##// END OF EJS Templates
Replace demandload with new demandimport
Matt Mackall -
r3877:abaee83c default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,104 b''
1 # demandimport.py - global demand-loading of modules for Mercurial
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
7
8 '''
9 demandimport - automatic demandloading of modules
10
11 To enable this module, do:
12
13 import demandimport; demandimport.enable()
14
15 Imports of the following forms will be demand-loaded:
16
17 import a, b.c
18 import a.b as c
19 from a import b,c # a will be loaded immediately
20
21 These imports will not be delayed:
22
23 from a import *
24 b = __import__(a)
25 '''
26
27 _origimport = __import__
28
29 class _demandmod(object):
30 """module demand-loader and proxy"""
31 def __init__(self, name, globals, locals):
32 if '.' in name:
33 head, rest = name.split('.', 1)
34 after = [rest]
35 else:
36 head = name
37 after = []
38 self.__dict__["_data"] = (head, globals, locals, after)
39 self.__dict__["_module"] = None
40 def _extend(self, name):
41 """add to the list of submodules to load"""
42 self._data[3].append(name)
43 def _load(self):
44 if not self._module:
45 head, globals, locals, after = self._data
46 mod = _origimport(head, globals, locals)
47 # load submodules
48 for x in after:
49 hx = x
50 if '.' in x:
51 hx = x.split('.')[0]
52 if not hasattr(mod, hx):
53 setattr(mod, hx, _demandmod(x, mod.__dict__, mod.__dict__))
54 # are we in the locals dictionary still?
55 if locals and locals.get(head) == self:
56 locals[head] = mod
57 self.__dict__["_module"] = mod
58 def __repr__(self):
59 return "<unloaded module '%s'>" % self._data[0]
60 def __call__(self, *args, **kwargs):
61 raise TypeError("'unloaded module' object is not callable")
62 def __getattr__(self, attr):
63 self._load()
64 return getattr(self._module, attr)
65 def __setattr__(self, attr, val):
66 self._load()
67 setattr(self._module, attr, val)
68
69 def _demandimport(name, globals=None, locals=None, fromlist=None):
70 if not locals or name in ignore or fromlist == ('*',):
71 # these cases we can't really delay
72 return _origimport(name, globals, locals, fromlist)
73 elif not fromlist:
74 # import a [as b]
75 if '.' in name: # a.b
76 base, rest = name.split('.', 1)
77 # if a is already demand-loaded, add b to its submodule list
78 if base in locals:
79 if isinstance(locals[base], _demandmod):
80 locals[base]._extend(rest)
81 return locals[base]
82 return _demandmod(name, globals, locals)
83 else:
84 # from a import b,c,d
85 mod = _origimport(name, globals, locals)
86 # recurse down the module chain
87 for comp in name.split('.')[1:]:
88 mod = getattr(mod, comp)
89 for x in fromlist:
90 # set requested submodules for demand load
91 if not(hasattr(mod, x)):
92 setattr(mod, x, _demandmod(x, mod.__dict__, mod.__dict__))
93 return mod
94
95 ignore = []
96
97 def enable():
98 "enable global demand-loading of modules"
99 __builtins__["__import__"] = _demandimport
100
101 def disable():
102 "disable global demand-loading of modules"
103 __builtins__["__import__"] = _origimport
104
@@ -1,179 +1,178 b''
1 # churn.py - create a graph showing who changed the most lines
1 # churn.py - create a graph showing who changed the most lines
2 #
2 #
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 #
8 #
9 # Aliases map file format is simple one alias per line in the following
9 # Aliases map file format is simple one alias per line in the following
10 # format:
10 # format:
11 #
11 #
12 # <alias email> <actual email>
12 # <alias email> <actual email>
13
13
14 from mercurial.demandload import *
14 import time, sys, signal, os
15 from mercurial.i18n import gettext as _
15 from mercurial.i18n import gettext as _
16 demandload(globals(), 'time sys signal os')
16 from mercurial import hg, mdiff, cmdutil, ui, util, templater, node
17 demandload(globals(), 'mercurial:hg,mdiff,fancyopts,cmdutil,ui,util,templater,node')
18
17
19 def __gather(ui, repo, node1, node2):
18 def __gather(ui, repo, node1, node2):
20 def dirtywork(f, mmap1, mmap2):
19 def dirtywork(f, mmap1, mmap2):
21 lines = 0
20 lines = 0
22
21
23 to = mmap1 and repo.file(f).read(mmap1[f]) or None
22 to = mmap1 and repo.file(f).read(mmap1[f]) or None
24 tn = mmap2 and repo.file(f).read(mmap2[f]) or None
23 tn = mmap2 and repo.file(f).read(mmap2[f]) or None
25
24
26 diff = mdiff.unidiff(to, "", tn, "", f).split("\n")
25 diff = mdiff.unidiff(to, "", tn, "", f).split("\n")
27
26
28 for line in diff:
27 for line in diff:
29 if not line:
28 if not line:
30 continue # skip EOF
29 continue # skip EOF
31 if line.startswith(" "):
30 if line.startswith(" "):
32 continue # context line
31 continue # context line
33 if line.startswith("--- ") or line.startswith("+++ "):
32 if line.startswith("--- ") or line.startswith("+++ "):
34 continue # begining of diff
33 continue # begining of diff
35 if line.startswith("@@ "):
34 if line.startswith("@@ "):
36 continue # info line
35 continue # info line
37
36
38 # changed lines
37 # changed lines
39 lines += 1
38 lines += 1
40
39
41 return lines
40 return lines
42
41
43 ##
42 ##
44
43
45 lines = 0
44 lines = 0
46
45
47 changes = repo.status(node1, node2, None, util.always)[:5]
46 changes = repo.status(node1, node2, None, util.always)[:5]
48
47
49 modified, added, removed, deleted, unknown = changes
48 modified, added, removed, deleted, unknown = changes
50
49
51 who = repo.changelog.read(node2)[1]
50 who = repo.changelog.read(node2)[1]
52 who = templater.email(who) # get the email of the person
51 who = templater.email(who) # get the email of the person
53
52
54 mmap1 = repo.manifest.read(repo.changelog.read(node1)[0])
53 mmap1 = repo.manifest.read(repo.changelog.read(node1)[0])
55 mmap2 = repo.manifest.read(repo.changelog.read(node2)[0])
54 mmap2 = repo.manifest.read(repo.changelog.read(node2)[0])
56 for f in modified:
55 for f in modified:
57 lines += dirtywork(f, mmap1, mmap2)
56 lines += dirtywork(f, mmap1, mmap2)
58
57
59 for f in added:
58 for f in added:
60 lines += dirtywork(f, None, mmap2)
59 lines += dirtywork(f, None, mmap2)
61
60
62 for f in removed:
61 for f in removed:
63 lines += dirtywork(f, mmap1, None)
62 lines += dirtywork(f, mmap1, None)
64
63
65 for f in deleted:
64 for f in deleted:
66 lines += dirtywork(f, mmap1, mmap2)
65 lines += dirtywork(f, mmap1, mmap2)
67
66
68 for f in unknown:
67 for f in unknown:
69 lines += dirtywork(f, mmap1, mmap2)
68 lines += dirtywork(f, mmap1, mmap2)
70
69
71 return (who, lines)
70 return (who, lines)
72
71
73 def gather_stats(ui, repo, amap, revs=None, progress=False):
72 def gather_stats(ui, repo, amap, revs=None, progress=False):
74 stats = {}
73 stats = {}
75
74
76 cl = repo.changelog
75 cl = repo.changelog
77
76
78 if not revs:
77 if not revs:
79 revs = range(0, cl.count())
78 revs = range(0, cl.count())
80
79
81 nr_revs = len(revs)
80 nr_revs = len(revs)
82 cur_rev = 0
81 cur_rev = 0
83
82
84 for rev in revs:
83 for rev in revs:
85 cur_rev += 1 # next revision
84 cur_rev += 1 # next revision
86
85
87 node2 = cl.node(rev)
86 node2 = cl.node(rev)
88 node1 = cl.parents(node2)[0]
87 node1 = cl.parents(node2)[0]
89
88
90 if cl.parents(node2)[1] != node.nullid:
89 if cl.parents(node2)[1] != node.nullid:
91 ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
90 ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
92 continue
91 continue
93
92
94 who, lines = __gather(ui, repo, node1, node2)
93 who, lines = __gather(ui, repo, node1, node2)
95
94
96 # remap the owner if possible
95 # remap the owner if possible
97 if amap.has_key(who):
96 if amap.has_key(who):
98 ui.note("using '%s' alias for '%s'\n" % (amap[who], who))
97 ui.note("using '%s' alias for '%s'\n" % (amap[who], who))
99 who = amap[who]
98 who = amap[who]
100
99
101 if not stats.has_key(who):
100 if not stats.has_key(who):
102 stats[who] = 0
101 stats[who] = 0
103 stats[who] += lines
102 stats[who] += lines
104
103
105 ui.note("rev %d: %d lines by %s\n" % (rev, lines, who))
104 ui.note("rev %d: %d lines by %s\n" % (rev, lines, who))
106
105
107 if progress:
106 if progress:
108 if int(100.0*(cur_rev - 1)/nr_revs) < int(100.0*cur_rev/nr_revs):
107 if int(100.0*(cur_rev - 1)/nr_revs) < int(100.0*cur_rev/nr_revs):
109 ui.write("%d%%.." % (int(100.0*cur_rev/nr_revs),))
108 ui.write("%d%%.." % (int(100.0*cur_rev/nr_revs),))
110 sys.stdout.flush()
109 sys.stdout.flush()
111
110
112 if progress:
111 if progress:
113 ui.write("done\n")
112 ui.write("done\n")
114 sys.stdout.flush()
113 sys.stdout.flush()
115
114
116 return stats
115 return stats
117
116
118 def churn(ui, repo, **opts):
117 def churn(ui, repo, **opts):
119 "Graphs the number of lines changed"
118 "Graphs the number of lines changed"
120
119
121 def pad(s, l):
120 def pad(s, l):
122 if len(s) < l:
121 if len(s) < l:
123 return s + " " * (l-len(s))
122 return s + " " * (l-len(s))
124 return s[0:l]
123 return s[0:l]
125
124
126 def graph(n, maximum, width, char):
125 def graph(n, maximum, width, char):
127 n = int(n * width / float(maximum))
126 n = int(n * width / float(maximum))
128
127
129 return char * (n)
128 return char * (n)
130
129
131 def get_aliases(f):
130 def get_aliases(f):
132 aliases = {}
131 aliases = {}
133
132
134 for l in f.readlines():
133 for l in f.readlines():
135 l = l.strip()
134 l = l.strip()
136 alias, actual = l.split(" ")
135 alias, actual = l.split(" ")
137 aliases[alias] = actual
136 aliases[alias] = actual
138
137
139 return aliases
138 return aliases
140
139
141 amap = {}
140 amap = {}
142 aliases = opts.get('aliases')
141 aliases = opts.get('aliases')
143 if aliases:
142 if aliases:
144 try:
143 try:
145 f = open(aliases,"r")
144 f = open(aliases,"r")
146 except OSError, e:
145 except OSError, e:
147 print "Error: " + e
146 print "Error: " + e
148 return
147 return
149
148
150 amap = get_aliases(f)
149 amap = get_aliases(f)
151 f.close()
150 f.close()
152
151
153 revs = [int(r) for r in cmdutil.revrange(repo, opts['rev'])]
152 revs = [int(r) for r in cmdutil.revrange(repo, opts['rev'])]
154 revs.sort()
153 revs.sort()
155 stats = gather_stats(ui, repo, amap, revs, opts.get('progress'))
154 stats = gather_stats(ui, repo, amap, revs, opts.get('progress'))
156
155
157 # make a list of tuples (name, lines) and sort it in descending order
156 # make a list of tuples (name, lines) and sort it in descending order
158 ordered = stats.items()
157 ordered = stats.items()
159 ordered.sort(lambda x, y: cmp(y[1], x[1]))
158 ordered.sort(lambda x, y: cmp(y[1], x[1]))
160
159
161 maximum = ordered[0][1]
160 maximum = ordered[0][1]
162
161
163 ui.note("Assuming 80 character terminal\n")
162 ui.note("Assuming 80 character terminal\n")
164 width = 80 - 1
163 width = 80 - 1
165
164
166 for i in ordered:
165 for i in ordered:
167 person = i[0]
166 person = i[0]
168 lines = i[1]
167 lines = i[1]
169 print "%s %6d %s" % (pad(person, 20), lines,
168 print "%s %6d %s" % (pad(person, 20), lines,
170 graph(lines, maximum, width - 20 - 1 - 6 - 2 - 2, '*'))
169 graph(lines, maximum, width - 20 - 1 - 6 - 2 - 2, '*'))
171
170
172 cmdtable = {
171 cmdtable = {
173 "churn":
172 "churn":
174 (churn,
173 (churn,
175 [('r', 'rev', [], _('limit statistics to the specified revisions')),
174 [('r', 'rev', [], _('limit statistics to the specified revisions')),
176 ('', 'aliases', '', _('file with email aliases')),
175 ('', 'aliases', '', _('file with email aliases')),
177 ('', 'progress', None, _('show progress'))],
176 ('', 'progress', None, _('show progress'))],
178 'hg churn [-r revision range] [-a file] [--progress]'),
177 'hg churn [-r revision range] [-a file] [--progress]'),
179 }
178 }
@@ -1,12 +1,11 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # mercurial - scalable distributed SCM
3 # mercurial - scalable distributed SCM
4 #
4 #
5 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
6 #
6 #
7 # This software may be used and distributed according to the terms
7 # This software may be used and distributed according to the terms
8 # of the GNU General Public License, incorporated herein by reference.
8 # of the GNU General Public License, incorporated herein by reference.
9
9
10 from mercurial import commands
10 import mercurial.commands
11
11 mercurial.commands.run()
12 commands.run()
@@ -1,124 +1,124 b''
1 # acl.py - changeset access control for mercurial
1 # acl.py - changeset access control for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 # this hook allows to allow or deny access to parts of a repo when
8 # this hook allows to allow or deny access to parts of a repo when
9 # taking incoming changesets.
9 # taking incoming changesets.
10 #
10 #
11 # authorization is against local user name on system where hook is
11 # authorization is against local user name on system where hook is
12 # run, not committer of original changeset (since that is easy to
12 # run, not committer of original changeset (since that is easy to
13 # spoof).
13 # spoof).
14 #
14 #
15 # acl hook is best to use if you use hgsh to set up restricted shells
15 # acl hook is best to use if you use hgsh to set up restricted shells
16 # for authenticated users to only push to / pull from. not safe if
16 # for authenticated users to only push to / pull from. not safe if
17 # user has interactive shell access, because they can disable hook.
17 # user has interactive shell access, because they can disable hook.
18 # also not safe if remote users share one local account, because then
18 # also not safe if remote users share one local account, because then
19 # no way to tell remote users apart.
19 # no way to tell remote users apart.
20 #
20 #
21 # to use, configure acl extension in hgrc like this:
21 # to use, configure acl extension in hgrc like this:
22 #
22 #
23 # [extensions]
23 # [extensions]
24 # hgext.acl =
24 # hgext.acl =
25 #
25 #
26 # [hooks]
26 # [hooks]
27 # pretxnchangegroup.acl = python:hgext.acl.hook
27 # pretxnchangegroup.acl = python:hgext.acl.hook
28 #
28 #
29 # [acl]
29 # [acl]
30 # sources = serve # check if source of incoming changes in this list
30 # sources = serve # check if source of incoming changes in this list
31 # # ("serve" == ssh or http, "push", "pull", "bundle")
31 # # ("serve" == ssh or http, "push", "pull", "bundle")
32 #
32 #
33 # allow and deny lists have subtree pattern (default syntax is glob)
33 # allow and deny lists have subtree pattern (default syntax is glob)
34 # on left, user names on right. deny list checked before allow list.
34 # on left, user names on right. deny list checked before allow list.
35 #
35 #
36 # [acl.allow]
36 # [acl.allow]
37 # # if acl.allow not present, all users allowed by default
37 # # if acl.allow not present, all users allowed by default
38 # # empty acl.allow = no users allowed
38 # # empty acl.allow = no users allowed
39 # docs/** = doc_writer
39 # docs/** = doc_writer
40 # .hgtags = release_engineer
40 # .hgtags = release_engineer
41 #
41 #
42 # [acl.deny]
42 # [acl.deny]
43 # # if acl.deny not present, no users denied by default
43 # # if acl.deny not present, no users denied by default
44 # # empty acl.deny = all users allowed
44 # # empty acl.deny = all users allowed
45 # glob pattern = user4, user5
45 # glob pattern = user4, user5
46 # ** = user6
46 # ** = user6
47
47
48 from mercurial.demandload import *
49 from mercurial.i18n import gettext as _
48 from mercurial.i18n import gettext as _
50 from mercurial.node import *
49 from mercurial.node import *
51 demandload(globals(), 'getpass mercurial:util')
50 from mercurial import util
51 import getpass
52
52
53 class checker(object):
53 class checker(object):
54 '''acl checker.'''
54 '''acl checker.'''
55
55
56 def buildmatch(self, key):
56 def buildmatch(self, key):
57 '''return tuple of (match function, list enabled).'''
57 '''return tuple of (match function, list enabled).'''
58 if not self.ui.has_config(key):
58 if not self.ui.has_config(key):
59 self.ui.debug(_('acl: %s not enabled\n') % key)
59 self.ui.debug(_('acl: %s not enabled\n') % key)
60 return None, False
60 return None, False
61
61
62 thisuser = self.getuser()
62 thisuser = self.getuser()
63 pats = [pat for pat, users in self.ui.configitems(key)
63 pats = [pat for pat, users in self.ui.configitems(key)
64 if thisuser in users.replace(',', ' ').split()]
64 if thisuser in users.replace(',', ' ').split()]
65 self.ui.debug(_('acl: %s enabled, %d entries for user %s\n') %
65 self.ui.debug(_('acl: %s enabled, %d entries for user %s\n') %
66 (key, len(pats), thisuser))
66 (key, len(pats), thisuser))
67 if pats:
67 if pats:
68 match = util.matcher(self.repo.root, names=pats)[1]
68 match = util.matcher(self.repo.root, names=pats)[1]
69 else:
69 else:
70 match = util.never
70 match = util.never
71 return match, True
71 return match, True
72
72
73 def getuser(self):
73 def getuser(self):
74 '''return name of authenticated user.'''
74 '''return name of authenticated user.'''
75 return self.user
75 return self.user
76
76
77 def __init__(self, ui, repo):
77 def __init__(self, ui, repo):
78 self.ui = ui
78 self.ui = ui
79 self.repo = repo
79 self.repo = repo
80 self.user = getpass.getuser()
80 self.user = getpass.getuser()
81 cfg = self.ui.config('acl', 'config')
81 cfg = self.ui.config('acl', 'config')
82 if cfg:
82 if cfg:
83 self.ui.readsections(cfg, 'acl.allow', 'acl.deny')
83 self.ui.readsections(cfg, 'acl.allow', 'acl.deny')
84 self.allow, self.allowable = self.buildmatch('acl.allow')
84 self.allow, self.allowable = self.buildmatch('acl.allow')
85 self.deny, self.deniable = self.buildmatch('acl.deny')
85 self.deny, self.deniable = self.buildmatch('acl.deny')
86
86
87 def skipsource(self, source):
87 def skipsource(self, source):
88 '''true if incoming changes from this source should be skipped.'''
88 '''true if incoming changes from this source should be skipped.'''
89 ok_sources = self.ui.config('acl', 'sources', 'serve').split()
89 ok_sources = self.ui.config('acl', 'sources', 'serve').split()
90 return source not in ok_sources
90 return source not in ok_sources
91
91
92 def check(self, node):
92 def check(self, node):
93 '''return if access allowed, raise exception if not.'''
93 '''return if access allowed, raise exception if not.'''
94 files = self.repo.changelog.read(node)[3]
94 files = self.repo.changelog.read(node)[3]
95 if self.deniable:
95 if self.deniable:
96 for f in files:
96 for f in files:
97 if self.deny(f):
97 if self.deny(f):
98 self.ui.debug(_('acl: user %s denied on %s\n') %
98 self.ui.debug(_('acl: user %s denied on %s\n') %
99 (self.getuser(), f))
99 (self.getuser(), f))
100 raise util.Abort(_('acl: access denied for changeset %s') %
100 raise util.Abort(_('acl: access denied for changeset %s') %
101 short(node))
101 short(node))
102 if self.allowable:
102 if self.allowable:
103 for f in files:
103 for f in files:
104 if not self.allow(f):
104 if not self.allow(f):
105 self.ui.debug(_('acl: user %s not allowed on %s\n') %
105 self.ui.debug(_('acl: user %s not allowed on %s\n') %
106 (self.getuser(), f))
106 (self.getuser(), f))
107 raise util.Abort(_('acl: access denied for changeset %s') %
107 raise util.Abort(_('acl: access denied for changeset %s') %
108 short(node))
108 short(node))
109 self.ui.debug(_('acl: allowing changeset %s\n') % short(node))
109 self.ui.debug(_('acl: allowing changeset %s\n') % short(node))
110
110
111 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
111 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
112 if hooktype != 'pretxnchangegroup':
112 if hooktype != 'pretxnchangegroup':
113 raise util.Abort(_('config error - hook type "%s" cannot stop '
113 raise util.Abort(_('config error - hook type "%s" cannot stop '
114 'incoming changesets') % hooktype)
114 'incoming changesets') % hooktype)
115
115
116 c = checker(ui, repo)
116 c = checker(ui, repo)
117 if c.skipsource(source):
117 if c.skipsource(source):
118 ui.debug(_('acl: changes have source "%s" - skipping\n') % source)
118 ui.debug(_('acl: changes have source "%s" - skipping\n') % source)
119 return
119 return
120
120
121 start = repo.changelog.rev(bin(node))
121 start = repo.changelog.rev(bin(node))
122 end = repo.changelog.count()
122 end = repo.changelog.count()
123 for rev in xrange(start, end):
123 for rev in xrange(start, end):
124 c.check(repo.changelog.node(rev))
124 c.check(repo.changelog.node(rev))
@@ -1,312 +1,312 b''
1 # bugzilla.py - bugzilla integration for mercurial
1 # bugzilla.py - bugzilla integration for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 # hook extension to update comments of bugzilla bugs when changesets
8 # hook extension to update comments of bugzilla bugs when changesets
9 # that refer to bugs by id are seen. this hook does not change bug
9 # that refer to bugs by id are seen. this hook does not change bug
10 # status, only comments.
10 # status, only comments.
11 #
11 #
12 # to configure, add items to '[bugzilla]' section of hgrc.
12 # to configure, add items to '[bugzilla]' section of hgrc.
13 #
13 #
14 # to use, configure bugzilla extension and enable like this:
14 # to use, configure bugzilla extension and enable like this:
15 #
15 #
16 # [extensions]
16 # [extensions]
17 # hgext.bugzilla =
17 # hgext.bugzilla =
18 #
18 #
19 # [hooks]
19 # [hooks]
20 # # run bugzilla hook on every change pulled or pushed in here
20 # # run bugzilla hook on every change pulled or pushed in here
21 # incoming.bugzilla = python:hgext.bugzilla.hook
21 # incoming.bugzilla = python:hgext.bugzilla.hook
22 #
22 #
23 # config items:
23 # config items:
24 #
24 #
25 # section name is 'bugzilla'.
25 # section name is 'bugzilla'.
26 # [bugzilla]
26 # [bugzilla]
27 #
27 #
28 # REQUIRED:
28 # REQUIRED:
29 # host = bugzilla # mysql server where bugzilla database lives
29 # host = bugzilla # mysql server where bugzilla database lives
30 # password = ** # user's password
30 # password = ** # user's password
31 # version = 2.16 # version of bugzilla installed
31 # version = 2.16 # version of bugzilla installed
32 #
32 #
33 # OPTIONAL:
33 # OPTIONAL:
34 # bzuser = ... # fallback bugzilla user name to record comments with
34 # bzuser = ... # fallback bugzilla user name to record comments with
35 # db = bugs # database to connect to
35 # db = bugs # database to connect to
36 # notify = ... # command to run to get bugzilla to send mail
36 # notify = ... # command to run to get bugzilla to send mail
37 # regexp = ... # regexp to match bug ids (must contain one "()" group)
37 # regexp = ... # regexp to match bug ids (must contain one "()" group)
38 # strip = 0 # number of slashes to strip for url paths
38 # strip = 0 # number of slashes to strip for url paths
39 # style = ... # style file to use when formatting comments
39 # style = ... # style file to use when formatting comments
40 # template = ... # template to use when formatting comments
40 # template = ... # template to use when formatting comments
41 # timeout = 5 # database connection timeout (seconds)
41 # timeout = 5 # database connection timeout (seconds)
42 # user = bugs # user to connect to database as
42 # user = bugs # user to connect to database as
43 # [web]
43 # [web]
44 # baseurl = http://hgserver/... # root of hg web site for browsing commits
44 # baseurl = http://hgserver/... # root of hg web site for browsing commits
45 #
45 #
46 # if hg committer names are not same as bugzilla user names, use
46 # if hg committer names are not same as bugzilla user names, use
47 # "usermap" feature to map from committer email to bugzilla user name.
47 # "usermap" feature to map from committer email to bugzilla user name.
48 # usermap can be in hgrc or separate config file.
48 # usermap can be in hgrc or separate config file.
49 #
49 #
50 # [bugzilla]
50 # [bugzilla]
51 # usermap = filename # cfg file with "committer"="bugzilla user" info
51 # usermap = filename # cfg file with "committer"="bugzilla user" info
52 # [usermap]
52 # [usermap]
53 # committer_email = bugzilla_user_name
53 # committer_email = bugzilla_user_name
54
54
55 from mercurial.demandload import *
56 from mercurial.i18n import gettext as _
55 from mercurial.i18n import gettext as _
57 from mercurial.node import *
56 from mercurial.node import *
58 demandload(globals(), 'mercurial:cmdutil,templater,util os re time')
57 from mercurial import cmdutil, templater, util
58 import os, re, time
59
59
60 MySQLdb = None
60 MySQLdb = None
61
61
62 def buglist(ids):
62 def buglist(ids):
63 return '(' + ','.join(map(str, ids)) + ')'
63 return '(' + ','.join(map(str, ids)) + ')'
64
64
65 class bugzilla_2_16(object):
65 class bugzilla_2_16(object):
66 '''support for bugzilla version 2.16.'''
66 '''support for bugzilla version 2.16.'''
67
67
68 def __init__(self, ui):
68 def __init__(self, ui):
69 self.ui = ui
69 self.ui = ui
70 host = self.ui.config('bugzilla', 'host', 'localhost')
70 host = self.ui.config('bugzilla', 'host', 'localhost')
71 user = self.ui.config('bugzilla', 'user', 'bugs')
71 user = self.ui.config('bugzilla', 'user', 'bugs')
72 passwd = self.ui.config('bugzilla', 'password')
72 passwd = self.ui.config('bugzilla', 'password')
73 db = self.ui.config('bugzilla', 'db', 'bugs')
73 db = self.ui.config('bugzilla', 'db', 'bugs')
74 timeout = int(self.ui.config('bugzilla', 'timeout', 5))
74 timeout = int(self.ui.config('bugzilla', 'timeout', 5))
75 usermap = self.ui.config('bugzilla', 'usermap')
75 usermap = self.ui.config('bugzilla', 'usermap')
76 if usermap:
76 if usermap:
77 self.ui.readsections(usermap, 'usermap')
77 self.ui.readsections(usermap, 'usermap')
78 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
78 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
79 (host, db, user, '*' * len(passwd)))
79 (host, db, user, '*' * len(passwd)))
80 self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd,
80 self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd,
81 db=db, connect_timeout=timeout)
81 db=db, connect_timeout=timeout)
82 self.cursor = self.conn.cursor()
82 self.cursor = self.conn.cursor()
83 self.run('select fieldid from fielddefs where name = "longdesc"')
83 self.run('select fieldid from fielddefs where name = "longdesc"')
84 ids = self.cursor.fetchall()
84 ids = self.cursor.fetchall()
85 if len(ids) != 1:
85 if len(ids) != 1:
86 raise util.Abort(_('unknown database schema'))
86 raise util.Abort(_('unknown database schema'))
87 self.longdesc_id = ids[0][0]
87 self.longdesc_id = ids[0][0]
88 self.user_ids = {}
88 self.user_ids = {}
89
89
90 def run(self, *args, **kwargs):
90 def run(self, *args, **kwargs):
91 '''run a query.'''
91 '''run a query.'''
92 self.ui.note(_('query: %s %s\n') % (args, kwargs))
92 self.ui.note(_('query: %s %s\n') % (args, kwargs))
93 try:
93 try:
94 self.cursor.execute(*args, **kwargs)
94 self.cursor.execute(*args, **kwargs)
95 except MySQLdb.MySQLError, err:
95 except MySQLdb.MySQLError, err:
96 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
96 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
97 raise
97 raise
98
98
99 def filter_real_bug_ids(self, ids):
99 def filter_real_bug_ids(self, ids):
100 '''filter not-existing bug ids from list.'''
100 '''filter not-existing bug ids from list.'''
101 self.run('select bug_id from bugs where bug_id in %s' % buglist(ids))
101 self.run('select bug_id from bugs where bug_id in %s' % buglist(ids))
102 ids = [c[0] for c in self.cursor.fetchall()]
102 ids = [c[0] for c in self.cursor.fetchall()]
103 ids.sort()
103 ids.sort()
104 return ids
104 return ids
105
105
106 def filter_unknown_bug_ids(self, node, ids):
106 def filter_unknown_bug_ids(self, node, ids):
107 '''filter bug ids from list that already refer to this changeset.'''
107 '''filter bug ids from list that already refer to this changeset.'''
108
108
109 self.run('''select bug_id from longdescs where
109 self.run('''select bug_id from longdescs where
110 bug_id in %s and thetext like "%%%s%%"''' %
110 bug_id in %s and thetext like "%%%s%%"''' %
111 (buglist(ids), short(node)))
111 (buglist(ids), short(node)))
112 unknown = dict.fromkeys(ids)
112 unknown = dict.fromkeys(ids)
113 for (id,) in self.cursor.fetchall():
113 for (id,) in self.cursor.fetchall():
114 self.ui.status(_('bug %d already knows about changeset %s\n') %
114 self.ui.status(_('bug %d already knows about changeset %s\n') %
115 (id, short(node)))
115 (id, short(node)))
116 unknown.pop(id, None)
116 unknown.pop(id, None)
117 ids = unknown.keys()
117 ids = unknown.keys()
118 ids.sort()
118 ids.sort()
119 return ids
119 return ids
120
120
121 def notify(self, ids):
121 def notify(self, ids):
122 '''tell bugzilla to send mail.'''
122 '''tell bugzilla to send mail.'''
123
123
124 self.ui.status(_('telling bugzilla to send mail:\n'))
124 self.ui.status(_('telling bugzilla to send mail:\n'))
125 for id in ids:
125 for id in ids:
126 self.ui.status(_(' bug %s\n') % id)
126 self.ui.status(_(' bug %s\n') % id)
127 cmd = self.ui.config('bugzilla', 'notify',
127 cmd = self.ui.config('bugzilla', 'notify',
128 'cd /var/www/html/bugzilla && '
128 'cd /var/www/html/bugzilla && '
129 './processmail %s nobody@nowhere.com') % id
129 './processmail %s nobody@nowhere.com') % id
130 fp = os.popen('(%s) 2>&1' % cmd)
130 fp = os.popen('(%s) 2>&1' % cmd)
131 out = fp.read()
131 out = fp.read()
132 ret = fp.close()
132 ret = fp.close()
133 if ret:
133 if ret:
134 self.ui.warn(out)
134 self.ui.warn(out)
135 raise util.Abort(_('bugzilla notify command %s') %
135 raise util.Abort(_('bugzilla notify command %s') %
136 util.explain_exit(ret)[0])
136 util.explain_exit(ret)[0])
137 self.ui.status(_('done\n'))
137 self.ui.status(_('done\n'))
138
138
139 def get_user_id(self, user):
139 def get_user_id(self, user):
140 '''look up numeric bugzilla user id.'''
140 '''look up numeric bugzilla user id.'''
141 try:
141 try:
142 return self.user_ids[user]
142 return self.user_ids[user]
143 except KeyError:
143 except KeyError:
144 try:
144 try:
145 userid = int(user)
145 userid = int(user)
146 except ValueError:
146 except ValueError:
147 self.ui.note(_('looking up user %s\n') % user)
147 self.ui.note(_('looking up user %s\n') % user)
148 self.run('''select userid from profiles
148 self.run('''select userid from profiles
149 where login_name like %s''', user)
149 where login_name like %s''', user)
150 all = self.cursor.fetchall()
150 all = self.cursor.fetchall()
151 if len(all) != 1:
151 if len(all) != 1:
152 raise KeyError(user)
152 raise KeyError(user)
153 userid = int(all[0][0])
153 userid = int(all[0][0])
154 self.user_ids[user] = userid
154 self.user_ids[user] = userid
155 return userid
155 return userid
156
156
157 def map_committer(self, user):
157 def map_committer(self, user):
158 '''map name of committer to bugzilla user name.'''
158 '''map name of committer to bugzilla user name.'''
159 for committer, bzuser in self.ui.configitems('usermap'):
159 for committer, bzuser in self.ui.configitems('usermap'):
160 if committer.lower() == user.lower():
160 if committer.lower() == user.lower():
161 return bzuser
161 return bzuser
162 return user
162 return user
163
163
164 def add_comment(self, bugid, text, committer):
164 def add_comment(self, bugid, text, committer):
165 '''add comment to bug. try adding comment as committer of
165 '''add comment to bug. try adding comment as committer of
166 changeset, otherwise as default bugzilla user.'''
166 changeset, otherwise as default bugzilla user.'''
167 user = self.map_committer(committer)
167 user = self.map_committer(committer)
168 try:
168 try:
169 userid = self.get_user_id(user)
169 userid = self.get_user_id(user)
170 except KeyError:
170 except KeyError:
171 try:
171 try:
172 defaultuser = self.ui.config('bugzilla', 'bzuser')
172 defaultuser = self.ui.config('bugzilla', 'bzuser')
173 if not defaultuser:
173 if not defaultuser:
174 raise util.Abort(_('cannot find bugzilla user id for %s') %
174 raise util.Abort(_('cannot find bugzilla user id for %s') %
175 user)
175 user)
176 userid = self.get_user_id(defaultuser)
176 userid = self.get_user_id(defaultuser)
177 except KeyError:
177 except KeyError:
178 raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
178 raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
179 (user, defaultuser))
179 (user, defaultuser))
180 now = time.strftime('%Y-%m-%d %H:%M:%S')
180 now = time.strftime('%Y-%m-%d %H:%M:%S')
181 self.run('''insert into longdescs
181 self.run('''insert into longdescs
182 (bug_id, who, bug_when, thetext)
182 (bug_id, who, bug_when, thetext)
183 values (%s, %s, %s, %s)''',
183 values (%s, %s, %s, %s)''',
184 (bugid, userid, now, text))
184 (bugid, userid, now, text))
185 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
185 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
186 values (%s, %s, %s, %s)''',
186 values (%s, %s, %s, %s)''',
187 (bugid, userid, now, self.longdesc_id))
187 (bugid, userid, now, self.longdesc_id))
188
188
189 class bugzilla(object):
189 class bugzilla(object):
190 # supported versions of bugzilla. different versions have
190 # supported versions of bugzilla. different versions have
191 # different schemas.
191 # different schemas.
192 _versions = {
192 _versions = {
193 '2.16': bugzilla_2_16,
193 '2.16': bugzilla_2_16,
194 }
194 }
195
195
196 _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
196 _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
197 r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
197 r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
198
198
199 _bz = None
199 _bz = None
200
200
201 def __init__(self, ui, repo):
201 def __init__(self, ui, repo):
202 self.ui = ui
202 self.ui = ui
203 self.repo = repo
203 self.repo = repo
204
204
205 def bz(self):
205 def bz(self):
206 '''return object that knows how to talk to bugzilla version in
206 '''return object that knows how to talk to bugzilla version in
207 use.'''
207 use.'''
208
208
209 if bugzilla._bz is None:
209 if bugzilla._bz is None:
210 bzversion = self.ui.config('bugzilla', 'version')
210 bzversion = self.ui.config('bugzilla', 'version')
211 try:
211 try:
212 bzclass = bugzilla._versions[bzversion]
212 bzclass = bugzilla._versions[bzversion]
213 except KeyError:
213 except KeyError:
214 raise util.Abort(_('bugzilla version %s not supported') %
214 raise util.Abort(_('bugzilla version %s not supported') %
215 bzversion)
215 bzversion)
216 bugzilla._bz = bzclass(self.ui)
216 bugzilla._bz = bzclass(self.ui)
217 return bugzilla._bz
217 return bugzilla._bz
218
218
219 def __getattr__(self, key):
219 def __getattr__(self, key):
220 return getattr(self.bz(), key)
220 return getattr(self.bz(), key)
221
221
222 _bug_re = None
222 _bug_re = None
223 _split_re = None
223 _split_re = None
224
224
225 def find_bug_ids(self, node, desc):
225 def find_bug_ids(self, node, desc):
226 '''find valid bug ids that are referred to in changeset
226 '''find valid bug ids that are referred to in changeset
227 comments and that do not already have references to this
227 comments and that do not already have references to this
228 changeset.'''
228 changeset.'''
229
229
230 if bugzilla._bug_re is None:
230 if bugzilla._bug_re is None:
231 bugzilla._bug_re = re.compile(
231 bugzilla._bug_re = re.compile(
232 self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
232 self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
233 re.IGNORECASE)
233 re.IGNORECASE)
234 bugzilla._split_re = re.compile(r'\D+')
234 bugzilla._split_re = re.compile(r'\D+')
235 start = 0
235 start = 0
236 ids = {}
236 ids = {}
237 while True:
237 while True:
238 m = bugzilla._bug_re.search(desc, start)
238 m = bugzilla._bug_re.search(desc, start)
239 if not m:
239 if not m:
240 break
240 break
241 start = m.end()
241 start = m.end()
242 for id in bugzilla._split_re.split(m.group(1)):
242 for id in bugzilla._split_re.split(m.group(1)):
243 if not id: continue
243 if not id: continue
244 ids[int(id)] = 1
244 ids[int(id)] = 1
245 ids = ids.keys()
245 ids = ids.keys()
246 if ids:
246 if ids:
247 ids = self.filter_real_bug_ids(ids)
247 ids = self.filter_real_bug_ids(ids)
248 if ids:
248 if ids:
249 ids = self.filter_unknown_bug_ids(node, ids)
249 ids = self.filter_unknown_bug_ids(node, ids)
250 return ids
250 return ids
251
251
252 def update(self, bugid, node, changes):
252 def update(self, bugid, node, changes):
253 '''update bugzilla bug with reference to changeset.'''
253 '''update bugzilla bug with reference to changeset.'''
254
254
255 def webroot(root):
255 def webroot(root):
256 '''strip leading prefix of repo root and turn into
256 '''strip leading prefix of repo root and turn into
257 url-safe path.'''
257 url-safe path.'''
258 count = int(self.ui.config('bugzilla', 'strip', 0))
258 count = int(self.ui.config('bugzilla', 'strip', 0))
259 root = util.pconvert(root)
259 root = util.pconvert(root)
260 while count > 0:
260 while count > 0:
261 c = root.find('/')
261 c = root.find('/')
262 if c == -1:
262 if c == -1:
263 break
263 break
264 root = root[c+1:]
264 root = root[c+1:]
265 count -= 1
265 count -= 1
266 return root
266 return root
267
267
268 mapfile = self.ui.config('bugzilla', 'style')
268 mapfile = self.ui.config('bugzilla', 'style')
269 tmpl = self.ui.config('bugzilla', 'template')
269 tmpl = self.ui.config('bugzilla', 'template')
270 t = cmdutil.changeset_templater(self.ui, self.repo,
270 t = cmdutil.changeset_templater(self.ui, self.repo,
271 False, mapfile, False)
271 False, mapfile, False)
272 if not mapfile and not tmpl:
272 if not mapfile and not tmpl:
273 tmpl = _('changeset {node|short} in repo {root} refers '
273 tmpl = _('changeset {node|short} in repo {root} refers '
274 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
274 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
275 if tmpl:
275 if tmpl:
276 tmpl = templater.parsestring(tmpl, quoted=False)
276 tmpl = templater.parsestring(tmpl, quoted=False)
277 t.use_template(tmpl)
277 t.use_template(tmpl)
278 self.ui.pushbuffer()
278 self.ui.pushbuffer()
279 t.show(changenode=node, changes=changes,
279 t.show(changenode=node, changes=changes,
280 bug=str(bugid),
280 bug=str(bugid),
281 hgweb=self.ui.config('web', 'baseurl'),
281 hgweb=self.ui.config('web', 'baseurl'),
282 root=self.repo.root,
282 root=self.repo.root,
283 webroot=webroot(self.repo.root))
283 webroot=webroot(self.repo.root))
284 data = self.ui.popbuffer()
284 data = self.ui.popbuffer()
285 self.add_comment(bugid, data, templater.email(changes[1]))
285 self.add_comment(bugid, data, templater.email(changes[1]))
286
286
287 def hook(ui, repo, hooktype, node=None, **kwargs):
287 def hook(ui, repo, hooktype, node=None, **kwargs):
288 '''add comment to bugzilla for each changeset that refers to a
288 '''add comment to bugzilla for each changeset that refers to a
289 bugzilla bug id. only add a comment once per bug, so same change
289 bugzilla bug id. only add a comment once per bug, so same change
290 seen multiple times does not fill bug with duplicate data.'''
290 seen multiple times does not fill bug with duplicate data.'''
291 try:
291 try:
292 import MySQLdb as mysql
292 import MySQLdb as mysql
293 global MySQLdb
293 global MySQLdb
294 MySQLdb = mysql
294 MySQLdb = mysql
295 except ImportError, err:
295 except ImportError, err:
296 raise util.Abort(_('python mysql support not available: %s') % err)
296 raise util.Abort(_('python mysql support not available: %s') % err)
297
297
298 if node is None:
298 if node is None:
299 raise util.Abort(_('hook type %s does not pass a changeset id') %
299 raise util.Abort(_('hook type %s does not pass a changeset id') %
300 hooktype)
300 hooktype)
301 try:
301 try:
302 bz = bugzilla(ui, repo)
302 bz = bugzilla(ui, repo)
303 bin_node = bin(node)
303 bin_node = bin(node)
304 changes = repo.changelog.read(bin_node)
304 changes = repo.changelog.read(bin_node)
305 ids = bz.find_bug_ids(bin_node, changes[4])
305 ids = bz.find_bug_ids(bin_node, changes[4])
306 if ids:
306 if ids:
307 for id in ids:
307 for id in ids:
308 bz.update(id, bin_node, changes)
308 bz.update(id, bin_node, changes)
309 bz.notify(ids)
309 bz.notify(ids)
310 except MySQLdb.MySQLError, err:
310 except MySQLdb.MySQLError, err:
311 raise util.Abort(_('database error: %s') % err[1])
311 raise util.Abort(_('database error: %s') % err[1])
312
312
@@ -1,187 +1,187 b''
1 # extdiff.py - external diff program support for mercurial
1 # extdiff.py - external diff program support for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 # The `extdiff' Mercurial extension allows you to use external programs
8 # The `extdiff' Mercurial extension allows you to use external programs
9 # to compare revisions, or revision with working dir. The external diff
9 # to compare revisions, or revision with working dir. The external diff
10 # programs are called with a configurable set of options and two
10 # programs are called with a configurable set of options and two
11 # non-option arguments: paths to directories containing snapshots of
11 # non-option arguments: paths to directories containing snapshots of
12 # files to compare.
12 # files to compare.
13 #
13 #
14 # To enable this extension:
14 # To enable this extension:
15 #
15 #
16 # [extensions]
16 # [extensions]
17 # hgext.extdiff =
17 # hgext.extdiff =
18 #
18 #
19 # The `extdiff' extension also allows to configure new diff commands, so
19 # The `extdiff' extension also allows to configure new diff commands, so
20 # you do not need to type "hg extdiff -p kdiff3" always.
20 # you do not need to type "hg extdiff -p kdiff3" always.
21 #
21 #
22 # [extdiff]
22 # [extdiff]
23 # # add new command that runs GNU diff(1) in 'context diff' mode
23 # # add new command that runs GNU diff(1) in 'context diff' mode
24 # cmd.cdiff = gdiff
24 # cmd.cdiff = gdiff
25 # opts.cdiff = -Nprc5
25 # opts.cdiff = -Nprc5
26
26
27 # # add new command called vdiff, runs kdiff3
27 # # add new command called vdiff, runs kdiff3
28 # cmd.vdiff = kdiff3
28 # cmd.vdiff = kdiff3
29
29
30 # # add new command called meld, runs meld (no need to name twice)
30 # # add new command called meld, runs meld (no need to name twice)
31 # cmd.meld =
31 # cmd.meld =
32
32
33 # # add new command called vimdiff, runs gvimdiff with DirDiff plugin
33 # # add new command called vimdiff, runs gvimdiff with DirDiff plugin
34 # #(see http://www.vim.org/scripts/script.php?script_id=102)
34 # #(see http://www.vim.org/scripts/script.php?script_id=102)
35 # # Non english user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
35 # # Non english user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
36 # # your .vimrc
36 # # your .vimrc
37 # cmd.vimdiff = gvim
37 # cmd.vimdiff = gvim
38 # opts.vimdiff = -f '+next' '+execute "DirDiff" argv(0) argv(1)'
38 # opts.vimdiff = -f '+next' '+execute "DirDiff" argv(0) argv(1)'
39 #
39 #
40 # Each custom diff commands can have two parts: a `cmd' and an `opts'
40 # Each custom diff commands can have two parts: a `cmd' and an `opts'
41 # part. The cmd.xxx option defines the name of an executable program
41 # part. The cmd.xxx option defines the name of an executable program
42 # that will be run, and opts.xxx defines a set of command-line options
42 # that will be run, and opts.xxx defines a set of command-line options
43 # which will be inserted to the command between the program name and
43 # which will be inserted to the command between the program name and
44 # the files/directories to diff (i.e. the cdiff example above).
44 # the files/directories to diff (i.e. the cdiff example above).
45 #
45 #
46 # You can use -I/-X and list of file or directory names like normal
46 # You can use -I/-X and list of file or directory names like normal
47 # "hg diff" command. The `extdiff' extension makes snapshots of only
47 # "hg diff" command. The `extdiff' extension makes snapshots of only
48 # needed files, so running the external diff program will actually be
48 # needed files, so running the external diff program will actually be
49 # pretty fast (at least faster than having to compare the entire tree).
49 # pretty fast (at least faster than having to compare the entire tree).
50
50
51 from mercurial.demandload import demandload
52 from mercurial.i18n import gettext as _
51 from mercurial.i18n import gettext as _
53 from mercurial.node import *
52 from mercurial.node import *
54 demandload(globals(), 'mercurial:cmdutil,util os shutil tempfile')
53 from mercurial import cmdutil, util
54 import os, shutil, tempfile
55
55
56 def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
56 def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
57 def snapshot_node(files, node):
57 def snapshot_node(files, node):
58 '''snapshot files as of some revision'''
58 '''snapshot files as of some revision'''
59 changes = repo.changelog.read(node)
59 changes = repo.changelog.read(node)
60 mf = repo.manifest.read(changes[0])
60 mf = repo.manifest.read(changes[0])
61 dirname = '%s.%s' % (os.path.basename(repo.root), short(node))
61 dirname = '%s.%s' % (os.path.basename(repo.root), short(node))
62 base = os.path.join(tmproot, dirname)
62 base = os.path.join(tmproot, dirname)
63 os.mkdir(base)
63 os.mkdir(base)
64 if not ui.quiet:
64 if not ui.quiet:
65 ui.write_err(_('making snapshot of %d files from rev %s\n') %
65 ui.write_err(_('making snapshot of %d files from rev %s\n') %
66 (len(files), short(node)))
66 (len(files), short(node)))
67 for fn in files:
67 for fn in files:
68 if not fn in mf:
68 if not fn in mf:
69 # skipping new file after a merge ?
69 # skipping new file after a merge ?
70 continue
70 continue
71 wfn = util.pconvert(fn)
71 wfn = util.pconvert(fn)
72 ui.note(' %s\n' % wfn)
72 ui.note(' %s\n' % wfn)
73 dest = os.path.join(base, wfn)
73 dest = os.path.join(base, wfn)
74 destdir = os.path.dirname(dest)
74 destdir = os.path.dirname(dest)
75 if not os.path.isdir(destdir):
75 if not os.path.isdir(destdir):
76 os.makedirs(destdir)
76 os.makedirs(destdir)
77 repo.wwrite(wfn, repo.file(fn).read(mf[fn]), open(dest, 'w'))
77 repo.wwrite(wfn, repo.file(fn).read(mf[fn]), open(dest, 'w'))
78 return dirname
78 return dirname
79
79
80 def snapshot_wdir(files):
80 def snapshot_wdir(files):
81 '''snapshot files from working directory.
81 '''snapshot files from working directory.
82 if not using snapshot, -I/-X does not work and recursive diff
82 if not using snapshot, -I/-X does not work and recursive diff
83 in tools like kdiff3 and meld displays too many files.'''
83 in tools like kdiff3 and meld displays too many files.'''
84 dirname = os.path.basename(repo.root)
84 dirname = os.path.basename(repo.root)
85 base = os.path.join(tmproot, dirname)
85 base = os.path.join(tmproot, dirname)
86 os.mkdir(base)
86 os.mkdir(base)
87 if not ui.quiet:
87 if not ui.quiet:
88 ui.write_err(_('making snapshot of %d files from working dir\n') %
88 ui.write_err(_('making snapshot of %d files from working dir\n') %
89 (len(files)))
89 (len(files)))
90 for fn in files:
90 for fn in files:
91 wfn = util.pconvert(fn)
91 wfn = util.pconvert(fn)
92 ui.note(' %s\n' % wfn)
92 ui.note(' %s\n' % wfn)
93 dest = os.path.join(base, wfn)
93 dest = os.path.join(base, wfn)
94 destdir = os.path.dirname(dest)
94 destdir = os.path.dirname(dest)
95 if not os.path.isdir(destdir):
95 if not os.path.isdir(destdir):
96 os.makedirs(destdir)
96 os.makedirs(destdir)
97 fp = open(dest, 'w')
97 fp = open(dest, 'w')
98 for chunk in util.filechunkiter(repo.wopener(wfn)):
98 for chunk in util.filechunkiter(repo.wopener(wfn)):
99 fp.write(chunk)
99 fp.write(chunk)
100 return dirname
100 return dirname
101
101
102 node1, node2 = cmdutil.revpair(repo, opts['rev'])
102 node1, node2 = cmdutil.revpair(repo, opts['rev'])
103 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
103 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
104 modified, added, removed, deleted, unknown = repo.status(
104 modified, added, removed, deleted, unknown = repo.status(
105 node1, node2, files, match=matchfn)[:5]
105 node1, node2, files, match=matchfn)[:5]
106 if not (modified or added or removed):
106 if not (modified or added or removed):
107 return 0
107 return 0
108
108
109 tmproot = tempfile.mkdtemp(prefix='extdiff.')
109 tmproot = tempfile.mkdtemp(prefix='extdiff.')
110 try:
110 try:
111 dir1 = snapshot_node(modified + removed, node1)
111 dir1 = snapshot_node(modified + removed, node1)
112 if node2:
112 if node2:
113 dir2 = snapshot_node(modified + added, node2)
113 dir2 = snapshot_node(modified + added, node2)
114 else:
114 else:
115 dir2 = snapshot_wdir(modified + added)
115 dir2 = snapshot_wdir(modified + added)
116 cmdline = ('%s %s %s %s' %
116 cmdline = ('%s %s %s %s' %
117 (util.shellquote(diffcmd), ' '.join(diffopts),
117 (util.shellquote(diffcmd), ' '.join(diffopts),
118 util.shellquote(dir1), util.shellquote(dir2)))
118 util.shellquote(dir1), util.shellquote(dir2)))
119 ui.debug('running %r in %s\n' % (cmdline, tmproot))
119 ui.debug('running %r in %s\n' % (cmdline, tmproot))
120 util.system(cmdline, cwd=tmproot)
120 util.system(cmdline, cwd=tmproot)
121 return 1
121 return 1
122 finally:
122 finally:
123 ui.note(_('cleaning up temp directory\n'))
123 ui.note(_('cleaning up temp directory\n'))
124 shutil.rmtree(tmproot)
124 shutil.rmtree(tmproot)
125
125
126 def extdiff(ui, repo, *pats, **opts):
126 def extdiff(ui, repo, *pats, **opts):
127 '''use external program to diff repository (or selected files)
127 '''use external program to diff repository (or selected files)
128
128
129 Show differences between revisions for the specified files, using
129 Show differences between revisions for the specified files, using
130 an external program. The default program used is diff, with
130 an external program. The default program used is diff, with
131 default options "-Npru".
131 default options "-Npru".
132
132
133 To select a different program, use the -p option. The program
133 To select a different program, use the -p option. The program
134 will be passed the names of two directories to compare. To pass
134 will be passed the names of two directories to compare. To pass
135 additional options to the program, use the -o option. These will
135 additional options to the program, use the -o option. These will
136 be passed before the names of the directories to compare.
136 be passed before the names of the directories to compare.
137
137
138 When two revision arguments are given, then changes are
138 When two revision arguments are given, then changes are
139 shown between those revisions. If only one revision is
139 shown between those revisions. If only one revision is
140 specified then that revision is compared to the working
140 specified then that revision is compared to the working
141 directory, and, when no revisions are specified, the
141 directory, and, when no revisions are specified, the
142 working directory files are compared to its parent.'''
142 working directory files are compared to its parent.'''
143 program = opts['program'] or 'diff'
143 program = opts['program'] or 'diff'
144 if opts['program']:
144 if opts['program']:
145 option = opts['option']
145 option = opts['option']
146 else:
146 else:
147 option = opts['option'] or ['-Npru']
147 option = opts['option'] or ['-Npru']
148 return dodiff(ui, repo, program, option, pats, opts)
148 return dodiff(ui, repo, program, option, pats, opts)
149
149
150 cmdtable = {
150 cmdtable = {
151 "extdiff":
151 "extdiff":
152 (extdiff,
152 (extdiff,
153 [('p', 'program', '', _('comparison program to run')),
153 [('p', 'program', '', _('comparison program to run')),
154 ('o', 'option', [], _('pass option to comparison program')),
154 ('o', 'option', [], _('pass option to comparison program')),
155 ('r', 'rev', [], _('revision')),
155 ('r', 'rev', [], _('revision')),
156 ('I', 'include', [], _('include names matching the given patterns')),
156 ('I', 'include', [], _('include names matching the given patterns')),
157 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
157 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
158 _('hg extdiff [OPT]... [FILE]...')),
158 _('hg extdiff [OPT]... [FILE]...')),
159 }
159 }
160
160
161 def uisetup(ui):
161 def uisetup(ui):
162 for cmd, path in ui.configitems('extdiff'):
162 for cmd, path in ui.configitems('extdiff'):
163 if not cmd.startswith('cmd.'): continue
163 if not cmd.startswith('cmd.'): continue
164 cmd = cmd[4:]
164 cmd = cmd[4:]
165 if not path: path = cmd
165 if not path: path = cmd
166 diffopts = ui.config('extdiff', 'opts.' + cmd, '')
166 diffopts = ui.config('extdiff', 'opts.' + cmd, '')
167 diffopts = diffopts and [diffopts] or []
167 diffopts = diffopts and [diffopts] or []
168 def save(cmd, path, diffopts):
168 def save(cmd, path, diffopts):
169 '''use closure to save diff command to use'''
169 '''use closure to save diff command to use'''
170 def mydiff(ui, repo, *pats, **opts):
170 def mydiff(ui, repo, *pats, **opts):
171 return dodiff(ui, repo, path, diffopts, pats, opts)
171 return dodiff(ui, repo, path, diffopts, pats, opts)
172 mydiff.__doc__ = '''use %(path)r to diff repository (or selected files)
172 mydiff.__doc__ = '''use %(path)r to diff repository (or selected files)
173
173
174 Show differences between revisions for the specified
174 Show differences between revisions for the specified
175 files, using the %(path)r program.
175 files, using the %(path)r program.
176
176
177 When two revision arguments are given, then changes are
177 When two revision arguments are given, then changes are
178 shown between those revisions. If only one revision is
178 shown between those revisions. If only one revision is
179 specified then that revision is compared to the working
179 specified then that revision is compared to the working
180 directory, and, when no revisions are specified, the
180 directory, and, when no revisions are specified, the
181 working directory files are compared to its parent.''' % {
181 working directory files are compared to its parent.''' % {
182 'path': path,
182 'path': path,
183 }
183 }
184 return mydiff
184 return mydiff
185 cmdtable[cmd] = (save(cmd, path, diffopts),
185 cmdtable[cmd] = (save(cmd, path, diffopts),
186 cmdtable['extdiff'][1][1:],
186 cmdtable['extdiff'][1][1:],
187 _('hg %s [OPT]... [FILE]...') % cmd)
187 _('hg %s [OPT]... [FILE]...') % cmd)
@@ -1,99 +1,98 b''
1 # fetch.py - pull and merge remote changes
1 # fetch.py - pull and merge remote changes
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from mercurial.demandload import *
9 from mercurial.i18n import gettext as _
8 from mercurial.i18n import gettext as _
10 from mercurial.node import *
9 from mercurial.node import *
11 demandload(globals(), 'mercurial:commands,hg,node,util')
10 from mercurial import commands, hg, node, util
12
11
13 def fetch(ui, repo, source='default', **opts):
12 def fetch(ui, repo, source='default', **opts):
14 '''Pull changes from a remote repository, merge new changes if needed.
13 '''Pull changes from a remote repository, merge new changes if needed.
15
14
16 This finds all changes from the repository at the specified path
15 This finds all changes from the repository at the specified path
17 or URL and adds them to the local repository.
16 or URL and adds them to the local repository.
18
17
19 If the pulled changes add a new head, the head is automatically
18 If the pulled changes add a new head, the head is automatically
20 merged, and the result of the merge is committed. Otherwise, the
19 merged, and the result of the merge is committed. Otherwise, the
21 working directory is updated.'''
20 working directory is updated.'''
22
21
23 def postincoming(other, modheads):
22 def postincoming(other, modheads):
24 if modheads == 0:
23 if modheads == 0:
25 return 0
24 return 0
26 if modheads == 1:
25 if modheads == 1:
27 return hg.clean(repo, repo.changelog.tip(), wlock=wlock)
26 return hg.clean(repo, repo.changelog.tip(), wlock=wlock)
28 newheads = repo.heads(parent)
27 newheads = repo.heads(parent)
29 newchildren = [n for n in repo.heads(parent) if n != parent]
28 newchildren = [n for n in repo.heads(parent) if n != parent]
30 newparent = parent
29 newparent = parent
31 if newchildren:
30 if newchildren:
32 newparent = newchildren[0]
31 newparent = newchildren[0]
33 hg.clean(repo, newparent, wlock=wlock)
32 hg.clean(repo, newparent, wlock=wlock)
34 newheads = [n for n in repo.heads() if n != newparent]
33 newheads = [n for n in repo.heads() if n != newparent]
35 err = False
34 err = False
36 if newheads:
35 if newheads:
37 ui.status(_('merging with new head %d:%s\n') %
36 ui.status(_('merging with new head %d:%s\n') %
38 (repo.changelog.rev(newheads[0]), short(newheads[0])))
37 (repo.changelog.rev(newheads[0]), short(newheads[0])))
39 err = hg.merge(repo, newheads[0], remind=False, wlock=wlock)
38 err = hg.merge(repo, newheads[0], remind=False, wlock=wlock)
40 if not err and len(newheads) > 1:
39 if not err and len(newheads) > 1:
41 ui.status(_('not merging with %d other new heads '
40 ui.status(_('not merging with %d other new heads '
42 '(use "hg heads" and "hg merge" to merge them)') %
41 '(use "hg heads" and "hg merge" to merge them)') %
43 (len(newheads) - 1))
42 (len(newheads) - 1))
44 if not err:
43 if not err:
45 mod, add, rem = repo.status(wlock=wlock)[:3]
44 mod, add, rem = repo.status(wlock=wlock)[:3]
46 message = (commands.logmessage(opts) or
45 message = (commands.logmessage(opts) or
47 (_('Automated merge with %s') % other.url()))
46 (_('Automated merge with %s') % other.url()))
48 n = repo.commit(mod + add + rem, message,
47 n = repo.commit(mod + add + rem, message,
49 opts['user'], opts['date'], lock=lock, wlock=wlock,
48 opts['user'], opts['date'], lock=lock, wlock=wlock,
50 force_editor=opts.get('force_editor'))
49 force_editor=opts.get('force_editor'))
51 ui.status(_('new changeset %d:%s merges remote changes '
50 ui.status(_('new changeset %d:%s merges remote changes '
52 'with local\n') % (repo.changelog.rev(n),
51 'with local\n') % (repo.changelog.rev(n),
53 short(n)))
52 short(n)))
54 def pull():
53 def pull():
55 commands.setremoteconfig(ui, opts)
54 commands.setremoteconfig(ui, opts)
56
55
57 other = hg.repository(ui, ui.expandpath(source))
56 other = hg.repository(ui, ui.expandpath(source))
58 ui.status(_('pulling from %s\n') % ui.expandpath(source))
57 ui.status(_('pulling from %s\n') % ui.expandpath(source))
59 revs = None
58 revs = None
60 if opts['rev'] and not other.local():
59 if opts['rev'] and not other.local():
61 raise util.Abort(_("fetch -r doesn't work for remote repositories yet"))
60 raise util.Abort(_("fetch -r doesn't work for remote repositories yet"))
62 elif opts['rev']:
61 elif opts['rev']:
63 revs = [other.lookup(rev) for rev in opts['rev']]
62 revs = [other.lookup(rev) for rev in opts['rev']]
64 modheads = repo.pull(other, heads=revs, lock=lock)
63 modheads = repo.pull(other, heads=revs, lock=lock)
65 return postincoming(other, modheads)
64 return postincoming(other, modheads)
66
65
67 parent, p2 = repo.dirstate.parents()
66 parent, p2 = repo.dirstate.parents()
68 if parent != repo.changelog.tip():
67 if parent != repo.changelog.tip():
69 raise util.Abort(_('working dir not at tip '
68 raise util.Abort(_('working dir not at tip '
70 '(use "hg update" to check out tip)'))
69 '(use "hg update" to check out tip)'))
71 if p2 != nullid:
70 if p2 != nullid:
72 raise util.Abort(_('outstanding uncommitted merge'))
71 raise util.Abort(_('outstanding uncommitted merge'))
73 wlock = repo.wlock()
72 wlock = repo.wlock()
74 lock = repo.lock()
73 lock = repo.lock()
75 try:
74 try:
76 mod, add, rem = repo.status(wlock=wlock)[:3]
75 mod, add, rem = repo.status(wlock=wlock)[:3]
77 if mod or add or rem:
76 if mod or add or rem:
78 raise util.Abort(_('outstanding uncommitted changes'))
77 raise util.Abort(_('outstanding uncommitted changes'))
79 if len(repo.heads()) > 1:
78 if len(repo.heads()) > 1:
80 raise util.Abort(_('multiple heads in this repository '
79 raise util.Abort(_('multiple heads in this repository '
81 '(use "hg heads" and "hg merge" to merge)'))
80 '(use "hg heads" and "hg merge" to merge)'))
82 return pull()
81 return pull()
83 finally:
82 finally:
84 lock.release()
83 lock.release()
85 wlock.release()
84 wlock.release()
86
85
87 cmdtable = {
86 cmdtable = {
88 'fetch':
87 'fetch':
89 (fetch,
88 (fetch,
90 [('e', 'ssh', '', _('specify ssh command to use')),
89 [('e', 'ssh', '', _('specify ssh command to use')),
91 ('m', 'message', '', _('use <text> as commit message')),
90 ('m', 'message', '', _('use <text> as commit message')),
92 ('l', 'logfile', '', _('read the commit message from <file>')),
91 ('l', 'logfile', '', _('read the commit message from <file>')),
93 ('d', 'date', '', _('record datecode as commit date')),
92 ('d', 'date', '', _('record datecode as commit date')),
94 ('u', 'user', '', _('record user as commiter')),
93 ('u', 'user', '', _('record user as commiter')),
95 ('r', 'rev', [], _('a specific revision you would like to pull')),
94 ('r', 'rev', [], _('a specific revision you would like to pull')),
96 ('f', 'force-editor', None, _('edit commit message')),
95 ('f', 'force-editor', None, _('edit commit message')),
97 ('', 'remotecmd', '', _('hg command to run on the remote side'))],
96 ('', 'remotecmd', '', _('hg command to run on the remote side'))],
98 'hg fetch [SOURCE]'),
97 'hg fetch [SOURCE]'),
99 }
98 }
@@ -1,299 +1,299 b''
1 # bisect extension for mercurial
1 # bisect extension for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
3 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
4 # Inspired by git bisect, extension skeleton taken from mq.py.
4 # Inspired by git bisect, extension skeleton taken from mq.py.
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from mercurial.i18n import gettext as _
9 from mercurial.i18n import gettext as _
10 from mercurial.demandload import demandload
10 from mercurial import hg, util, commands, cmdutil
11 demandload(globals(), "os sys sets mercurial:hg,util,commands,cmdutil")
11 import os, sys, sets
12
12
13 versionstr = "0.0.3"
13 versionstr = "0.0.3"
14
14
15 def lookup_rev(ui, repo, rev=None):
15 def lookup_rev(ui, repo, rev=None):
16 """returns rev or the checked-out revision if rev is None"""
16 """returns rev or the checked-out revision if rev is None"""
17 if not rev is None:
17 if not rev is None:
18 return repo.lookup(rev)
18 return repo.lookup(rev)
19 parents = [p for p in repo.dirstate.parents() if p != hg.nullid]
19 parents = [p for p in repo.dirstate.parents() if p != hg.nullid]
20 if len(parents) != 1:
20 if len(parents) != 1:
21 raise util.Abort(_("unexpected number of parents, "
21 raise util.Abort(_("unexpected number of parents, "
22 "please commit or revert"))
22 "please commit or revert"))
23 return parents.pop()
23 return parents.pop()
24
24
25 def check_clean(ui, repo):
25 def check_clean(ui, repo):
26 modified, added, removed, deleted, unknown = repo.status()[:5]
26 modified, added, removed, deleted, unknown = repo.status()[:5]
27 if modified or added or removed:
27 if modified or added or removed:
28 ui.warn("Repository is not clean, please commit or revert\n")
28 ui.warn("Repository is not clean, please commit or revert\n")
29 sys.exit(1)
29 sys.exit(1)
30
30
31 class bisect(object):
31 class bisect(object):
32 """dichotomic search in the DAG of changesets"""
32 """dichotomic search in the DAG of changesets"""
33 def __init__(self, ui, repo):
33 def __init__(self, ui, repo):
34 self.repo = repo
34 self.repo = repo
35 self.path = repo.join("bisect")
35 self.path = repo.join("bisect")
36 self.opener = util.opener(self.path)
36 self.opener = util.opener(self.path)
37 self.ui = ui
37 self.ui = ui
38 self.goodrevs = []
38 self.goodrevs = []
39 self.badrev = None
39 self.badrev = None
40 self.good_dirty = 0
40 self.good_dirty = 0
41 self.bad_dirty = 0
41 self.bad_dirty = 0
42 self.good_path = "good"
42 self.good_path = "good"
43 self.bad_path = "bad"
43 self.bad_path = "bad"
44
44
45 if os.path.exists(os.path.join(self.path, self.good_path)):
45 if os.path.exists(os.path.join(self.path, self.good_path)):
46 self.goodrevs = self.opener(self.good_path).read().splitlines()
46 self.goodrevs = self.opener(self.good_path).read().splitlines()
47 self.goodrevs = [hg.bin(x) for x in self.goodrevs]
47 self.goodrevs = [hg.bin(x) for x in self.goodrevs]
48 if os.path.exists(os.path.join(self.path, self.bad_path)):
48 if os.path.exists(os.path.join(self.path, self.bad_path)):
49 r = self.opener(self.bad_path).read().splitlines()
49 r = self.opener(self.bad_path).read().splitlines()
50 if r:
50 if r:
51 self.badrev = hg.bin(r.pop(0))
51 self.badrev = hg.bin(r.pop(0))
52
52
53 def write(self):
53 def write(self):
54 if not os.path.isdir(self.path):
54 if not os.path.isdir(self.path):
55 return
55 return
56 f = self.opener(self.good_path, "w")
56 f = self.opener(self.good_path, "w")
57 f.write("\n".join([hg.hex(r) for r in self.goodrevs]))
57 f.write("\n".join([hg.hex(r) for r in self.goodrevs]))
58 if len(self.goodrevs) > 0:
58 if len(self.goodrevs) > 0:
59 f.write("\n")
59 f.write("\n")
60 f = self.opener(self.bad_path, "w")
60 f = self.opener(self.bad_path, "w")
61 if self.badrev:
61 if self.badrev:
62 f.write(hg.hex(self.badrev) + "\n")
62 f.write(hg.hex(self.badrev) + "\n")
63
63
64 def init(self):
64 def init(self):
65 """start a new bisection"""
65 """start a new bisection"""
66 if os.path.isdir(self.path):
66 if os.path.isdir(self.path):
67 raise util.Abort(_("bisect directory already exists\n"))
67 raise util.Abort(_("bisect directory already exists\n"))
68 os.mkdir(self.path)
68 os.mkdir(self.path)
69 check_clean(self.ui, self.repo)
69 check_clean(self.ui, self.repo)
70 return 0
70 return 0
71
71
72 def reset(self):
72 def reset(self):
73 """finish a bisection"""
73 """finish a bisection"""
74 if os.path.isdir(self.path):
74 if os.path.isdir(self.path):
75 sl = [os.path.join(self.path, p)
75 sl = [os.path.join(self.path, p)
76 for p in [self.bad_path, self.good_path]]
76 for p in [self.bad_path, self.good_path]]
77 for s in sl:
77 for s in sl:
78 if os.path.exists(s):
78 if os.path.exists(s):
79 os.unlink(s)
79 os.unlink(s)
80 os.rmdir(self.path)
80 os.rmdir(self.path)
81 # Not sure about this
81 # Not sure about this
82 #self.ui.write("Going back to tip\n")
82 #self.ui.write("Going back to tip\n")
83 #self.repo.update(self.repo.changelog.tip())
83 #self.repo.update(self.repo.changelog.tip())
84 return 1
84 return 1
85
85
86 def num_ancestors(self, head=None, stop=None):
86 def num_ancestors(self, head=None, stop=None):
87 """
87 """
88 returns a dict with the mapping:
88 returns a dict with the mapping:
89 node -> number of ancestors (self included)
89 node -> number of ancestors (self included)
90 for all nodes who are ancestor of head and
90 for all nodes who are ancestor of head and
91 not in stop.
91 not in stop.
92 """
92 """
93 if head is None:
93 if head is None:
94 head = self.badrev
94 head = self.badrev
95 return self.__ancestors_and_nb_ancestors(head, stop)[1]
95 return self.__ancestors_and_nb_ancestors(head, stop)[1]
96
96
97 def ancestors(self, head=None, stop=None):
97 def ancestors(self, head=None, stop=None):
98 """
98 """
99 returns the set of the ancestors of head (self included)
99 returns the set of the ancestors of head (self included)
100 who are not in stop.
100 who are not in stop.
101 """
101 """
102 if head is None:
102 if head is None:
103 head = self.badrev
103 head = self.badrev
104 return self.__ancestors_and_nb_ancestors(head, stop)[0]
104 return self.__ancestors_and_nb_ancestors(head, stop)[0]
105
105
106 def __ancestors_and_nb_ancestors(self, head, stop=None):
106 def __ancestors_and_nb_ancestors(self, head, stop=None):
107 """
107 """
108 if stop is None then ancestors of goodrevs are used as
108 if stop is None then ancestors of goodrevs are used as
109 lower limit.
109 lower limit.
110
110
111 returns (anc, n_child) where anc is the set of the ancestors of head
111 returns (anc, n_child) where anc is the set of the ancestors of head
112 and n_child is a dictionary with the following mapping:
112 and n_child is a dictionary with the following mapping:
113 node -> number of ancestors (self included)
113 node -> number of ancestors (self included)
114 """
114 """
115 cl = self.repo.changelog
115 cl = self.repo.changelog
116 if not stop:
116 if not stop:
117 stop = sets.Set([])
117 stop = sets.Set([])
118 for i in xrange(len(self.goodrevs)-1, -1, -1):
118 for i in xrange(len(self.goodrevs)-1, -1, -1):
119 g = self.goodrevs[i]
119 g = self.goodrevs[i]
120 if g in stop:
120 if g in stop:
121 continue
121 continue
122 stop.update(cl.reachable(g))
122 stop.update(cl.reachable(g))
123 def num_children(a):
123 def num_children(a):
124 """
124 """
125 returns a dictionnary with the following mapping
125 returns a dictionnary with the following mapping
126 node -> [number of children, empty set]
126 node -> [number of children, empty set]
127 """
127 """
128 d = {a: [0, sets.Set([])]}
128 d = {a: [0, sets.Set([])]}
129 for i in xrange(cl.rev(a)+1):
129 for i in xrange(cl.rev(a)+1):
130 n = cl.node(i)
130 n = cl.node(i)
131 if not d.has_key(n):
131 if not d.has_key(n):
132 d[n] = [0, sets.Set([])]
132 d[n] = [0, sets.Set([])]
133 parents = [p for p in cl.parents(n) if p != hg.nullid]
133 parents = [p for p in cl.parents(n) if p != hg.nullid]
134 for p in parents:
134 for p in parents:
135 d[p][0] += 1
135 d[p][0] += 1
136 return d
136 return d
137
137
138 if head in stop:
138 if head in stop:
139 raise util.Abort(_("Unconsistent state, %s:%s is good and bad")
139 raise util.Abort(_("Unconsistent state, %s:%s is good and bad")
140 % (cl.rev(head), hg.short(head)))
140 % (cl.rev(head), hg.short(head)))
141 n_child = num_children(head)
141 n_child = num_children(head)
142 for i in xrange(cl.rev(head)+1):
142 for i in xrange(cl.rev(head)+1):
143 n = cl.node(i)
143 n = cl.node(i)
144 parents = [p for p in cl.parents(n) if p != hg.nullid]
144 parents = [p for p in cl.parents(n) if p != hg.nullid]
145 for p in parents:
145 for p in parents:
146 n_child[p][0] -= 1
146 n_child[p][0] -= 1
147 if not n in stop:
147 if not n in stop:
148 n_child[n][1].union_update(n_child[p][1])
148 n_child[n][1].union_update(n_child[p][1])
149 if n_child[p][0] == 0:
149 if n_child[p][0] == 0:
150 n_child[p] = len(n_child[p][1])
150 n_child[p] = len(n_child[p][1])
151 if not n in stop:
151 if not n in stop:
152 n_child[n][1].add(n)
152 n_child[n][1].add(n)
153 if n_child[n][0] == 0:
153 if n_child[n][0] == 0:
154 if n == head:
154 if n == head:
155 anc = n_child[n][1]
155 anc = n_child[n][1]
156 n_child[n] = len(n_child[n][1])
156 n_child[n] = len(n_child[n][1])
157 return anc, n_child
157 return anc, n_child
158
158
159 def next(self):
159 def next(self):
160 if not self.badrev:
160 if not self.badrev:
161 raise util.Abort(_("You should give at least one bad revision"))
161 raise util.Abort(_("You should give at least one bad revision"))
162 if not self.goodrevs:
162 if not self.goodrevs:
163 self.ui.warn(_("No good revision given\n"))
163 self.ui.warn(_("No good revision given\n"))
164 self.ui.warn(_("Marking the first revision as good\n"))
164 self.ui.warn(_("Marking the first revision as good\n"))
165 ancestors, num_ancestors = self.__ancestors_and_nb_ancestors(
165 ancestors, num_ancestors = self.__ancestors_and_nb_ancestors(
166 self.badrev)
166 self.badrev)
167 tot = len(ancestors)
167 tot = len(ancestors)
168 if tot == 1:
168 if tot == 1:
169 if ancestors.pop() != self.badrev:
169 if ancestors.pop() != self.badrev:
170 raise util.Abort(_("Could not find the first bad revision"))
170 raise util.Abort(_("Could not find the first bad revision"))
171 self.ui.write(_("The first bad revision is:\n"))
171 self.ui.write(_("The first bad revision is:\n"))
172 displayer = cmdutil.show_changeset(self.ui, self.repo, {})
172 displayer = cmdutil.show_changeset(self.ui, self.repo, {})
173 displayer.show(changenode=self.badrev)
173 displayer.show(changenode=self.badrev)
174 return None
174 return None
175 best_rev = None
175 best_rev = None
176 best_len = -1
176 best_len = -1
177 for n in ancestors:
177 for n in ancestors:
178 l = num_ancestors[n]
178 l = num_ancestors[n]
179 l = min(l, tot - l)
179 l = min(l, tot - l)
180 if l > best_len:
180 if l > best_len:
181 best_len = l
181 best_len = l
182 best_rev = n
182 best_rev = n
183 assert best_rev is not None
183 assert best_rev is not None
184 nb_tests = 0
184 nb_tests = 0
185 q, r = divmod(tot, 2)
185 q, r = divmod(tot, 2)
186 while q:
186 while q:
187 nb_tests += 1
187 nb_tests += 1
188 q, r = divmod(q, 2)
188 q, r = divmod(q, 2)
189 msg = _("Testing changeset %s:%s (%s changesets remaining, "
189 msg = _("Testing changeset %s:%s (%s changesets remaining, "
190 "~%s tests)\n") % (self.repo.changelog.rev(best_rev),
190 "~%s tests)\n") % (self.repo.changelog.rev(best_rev),
191 hg.short(best_rev), tot, nb_tests)
191 hg.short(best_rev), tot, nb_tests)
192 self.ui.write(msg)
192 self.ui.write(msg)
193 return best_rev
193 return best_rev
194
194
195 def autonext(self):
195 def autonext(self):
196 """find and update to the next revision to test"""
196 """find and update to the next revision to test"""
197 check_clean(self.ui, self.repo)
197 check_clean(self.ui, self.repo)
198 rev = self.next()
198 rev = self.next()
199 if rev is not None:
199 if rev is not None:
200 return hg.clean(self.repo, rev)
200 return hg.clean(self.repo, rev)
201
201
202 def good(self, rev):
202 def good(self, rev):
203 self.goodrevs.append(rev)
203 self.goodrevs.append(rev)
204
204
205 def autogood(self, rev=None):
205 def autogood(self, rev=None):
206 """mark revision as good and update to the next revision to test"""
206 """mark revision as good and update to the next revision to test"""
207 check_clean(self.ui, self.repo)
207 check_clean(self.ui, self.repo)
208 rev = lookup_rev(self.ui, self.repo, rev)
208 rev = lookup_rev(self.ui, self.repo, rev)
209 self.good(rev)
209 self.good(rev)
210 if self.badrev:
210 if self.badrev:
211 return self.autonext()
211 return self.autonext()
212
212
213 def bad(self, rev):
213 def bad(self, rev):
214 self.badrev = rev
214 self.badrev = rev
215
215
216 def autobad(self, rev=None):
216 def autobad(self, rev=None):
217 """mark revision as bad and update to the next revision to test"""
217 """mark revision as bad and update to the next revision to test"""
218 check_clean(self.ui, self.repo)
218 check_clean(self.ui, self.repo)
219 rev = lookup_rev(self.ui, self.repo, rev)
219 rev = lookup_rev(self.ui, self.repo, rev)
220 self.bad(rev)
220 self.bad(rev)
221 if self.goodrevs:
221 if self.goodrevs:
222 self.autonext()
222 self.autonext()
223
223
224 # should we put it in the class ?
224 # should we put it in the class ?
225 def test(ui, repo, rev):
225 def test(ui, repo, rev):
226 """test the bisection code"""
226 """test the bisection code"""
227 b = bisect(ui, repo)
227 b = bisect(ui, repo)
228 rev = repo.lookup(rev)
228 rev = repo.lookup(rev)
229 ui.write("testing with rev %s\n" % hg.hex(rev))
229 ui.write("testing with rev %s\n" % hg.hex(rev))
230 anc = b.ancestors()
230 anc = b.ancestors()
231 while len(anc) > 1:
231 while len(anc) > 1:
232 if not rev in anc:
232 if not rev in anc:
233 ui.warn("failure while bisecting\n")
233 ui.warn("failure while bisecting\n")
234 sys.exit(1)
234 sys.exit(1)
235 ui.write("it worked :)\n")
235 ui.write("it worked :)\n")
236 new_rev = b.next()
236 new_rev = b.next()
237 ui.write("choosing if good or bad\n")
237 ui.write("choosing if good or bad\n")
238 if rev in b.ancestors(head=new_rev):
238 if rev in b.ancestors(head=new_rev):
239 b.bad(new_rev)
239 b.bad(new_rev)
240 ui.write("it is bad\n")
240 ui.write("it is bad\n")
241 else:
241 else:
242 b.good(new_rev)
242 b.good(new_rev)
243 ui.write("it is good\n")
243 ui.write("it is good\n")
244 anc = b.ancestors()
244 anc = b.ancestors()
245 #repo.update(new_rev, force=True)
245 #repo.update(new_rev, force=True)
246 for v in anc:
246 for v in anc:
247 if v != rev:
247 if v != rev:
248 ui.warn("fail to found cset! :(\n")
248 ui.warn("fail to found cset! :(\n")
249 return 1
249 return 1
250 ui.write("Found bad cset: %s\n" % hg.hex(b.badrev))
250 ui.write("Found bad cset: %s\n" % hg.hex(b.badrev))
251 ui.write("Everything is ok :)\n")
251 ui.write("Everything is ok :)\n")
252 return 0
252 return 0
253
253
254 def bisect_run(ui, repo, cmd=None, *args):
254 def bisect_run(ui, repo, cmd=None, *args):
255 """bisect extension: dichotomic search in the DAG of changesets
255 """bisect extension: dichotomic search in the DAG of changesets
256 for subcommands see "hg bisect help\"
256 for subcommands see "hg bisect help\"
257 """
257 """
258 def help_(cmd=None, *args):
258 def help_(cmd=None, *args):
259 """show help for a given bisect subcommand or all subcommands"""
259 """show help for a given bisect subcommand or all subcommands"""
260 cmdtable = bisectcmdtable
260 cmdtable = bisectcmdtable
261 if cmd:
261 if cmd:
262 doc = cmdtable[cmd][0].__doc__
262 doc = cmdtable[cmd][0].__doc__
263 synopsis = cmdtable[cmd][2]
263 synopsis = cmdtable[cmd][2]
264 ui.write(synopsis + "\n")
264 ui.write(synopsis + "\n")
265 ui.write("\n" + doc + "\n")
265 ui.write("\n" + doc + "\n")
266 return
266 return
267 ui.write(_("list of subcommands for the bisect extension\n\n"))
267 ui.write(_("list of subcommands for the bisect extension\n\n"))
268 cmds = cmdtable.keys()
268 cmds = cmdtable.keys()
269 cmds.sort()
269 cmds.sort()
270 m = max([len(c) for c in cmds])
270 m = max([len(c) for c in cmds])
271 for cmd in cmds:
271 for cmd in cmds:
272 doc = cmdtable[cmd][0].__doc__.splitlines(0)[0].rstrip()
272 doc = cmdtable[cmd][0].__doc__.splitlines(0)[0].rstrip()
273 ui.write(" %-*s %s\n" % (m, cmd, doc))
273 ui.write(" %-*s %s\n" % (m, cmd, doc))
274
274
275 b = bisect(ui, repo)
275 b = bisect(ui, repo)
276 bisectcmdtable = {
276 bisectcmdtable = {
277 "init": (b.init, 0, _("hg bisect init")),
277 "init": (b.init, 0, _("hg bisect init")),
278 "bad": (b.autobad, 1, _("hg bisect bad [<rev>]")),
278 "bad": (b.autobad, 1, _("hg bisect bad [<rev>]")),
279 "good": (b.autogood, 1, _("hg bisect good [<rev>]")),
279 "good": (b.autogood, 1, _("hg bisect good [<rev>]")),
280 "next": (b.autonext, 0, _("hg bisect next")),
280 "next": (b.autonext, 0, _("hg bisect next")),
281 "reset": (b.reset, 0, _("hg bisect reset")),
281 "reset": (b.reset, 0, _("hg bisect reset")),
282 "help": (help_, 1, _("hg bisect help [<subcommand>]")),
282 "help": (help_, 1, _("hg bisect help [<subcommand>]")),
283 }
283 }
284
284
285 if not bisectcmdtable.has_key(cmd):
285 if not bisectcmdtable.has_key(cmd):
286 ui.warn(_("bisect: Unknown sub-command\n"))
286 ui.warn(_("bisect: Unknown sub-command\n"))
287 return help_()
287 return help_()
288 if len(args) > bisectcmdtable[cmd][1]:
288 if len(args) > bisectcmdtable[cmd][1]:
289 ui.warn(_("bisect: Too many arguments\n"))
289 ui.warn(_("bisect: Too many arguments\n"))
290 return help_()
290 return help_()
291 try:
291 try:
292 return bisectcmdtable[cmd][0](*args)
292 return bisectcmdtable[cmd][0](*args)
293 finally:
293 finally:
294 b.write()
294 b.write()
295
295
296 cmdtable = {
296 cmdtable = {
297 "bisect": (bisect_run, [], _("hg bisect [help|init|reset|next|good|bad]")),
297 "bisect": (bisect_run, [], _("hg bisect [help|init|reset|next|good|bad]")),
298 #"bisect-test": (test, [], "hg bisect-test rev"),
298 #"bisect-test": (test, [], "hg bisect-test rev"),
299 }
299 }
@@ -1,309 +1,308 b''
1 # Minimal support for git commands on an hg repository
1 # Minimal support for git commands on an hg repository
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from mercurial.demandload import *
8 import time, sys, signal, os
9 demandload(globals(), 'time sys signal os')
9 from mercurial import hg, fancyopts, commands, ui, util, patch, revlog
10 demandload(globals(), 'mercurial:hg,fancyopts,commands,ui,util,patch,revlog')
11
10
12 def difftree(ui, repo, node1=None, node2=None, *files, **opts):
11 def difftree(ui, repo, node1=None, node2=None, *files, **opts):
13 """diff trees from two commits"""
12 """diff trees from two commits"""
14 def __difftree(repo, node1, node2, files=[]):
13 def __difftree(repo, node1, node2, files=[]):
15 if node2:
14 if node2:
16 change = repo.changelog.read(node2)
15 change = repo.changelog.read(node2)
17 mmap2 = repo.manifest.read(change[0])
16 mmap2 = repo.manifest.read(change[0])
18 status = repo.status(node1, node2, files=files)[:5]
17 status = repo.status(node1, node2, files=files)[:5]
19 modified, added, removed, deleted, unknown = status
18 modified, added, removed, deleted, unknown = status
20 else:
19 else:
21 status = repo.status(node1, files=files)[:5]
20 status = repo.status(node1, files=files)[:5]
22 modified, added, removed, deleted, unknown = status
21 modified, added, removed, deleted, unknown = status
23 if not node1:
22 if not node1:
24 node1 = repo.dirstate.parents()[0]
23 node1 = repo.dirstate.parents()[0]
25
24
26 change = repo.changelog.read(node1)
25 change = repo.changelog.read(node1)
27 mmap = repo.manifest.read(change[0])
26 mmap = repo.manifest.read(change[0])
28 empty = hg.short(hg.nullid)
27 empty = hg.short(hg.nullid)
29
28
30 for f in modified:
29 for f in modified:
31 # TODO get file permissions
30 # TODO get file permissions
32 print ":100664 100664 %s %s M\t%s\t%s" % (hg.short(mmap[f]),
31 print ":100664 100664 %s %s M\t%s\t%s" % (hg.short(mmap[f]),
33 hg.short(mmap2[f]),
32 hg.short(mmap2[f]),
34 f, f)
33 f, f)
35 for f in added:
34 for f in added:
36 print ":000000 100664 %s %s N\t%s\t%s" % (empty,
35 print ":000000 100664 %s %s N\t%s\t%s" % (empty,
37 hg.short(mmap2[f]),
36 hg.short(mmap2[f]),
38 f, f)
37 f, f)
39 for f in removed:
38 for f in removed:
40 print ":100664 000000 %s %s D\t%s\t%s" % (hg.short(mmap[f]),
39 print ":100664 000000 %s %s D\t%s\t%s" % (hg.short(mmap[f]),
41 empty,
40 empty,
42 f, f)
41 f, f)
43 ##
42 ##
44
43
45 while True:
44 while True:
46 if opts['stdin']:
45 if opts['stdin']:
47 try:
46 try:
48 line = raw_input().split(' ')
47 line = raw_input().split(' ')
49 node1 = line[0]
48 node1 = line[0]
50 if len(line) > 1:
49 if len(line) > 1:
51 node2 = line[1]
50 node2 = line[1]
52 else:
51 else:
53 node2 = None
52 node2 = None
54 except EOFError:
53 except EOFError:
55 break
54 break
56 node1 = repo.lookup(node1)
55 node1 = repo.lookup(node1)
57 if node2:
56 if node2:
58 node2 = repo.lookup(node2)
57 node2 = repo.lookup(node2)
59 else:
58 else:
60 node2 = node1
59 node2 = node1
61 node1 = repo.changelog.parents(node1)[0]
60 node1 = repo.changelog.parents(node1)[0]
62 if opts['patch']:
61 if opts['patch']:
63 if opts['pretty']:
62 if opts['pretty']:
64 catcommit(repo, node2, "")
63 catcommit(repo, node2, "")
65 patch.diff(repo, node1, node2,
64 patch.diff(repo, node1, node2,
66 files=files,
65 files=files,
67 opts=patch.diffopts(ui, {'git': True}))
66 opts=patch.diffopts(ui, {'git': True}))
68 else:
67 else:
69 __difftree(repo, node1, node2, files=files)
68 __difftree(repo, node1, node2, files=files)
70 if not opts['stdin']:
69 if not opts['stdin']:
71 break
70 break
72
71
73 def catcommit(repo, n, prefix, changes=None):
72 def catcommit(repo, n, prefix, changes=None):
74 nlprefix = '\n' + prefix;
73 nlprefix = '\n' + prefix;
75 (p1, p2) = repo.changelog.parents(n)
74 (p1, p2) = repo.changelog.parents(n)
76 (h, h1, h2) = map(hg.short, (n, p1, p2))
75 (h, h1, h2) = map(hg.short, (n, p1, p2))
77 (i1, i2) = map(repo.changelog.rev, (p1, p2))
76 (i1, i2) = map(repo.changelog.rev, (p1, p2))
78 if not changes:
77 if not changes:
79 changes = repo.changelog.read(n)
78 changes = repo.changelog.read(n)
80 print "tree %s" % (hg.short(changes[0]))
79 print "tree %s" % (hg.short(changes[0]))
81 if i1 != hg.nullrev: print "parent %s" % (h1)
80 if i1 != hg.nullrev: print "parent %s" % (h1)
82 if i2 != hg.nullrev: print "parent %s" % (h2)
81 if i2 != hg.nullrev: print "parent %s" % (h2)
83 date_ar = changes[2]
82 date_ar = changes[2]
84 date = int(float(date_ar[0]))
83 date = int(float(date_ar[0]))
85 lines = changes[4].splitlines()
84 lines = changes[4].splitlines()
86 if lines and lines[-1].startswith('committer:'):
85 if lines and lines[-1].startswith('committer:'):
87 committer = lines[-1].split(': ')[1].rstrip()
86 committer = lines[-1].split(': ')[1].rstrip()
88 else:
87 else:
89 committer = changes[1]
88 committer = changes[1]
90
89
91 print "author %s %s %s" % (changes[1], date, date_ar[1])
90 print "author %s %s %s" % (changes[1], date, date_ar[1])
92 print "committer %s %s %s" % (committer, date, date_ar[1])
91 print "committer %s %s %s" % (committer, date, date_ar[1])
93 print "revision %d" % repo.changelog.rev(n)
92 print "revision %d" % repo.changelog.rev(n)
94 print ""
93 print ""
95 if prefix != "":
94 if prefix != "":
96 print "%s%s" % (prefix, changes[4].replace('\n', nlprefix).strip())
95 print "%s%s" % (prefix, changes[4].replace('\n', nlprefix).strip())
97 else:
96 else:
98 print changes[4]
97 print changes[4]
99 if prefix:
98 if prefix:
100 sys.stdout.write('\0')
99 sys.stdout.write('\0')
101
100
102 def base(ui, repo, node1, node2):
101 def base(ui, repo, node1, node2):
103 """Output common ancestor information"""
102 """Output common ancestor information"""
104 node1 = repo.lookup(node1)
103 node1 = repo.lookup(node1)
105 node2 = repo.lookup(node2)
104 node2 = repo.lookup(node2)
106 n = repo.changelog.ancestor(node1, node2)
105 n = repo.changelog.ancestor(node1, node2)
107 print hg.short(n)
106 print hg.short(n)
108
107
109 def catfile(ui, repo, type=None, r=None, **opts):
108 def catfile(ui, repo, type=None, r=None, **opts):
110 """cat a specific revision"""
109 """cat a specific revision"""
111 # in stdin mode, every line except the commit is prefixed with two
110 # in stdin mode, every line except the commit is prefixed with two
112 # spaces. This way the our caller can find the commit without magic
111 # spaces. This way the our caller can find the commit without magic
113 # strings
112 # strings
114 #
113 #
115 prefix = ""
114 prefix = ""
116 if opts['stdin']:
115 if opts['stdin']:
117 try:
116 try:
118 (type, r) = raw_input().split(' ');
117 (type, r) = raw_input().split(' ');
119 prefix = " "
118 prefix = " "
120 except EOFError:
119 except EOFError:
121 return
120 return
122
121
123 else:
122 else:
124 if not type or not r:
123 if not type or not r:
125 ui.warn("cat-file: type or revision not supplied\n")
124 ui.warn("cat-file: type or revision not supplied\n")
126 commands.help_(ui, 'cat-file')
125 commands.help_(ui, 'cat-file')
127
126
128 while r:
127 while r:
129 if type != "commit":
128 if type != "commit":
130 sys.stderr.write("aborting hg cat-file only understands commits\n")
129 sys.stderr.write("aborting hg cat-file only understands commits\n")
131 sys.exit(1);
130 sys.exit(1);
132 n = repo.lookup(r)
131 n = repo.lookup(r)
133 catcommit(repo, n, prefix)
132 catcommit(repo, n, prefix)
134 if opts['stdin']:
133 if opts['stdin']:
135 try:
134 try:
136 (type, r) = raw_input().split(' ');
135 (type, r) = raw_input().split(' ');
137 except EOFError:
136 except EOFError:
138 break
137 break
139 else:
138 else:
140 break
139 break
141
140
142 # git rev-tree is a confusing thing. You can supply a number of
141 # git rev-tree is a confusing thing. You can supply a number of
143 # commit sha1s on the command line, and it walks the commit history
142 # commit sha1s on the command line, and it walks the commit history
144 # telling you which commits are reachable from the supplied ones via
143 # telling you which commits are reachable from the supplied ones via
145 # a bitmask based on arg position.
144 # a bitmask based on arg position.
146 # you can specify a commit to stop at by starting the sha1 with ^
145 # you can specify a commit to stop at by starting the sha1 with ^
147 def revtree(args, repo, full="tree", maxnr=0, parents=False):
146 def revtree(args, repo, full="tree", maxnr=0, parents=False):
148 def chlogwalk():
147 def chlogwalk():
149 ch = repo.changelog
148 ch = repo.changelog
150 count = ch.count()
149 count = ch.count()
151 i = count
150 i = count
152 l = [0] * 100
151 l = [0] * 100
153 chunk = 100
152 chunk = 100
154 while True:
153 while True:
155 if chunk > i:
154 if chunk > i:
156 chunk = i
155 chunk = i
157 i = 0
156 i = 0
158 else:
157 else:
159 i -= chunk
158 i -= chunk
160
159
161 for x in xrange(0, chunk):
160 for x in xrange(0, chunk):
162 if i + x >= count:
161 if i + x >= count:
163 l[chunk - x:] = [0] * (chunk - x)
162 l[chunk - x:] = [0] * (chunk - x)
164 break
163 break
165 if full != None:
164 if full != None:
166 l[x] = ch.read(ch.node(i + x))
165 l[x] = ch.read(ch.node(i + x))
167 else:
166 else:
168 l[x] = 1
167 l[x] = 1
169 for x in xrange(chunk-1, -1, -1):
168 for x in xrange(chunk-1, -1, -1):
170 if l[x] != 0:
169 if l[x] != 0:
171 yield (i + x, full != None and l[x] or None)
170 yield (i + x, full != None and l[x] or None)
172 if i == 0:
171 if i == 0:
173 break
172 break
174
173
175 # calculate and return the reachability bitmask for sha
174 # calculate and return the reachability bitmask for sha
176 def is_reachable(ar, reachable, sha):
175 def is_reachable(ar, reachable, sha):
177 if len(ar) == 0:
176 if len(ar) == 0:
178 return 1
177 return 1
179 mask = 0
178 mask = 0
180 for i in xrange(len(ar)):
179 for i in xrange(len(ar)):
181 if sha in reachable[i]:
180 if sha in reachable[i]:
182 mask |= 1 << i
181 mask |= 1 << i
183
182
184 return mask
183 return mask
185
184
186 reachable = []
185 reachable = []
187 stop_sha1 = []
186 stop_sha1 = []
188 want_sha1 = []
187 want_sha1 = []
189 count = 0
188 count = 0
190
189
191 # figure out which commits they are asking for and which ones they
190 # figure out which commits they are asking for and which ones they
192 # want us to stop on
191 # want us to stop on
193 for i in xrange(len(args)):
192 for i in xrange(len(args)):
194 if args[i].startswith('^'):
193 if args[i].startswith('^'):
195 s = repo.lookup(args[i][1:])
194 s = repo.lookup(args[i][1:])
196 stop_sha1.append(s)
195 stop_sha1.append(s)
197 want_sha1.append(s)
196 want_sha1.append(s)
198 elif args[i] != 'HEAD':
197 elif args[i] != 'HEAD':
199 want_sha1.append(repo.lookup(args[i]))
198 want_sha1.append(repo.lookup(args[i]))
200
199
201 # calculate the graph for the supplied commits
200 # calculate the graph for the supplied commits
202 for i in xrange(len(want_sha1)):
201 for i in xrange(len(want_sha1)):
203 reachable.append({});
202 reachable.append({});
204 n = want_sha1[i];
203 n = want_sha1[i];
205 visit = [n];
204 visit = [n];
206 reachable[i][n] = 1
205 reachable[i][n] = 1
207 while visit:
206 while visit:
208 n = visit.pop(0)
207 n = visit.pop(0)
209 if n in stop_sha1:
208 if n in stop_sha1:
210 continue
209 continue
211 for p in repo.changelog.parents(n):
210 for p in repo.changelog.parents(n):
212 if p not in reachable[i]:
211 if p not in reachable[i]:
213 reachable[i][p] = 1
212 reachable[i][p] = 1
214 visit.append(p)
213 visit.append(p)
215 if p in stop_sha1:
214 if p in stop_sha1:
216 continue
215 continue
217
216
218 # walk the repository looking for commits that are in our
217 # walk the repository looking for commits that are in our
219 # reachability graph
218 # reachability graph
220 for i, changes in chlogwalk():
219 for i, changes in chlogwalk():
221 n = repo.changelog.node(i)
220 n = repo.changelog.node(i)
222 mask = is_reachable(want_sha1, reachable, n)
221 mask = is_reachable(want_sha1, reachable, n)
223 if mask:
222 if mask:
224 parentstr = ""
223 parentstr = ""
225 if parents:
224 if parents:
226 pp = repo.changelog.parents(n)
225 pp = repo.changelog.parents(n)
227 if pp[0] != hg.nullid:
226 if pp[0] != hg.nullid:
228 parentstr += " " + hg.short(pp[0])
227 parentstr += " " + hg.short(pp[0])
229 if pp[1] != hg.nullid:
228 if pp[1] != hg.nullid:
230 parentstr += " " + hg.short(pp[1])
229 parentstr += " " + hg.short(pp[1])
231 if not full:
230 if not full:
232 print hg.short(n) + parentstr
231 print hg.short(n) + parentstr
233 elif full == "commit":
232 elif full == "commit":
234 print hg.short(n) + parentstr
233 print hg.short(n) + parentstr
235 catcommit(repo, n, ' ', changes)
234 catcommit(repo, n, ' ', changes)
236 else:
235 else:
237 (p1, p2) = repo.changelog.parents(n)
236 (p1, p2) = repo.changelog.parents(n)
238 (h, h1, h2) = map(hg.short, (n, p1, p2))
237 (h, h1, h2) = map(hg.short, (n, p1, p2))
239 (i1, i2) = map(repo.changelog.rev, (p1, p2))
238 (i1, i2) = map(repo.changelog.rev, (p1, p2))
240
239
241 date = changes[2][0]
240 date = changes[2][0]
242 print "%s %s:%s" % (date, h, mask),
241 print "%s %s:%s" % (date, h, mask),
243 mask = is_reachable(want_sha1, reachable, p1)
242 mask = is_reachable(want_sha1, reachable, p1)
244 if i1 != hg.nullrev and mask > 0:
243 if i1 != hg.nullrev and mask > 0:
245 print "%s:%s " % (h1, mask),
244 print "%s:%s " % (h1, mask),
246 mask = is_reachable(want_sha1, reachable, p2)
245 mask = is_reachable(want_sha1, reachable, p2)
247 if i2 != hg.nullrev and mask > 0:
246 if i2 != hg.nullrev and mask > 0:
248 print "%s:%s " % (h2, mask),
247 print "%s:%s " % (h2, mask),
249 print ""
248 print ""
250 if maxnr and count >= maxnr:
249 if maxnr and count >= maxnr:
251 break
250 break
252 count += 1
251 count += 1
253
252
254 def revparse(ui, repo, *revs, **opts):
253 def revparse(ui, repo, *revs, **opts):
255 """Parse given revisions"""
254 """Parse given revisions"""
256 def revstr(rev):
255 def revstr(rev):
257 if rev == 'HEAD':
256 if rev == 'HEAD':
258 rev = 'tip'
257 rev = 'tip'
259 return revlog.hex(repo.lookup(rev))
258 return revlog.hex(repo.lookup(rev))
260
259
261 for r in revs:
260 for r in revs:
262 revrange = r.split(':', 1)
261 revrange = r.split(':', 1)
263 ui.write('%s\n' % revstr(revrange[0]))
262 ui.write('%s\n' % revstr(revrange[0]))
264 if len(revrange) == 2:
263 if len(revrange) == 2:
265 ui.write('^%s\n' % revstr(revrange[1]))
264 ui.write('^%s\n' % revstr(revrange[1]))
266
265
267 # git rev-list tries to order things by date, and has the ability to stop
266 # git rev-list tries to order things by date, and has the ability to stop
268 # at a given commit without walking the whole repo. TODO add the stop
267 # at a given commit without walking the whole repo. TODO add the stop
269 # parameter
268 # parameter
270 def revlist(ui, repo, *revs, **opts):
269 def revlist(ui, repo, *revs, **opts):
271 """print revisions"""
270 """print revisions"""
272 if opts['header']:
271 if opts['header']:
273 full = "commit"
272 full = "commit"
274 else:
273 else:
275 full = None
274 full = None
276 copy = [x for x in revs]
275 copy = [x for x in revs]
277 revtree(copy, repo, full, opts['max_count'], opts['parents'])
276 revtree(copy, repo, full, opts['max_count'], opts['parents'])
278
277
279 def view(ui, repo, *etc, **opts):
278 def view(ui, repo, *etc, **opts):
280 "start interactive history viewer"
279 "start interactive history viewer"
281 os.chdir(repo.root)
280 os.chdir(repo.root)
282 optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v])
281 optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v])
283 cmd = ui.config("hgk", "path", "hgk") + " %s %s" % (optstr, " ".join(etc))
282 cmd = ui.config("hgk", "path", "hgk") + " %s %s" % (optstr, " ".join(etc))
284 ui.debug("running %s\n" % cmd)
283 ui.debug("running %s\n" % cmd)
285 os.system(cmd)
284 os.system(cmd)
286
285
287 cmdtable = {
286 cmdtable = {
288 "^view": (view,
287 "^view": (view,
289 [('l', 'limit', '', 'limit number of changes displayed')],
288 [('l', 'limit', '', 'limit number of changes displayed')],
290 'hg view [-l LIMIT] [REVRANGE]'),
289 'hg view [-l LIMIT] [REVRANGE]'),
291 "debug-diff-tree": (difftree, [('p', 'patch', None, 'generate patch'),
290 "debug-diff-tree": (difftree, [('p', 'patch', None, 'generate patch'),
292 ('r', 'recursive', None, 'recursive'),
291 ('r', 'recursive', None, 'recursive'),
293 ('P', 'pretty', None, 'pretty'),
292 ('P', 'pretty', None, 'pretty'),
294 ('s', 'stdin', None, 'stdin'),
293 ('s', 'stdin', None, 'stdin'),
295 ('C', 'copy', None, 'detect copies'),
294 ('C', 'copy', None, 'detect copies'),
296 ('S', 'search', "", 'search')],
295 ('S', 'search', "", 'search')],
297 "hg git-diff-tree [options] node1 node2 [files...]"),
296 "hg git-diff-tree [options] node1 node2 [files...]"),
298 "debug-cat-file": (catfile, [('s', 'stdin', None, 'stdin')],
297 "debug-cat-file": (catfile, [('s', 'stdin', None, 'stdin')],
299 "hg debug-cat-file [options] type file"),
298 "hg debug-cat-file [options] type file"),
300 "debug-merge-base": (base, [], "hg debug-merge-base node node"),
299 "debug-merge-base": (base, [], "hg debug-merge-base node node"),
301 'debug-rev-parse': (revparse,
300 'debug-rev-parse': (revparse,
302 [('', 'default', '', 'ignored')],
301 [('', 'default', '', 'ignored')],
303 "hg debug-rev-parse rev"),
302 "hg debug-rev-parse rev"),
304 "debug-rev-list": (revlist, [('H', 'header', None, 'header'),
303 "debug-rev-list": (revlist, [('H', 'header', None, 'header'),
305 ('t', 'topo-order', None, 'topo-order'),
304 ('t', 'topo-order', None, 'topo-order'),
306 ('p', 'parents', None, 'parents'),
305 ('p', 'parents', None, 'parents'),
307 ('n', 'max-count', 0, 'max-count')],
306 ('n', 'max-count', 0, 'max-count')],
308 "hg debug-rev-list [options] revs"),
307 "hg debug-rev-list [options] revs"),
309 }
308 }
@@ -1,2191 +1,2189 b''
1 # queue.py - patch queues for mercurial
1 # queue.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 '''patch management and development
8 '''patch management and development
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use "hg help command" for more details):
17 Common tasks (use "hg help command" for more details):
18
18
19 prepare repository to work with patches qinit
19 prepare repository to work with patches qinit
20 create new patch qnew
20 create new patch qnew
21 import existing patch qimport
21 import existing patch qimport
22
22
23 print patch series qseries
23 print patch series qseries
24 print applied patches qapplied
24 print applied patches qapplied
25 print name of top applied patch qtop
25 print name of top applied patch qtop
26
26
27 add known patch to applied stack qpush
27 add known patch to applied stack qpush
28 remove patch from applied stack qpop
28 remove patch from applied stack qpop
29 refresh contents of top applied patch qrefresh
29 refresh contents of top applied patch qrefresh
30 '''
30 '''
31
31
32 from mercurial.demandload import *
33 from mercurial.i18n import gettext as _
32 from mercurial.i18n import gettext as _
34 from mercurial import commands
33 from mercurial import commands, cmdutil, hg, patch, revlog, util, changegroup
35 demandload(globals(), "os sys re struct traceback errno bz2")
34 import os, sys, re, struct, traceback, errno, bz2
36 demandload(globals(), "mercurial:cmdutil,hg,patch,revlog,util,changegroup")
37
35
38 commands.norepo += " qclone qversion"
36 commands.norepo += " qclone qversion"
39
37
40 class statusentry:
38 class statusentry:
41 def __init__(self, rev, name=None):
39 def __init__(self, rev, name=None):
42 if not name:
40 if not name:
43 fields = rev.split(':', 1)
41 fields = rev.split(':', 1)
44 if len(fields) == 2:
42 if len(fields) == 2:
45 self.rev, self.name = fields
43 self.rev, self.name = fields
46 else:
44 else:
47 self.rev, self.name = None, None
45 self.rev, self.name = None, None
48 else:
46 else:
49 self.rev, self.name = rev, name
47 self.rev, self.name = rev, name
50
48
51 def __str__(self):
49 def __str__(self):
52 return self.rev + ':' + self.name
50 return self.rev + ':' + self.name
53
51
54 class queue:
52 class queue:
55 def __init__(self, ui, path, patchdir=None):
53 def __init__(self, ui, path, patchdir=None):
56 self.basepath = path
54 self.basepath = path
57 self.path = patchdir or os.path.join(path, "patches")
55 self.path = patchdir or os.path.join(path, "patches")
58 self.opener = util.opener(self.path)
56 self.opener = util.opener(self.path)
59 self.ui = ui
57 self.ui = ui
60 self.applied = []
58 self.applied = []
61 self.full_series = []
59 self.full_series = []
62 self.applied_dirty = 0
60 self.applied_dirty = 0
63 self.series_dirty = 0
61 self.series_dirty = 0
64 self.series_path = "series"
62 self.series_path = "series"
65 self.status_path = "status"
63 self.status_path = "status"
66 self.guards_path = "guards"
64 self.guards_path = "guards"
67 self.active_guards = None
65 self.active_guards = None
68 self.guards_dirty = False
66 self.guards_dirty = False
69 self._diffopts = None
67 self._diffopts = None
70
68
71 if os.path.exists(self.join(self.series_path)):
69 if os.path.exists(self.join(self.series_path)):
72 self.full_series = self.opener(self.series_path).read().splitlines()
70 self.full_series = self.opener(self.series_path).read().splitlines()
73 self.parse_series()
71 self.parse_series()
74
72
75 if os.path.exists(self.join(self.status_path)):
73 if os.path.exists(self.join(self.status_path)):
76 lines = self.opener(self.status_path).read().splitlines()
74 lines = self.opener(self.status_path).read().splitlines()
77 self.applied = [statusentry(l) for l in lines]
75 self.applied = [statusentry(l) for l in lines]
78
76
79 def diffopts(self):
77 def diffopts(self):
80 if self._diffopts is None:
78 if self._diffopts is None:
81 self._diffopts = patch.diffopts(self.ui)
79 self._diffopts = patch.diffopts(self.ui)
82 return self._diffopts
80 return self._diffopts
83
81
84 def join(self, *p):
82 def join(self, *p):
85 return os.path.join(self.path, *p)
83 return os.path.join(self.path, *p)
86
84
87 def find_series(self, patch):
85 def find_series(self, patch):
88 pre = re.compile("(\s*)([^#]+)")
86 pre = re.compile("(\s*)([^#]+)")
89 index = 0
87 index = 0
90 for l in self.full_series:
88 for l in self.full_series:
91 m = pre.match(l)
89 m = pre.match(l)
92 if m:
90 if m:
93 s = m.group(2)
91 s = m.group(2)
94 s = s.rstrip()
92 s = s.rstrip()
95 if s == patch:
93 if s == patch:
96 return index
94 return index
97 index += 1
95 index += 1
98 return None
96 return None
99
97
100 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
98 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
101
99
102 def parse_series(self):
100 def parse_series(self):
103 self.series = []
101 self.series = []
104 self.series_guards = []
102 self.series_guards = []
105 for l in self.full_series:
103 for l in self.full_series:
106 h = l.find('#')
104 h = l.find('#')
107 if h == -1:
105 if h == -1:
108 patch = l
106 patch = l
109 comment = ''
107 comment = ''
110 elif h == 0:
108 elif h == 0:
111 continue
109 continue
112 else:
110 else:
113 patch = l[:h]
111 patch = l[:h]
114 comment = l[h:]
112 comment = l[h:]
115 patch = patch.strip()
113 patch = patch.strip()
116 if patch:
114 if patch:
117 if patch in self.series:
115 if patch in self.series:
118 raise util.Abort(_('%s appears more than once in %s') %
116 raise util.Abort(_('%s appears more than once in %s') %
119 (patch, self.join(self.series_path)))
117 (patch, self.join(self.series_path)))
120 self.series.append(patch)
118 self.series.append(patch)
121 self.series_guards.append(self.guard_re.findall(comment))
119 self.series_guards.append(self.guard_re.findall(comment))
122
120
123 def check_guard(self, guard):
121 def check_guard(self, guard):
124 bad_chars = '# \t\r\n\f'
122 bad_chars = '# \t\r\n\f'
125 first = guard[0]
123 first = guard[0]
126 for c in '-+':
124 for c in '-+':
127 if first == c:
125 if first == c:
128 return (_('guard %r starts with invalid character: %r') %
126 return (_('guard %r starts with invalid character: %r') %
129 (guard, c))
127 (guard, c))
130 for c in bad_chars:
128 for c in bad_chars:
131 if c in guard:
129 if c in guard:
132 return _('invalid character in guard %r: %r') % (guard, c)
130 return _('invalid character in guard %r: %r') % (guard, c)
133
131
134 def set_active(self, guards):
132 def set_active(self, guards):
135 for guard in guards:
133 for guard in guards:
136 bad = self.check_guard(guard)
134 bad = self.check_guard(guard)
137 if bad:
135 if bad:
138 raise util.Abort(bad)
136 raise util.Abort(bad)
139 guards = dict.fromkeys(guards).keys()
137 guards = dict.fromkeys(guards).keys()
140 guards.sort()
138 guards.sort()
141 self.ui.debug('active guards: %s\n' % ' '.join(guards))
139 self.ui.debug('active guards: %s\n' % ' '.join(guards))
142 self.active_guards = guards
140 self.active_guards = guards
143 self.guards_dirty = True
141 self.guards_dirty = True
144
142
145 def active(self):
143 def active(self):
146 if self.active_guards is None:
144 if self.active_guards is None:
147 self.active_guards = []
145 self.active_guards = []
148 try:
146 try:
149 guards = self.opener(self.guards_path).read().split()
147 guards = self.opener(self.guards_path).read().split()
150 except IOError, err:
148 except IOError, err:
151 if err.errno != errno.ENOENT: raise
149 if err.errno != errno.ENOENT: raise
152 guards = []
150 guards = []
153 for i, guard in enumerate(guards):
151 for i, guard in enumerate(guards):
154 bad = self.check_guard(guard)
152 bad = self.check_guard(guard)
155 if bad:
153 if bad:
156 self.ui.warn('%s:%d: %s\n' %
154 self.ui.warn('%s:%d: %s\n' %
157 (self.join(self.guards_path), i + 1, bad))
155 (self.join(self.guards_path), i + 1, bad))
158 else:
156 else:
159 self.active_guards.append(guard)
157 self.active_guards.append(guard)
160 return self.active_guards
158 return self.active_guards
161
159
162 def set_guards(self, idx, guards):
160 def set_guards(self, idx, guards):
163 for g in guards:
161 for g in guards:
164 if len(g) < 2:
162 if len(g) < 2:
165 raise util.Abort(_('guard %r too short') % g)
163 raise util.Abort(_('guard %r too short') % g)
166 if g[0] not in '-+':
164 if g[0] not in '-+':
167 raise util.Abort(_('guard %r starts with invalid char') % g)
165 raise util.Abort(_('guard %r starts with invalid char') % g)
168 bad = self.check_guard(g[1:])
166 bad = self.check_guard(g[1:])
169 if bad:
167 if bad:
170 raise util.Abort(bad)
168 raise util.Abort(bad)
171 drop = self.guard_re.sub('', self.full_series[idx])
169 drop = self.guard_re.sub('', self.full_series[idx])
172 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
170 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
173 self.parse_series()
171 self.parse_series()
174 self.series_dirty = True
172 self.series_dirty = True
175
173
176 def pushable(self, idx):
174 def pushable(self, idx):
177 if isinstance(idx, str):
175 if isinstance(idx, str):
178 idx = self.series.index(idx)
176 idx = self.series.index(idx)
179 patchguards = self.series_guards[idx]
177 patchguards = self.series_guards[idx]
180 if not patchguards:
178 if not patchguards:
181 return True, None
179 return True, None
182 default = False
180 default = False
183 guards = self.active()
181 guards = self.active()
184 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
182 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
185 if exactneg:
183 if exactneg:
186 return False, exactneg[0]
184 return False, exactneg[0]
187 pos = [g for g in patchguards if g[0] == '+']
185 pos = [g for g in patchguards if g[0] == '+']
188 exactpos = [g for g in pos if g[1:] in guards]
186 exactpos = [g for g in pos if g[1:] in guards]
189 if pos:
187 if pos:
190 if exactpos:
188 if exactpos:
191 return True, exactpos[0]
189 return True, exactpos[0]
192 return False, pos
190 return False, pos
193 return True, ''
191 return True, ''
194
192
195 def explain_pushable(self, idx, all_patches=False):
193 def explain_pushable(self, idx, all_patches=False):
196 write = all_patches and self.ui.write or self.ui.warn
194 write = all_patches and self.ui.write or self.ui.warn
197 if all_patches or self.ui.verbose:
195 if all_patches or self.ui.verbose:
198 if isinstance(idx, str):
196 if isinstance(idx, str):
199 idx = self.series.index(idx)
197 idx = self.series.index(idx)
200 pushable, why = self.pushable(idx)
198 pushable, why = self.pushable(idx)
201 if all_patches and pushable:
199 if all_patches and pushable:
202 if why is None:
200 if why is None:
203 write(_('allowing %s - no guards in effect\n') %
201 write(_('allowing %s - no guards in effect\n') %
204 self.series[idx])
202 self.series[idx])
205 else:
203 else:
206 if not why:
204 if not why:
207 write(_('allowing %s - no matching negative guards\n') %
205 write(_('allowing %s - no matching negative guards\n') %
208 self.series[idx])
206 self.series[idx])
209 else:
207 else:
210 write(_('allowing %s - guarded by %r\n') %
208 write(_('allowing %s - guarded by %r\n') %
211 (self.series[idx], why))
209 (self.series[idx], why))
212 if not pushable:
210 if not pushable:
213 if why:
211 if why:
214 write(_('skipping %s - guarded by %r\n') %
212 write(_('skipping %s - guarded by %r\n') %
215 (self.series[idx], why))
213 (self.series[idx], why))
216 else:
214 else:
217 write(_('skipping %s - no matching guards\n') %
215 write(_('skipping %s - no matching guards\n') %
218 self.series[idx])
216 self.series[idx])
219
217
220 def save_dirty(self):
218 def save_dirty(self):
221 def write_list(items, path):
219 def write_list(items, path):
222 fp = self.opener(path, 'w')
220 fp = self.opener(path, 'w')
223 for i in items:
221 for i in items:
224 print >> fp, i
222 print >> fp, i
225 fp.close()
223 fp.close()
226 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
224 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
227 if self.series_dirty: write_list(self.full_series, self.series_path)
225 if self.series_dirty: write_list(self.full_series, self.series_path)
228 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
226 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
229
227
230 def readheaders(self, patch):
228 def readheaders(self, patch):
231 def eatdiff(lines):
229 def eatdiff(lines):
232 while lines:
230 while lines:
233 l = lines[-1]
231 l = lines[-1]
234 if (l.startswith("diff -") or
232 if (l.startswith("diff -") or
235 l.startswith("Index:") or
233 l.startswith("Index:") or
236 l.startswith("===========")):
234 l.startswith("===========")):
237 del lines[-1]
235 del lines[-1]
238 else:
236 else:
239 break
237 break
240 def eatempty(lines):
238 def eatempty(lines):
241 while lines:
239 while lines:
242 l = lines[-1]
240 l = lines[-1]
243 if re.match('\s*$', l):
241 if re.match('\s*$', l):
244 del lines[-1]
242 del lines[-1]
245 else:
243 else:
246 break
244 break
247
245
248 pf = self.join(patch)
246 pf = self.join(patch)
249 message = []
247 message = []
250 comments = []
248 comments = []
251 user = None
249 user = None
252 date = None
250 date = None
253 format = None
251 format = None
254 subject = None
252 subject = None
255 diffstart = 0
253 diffstart = 0
256
254
257 for line in file(pf):
255 for line in file(pf):
258 line = line.rstrip()
256 line = line.rstrip()
259 if line.startswith('diff --git'):
257 if line.startswith('diff --git'):
260 diffstart = 2
258 diffstart = 2
261 break
259 break
262 if diffstart:
260 if diffstart:
263 if line.startswith('+++ '):
261 if line.startswith('+++ '):
264 diffstart = 2
262 diffstart = 2
265 break
263 break
266 if line.startswith("--- "):
264 if line.startswith("--- "):
267 diffstart = 1
265 diffstart = 1
268 continue
266 continue
269 elif format == "hgpatch":
267 elif format == "hgpatch":
270 # parse values when importing the result of an hg export
268 # parse values when importing the result of an hg export
271 if line.startswith("# User "):
269 if line.startswith("# User "):
272 user = line[7:]
270 user = line[7:]
273 elif line.startswith("# Date "):
271 elif line.startswith("# Date "):
274 date = line[7:]
272 date = line[7:]
275 elif not line.startswith("# ") and line:
273 elif not line.startswith("# ") and line:
276 message.append(line)
274 message.append(line)
277 format = None
275 format = None
278 elif line == '# HG changeset patch':
276 elif line == '# HG changeset patch':
279 format = "hgpatch"
277 format = "hgpatch"
280 elif (format != "tagdone" and (line.startswith("Subject: ") or
278 elif (format != "tagdone" and (line.startswith("Subject: ") or
281 line.startswith("subject: "))):
279 line.startswith("subject: "))):
282 subject = line[9:]
280 subject = line[9:]
283 format = "tag"
281 format = "tag"
284 elif (format != "tagdone" and (line.startswith("From: ") or
282 elif (format != "tagdone" and (line.startswith("From: ") or
285 line.startswith("from: "))):
283 line.startswith("from: "))):
286 user = line[6:]
284 user = line[6:]
287 format = "tag"
285 format = "tag"
288 elif format == "tag" and line == "":
286 elif format == "tag" and line == "":
289 # when looking for tags (subject: from: etc) they
287 # when looking for tags (subject: from: etc) they
290 # end once you find a blank line in the source
288 # end once you find a blank line in the source
291 format = "tagdone"
289 format = "tagdone"
292 elif message or line:
290 elif message or line:
293 message.append(line)
291 message.append(line)
294 comments.append(line)
292 comments.append(line)
295
293
296 eatdiff(message)
294 eatdiff(message)
297 eatdiff(comments)
295 eatdiff(comments)
298 eatempty(message)
296 eatempty(message)
299 eatempty(comments)
297 eatempty(comments)
300
298
301 # make sure message isn't empty
299 # make sure message isn't empty
302 if format and format.startswith("tag") and subject:
300 if format and format.startswith("tag") and subject:
303 message.insert(0, "")
301 message.insert(0, "")
304 message.insert(0, subject)
302 message.insert(0, subject)
305 return (message, comments, user, date, diffstart > 1)
303 return (message, comments, user, date, diffstart > 1)
306
304
307 def printdiff(self, repo, node1, node2=None, files=None,
305 def printdiff(self, repo, node1, node2=None, files=None,
308 fp=None, changes=None, opts={}):
306 fp=None, changes=None, opts={}):
309 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
307 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
310
308
311 patch.diff(repo, node1, node2, fns, match=matchfn,
309 patch.diff(repo, node1, node2, fns, match=matchfn,
312 fp=fp, changes=changes, opts=self.diffopts())
310 fp=fp, changes=changes, opts=self.diffopts())
313
311
314 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
312 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
315 # first try just applying the patch
313 # first try just applying the patch
316 (err, n) = self.apply(repo, [ patch ], update_status=False,
314 (err, n) = self.apply(repo, [ patch ], update_status=False,
317 strict=True, merge=rev, wlock=wlock)
315 strict=True, merge=rev, wlock=wlock)
318
316
319 if err == 0:
317 if err == 0:
320 return (err, n)
318 return (err, n)
321
319
322 if n is None:
320 if n is None:
323 raise util.Abort(_("apply failed for patch %s") % patch)
321 raise util.Abort(_("apply failed for patch %s") % patch)
324
322
325 self.ui.warn("patch didn't work out, merging %s\n" % patch)
323 self.ui.warn("patch didn't work out, merging %s\n" % patch)
326
324
327 # apply failed, strip away that rev and merge.
325 # apply failed, strip away that rev and merge.
328 hg.clean(repo, head, wlock=wlock)
326 hg.clean(repo, head, wlock=wlock)
329 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
327 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
330
328
331 c = repo.changelog.read(rev)
329 c = repo.changelog.read(rev)
332 ret = hg.merge(repo, rev, wlock=wlock)
330 ret = hg.merge(repo, rev, wlock=wlock)
333 if ret:
331 if ret:
334 raise util.Abort(_("update returned %d") % ret)
332 raise util.Abort(_("update returned %d") % ret)
335 n = repo.commit(None, c[4], c[1], force=1, wlock=wlock)
333 n = repo.commit(None, c[4], c[1], force=1, wlock=wlock)
336 if n == None:
334 if n == None:
337 raise util.Abort(_("repo commit failed"))
335 raise util.Abort(_("repo commit failed"))
338 try:
336 try:
339 message, comments, user, date, patchfound = mergeq.readheaders(patch)
337 message, comments, user, date, patchfound = mergeq.readheaders(patch)
340 except:
338 except:
341 raise util.Abort(_("unable to read %s") % patch)
339 raise util.Abort(_("unable to read %s") % patch)
342
340
343 patchf = self.opener(patch, "w")
341 patchf = self.opener(patch, "w")
344 if comments:
342 if comments:
345 comments = "\n".join(comments) + '\n\n'
343 comments = "\n".join(comments) + '\n\n'
346 patchf.write(comments)
344 patchf.write(comments)
347 self.printdiff(repo, head, n, fp=patchf)
345 self.printdiff(repo, head, n, fp=patchf)
348 patchf.close()
346 patchf.close()
349 return (0, n)
347 return (0, n)
350
348
351 def qparents(self, repo, rev=None):
349 def qparents(self, repo, rev=None):
352 if rev is None:
350 if rev is None:
353 (p1, p2) = repo.dirstate.parents()
351 (p1, p2) = repo.dirstate.parents()
354 if p2 == revlog.nullid:
352 if p2 == revlog.nullid:
355 return p1
353 return p1
356 if len(self.applied) == 0:
354 if len(self.applied) == 0:
357 return None
355 return None
358 return revlog.bin(self.applied[-1].rev)
356 return revlog.bin(self.applied[-1].rev)
359 pp = repo.changelog.parents(rev)
357 pp = repo.changelog.parents(rev)
360 if pp[1] != revlog.nullid:
358 if pp[1] != revlog.nullid:
361 arevs = [ x.rev for x in self.applied ]
359 arevs = [ x.rev for x in self.applied ]
362 p0 = revlog.hex(pp[0])
360 p0 = revlog.hex(pp[0])
363 p1 = revlog.hex(pp[1])
361 p1 = revlog.hex(pp[1])
364 if p0 in arevs:
362 if p0 in arevs:
365 return pp[0]
363 return pp[0]
366 if p1 in arevs:
364 if p1 in arevs:
367 return pp[1]
365 return pp[1]
368 return pp[0]
366 return pp[0]
369
367
370 def mergepatch(self, repo, mergeq, series, wlock):
368 def mergepatch(self, repo, mergeq, series, wlock):
371 if len(self.applied) == 0:
369 if len(self.applied) == 0:
372 # each of the patches merged in will have two parents. This
370 # each of the patches merged in will have two parents. This
373 # can confuse the qrefresh, qdiff, and strip code because it
371 # can confuse the qrefresh, qdiff, and strip code because it
374 # needs to know which parent is actually in the patch queue.
372 # needs to know which parent is actually in the patch queue.
375 # so, we insert a merge marker with only one parent. This way
373 # so, we insert a merge marker with only one parent. This way
376 # the first patch in the queue is never a merge patch
374 # the first patch in the queue is never a merge patch
377 #
375 #
378 pname = ".hg.patches.merge.marker"
376 pname = ".hg.patches.merge.marker"
379 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
377 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
380 wlock=wlock)
378 wlock=wlock)
381 self.applied.append(statusentry(revlog.hex(n), pname))
379 self.applied.append(statusentry(revlog.hex(n), pname))
382 self.applied_dirty = 1
380 self.applied_dirty = 1
383
381
384 head = self.qparents(repo)
382 head = self.qparents(repo)
385
383
386 for patch in series:
384 for patch in series:
387 patch = mergeq.lookup(patch, strict=True)
385 patch = mergeq.lookup(patch, strict=True)
388 if not patch:
386 if not patch:
389 self.ui.warn("patch %s does not exist\n" % patch)
387 self.ui.warn("patch %s does not exist\n" % patch)
390 return (1, None)
388 return (1, None)
391 pushable, reason = self.pushable(patch)
389 pushable, reason = self.pushable(patch)
392 if not pushable:
390 if not pushable:
393 self.explain_pushable(patch, all_patches=True)
391 self.explain_pushable(patch, all_patches=True)
394 continue
392 continue
395 info = mergeq.isapplied(patch)
393 info = mergeq.isapplied(patch)
396 if not info:
394 if not info:
397 self.ui.warn("patch %s is not applied\n" % patch)
395 self.ui.warn("patch %s is not applied\n" % patch)
398 return (1, None)
396 return (1, None)
399 rev = revlog.bin(info[1])
397 rev = revlog.bin(info[1])
400 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
398 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
401 if head:
399 if head:
402 self.applied.append(statusentry(revlog.hex(head), patch))
400 self.applied.append(statusentry(revlog.hex(head), patch))
403 self.applied_dirty = 1
401 self.applied_dirty = 1
404 if err:
402 if err:
405 return (err, head)
403 return (err, head)
406 return (0, head)
404 return (0, head)
407
405
408 def patch(self, repo, patchfile):
406 def patch(self, repo, patchfile):
409 '''Apply patchfile to the working directory.
407 '''Apply patchfile to the working directory.
410 patchfile: file name of patch'''
408 patchfile: file name of patch'''
411 files = {}
409 files = {}
412 try:
410 try:
413 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
411 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
414 files=files)
412 files=files)
415 except Exception, inst:
413 except Exception, inst:
416 self.ui.note(str(inst) + '\n')
414 self.ui.note(str(inst) + '\n')
417 if not self.ui.verbose:
415 if not self.ui.verbose:
418 self.ui.warn("patch failed, unable to continue (try -v)\n")
416 self.ui.warn("patch failed, unable to continue (try -v)\n")
419 return (False, files, False)
417 return (False, files, False)
420
418
421 return (True, files, fuzz)
419 return (True, files, fuzz)
422
420
423 def apply(self, repo, series, list=False, update_status=True,
421 def apply(self, repo, series, list=False, update_status=True,
424 strict=False, patchdir=None, merge=None, wlock=None):
422 strict=False, patchdir=None, merge=None, wlock=None):
425 # TODO unify with commands.py
423 # TODO unify with commands.py
426 if not patchdir:
424 if not patchdir:
427 patchdir = self.path
425 patchdir = self.path
428 err = 0
426 err = 0
429 if not wlock:
427 if not wlock:
430 wlock = repo.wlock()
428 wlock = repo.wlock()
431 lock = repo.lock()
429 lock = repo.lock()
432 tr = repo.transaction()
430 tr = repo.transaction()
433 n = None
431 n = None
434 for patchname in series:
432 for patchname in series:
435 pushable, reason = self.pushable(patchname)
433 pushable, reason = self.pushable(patchname)
436 if not pushable:
434 if not pushable:
437 self.explain_pushable(patchname, all_patches=True)
435 self.explain_pushable(patchname, all_patches=True)
438 continue
436 continue
439 self.ui.warn("applying %s\n" % patchname)
437 self.ui.warn("applying %s\n" % patchname)
440 pf = os.path.join(patchdir, patchname)
438 pf = os.path.join(patchdir, patchname)
441
439
442 try:
440 try:
443 message, comments, user, date, patchfound = self.readheaders(patchname)
441 message, comments, user, date, patchfound = self.readheaders(patchname)
444 except:
442 except:
445 self.ui.warn("Unable to read %s\n" % patchname)
443 self.ui.warn("Unable to read %s\n" % patchname)
446 err = 1
444 err = 1
447 break
445 break
448
446
449 if not message:
447 if not message:
450 message = "imported patch %s\n" % patchname
448 message = "imported patch %s\n" % patchname
451 else:
449 else:
452 if list:
450 if list:
453 message.append("\nimported patch %s" % patchname)
451 message.append("\nimported patch %s" % patchname)
454 message = '\n'.join(message)
452 message = '\n'.join(message)
455
453
456 (patcherr, files, fuzz) = self.patch(repo, pf)
454 (patcherr, files, fuzz) = self.patch(repo, pf)
457 patcherr = not patcherr
455 patcherr = not patcherr
458
456
459 if merge and files:
457 if merge and files:
460 # Mark as merged and update dirstate parent info
458 # Mark as merged and update dirstate parent info
461 repo.dirstate.update(repo.dirstate.filterfiles(files.keys()), 'm')
459 repo.dirstate.update(repo.dirstate.filterfiles(files.keys()), 'm')
462 p1, p2 = repo.dirstate.parents()
460 p1, p2 = repo.dirstate.parents()
463 repo.dirstate.setparents(p1, merge)
461 repo.dirstate.setparents(p1, merge)
464 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
462 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
465 n = repo.commit(files, message, user, date, force=1, lock=lock,
463 n = repo.commit(files, message, user, date, force=1, lock=lock,
466 wlock=wlock)
464 wlock=wlock)
467
465
468 if n == None:
466 if n == None:
469 raise util.Abort(_("repo commit failed"))
467 raise util.Abort(_("repo commit failed"))
470
468
471 if update_status:
469 if update_status:
472 self.applied.append(statusentry(revlog.hex(n), patchname))
470 self.applied.append(statusentry(revlog.hex(n), patchname))
473
471
474 if patcherr:
472 if patcherr:
475 if not patchfound:
473 if not patchfound:
476 self.ui.warn("patch %s is empty\n" % patchname)
474 self.ui.warn("patch %s is empty\n" % patchname)
477 err = 0
475 err = 0
478 else:
476 else:
479 self.ui.warn("patch failed, rejects left in working dir\n")
477 self.ui.warn("patch failed, rejects left in working dir\n")
480 err = 1
478 err = 1
481 break
479 break
482
480
483 if fuzz and strict:
481 if fuzz and strict:
484 self.ui.warn("fuzz found when applying patch, stopping\n")
482 self.ui.warn("fuzz found when applying patch, stopping\n")
485 err = 1
483 err = 1
486 break
484 break
487 tr.close()
485 tr.close()
488 return (err, n)
486 return (err, n)
489
487
490 def delete(self, repo, patches, opts):
488 def delete(self, repo, patches, opts):
491 realpatches = []
489 realpatches = []
492 for patch in patches:
490 for patch in patches:
493 patch = self.lookup(patch, strict=True)
491 patch = self.lookup(patch, strict=True)
494 info = self.isapplied(patch)
492 info = self.isapplied(patch)
495 if info:
493 if info:
496 raise util.Abort(_("cannot delete applied patch %s") % patch)
494 raise util.Abort(_("cannot delete applied patch %s") % patch)
497 if patch not in self.series:
495 if patch not in self.series:
498 raise util.Abort(_("patch %s not in series file") % patch)
496 raise util.Abort(_("patch %s not in series file") % patch)
499 realpatches.append(patch)
497 realpatches.append(patch)
500
498
501 appliedbase = 0
499 appliedbase = 0
502 if opts.get('rev'):
500 if opts.get('rev'):
503 if not self.applied:
501 if not self.applied:
504 raise util.Abort(_('no patches applied'))
502 raise util.Abort(_('no patches applied'))
505 revs = cmdutil.revrange(repo, opts['rev'])
503 revs = cmdutil.revrange(repo, opts['rev'])
506 if len(revs) > 1 and revs[0] > revs[1]:
504 if len(revs) > 1 and revs[0] > revs[1]:
507 revs.reverse()
505 revs.reverse()
508 for rev in revs:
506 for rev in revs:
509 if appliedbase >= len(self.applied):
507 if appliedbase >= len(self.applied):
510 raise util.Abort(_("revision %d is not managed") % rev)
508 raise util.Abort(_("revision %d is not managed") % rev)
511
509
512 base = revlog.bin(self.applied[appliedbase].rev)
510 base = revlog.bin(self.applied[appliedbase].rev)
513 node = repo.changelog.node(rev)
511 node = repo.changelog.node(rev)
514 if node != base:
512 if node != base:
515 raise util.Abort(_("cannot delete revision %d above "
513 raise util.Abort(_("cannot delete revision %d above "
516 "applied patches") % rev)
514 "applied patches") % rev)
517 realpatches.append(self.applied[appliedbase].name)
515 realpatches.append(self.applied[appliedbase].name)
518 appliedbase += 1
516 appliedbase += 1
519
517
520 if not opts.get('keep'):
518 if not opts.get('keep'):
521 r = self.qrepo()
519 r = self.qrepo()
522 if r:
520 if r:
523 r.remove(realpatches, True)
521 r.remove(realpatches, True)
524 else:
522 else:
525 for p in realpatches:
523 for p in realpatches:
526 os.unlink(self.join(p))
524 os.unlink(self.join(p))
527
525
528 if appliedbase:
526 if appliedbase:
529 del self.applied[:appliedbase]
527 del self.applied[:appliedbase]
530 self.applied_dirty = 1
528 self.applied_dirty = 1
531 indices = [self.find_series(p) for p in realpatches]
529 indices = [self.find_series(p) for p in realpatches]
532 indices.sort()
530 indices.sort()
533 for i in indices[-1::-1]:
531 for i in indices[-1::-1]:
534 del self.full_series[i]
532 del self.full_series[i]
535 self.parse_series()
533 self.parse_series()
536 self.series_dirty = 1
534 self.series_dirty = 1
537
535
538 def check_toppatch(self, repo):
536 def check_toppatch(self, repo):
539 if len(self.applied) > 0:
537 if len(self.applied) > 0:
540 top = revlog.bin(self.applied[-1].rev)
538 top = revlog.bin(self.applied[-1].rev)
541 pp = repo.dirstate.parents()
539 pp = repo.dirstate.parents()
542 if top not in pp:
540 if top not in pp:
543 raise util.Abort(_("queue top not at same revision as working directory"))
541 raise util.Abort(_("queue top not at same revision as working directory"))
544 return top
542 return top
545 return None
543 return None
546 def check_localchanges(self, repo, force=False, refresh=True):
544 def check_localchanges(self, repo, force=False, refresh=True):
547 m, a, r, d = repo.status()[:4]
545 m, a, r, d = repo.status()[:4]
548 if m or a or r or d:
546 if m or a or r or d:
549 if not force:
547 if not force:
550 if refresh:
548 if refresh:
551 raise util.Abort(_("local changes found, refresh first"))
549 raise util.Abort(_("local changes found, refresh first"))
552 else:
550 else:
553 raise util.Abort(_("local changes found"))
551 raise util.Abort(_("local changes found"))
554 return m, a, r, d
552 return m, a, r, d
555 def new(self, repo, patch, msg=None, force=None):
553 def new(self, repo, patch, msg=None, force=None):
556 if os.path.exists(self.join(patch)):
554 if os.path.exists(self.join(patch)):
557 raise util.Abort(_('patch "%s" already exists') % patch)
555 raise util.Abort(_('patch "%s" already exists') % patch)
558 m, a, r, d = self.check_localchanges(repo, force)
556 m, a, r, d = self.check_localchanges(repo, force)
559 commitfiles = m + a + r
557 commitfiles = m + a + r
560 self.check_toppatch(repo)
558 self.check_toppatch(repo)
561 wlock = repo.wlock()
559 wlock = repo.wlock()
562 insert = self.full_series_end()
560 insert = self.full_series_end()
563 if msg:
561 if msg:
564 n = repo.commit(commitfiles, "[mq]: %s" % msg, force=True,
562 n = repo.commit(commitfiles, "[mq]: %s" % msg, force=True,
565 wlock=wlock)
563 wlock=wlock)
566 else:
564 else:
567 n = repo.commit(commitfiles,
565 n = repo.commit(commitfiles,
568 "New patch: %s" % patch, force=True, wlock=wlock)
566 "New patch: %s" % patch, force=True, wlock=wlock)
569 if n == None:
567 if n == None:
570 raise util.Abort(_("repo commit failed"))
568 raise util.Abort(_("repo commit failed"))
571 self.full_series[insert:insert] = [patch]
569 self.full_series[insert:insert] = [patch]
572 self.applied.append(statusentry(revlog.hex(n), patch))
570 self.applied.append(statusentry(revlog.hex(n), patch))
573 self.parse_series()
571 self.parse_series()
574 self.series_dirty = 1
572 self.series_dirty = 1
575 self.applied_dirty = 1
573 self.applied_dirty = 1
576 p = self.opener(patch, "w")
574 p = self.opener(patch, "w")
577 if msg:
575 if msg:
578 msg = msg + "\n"
576 msg = msg + "\n"
579 p.write(msg)
577 p.write(msg)
580 p.close()
578 p.close()
581 wlock = None
579 wlock = None
582 r = self.qrepo()
580 r = self.qrepo()
583 if r: r.add([patch])
581 if r: r.add([patch])
584 if commitfiles:
582 if commitfiles:
585 self.refresh(repo, short=True)
583 self.refresh(repo, short=True)
586
584
587 def strip(self, repo, rev, update=True, backup="all", wlock=None):
585 def strip(self, repo, rev, update=True, backup="all", wlock=None):
588 def limitheads(chlog, stop):
586 def limitheads(chlog, stop):
589 """return the list of all nodes that have no children"""
587 """return the list of all nodes that have no children"""
590 p = {}
588 p = {}
591 h = []
589 h = []
592 stoprev = 0
590 stoprev = 0
593 if stop in chlog.nodemap:
591 if stop in chlog.nodemap:
594 stoprev = chlog.rev(stop)
592 stoprev = chlog.rev(stop)
595
593
596 for r in xrange(chlog.count() - 1, -1, -1):
594 for r in xrange(chlog.count() - 1, -1, -1):
597 n = chlog.node(r)
595 n = chlog.node(r)
598 if n not in p:
596 if n not in p:
599 h.append(n)
597 h.append(n)
600 if n == stop:
598 if n == stop:
601 break
599 break
602 if r < stoprev:
600 if r < stoprev:
603 break
601 break
604 for pn in chlog.parents(n):
602 for pn in chlog.parents(n):
605 p[pn] = 1
603 p[pn] = 1
606 return h
604 return h
607
605
608 def bundle(cg):
606 def bundle(cg):
609 backupdir = repo.join("strip-backup")
607 backupdir = repo.join("strip-backup")
610 if not os.path.isdir(backupdir):
608 if not os.path.isdir(backupdir):
611 os.mkdir(backupdir)
609 os.mkdir(backupdir)
612 name = os.path.join(backupdir, "%s" % revlog.short(rev))
610 name = os.path.join(backupdir, "%s" % revlog.short(rev))
613 name = savename(name)
611 name = savename(name)
614 self.ui.warn("saving bundle to %s\n" % name)
612 self.ui.warn("saving bundle to %s\n" % name)
615 return changegroup.writebundle(cg, name, "HG10BZ")
613 return changegroup.writebundle(cg, name, "HG10BZ")
616
614
617 def stripall(rev, revnum):
615 def stripall(rev, revnum):
618 cl = repo.changelog
616 cl = repo.changelog
619 c = cl.read(rev)
617 c = cl.read(rev)
620 mm = repo.manifest.read(c[0])
618 mm = repo.manifest.read(c[0])
621 seen = {}
619 seen = {}
622
620
623 for x in xrange(revnum, cl.count()):
621 for x in xrange(revnum, cl.count()):
624 c = cl.read(cl.node(x))
622 c = cl.read(cl.node(x))
625 for f in c[3]:
623 for f in c[3]:
626 if f in seen:
624 if f in seen:
627 continue
625 continue
628 seen[f] = 1
626 seen[f] = 1
629 if f in mm:
627 if f in mm:
630 filerev = mm[f]
628 filerev = mm[f]
631 else:
629 else:
632 filerev = 0
630 filerev = 0
633 seen[f] = filerev
631 seen[f] = filerev
634 # we go in two steps here so the strip loop happens in a
632 # we go in two steps here so the strip loop happens in a
635 # sensible order. When stripping many files, this helps keep
633 # sensible order. When stripping many files, this helps keep
636 # our disk access patterns under control.
634 # our disk access patterns under control.
637 seen_list = seen.keys()
635 seen_list = seen.keys()
638 seen_list.sort()
636 seen_list.sort()
639 for f in seen_list:
637 for f in seen_list:
640 ff = repo.file(f)
638 ff = repo.file(f)
641 filerev = seen[f]
639 filerev = seen[f]
642 if filerev != 0:
640 if filerev != 0:
643 if filerev in ff.nodemap:
641 if filerev in ff.nodemap:
644 filerev = ff.rev(filerev)
642 filerev = ff.rev(filerev)
645 else:
643 else:
646 filerev = 0
644 filerev = 0
647 ff.strip(filerev, revnum)
645 ff.strip(filerev, revnum)
648
646
649 if not wlock:
647 if not wlock:
650 wlock = repo.wlock()
648 wlock = repo.wlock()
651 lock = repo.lock()
649 lock = repo.lock()
652 chlog = repo.changelog
650 chlog = repo.changelog
653 # TODO delete the undo files, and handle undo of merge sets
651 # TODO delete the undo files, and handle undo of merge sets
654 pp = chlog.parents(rev)
652 pp = chlog.parents(rev)
655 revnum = chlog.rev(rev)
653 revnum = chlog.rev(rev)
656
654
657 if update:
655 if update:
658 self.check_localchanges(repo, refresh=False)
656 self.check_localchanges(repo, refresh=False)
659 urev = self.qparents(repo, rev)
657 urev = self.qparents(repo, rev)
660 hg.clean(repo, urev, wlock=wlock)
658 hg.clean(repo, urev, wlock=wlock)
661 repo.dirstate.write()
659 repo.dirstate.write()
662
660
663 # save is a list of all the branches we are truncating away
661 # save is a list of all the branches we are truncating away
664 # that we actually want to keep. changegroup will be used
662 # that we actually want to keep. changegroup will be used
665 # to preserve them and add them back after the truncate
663 # to preserve them and add them back after the truncate
666 saveheads = []
664 saveheads = []
667 savebases = {}
665 savebases = {}
668
666
669 heads = limitheads(chlog, rev)
667 heads = limitheads(chlog, rev)
670 seen = {}
668 seen = {}
671
669
672 # search through all the heads, finding those where the revision
670 # search through all the heads, finding those where the revision
673 # we want to strip away is an ancestor. Also look for merges
671 # we want to strip away is an ancestor. Also look for merges
674 # that might be turned into new heads by the strip.
672 # that might be turned into new heads by the strip.
675 while heads:
673 while heads:
676 h = heads.pop()
674 h = heads.pop()
677 n = h
675 n = h
678 while True:
676 while True:
679 seen[n] = 1
677 seen[n] = 1
680 pp = chlog.parents(n)
678 pp = chlog.parents(n)
681 if pp[1] != revlog.nullid and chlog.rev(pp[1]) > revnum:
679 if pp[1] != revlog.nullid and chlog.rev(pp[1]) > revnum:
682 if pp[1] not in seen:
680 if pp[1] not in seen:
683 heads.append(pp[1])
681 heads.append(pp[1])
684 if pp[0] == revlog.nullid:
682 if pp[0] == revlog.nullid:
685 break
683 break
686 if chlog.rev(pp[0]) < revnum:
684 if chlog.rev(pp[0]) < revnum:
687 break
685 break
688 n = pp[0]
686 n = pp[0]
689 if n == rev:
687 if n == rev:
690 break
688 break
691 r = chlog.reachable(h, rev)
689 r = chlog.reachable(h, rev)
692 if rev not in r:
690 if rev not in r:
693 saveheads.append(h)
691 saveheads.append(h)
694 for x in r:
692 for x in r:
695 if chlog.rev(x) > revnum:
693 if chlog.rev(x) > revnum:
696 savebases[x] = 1
694 savebases[x] = 1
697
695
698 # create a changegroup for all the branches we need to keep
696 # create a changegroup for all the branches we need to keep
699 if backup == "all":
697 if backup == "all":
700 backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
698 backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
701 bundle(backupch)
699 bundle(backupch)
702 if saveheads:
700 if saveheads:
703 backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
701 backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
704 chgrpfile = bundle(backupch)
702 chgrpfile = bundle(backupch)
705
703
706 stripall(rev, revnum)
704 stripall(rev, revnum)
707
705
708 change = chlog.read(rev)
706 change = chlog.read(rev)
709 chlog.strip(revnum, revnum)
707 chlog.strip(revnum, revnum)
710 repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
708 repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
711 if saveheads:
709 if saveheads:
712 self.ui.status("adding branch\n")
710 self.ui.status("adding branch\n")
713 commands.unbundle(self.ui, repo, chgrpfile, update=False)
711 commands.unbundle(self.ui, repo, chgrpfile, update=False)
714 if backup != "strip":
712 if backup != "strip":
715 os.unlink(chgrpfile)
713 os.unlink(chgrpfile)
716
714
717 def isapplied(self, patch):
715 def isapplied(self, patch):
718 """returns (index, rev, patch)"""
716 """returns (index, rev, patch)"""
719 for i in xrange(len(self.applied)):
717 for i in xrange(len(self.applied)):
720 a = self.applied[i]
718 a = self.applied[i]
721 if a.name == patch:
719 if a.name == patch:
722 return (i, a.rev, a.name)
720 return (i, a.rev, a.name)
723 return None
721 return None
724
722
725 # if the exact patch name does not exist, we try a few
723 # if the exact patch name does not exist, we try a few
726 # variations. If strict is passed, we try only #1
724 # variations. If strict is passed, we try only #1
727 #
725 #
728 # 1) a number to indicate an offset in the series file
726 # 1) a number to indicate an offset in the series file
729 # 2) a unique substring of the patch name was given
727 # 2) a unique substring of the patch name was given
730 # 3) patchname[-+]num to indicate an offset in the series file
728 # 3) patchname[-+]num to indicate an offset in the series file
731 def lookup(self, patch, strict=False):
729 def lookup(self, patch, strict=False):
732 patch = patch and str(patch)
730 patch = patch and str(patch)
733
731
734 def partial_name(s):
732 def partial_name(s):
735 if s in self.series:
733 if s in self.series:
736 return s
734 return s
737 matches = [x for x in self.series if s in x]
735 matches = [x for x in self.series if s in x]
738 if len(matches) > 1:
736 if len(matches) > 1:
739 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
737 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
740 for m in matches:
738 for m in matches:
741 self.ui.warn(' %s\n' % m)
739 self.ui.warn(' %s\n' % m)
742 return None
740 return None
743 if matches:
741 if matches:
744 return matches[0]
742 return matches[0]
745 if len(self.series) > 0 and len(self.applied) > 0:
743 if len(self.series) > 0 and len(self.applied) > 0:
746 if s == 'qtip':
744 if s == 'qtip':
747 return self.series[self.series_end(True)-1]
745 return self.series[self.series_end(True)-1]
748 if s == 'qbase':
746 if s == 'qbase':
749 return self.series[0]
747 return self.series[0]
750 return None
748 return None
751 if patch == None:
749 if patch == None:
752 return None
750 return None
753
751
754 # we don't want to return a partial match until we make
752 # we don't want to return a partial match until we make
755 # sure the file name passed in does not exist (checked below)
753 # sure the file name passed in does not exist (checked below)
756 res = partial_name(patch)
754 res = partial_name(patch)
757 if res and res == patch:
755 if res and res == patch:
758 return res
756 return res
759
757
760 if not os.path.isfile(self.join(patch)):
758 if not os.path.isfile(self.join(patch)):
761 try:
759 try:
762 sno = int(patch)
760 sno = int(patch)
763 except(ValueError, OverflowError):
761 except(ValueError, OverflowError):
764 pass
762 pass
765 else:
763 else:
766 if sno < len(self.series):
764 if sno < len(self.series):
767 return self.series[sno]
765 return self.series[sno]
768 if not strict:
766 if not strict:
769 # return any partial match made above
767 # return any partial match made above
770 if res:
768 if res:
771 return res
769 return res
772 minus = patch.rfind('-')
770 minus = patch.rfind('-')
773 if minus >= 0:
771 if minus >= 0:
774 res = partial_name(patch[:minus])
772 res = partial_name(patch[:minus])
775 if res:
773 if res:
776 i = self.series.index(res)
774 i = self.series.index(res)
777 try:
775 try:
778 off = int(patch[minus+1:] or 1)
776 off = int(patch[minus+1:] or 1)
779 except(ValueError, OverflowError):
777 except(ValueError, OverflowError):
780 pass
778 pass
781 else:
779 else:
782 if i - off >= 0:
780 if i - off >= 0:
783 return self.series[i - off]
781 return self.series[i - off]
784 plus = patch.rfind('+')
782 plus = patch.rfind('+')
785 if plus >= 0:
783 if plus >= 0:
786 res = partial_name(patch[:plus])
784 res = partial_name(patch[:plus])
787 if res:
785 if res:
788 i = self.series.index(res)
786 i = self.series.index(res)
789 try:
787 try:
790 off = int(patch[plus+1:] or 1)
788 off = int(patch[plus+1:] or 1)
791 except(ValueError, OverflowError):
789 except(ValueError, OverflowError):
792 pass
790 pass
793 else:
791 else:
794 if i + off < len(self.series):
792 if i + off < len(self.series):
795 return self.series[i + off]
793 return self.series[i + off]
796 raise util.Abort(_("patch %s not in series") % patch)
794 raise util.Abort(_("patch %s not in series") % patch)
797
795
798 def push(self, repo, patch=None, force=False, list=False,
796 def push(self, repo, patch=None, force=False, list=False,
799 mergeq=None, wlock=None):
797 mergeq=None, wlock=None):
800 if not wlock:
798 if not wlock:
801 wlock = repo.wlock()
799 wlock = repo.wlock()
802 patch = self.lookup(patch)
800 patch = self.lookup(patch)
803 if patch and self.isapplied(patch):
801 if patch and self.isapplied(patch):
804 raise util.Abort(_("patch %s is already applied") % patch)
802 raise util.Abort(_("patch %s is already applied") % patch)
805 if self.series_end() == len(self.series):
803 if self.series_end() == len(self.series):
806 raise util.Abort(_("patch series fully applied"))
804 raise util.Abort(_("patch series fully applied"))
807 if not force:
805 if not force:
808 self.check_localchanges(repo)
806 self.check_localchanges(repo)
809
807
810 self.applied_dirty = 1;
808 self.applied_dirty = 1;
811 start = self.series_end()
809 start = self.series_end()
812 if start > 0:
810 if start > 0:
813 self.check_toppatch(repo)
811 self.check_toppatch(repo)
814 if not patch:
812 if not patch:
815 patch = self.series[start]
813 patch = self.series[start]
816 end = start + 1
814 end = start + 1
817 else:
815 else:
818 end = self.series.index(patch, start) + 1
816 end = self.series.index(patch, start) + 1
819 s = self.series[start:end]
817 s = self.series[start:end]
820 if mergeq:
818 if mergeq:
821 ret = self.mergepatch(repo, mergeq, s, wlock)
819 ret = self.mergepatch(repo, mergeq, s, wlock)
822 else:
820 else:
823 ret = self.apply(repo, s, list, wlock=wlock)
821 ret = self.apply(repo, s, list, wlock=wlock)
824 top = self.applied[-1].name
822 top = self.applied[-1].name
825 if ret[0]:
823 if ret[0]:
826 self.ui.write("Errors during apply, please fix and refresh %s\n" %
824 self.ui.write("Errors during apply, please fix and refresh %s\n" %
827 top)
825 top)
828 else:
826 else:
829 self.ui.write("Now at: %s\n" % top)
827 self.ui.write("Now at: %s\n" % top)
830 return ret[0]
828 return ret[0]
831
829
832 def pop(self, repo, patch=None, force=False, update=True, all=False,
830 def pop(self, repo, patch=None, force=False, update=True, all=False,
833 wlock=None):
831 wlock=None):
834 def getfile(f, rev):
832 def getfile(f, rev):
835 t = repo.file(f).read(rev)
833 t = repo.file(f).read(rev)
836 try:
834 try:
837 repo.wfile(f, "w").write(t)
835 repo.wfile(f, "w").write(t)
838 except IOError:
836 except IOError:
839 try:
837 try:
840 os.makedirs(os.path.dirname(repo.wjoin(f)))
838 os.makedirs(os.path.dirname(repo.wjoin(f)))
841 except OSError, err:
839 except OSError, err:
842 if err.errno != errno.EEXIST: raise
840 if err.errno != errno.EEXIST: raise
843 repo.wfile(f, "w").write(t)
841 repo.wfile(f, "w").write(t)
844
842
845 if not wlock:
843 if not wlock:
846 wlock = repo.wlock()
844 wlock = repo.wlock()
847 if patch:
845 if patch:
848 # index, rev, patch
846 # index, rev, patch
849 info = self.isapplied(patch)
847 info = self.isapplied(patch)
850 if not info:
848 if not info:
851 patch = self.lookup(patch)
849 patch = self.lookup(patch)
852 info = self.isapplied(patch)
850 info = self.isapplied(patch)
853 if not info:
851 if not info:
854 raise util.Abort(_("patch %s is not applied") % patch)
852 raise util.Abort(_("patch %s is not applied") % patch)
855 if len(self.applied) == 0:
853 if len(self.applied) == 0:
856 raise util.Abort(_("no patches applied"))
854 raise util.Abort(_("no patches applied"))
857
855
858 if not update:
856 if not update:
859 parents = repo.dirstate.parents()
857 parents = repo.dirstate.parents()
860 rr = [ revlog.bin(x.rev) for x in self.applied ]
858 rr = [ revlog.bin(x.rev) for x in self.applied ]
861 for p in parents:
859 for p in parents:
862 if p in rr:
860 if p in rr:
863 self.ui.warn("qpop: forcing dirstate update\n")
861 self.ui.warn("qpop: forcing dirstate update\n")
864 update = True
862 update = True
865
863
866 if not force and update:
864 if not force and update:
867 self.check_localchanges(repo)
865 self.check_localchanges(repo)
868
866
869 self.applied_dirty = 1;
867 self.applied_dirty = 1;
870 end = len(self.applied)
868 end = len(self.applied)
871 if not patch:
869 if not patch:
872 if all:
870 if all:
873 popi = 0
871 popi = 0
874 else:
872 else:
875 popi = len(self.applied) - 1
873 popi = len(self.applied) - 1
876 else:
874 else:
877 popi = info[0] + 1
875 popi = info[0] + 1
878 if popi >= end:
876 if popi >= end:
879 self.ui.warn("qpop: %s is already at the top\n" % patch)
877 self.ui.warn("qpop: %s is already at the top\n" % patch)
880 return
878 return
881 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
879 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
882
880
883 start = info[0]
881 start = info[0]
884 rev = revlog.bin(info[1])
882 rev = revlog.bin(info[1])
885
883
886 # we know there are no local changes, so we can make a simplified
884 # we know there are no local changes, so we can make a simplified
887 # form of hg.update.
885 # form of hg.update.
888 if update:
886 if update:
889 top = self.check_toppatch(repo)
887 top = self.check_toppatch(repo)
890 qp = self.qparents(repo, rev)
888 qp = self.qparents(repo, rev)
891 changes = repo.changelog.read(qp)
889 changes = repo.changelog.read(qp)
892 mmap = repo.manifest.read(changes[0])
890 mmap = repo.manifest.read(changes[0])
893 m, a, r, d, u = repo.status(qp, top)[:5]
891 m, a, r, d, u = repo.status(qp, top)[:5]
894 if d:
892 if d:
895 raise util.Abort("deletions found between repo revs")
893 raise util.Abort("deletions found between repo revs")
896 for f in m:
894 for f in m:
897 getfile(f, mmap[f])
895 getfile(f, mmap[f])
898 for f in r:
896 for f in r:
899 getfile(f, mmap[f])
897 getfile(f, mmap[f])
900 util.set_exec(repo.wjoin(f), mmap.execf(f))
898 util.set_exec(repo.wjoin(f), mmap.execf(f))
901 repo.dirstate.update(m + r, 'n')
899 repo.dirstate.update(m + r, 'n')
902 for f in a:
900 for f in a:
903 try:
901 try:
904 os.unlink(repo.wjoin(f))
902 os.unlink(repo.wjoin(f))
905 except OSError, e:
903 except OSError, e:
906 if e.errno != errno.ENOENT:
904 if e.errno != errno.ENOENT:
907 raise
905 raise
908 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
906 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
909 except: pass
907 except: pass
910 if a:
908 if a:
911 repo.dirstate.forget(a)
909 repo.dirstate.forget(a)
912 repo.dirstate.setparents(qp, revlog.nullid)
910 repo.dirstate.setparents(qp, revlog.nullid)
913 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
911 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
914 del self.applied[start:end]
912 del self.applied[start:end]
915 if len(self.applied):
913 if len(self.applied):
916 self.ui.write("Now at: %s\n" % self.applied[-1].name)
914 self.ui.write("Now at: %s\n" % self.applied[-1].name)
917 else:
915 else:
918 self.ui.write("Patch queue now empty\n")
916 self.ui.write("Patch queue now empty\n")
919
917
920 def diff(self, repo, pats, opts):
918 def diff(self, repo, pats, opts):
921 top = self.check_toppatch(repo)
919 top = self.check_toppatch(repo)
922 if not top:
920 if not top:
923 self.ui.write("No patches applied\n")
921 self.ui.write("No patches applied\n")
924 return
922 return
925 qp = self.qparents(repo, top)
923 qp = self.qparents(repo, top)
926 if opts.get('git'):
924 if opts.get('git'):
927 self.diffopts().git = True
925 self.diffopts().git = True
928 self.printdiff(repo, qp, files=pats, opts=opts)
926 self.printdiff(repo, qp, files=pats, opts=opts)
929
927
930 def refresh(self, repo, pats=None, **opts):
928 def refresh(self, repo, pats=None, **opts):
931 if len(self.applied) == 0:
929 if len(self.applied) == 0:
932 self.ui.write("No patches applied\n")
930 self.ui.write("No patches applied\n")
933 return 1
931 return 1
934 wlock = repo.wlock()
932 wlock = repo.wlock()
935 self.check_toppatch(repo)
933 self.check_toppatch(repo)
936 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
934 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
937 top = revlog.bin(top)
935 top = revlog.bin(top)
938 cparents = repo.changelog.parents(top)
936 cparents = repo.changelog.parents(top)
939 patchparent = self.qparents(repo, top)
937 patchparent = self.qparents(repo, top)
940 message, comments, user, date, patchfound = self.readheaders(patchfn)
938 message, comments, user, date, patchfound = self.readheaders(patchfn)
941
939
942 patchf = self.opener(patchfn, "w")
940 patchf = self.opener(patchfn, "w")
943 msg = opts.get('msg', '').rstrip()
941 msg = opts.get('msg', '').rstrip()
944 if msg:
942 if msg:
945 if comments:
943 if comments:
946 # Remove existing message.
944 # Remove existing message.
947 ci = 0
945 ci = 0
948 for mi in xrange(len(message)):
946 for mi in xrange(len(message)):
949 while message[mi] != comments[ci]:
947 while message[mi] != comments[ci]:
950 ci += 1
948 ci += 1
951 del comments[ci]
949 del comments[ci]
952 comments.append(msg)
950 comments.append(msg)
953 if comments:
951 if comments:
954 comments = "\n".join(comments) + '\n\n'
952 comments = "\n".join(comments) + '\n\n'
955 patchf.write(comments)
953 patchf.write(comments)
956
954
957 if opts.get('git'):
955 if opts.get('git'):
958 self.diffopts().git = True
956 self.diffopts().git = True
959 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
957 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
960 tip = repo.changelog.tip()
958 tip = repo.changelog.tip()
961 if top == tip:
959 if top == tip:
962 # if the top of our patch queue is also the tip, there is an
960 # if the top of our patch queue is also the tip, there is an
963 # optimization here. We update the dirstate in place and strip
961 # optimization here. We update the dirstate in place and strip
964 # off the tip commit. Then just commit the current directory
962 # off the tip commit. Then just commit the current directory
965 # tree. We can also send repo.commit the list of files
963 # tree. We can also send repo.commit the list of files
966 # changed to speed up the diff
964 # changed to speed up the diff
967 #
965 #
968 # in short mode, we only diff the files included in the
966 # in short mode, we only diff the files included in the
969 # patch already
967 # patch already
970 #
968 #
971 # this should really read:
969 # this should really read:
972 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
970 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
973 # but we do it backwards to take advantage of manifest/chlog
971 # but we do it backwards to take advantage of manifest/chlog
974 # caching against the next repo.status call
972 # caching against the next repo.status call
975 #
973 #
976 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
974 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
977 changes = repo.changelog.read(tip)
975 changes = repo.changelog.read(tip)
978 man = repo.manifest.read(changes[0])
976 man = repo.manifest.read(changes[0])
979 aaa = aa[:]
977 aaa = aa[:]
980 if opts.get('short'):
978 if opts.get('short'):
981 filelist = mm + aa + dd
979 filelist = mm + aa + dd
982 else:
980 else:
983 filelist = None
981 filelist = None
984 m, a, r, d, u = repo.status(files=filelist)[:5]
982 m, a, r, d, u = repo.status(files=filelist)[:5]
985
983
986 # we might end up with files that were added between tip and
984 # we might end up with files that were added between tip and
987 # the dirstate parent, but then changed in the local dirstate.
985 # the dirstate parent, but then changed in the local dirstate.
988 # in this case, we want them to only show up in the added section
986 # in this case, we want them to only show up in the added section
989 for x in m:
987 for x in m:
990 if x not in aa:
988 if x not in aa:
991 mm.append(x)
989 mm.append(x)
992 # we might end up with files added by the local dirstate that
990 # we might end up with files added by the local dirstate that
993 # were deleted by the patch. In this case, they should only
991 # were deleted by the patch. In this case, they should only
994 # show up in the changed section.
992 # show up in the changed section.
995 for x in a:
993 for x in a:
996 if x in dd:
994 if x in dd:
997 del dd[dd.index(x)]
995 del dd[dd.index(x)]
998 mm.append(x)
996 mm.append(x)
999 else:
997 else:
1000 aa.append(x)
998 aa.append(x)
1001 # make sure any files deleted in the local dirstate
999 # make sure any files deleted in the local dirstate
1002 # are not in the add or change column of the patch
1000 # are not in the add or change column of the patch
1003 forget = []
1001 forget = []
1004 for x in d + r:
1002 for x in d + r:
1005 if x in aa:
1003 if x in aa:
1006 del aa[aa.index(x)]
1004 del aa[aa.index(x)]
1007 forget.append(x)
1005 forget.append(x)
1008 continue
1006 continue
1009 elif x in mm:
1007 elif x in mm:
1010 del mm[mm.index(x)]
1008 del mm[mm.index(x)]
1011 dd.append(x)
1009 dd.append(x)
1012
1010
1013 m = util.unique(mm)
1011 m = util.unique(mm)
1014 r = util.unique(dd)
1012 r = util.unique(dd)
1015 a = util.unique(aa)
1013 a = util.unique(aa)
1016 filelist = filter(matchfn, util.unique(m + r + a))
1014 filelist = filter(matchfn, util.unique(m + r + a))
1017 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1015 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1018 fp=patchf, changes=(m, a, r, [], u),
1016 fp=patchf, changes=(m, a, r, [], u),
1019 opts=self.diffopts())
1017 opts=self.diffopts())
1020 patchf.close()
1018 patchf.close()
1021
1019
1022 repo.dirstate.setparents(*cparents)
1020 repo.dirstate.setparents(*cparents)
1023 copies = {}
1021 copies = {}
1024 for dst in a:
1022 for dst in a:
1025 src = repo.dirstate.copied(dst)
1023 src = repo.dirstate.copied(dst)
1026 if src is None:
1024 if src is None:
1027 continue
1025 continue
1028 copies.setdefault(src, []).append(dst)
1026 copies.setdefault(src, []).append(dst)
1029 repo.dirstate.update(a, 'a')
1027 repo.dirstate.update(a, 'a')
1030 # remember the copies between patchparent and tip
1028 # remember the copies between patchparent and tip
1031 # this may be slow, so don't do it if we're not tracking copies
1029 # this may be slow, so don't do it if we're not tracking copies
1032 if self.diffopts().git:
1030 if self.diffopts().git:
1033 for dst in aaa:
1031 for dst in aaa:
1034 f = repo.file(dst)
1032 f = repo.file(dst)
1035 src = f.renamed(man[dst])
1033 src = f.renamed(man[dst])
1036 if src:
1034 if src:
1037 copies[src[0]] = copies.get(dst, [])
1035 copies[src[0]] = copies.get(dst, [])
1038 if dst in a:
1036 if dst in a:
1039 copies[src[0]].append(dst)
1037 copies[src[0]].append(dst)
1040 # we can't copy a file created by the patch itself
1038 # we can't copy a file created by the patch itself
1041 if dst in copies:
1039 if dst in copies:
1042 del copies[dst]
1040 del copies[dst]
1043 for src, dsts in copies.iteritems():
1041 for src, dsts in copies.iteritems():
1044 for dst in dsts:
1042 for dst in dsts:
1045 repo.dirstate.copy(src, dst)
1043 repo.dirstate.copy(src, dst)
1046 repo.dirstate.update(r, 'r')
1044 repo.dirstate.update(r, 'r')
1047 # if the patch excludes a modified file, mark that file with mtime=0
1045 # if the patch excludes a modified file, mark that file with mtime=0
1048 # so status can see it.
1046 # so status can see it.
1049 mm = []
1047 mm = []
1050 for i in xrange(len(m)-1, -1, -1):
1048 for i in xrange(len(m)-1, -1, -1):
1051 if not matchfn(m[i]):
1049 if not matchfn(m[i]):
1052 mm.append(m[i])
1050 mm.append(m[i])
1053 del m[i]
1051 del m[i]
1054 repo.dirstate.update(m, 'n')
1052 repo.dirstate.update(m, 'n')
1055 repo.dirstate.update(mm, 'n', st_mtime=0)
1053 repo.dirstate.update(mm, 'n', st_mtime=0)
1056 repo.dirstate.forget(forget)
1054 repo.dirstate.forget(forget)
1057
1055
1058 if not msg:
1056 if not msg:
1059 if not message:
1057 if not message:
1060 message = "patch queue: %s\n" % patchfn
1058 message = "patch queue: %s\n" % patchfn
1061 else:
1059 else:
1062 message = "\n".join(message)
1060 message = "\n".join(message)
1063 else:
1061 else:
1064 message = msg
1062 message = msg
1065
1063
1066 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1064 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1067 n = repo.commit(filelist, message, changes[1], force=1, wlock=wlock)
1065 n = repo.commit(filelist, message, changes[1], force=1, wlock=wlock)
1068 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1066 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1069 self.applied_dirty = 1
1067 self.applied_dirty = 1
1070 else:
1068 else:
1071 self.printdiff(repo, patchparent, fp=patchf)
1069 self.printdiff(repo, patchparent, fp=patchf)
1072 patchf.close()
1070 patchf.close()
1073 added = repo.status()[1]
1071 added = repo.status()[1]
1074 for a in added:
1072 for a in added:
1075 f = repo.wjoin(a)
1073 f = repo.wjoin(a)
1076 try:
1074 try:
1077 os.unlink(f)
1075 os.unlink(f)
1078 except OSError, e:
1076 except OSError, e:
1079 if e.errno != errno.ENOENT:
1077 if e.errno != errno.ENOENT:
1080 raise
1078 raise
1081 try: os.removedirs(os.path.dirname(f))
1079 try: os.removedirs(os.path.dirname(f))
1082 except: pass
1080 except: pass
1083 # forget the file copies in the dirstate
1081 # forget the file copies in the dirstate
1084 # push should readd the files later on
1082 # push should readd the files later on
1085 repo.dirstate.forget(added)
1083 repo.dirstate.forget(added)
1086 self.pop(repo, force=True, wlock=wlock)
1084 self.pop(repo, force=True, wlock=wlock)
1087 self.push(repo, force=True, wlock=wlock)
1085 self.push(repo, force=True, wlock=wlock)
1088
1086
1089 def init(self, repo, create=False):
1087 def init(self, repo, create=False):
1090 if os.path.isdir(self.path):
1088 if os.path.isdir(self.path):
1091 raise util.Abort(_("patch queue directory already exists"))
1089 raise util.Abort(_("patch queue directory already exists"))
1092 os.mkdir(self.path)
1090 os.mkdir(self.path)
1093 if create:
1091 if create:
1094 return self.qrepo(create=True)
1092 return self.qrepo(create=True)
1095
1093
1096 def unapplied(self, repo, patch=None):
1094 def unapplied(self, repo, patch=None):
1097 if patch and patch not in self.series:
1095 if patch and patch not in self.series:
1098 raise util.Abort(_("patch %s is not in series file") % patch)
1096 raise util.Abort(_("patch %s is not in series file") % patch)
1099 if not patch:
1097 if not patch:
1100 start = self.series_end()
1098 start = self.series_end()
1101 else:
1099 else:
1102 start = self.series.index(patch) + 1
1100 start = self.series.index(patch) + 1
1103 unapplied = []
1101 unapplied = []
1104 for i in xrange(start, len(self.series)):
1102 for i in xrange(start, len(self.series)):
1105 pushable, reason = self.pushable(i)
1103 pushable, reason = self.pushable(i)
1106 if pushable:
1104 if pushable:
1107 unapplied.append((i, self.series[i]))
1105 unapplied.append((i, self.series[i]))
1108 self.explain_pushable(i)
1106 self.explain_pushable(i)
1109 return unapplied
1107 return unapplied
1110
1108
1111 def qseries(self, repo, missing=None, start=0, length=0, status=None,
1109 def qseries(self, repo, missing=None, start=0, length=0, status=None,
1112 summary=False):
1110 summary=False):
1113 def displayname(patchname):
1111 def displayname(patchname):
1114 if summary:
1112 if summary:
1115 msg = self.readheaders(patchname)[0]
1113 msg = self.readheaders(patchname)[0]
1116 msg = msg and ': ' + msg[0] or ': '
1114 msg = msg and ': ' + msg[0] or ': '
1117 else:
1115 else:
1118 msg = ''
1116 msg = ''
1119 return '%s%s' % (patchname, msg)
1117 return '%s%s' % (patchname, msg)
1120
1118
1121 def pname(i):
1119 def pname(i):
1122 if status == 'A':
1120 if status == 'A':
1123 return self.applied[i].name
1121 return self.applied[i].name
1124 else:
1122 else:
1125 return self.series[i]
1123 return self.series[i]
1126
1124
1127 applied = dict.fromkeys([p.name for p in self.applied])
1125 applied = dict.fromkeys([p.name for p in self.applied])
1128 if not length:
1126 if not length:
1129 length = len(self.series) - start
1127 length = len(self.series) - start
1130 if not missing:
1128 if not missing:
1131 for i in xrange(start, start+length):
1129 for i in xrange(start, start+length):
1132 pfx = ''
1130 pfx = ''
1133 patch = pname(i)
1131 patch = pname(i)
1134 if self.ui.verbose:
1132 if self.ui.verbose:
1135 if patch in applied:
1133 if patch in applied:
1136 stat = 'A'
1134 stat = 'A'
1137 elif self.pushable(i)[0]:
1135 elif self.pushable(i)[0]:
1138 stat = 'U'
1136 stat = 'U'
1139 else:
1137 else:
1140 stat = 'G'
1138 stat = 'G'
1141 pfx = '%d %s ' % (i, stat)
1139 pfx = '%d %s ' % (i, stat)
1142 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1140 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1143 else:
1141 else:
1144 msng_list = []
1142 msng_list = []
1145 for root, dirs, files in os.walk(self.path):
1143 for root, dirs, files in os.walk(self.path):
1146 d = root[len(self.path) + 1:]
1144 d = root[len(self.path) + 1:]
1147 for f in files:
1145 for f in files:
1148 fl = os.path.join(d, f)
1146 fl = os.path.join(d, f)
1149 if (fl not in self.series and
1147 if (fl not in self.series and
1150 fl not in (self.status_path, self.series_path)
1148 fl not in (self.status_path, self.series_path)
1151 and not fl.startswith('.')):
1149 and not fl.startswith('.')):
1152 msng_list.append(fl)
1150 msng_list.append(fl)
1153 msng_list.sort()
1151 msng_list.sort()
1154 for x in msng_list:
1152 for x in msng_list:
1155 pfx = self.ui.verbose and ('D ') or ''
1153 pfx = self.ui.verbose and ('D ') or ''
1156 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1154 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1157
1155
1158 def issaveline(self, l):
1156 def issaveline(self, l):
1159 if l.name == '.hg.patches.save.line':
1157 if l.name == '.hg.patches.save.line':
1160 return True
1158 return True
1161
1159
1162 def qrepo(self, create=False):
1160 def qrepo(self, create=False):
1163 if create or os.path.isdir(self.join(".hg")):
1161 if create or os.path.isdir(self.join(".hg")):
1164 return hg.repository(self.ui, path=self.path, create=create)
1162 return hg.repository(self.ui, path=self.path, create=create)
1165
1163
1166 def restore(self, repo, rev, delete=None, qupdate=None):
1164 def restore(self, repo, rev, delete=None, qupdate=None):
1167 c = repo.changelog.read(rev)
1165 c = repo.changelog.read(rev)
1168 desc = c[4].strip()
1166 desc = c[4].strip()
1169 lines = desc.splitlines()
1167 lines = desc.splitlines()
1170 i = 0
1168 i = 0
1171 datastart = None
1169 datastart = None
1172 series = []
1170 series = []
1173 applied = []
1171 applied = []
1174 qpp = None
1172 qpp = None
1175 for i in xrange(0, len(lines)):
1173 for i in xrange(0, len(lines)):
1176 if lines[i] == 'Patch Data:':
1174 if lines[i] == 'Patch Data:':
1177 datastart = i + 1
1175 datastart = i + 1
1178 elif lines[i].startswith('Dirstate:'):
1176 elif lines[i].startswith('Dirstate:'):
1179 l = lines[i].rstrip()
1177 l = lines[i].rstrip()
1180 l = l[10:].split(' ')
1178 l = l[10:].split(' ')
1181 qpp = [ hg.bin(x) for x in l ]
1179 qpp = [ hg.bin(x) for x in l ]
1182 elif datastart != None:
1180 elif datastart != None:
1183 l = lines[i].rstrip()
1181 l = lines[i].rstrip()
1184 se = statusentry(l)
1182 se = statusentry(l)
1185 file_ = se.name
1183 file_ = se.name
1186 if se.rev:
1184 if se.rev:
1187 applied.append(se)
1185 applied.append(se)
1188 else:
1186 else:
1189 series.append(file_)
1187 series.append(file_)
1190 if datastart == None:
1188 if datastart == None:
1191 self.ui.warn("No saved patch data found\n")
1189 self.ui.warn("No saved patch data found\n")
1192 return 1
1190 return 1
1193 self.ui.warn("restoring status: %s\n" % lines[0])
1191 self.ui.warn("restoring status: %s\n" % lines[0])
1194 self.full_series = series
1192 self.full_series = series
1195 self.applied = applied
1193 self.applied = applied
1196 self.parse_series()
1194 self.parse_series()
1197 self.series_dirty = 1
1195 self.series_dirty = 1
1198 self.applied_dirty = 1
1196 self.applied_dirty = 1
1199 heads = repo.changelog.heads()
1197 heads = repo.changelog.heads()
1200 if delete:
1198 if delete:
1201 if rev not in heads:
1199 if rev not in heads:
1202 self.ui.warn("save entry has children, leaving it alone\n")
1200 self.ui.warn("save entry has children, leaving it alone\n")
1203 else:
1201 else:
1204 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1202 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1205 pp = repo.dirstate.parents()
1203 pp = repo.dirstate.parents()
1206 if rev in pp:
1204 if rev in pp:
1207 update = True
1205 update = True
1208 else:
1206 else:
1209 update = False
1207 update = False
1210 self.strip(repo, rev, update=update, backup='strip')
1208 self.strip(repo, rev, update=update, backup='strip')
1211 if qpp:
1209 if qpp:
1212 self.ui.warn("saved queue repository parents: %s %s\n" %
1210 self.ui.warn("saved queue repository parents: %s %s\n" %
1213 (hg.short(qpp[0]), hg.short(qpp[1])))
1211 (hg.short(qpp[0]), hg.short(qpp[1])))
1214 if qupdate:
1212 if qupdate:
1215 print "queue directory updating"
1213 print "queue directory updating"
1216 r = self.qrepo()
1214 r = self.qrepo()
1217 if not r:
1215 if not r:
1218 self.ui.warn("Unable to load queue repository\n")
1216 self.ui.warn("Unable to load queue repository\n")
1219 return 1
1217 return 1
1220 hg.clean(r, qpp[0])
1218 hg.clean(r, qpp[0])
1221
1219
1222 def save(self, repo, msg=None):
1220 def save(self, repo, msg=None):
1223 if len(self.applied) == 0:
1221 if len(self.applied) == 0:
1224 self.ui.warn("save: no patches applied, exiting\n")
1222 self.ui.warn("save: no patches applied, exiting\n")
1225 return 1
1223 return 1
1226 if self.issaveline(self.applied[-1]):
1224 if self.issaveline(self.applied[-1]):
1227 self.ui.warn("status is already saved\n")
1225 self.ui.warn("status is already saved\n")
1228 return 1
1226 return 1
1229
1227
1230 ar = [ ':' + x for x in self.full_series ]
1228 ar = [ ':' + x for x in self.full_series ]
1231 if not msg:
1229 if not msg:
1232 msg = "hg patches saved state"
1230 msg = "hg patches saved state"
1233 else:
1231 else:
1234 msg = "hg patches: " + msg.rstrip('\r\n')
1232 msg = "hg patches: " + msg.rstrip('\r\n')
1235 r = self.qrepo()
1233 r = self.qrepo()
1236 if r:
1234 if r:
1237 pp = r.dirstate.parents()
1235 pp = r.dirstate.parents()
1238 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1236 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1239 msg += "\n\nPatch Data:\n"
1237 msg += "\n\nPatch Data:\n"
1240 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1238 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1241 "\n".join(ar) + '\n' or "")
1239 "\n".join(ar) + '\n' or "")
1242 n = repo.commit(None, text, user=None, force=1)
1240 n = repo.commit(None, text, user=None, force=1)
1243 if not n:
1241 if not n:
1244 self.ui.warn("repo commit failed\n")
1242 self.ui.warn("repo commit failed\n")
1245 return 1
1243 return 1
1246 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1244 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1247 self.applied_dirty = 1
1245 self.applied_dirty = 1
1248
1246
1249 def full_series_end(self):
1247 def full_series_end(self):
1250 if len(self.applied) > 0:
1248 if len(self.applied) > 0:
1251 p = self.applied[-1].name
1249 p = self.applied[-1].name
1252 end = self.find_series(p)
1250 end = self.find_series(p)
1253 if end == None:
1251 if end == None:
1254 return len(self.full_series)
1252 return len(self.full_series)
1255 return end + 1
1253 return end + 1
1256 return 0
1254 return 0
1257
1255
1258 def series_end(self, all_patches=False):
1256 def series_end(self, all_patches=False):
1259 end = 0
1257 end = 0
1260 def next(start):
1258 def next(start):
1261 if all_patches:
1259 if all_patches:
1262 return start
1260 return start
1263 i = start
1261 i = start
1264 while i < len(self.series):
1262 while i < len(self.series):
1265 p, reason = self.pushable(i)
1263 p, reason = self.pushable(i)
1266 if p:
1264 if p:
1267 break
1265 break
1268 self.explain_pushable(i)
1266 self.explain_pushable(i)
1269 i += 1
1267 i += 1
1270 return i
1268 return i
1271 if len(self.applied) > 0:
1269 if len(self.applied) > 0:
1272 p = self.applied[-1].name
1270 p = self.applied[-1].name
1273 try:
1271 try:
1274 end = self.series.index(p)
1272 end = self.series.index(p)
1275 except ValueError:
1273 except ValueError:
1276 return 0
1274 return 0
1277 return next(end + 1)
1275 return next(end + 1)
1278 return next(end)
1276 return next(end)
1279
1277
1280 def appliedname(self, index):
1278 def appliedname(self, index):
1281 pname = self.applied[index].name
1279 pname = self.applied[index].name
1282 if not self.ui.verbose:
1280 if not self.ui.verbose:
1283 p = pname
1281 p = pname
1284 else:
1282 else:
1285 p = str(self.series.index(pname)) + " " + pname
1283 p = str(self.series.index(pname)) + " " + pname
1286 return p
1284 return p
1287
1285
1288 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1286 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1289 force=None, git=False):
1287 force=None, git=False):
1290 def checkseries(patchname):
1288 def checkseries(patchname):
1291 if patchname in self.series:
1289 if patchname in self.series:
1292 raise util.Abort(_('patch %s is already in the series file')
1290 raise util.Abort(_('patch %s is already in the series file')
1293 % patchname)
1291 % patchname)
1294 def checkfile(patchname):
1292 def checkfile(patchname):
1295 if not force and os.path.exists(self.join(patchname)):
1293 if not force and os.path.exists(self.join(patchname)):
1296 raise util.Abort(_('patch "%s" already exists')
1294 raise util.Abort(_('patch "%s" already exists')
1297 % patchname)
1295 % patchname)
1298
1296
1299 if rev:
1297 if rev:
1300 if files:
1298 if files:
1301 raise util.Abort(_('option "-r" not valid when importing '
1299 raise util.Abort(_('option "-r" not valid when importing '
1302 'files'))
1300 'files'))
1303 rev = cmdutil.revrange(repo, rev)
1301 rev = cmdutil.revrange(repo, rev)
1304 rev.sort(lambda x, y: cmp(y, x))
1302 rev.sort(lambda x, y: cmp(y, x))
1305 if (len(files) > 1 or len(rev) > 1) and patchname:
1303 if (len(files) > 1 or len(rev) > 1) and patchname:
1306 raise util.Abort(_('option "-n" not valid when importing multiple '
1304 raise util.Abort(_('option "-n" not valid when importing multiple '
1307 'patches'))
1305 'patches'))
1308 i = 0
1306 i = 0
1309 added = []
1307 added = []
1310 if rev:
1308 if rev:
1311 # If mq patches are applied, we can only import revisions
1309 # If mq patches are applied, we can only import revisions
1312 # that form a linear path to qbase.
1310 # that form a linear path to qbase.
1313 # Otherwise, they should form a linear path to a head.
1311 # Otherwise, they should form a linear path to a head.
1314 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1312 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1315 if len(heads) > 1:
1313 if len(heads) > 1:
1316 raise util.Abort(_('revision %d is the root of more than one '
1314 raise util.Abort(_('revision %d is the root of more than one '
1317 'branch') % rev[-1])
1315 'branch') % rev[-1])
1318 if self.applied:
1316 if self.applied:
1319 base = revlog.hex(repo.changelog.node(rev[0]))
1317 base = revlog.hex(repo.changelog.node(rev[0]))
1320 if base in [n.rev for n in self.applied]:
1318 if base in [n.rev for n in self.applied]:
1321 raise util.Abort(_('revision %d is already managed')
1319 raise util.Abort(_('revision %d is already managed')
1322 % rev[0])
1320 % rev[0])
1323 if heads != [revlog.bin(self.applied[-1].rev)]:
1321 if heads != [revlog.bin(self.applied[-1].rev)]:
1324 raise util.Abort(_('revision %d is not the parent of '
1322 raise util.Abort(_('revision %d is not the parent of '
1325 'the queue') % rev[0])
1323 'the queue') % rev[0])
1326 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1324 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1327 lastparent = repo.changelog.parentrevs(base)[0]
1325 lastparent = repo.changelog.parentrevs(base)[0]
1328 else:
1326 else:
1329 if heads != [repo.changelog.node(rev[0])]:
1327 if heads != [repo.changelog.node(rev[0])]:
1330 raise util.Abort(_('revision %d has unmanaged children')
1328 raise util.Abort(_('revision %d has unmanaged children')
1331 % rev[0])
1329 % rev[0])
1332 lastparent = None
1330 lastparent = None
1333
1331
1334 if git:
1332 if git:
1335 self.diffopts().git = True
1333 self.diffopts().git = True
1336
1334
1337 for r in rev:
1335 for r in rev:
1338 p1, p2 = repo.changelog.parentrevs(r)
1336 p1, p2 = repo.changelog.parentrevs(r)
1339 n = repo.changelog.node(r)
1337 n = repo.changelog.node(r)
1340 if p2 != revlog.nullrev:
1338 if p2 != revlog.nullrev:
1341 raise util.Abort(_('cannot import merge revision %d') % r)
1339 raise util.Abort(_('cannot import merge revision %d') % r)
1342 if lastparent and lastparent != r:
1340 if lastparent and lastparent != r:
1343 raise util.Abort(_('revision %d is not the parent of %d')
1341 raise util.Abort(_('revision %d is not the parent of %d')
1344 % (r, lastparent))
1342 % (r, lastparent))
1345 lastparent = p1
1343 lastparent = p1
1346
1344
1347 if not patchname:
1345 if not patchname:
1348 patchname = '%d.diff' % r
1346 patchname = '%d.diff' % r
1349 checkseries(patchname)
1347 checkseries(patchname)
1350 checkfile(patchname)
1348 checkfile(patchname)
1351 self.full_series.insert(0, patchname)
1349 self.full_series.insert(0, patchname)
1352
1350
1353 patchf = self.opener(patchname, "w")
1351 patchf = self.opener(patchname, "w")
1354 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1352 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1355 patchf.close()
1353 patchf.close()
1356
1354
1357 se = statusentry(revlog.hex(n), patchname)
1355 se = statusentry(revlog.hex(n), patchname)
1358 self.applied.insert(0, se)
1356 self.applied.insert(0, se)
1359
1357
1360 added.append(patchname)
1358 added.append(patchname)
1361 patchname = None
1359 patchname = None
1362 self.parse_series()
1360 self.parse_series()
1363 self.applied_dirty = 1
1361 self.applied_dirty = 1
1364
1362
1365 for filename in files:
1363 for filename in files:
1366 if existing:
1364 if existing:
1367 if filename == '-':
1365 if filename == '-':
1368 raise util.Abort(_('-e is incompatible with import from -'))
1366 raise util.Abort(_('-e is incompatible with import from -'))
1369 if not patchname:
1367 if not patchname:
1370 patchname = filename
1368 patchname = filename
1371 if not os.path.isfile(self.join(patchname)):
1369 if not os.path.isfile(self.join(patchname)):
1372 raise util.Abort(_("patch %s does not exist") % patchname)
1370 raise util.Abort(_("patch %s does not exist") % patchname)
1373 else:
1371 else:
1374 try:
1372 try:
1375 if filename == '-':
1373 if filename == '-':
1376 if not patchname:
1374 if not patchname:
1377 raise util.Abort(_('need --name to import a patch from -'))
1375 raise util.Abort(_('need --name to import a patch from -'))
1378 text = sys.stdin.read()
1376 text = sys.stdin.read()
1379 else:
1377 else:
1380 text = file(filename).read()
1378 text = file(filename).read()
1381 except IOError:
1379 except IOError:
1382 raise util.Abort(_("unable to read %s") % patchname)
1380 raise util.Abort(_("unable to read %s") % patchname)
1383 if not patchname:
1381 if not patchname:
1384 patchname = os.path.basename(filename)
1382 patchname = os.path.basename(filename)
1385 checkfile(patchname)
1383 checkfile(patchname)
1386 patchf = self.opener(patchname, "w")
1384 patchf = self.opener(patchname, "w")
1387 patchf.write(text)
1385 patchf.write(text)
1388 checkseries(patchname)
1386 checkseries(patchname)
1389 index = self.full_series_end() + i
1387 index = self.full_series_end() + i
1390 self.full_series[index:index] = [patchname]
1388 self.full_series[index:index] = [patchname]
1391 self.parse_series()
1389 self.parse_series()
1392 self.ui.warn("adding %s to series file\n" % patchname)
1390 self.ui.warn("adding %s to series file\n" % patchname)
1393 i += 1
1391 i += 1
1394 added.append(patchname)
1392 added.append(patchname)
1395 patchname = None
1393 patchname = None
1396 self.series_dirty = 1
1394 self.series_dirty = 1
1397 qrepo = self.qrepo()
1395 qrepo = self.qrepo()
1398 if qrepo:
1396 if qrepo:
1399 qrepo.add(added)
1397 qrepo.add(added)
1400
1398
1401 def delete(ui, repo, *patches, **opts):
1399 def delete(ui, repo, *patches, **opts):
1402 """remove patches from queue
1400 """remove patches from queue
1403
1401
1404 With --rev, mq will stop managing the named revisions. The
1402 With --rev, mq will stop managing the named revisions. The
1405 patches must be applied and at the base of the stack. This option
1403 patches must be applied and at the base of the stack. This option
1406 is useful when the patches have been applied upstream.
1404 is useful when the patches have been applied upstream.
1407
1405
1408 Otherwise, the patches must not be applied.
1406 Otherwise, the patches must not be applied.
1409
1407
1410 With --keep, the patch files are preserved in the patch directory."""
1408 With --keep, the patch files are preserved in the patch directory."""
1411 q = repo.mq
1409 q = repo.mq
1412 q.delete(repo, patches, opts)
1410 q.delete(repo, patches, opts)
1413 q.save_dirty()
1411 q.save_dirty()
1414 return 0
1412 return 0
1415
1413
1416 def applied(ui, repo, patch=None, **opts):
1414 def applied(ui, repo, patch=None, **opts):
1417 """print the patches already applied"""
1415 """print the patches already applied"""
1418 q = repo.mq
1416 q = repo.mq
1419 if patch:
1417 if patch:
1420 if patch not in q.series:
1418 if patch not in q.series:
1421 raise util.Abort(_("patch %s is not in series file") % patch)
1419 raise util.Abort(_("patch %s is not in series file") % patch)
1422 end = q.series.index(patch) + 1
1420 end = q.series.index(patch) + 1
1423 else:
1421 else:
1424 end = len(q.applied)
1422 end = len(q.applied)
1425 if not end:
1423 if not end:
1426 return
1424 return
1427
1425
1428 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1426 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1429
1427
1430 def unapplied(ui, repo, patch=None, **opts):
1428 def unapplied(ui, repo, patch=None, **opts):
1431 """print the patches not yet applied"""
1429 """print the patches not yet applied"""
1432 q = repo.mq
1430 q = repo.mq
1433 if patch:
1431 if patch:
1434 if patch not in q.series:
1432 if patch not in q.series:
1435 raise util.Abort(_("patch %s is not in series file") % patch)
1433 raise util.Abort(_("patch %s is not in series file") % patch)
1436 start = q.series.index(patch) + 1
1434 start = q.series.index(patch) + 1
1437 else:
1435 else:
1438 start = q.series_end()
1436 start = q.series_end()
1439 q.qseries(repo, start=start, summary=opts.get('summary'))
1437 q.qseries(repo, start=start, summary=opts.get('summary'))
1440
1438
1441 def qimport(ui, repo, *filename, **opts):
1439 def qimport(ui, repo, *filename, **opts):
1442 """import a patch
1440 """import a patch
1443
1441
1444 The patch will have the same name as its source file unless you
1442 The patch will have the same name as its source file unless you
1445 give it a new one with --name.
1443 give it a new one with --name.
1446
1444
1447 You can register an existing patch inside the patch directory
1445 You can register an existing patch inside the patch directory
1448 with the --existing flag.
1446 with the --existing flag.
1449
1447
1450 With --force, an existing patch of the same name will be overwritten.
1448 With --force, an existing patch of the same name will be overwritten.
1451
1449
1452 An existing changeset may be placed under mq control with --rev
1450 An existing changeset may be placed under mq control with --rev
1453 (e.g. qimport --rev tip -n patch will place tip under mq control).
1451 (e.g. qimport --rev tip -n patch will place tip under mq control).
1454 With --git, patches imported with --rev will use the git diff
1452 With --git, patches imported with --rev will use the git diff
1455 format.
1453 format.
1456 """
1454 """
1457 q = repo.mq
1455 q = repo.mq
1458 q.qimport(repo, filename, patchname=opts['name'],
1456 q.qimport(repo, filename, patchname=opts['name'],
1459 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1457 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1460 git=opts['git'])
1458 git=opts['git'])
1461 q.save_dirty()
1459 q.save_dirty()
1462 return 0
1460 return 0
1463
1461
1464 def init(ui, repo, **opts):
1462 def init(ui, repo, **opts):
1465 """init a new queue repository
1463 """init a new queue repository
1466
1464
1467 The queue repository is unversioned by default. If -c is
1465 The queue repository is unversioned by default. If -c is
1468 specified, qinit will create a separate nested repository
1466 specified, qinit will create a separate nested repository
1469 for patches. Use qcommit to commit changes to this queue
1467 for patches. Use qcommit to commit changes to this queue
1470 repository."""
1468 repository."""
1471 q = repo.mq
1469 q = repo.mq
1472 r = q.init(repo, create=opts['create_repo'])
1470 r = q.init(repo, create=opts['create_repo'])
1473 q.save_dirty()
1471 q.save_dirty()
1474 if r:
1472 if r:
1475 fp = r.wopener('.hgignore', 'w')
1473 fp = r.wopener('.hgignore', 'w')
1476 print >> fp, 'syntax: glob'
1474 print >> fp, 'syntax: glob'
1477 print >> fp, 'status'
1475 print >> fp, 'status'
1478 print >> fp, 'guards'
1476 print >> fp, 'guards'
1479 fp.close()
1477 fp.close()
1480 r.wopener('series', 'w').close()
1478 r.wopener('series', 'w').close()
1481 r.add(['.hgignore', 'series'])
1479 r.add(['.hgignore', 'series'])
1482 return 0
1480 return 0
1483
1481
1484 def clone(ui, source, dest=None, **opts):
1482 def clone(ui, source, dest=None, **opts):
1485 '''clone main and patch repository at same time
1483 '''clone main and patch repository at same time
1486
1484
1487 If source is local, destination will have no patches applied. If
1485 If source is local, destination will have no patches applied. If
1488 source is remote, this command can not check if patches are
1486 source is remote, this command can not check if patches are
1489 applied in source, so cannot guarantee that patches are not
1487 applied in source, so cannot guarantee that patches are not
1490 applied in destination. If you clone remote repository, be sure
1488 applied in destination. If you clone remote repository, be sure
1491 before that it has no patches applied.
1489 before that it has no patches applied.
1492
1490
1493 Source patch repository is looked for in <src>/.hg/patches by
1491 Source patch repository is looked for in <src>/.hg/patches by
1494 default. Use -p <url> to change.
1492 default. Use -p <url> to change.
1495 '''
1493 '''
1496 commands.setremoteconfig(ui, opts)
1494 commands.setremoteconfig(ui, opts)
1497 if dest is None:
1495 if dest is None:
1498 dest = hg.defaultdest(source)
1496 dest = hg.defaultdest(source)
1499 sr = hg.repository(ui, ui.expandpath(source))
1497 sr = hg.repository(ui, ui.expandpath(source))
1500 qbase, destrev = None, None
1498 qbase, destrev = None, None
1501 if sr.local():
1499 if sr.local():
1502 reposetup(ui, sr)
1500 reposetup(ui, sr)
1503 if sr.mq.applied:
1501 if sr.mq.applied:
1504 qbase = revlog.bin(sr.mq.applied[0].rev)
1502 qbase = revlog.bin(sr.mq.applied[0].rev)
1505 if not hg.islocal(dest):
1503 if not hg.islocal(dest):
1506 destrev = sr.parents(qbase)[0]
1504 destrev = sr.parents(qbase)[0]
1507 ui.note(_('cloning main repo\n'))
1505 ui.note(_('cloning main repo\n'))
1508 sr, dr = hg.clone(ui, sr, dest,
1506 sr, dr = hg.clone(ui, sr, dest,
1509 pull=opts['pull'],
1507 pull=opts['pull'],
1510 rev=destrev,
1508 rev=destrev,
1511 update=False,
1509 update=False,
1512 stream=opts['uncompressed'])
1510 stream=opts['uncompressed'])
1513 ui.note(_('cloning patch repo\n'))
1511 ui.note(_('cloning patch repo\n'))
1514 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1512 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1515 dr.url() + '/.hg/patches',
1513 dr.url() + '/.hg/patches',
1516 pull=opts['pull'],
1514 pull=opts['pull'],
1517 update=not opts['noupdate'],
1515 update=not opts['noupdate'],
1518 stream=opts['uncompressed'])
1516 stream=opts['uncompressed'])
1519 if dr.local():
1517 if dr.local():
1520 if qbase:
1518 if qbase:
1521 ui.note(_('stripping applied patches from destination repo\n'))
1519 ui.note(_('stripping applied patches from destination repo\n'))
1522 reposetup(ui, dr)
1520 reposetup(ui, dr)
1523 dr.mq.strip(dr, qbase, update=False, backup=None)
1521 dr.mq.strip(dr, qbase, update=False, backup=None)
1524 if not opts['noupdate']:
1522 if not opts['noupdate']:
1525 ui.note(_('updating destination repo\n'))
1523 ui.note(_('updating destination repo\n'))
1526 hg.update(dr, dr.changelog.tip())
1524 hg.update(dr, dr.changelog.tip())
1527
1525
1528 def commit(ui, repo, *pats, **opts):
1526 def commit(ui, repo, *pats, **opts):
1529 """commit changes in the queue repository"""
1527 """commit changes in the queue repository"""
1530 q = repo.mq
1528 q = repo.mq
1531 r = q.qrepo()
1529 r = q.qrepo()
1532 if not r: raise util.Abort('no queue repository')
1530 if not r: raise util.Abort('no queue repository')
1533 commands.commit(r.ui, r, *pats, **opts)
1531 commands.commit(r.ui, r, *pats, **opts)
1534
1532
1535 def series(ui, repo, **opts):
1533 def series(ui, repo, **opts):
1536 """print the entire series file"""
1534 """print the entire series file"""
1537 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1535 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1538 return 0
1536 return 0
1539
1537
1540 def top(ui, repo, **opts):
1538 def top(ui, repo, **opts):
1541 """print the name of the current patch"""
1539 """print the name of the current patch"""
1542 q = repo.mq
1540 q = repo.mq
1543 t = len(q.applied)
1541 t = len(q.applied)
1544 if t:
1542 if t:
1545 return q.qseries(repo, start=t-1, length=1, status='A',
1543 return q.qseries(repo, start=t-1, length=1, status='A',
1546 summary=opts.get('summary'))
1544 summary=opts.get('summary'))
1547 else:
1545 else:
1548 ui.write("No patches applied\n")
1546 ui.write("No patches applied\n")
1549 return 1
1547 return 1
1550
1548
1551 def next(ui, repo, **opts):
1549 def next(ui, repo, **opts):
1552 """print the name of the next patch"""
1550 """print the name of the next patch"""
1553 q = repo.mq
1551 q = repo.mq
1554 end = q.series_end()
1552 end = q.series_end()
1555 if end == len(q.series):
1553 if end == len(q.series):
1556 ui.write("All patches applied\n")
1554 ui.write("All patches applied\n")
1557 return 1
1555 return 1
1558 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1556 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1559
1557
1560 def prev(ui, repo, **opts):
1558 def prev(ui, repo, **opts):
1561 """print the name of the previous patch"""
1559 """print the name of the previous patch"""
1562 q = repo.mq
1560 q = repo.mq
1563 l = len(q.applied)
1561 l = len(q.applied)
1564 if l == 1:
1562 if l == 1:
1565 ui.write("Only one patch applied\n")
1563 ui.write("Only one patch applied\n")
1566 return 1
1564 return 1
1567 if not l:
1565 if not l:
1568 ui.write("No patches applied\n")
1566 ui.write("No patches applied\n")
1569 return 1
1567 return 1
1570 return q.qseries(repo, start=l-2, length=1, status='A',
1568 return q.qseries(repo, start=l-2, length=1, status='A',
1571 summary=opts.get('summary'))
1569 summary=opts.get('summary'))
1572
1570
1573 def new(ui, repo, patch, **opts):
1571 def new(ui, repo, patch, **opts):
1574 """create a new patch
1572 """create a new patch
1575
1573
1576 qnew creates a new patch on top of the currently-applied patch
1574 qnew creates a new patch on top of the currently-applied patch
1577 (if any). It will refuse to run if there are any outstanding
1575 (if any). It will refuse to run if there are any outstanding
1578 changes unless -f is specified, in which case the patch will
1576 changes unless -f is specified, in which case the patch will
1579 be initialised with them.
1577 be initialised with them.
1580
1578
1581 -e, -m or -l set the patch header as well as the commit message.
1579 -e, -m or -l set the patch header as well as the commit message.
1582 If none is specified, the patch header is empty and the
1580 If none is specified, the patch header is empty and the
1583 commit message is 'New patch: PATCH'"""
1581 commit message is 'New patch: PATCH'"""
1584 q = repo.mq
1582 q = repo.mq
1585 message = commands.logmessage(opts)
1583 message = commands.logmessage(opts)
1586 if opts['edit']:
1584 if opts['edit']:
1587 message = ui.edit(message, ui.username())
1585 message = ui.edit(message, ui.username())
1588 q.new(repo, patch, msg=message, force=opts['force'])
1586 q.new(repo, patch, msg=message, force=opts['force'])
1589 q.save_dirty()
1587 q.save_dirty()
1590 return 0
1588 return 0
1591
1589
1592 def refresh(ui, repo, *pats, **opts):
1590 def refresh(ui, repo, *pats, **opts):
1593 """update the current patch
1591 """update the current patch
1594
1592
1595 If any file patterns are provided, the refreshed patch will contain only
1593 If any file patterns are provided, the refreshed patch will contain only
1596 the modifications that match those patterns; the remaining modifications
1594 the modifications that match those patterns; the remaining modifications
1597 will remain in the working directory.
1595 will remain in the working directory.
1598 """
1596 """
1599 q = repo.mq
1597 q = repo.mq
1600 message = commands.logmessage(opts)
1598 message = commands.logmessage(opts)
1601 if opts['edit']:
1599 if opts['edit']:
1602 if message:
1600 if message:
1603 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1601 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1604 patch = q.applied[-1].name
1602 patch = q.applied[-1].name
1605 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1603 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1606 message = ui.edit('\n'.join(message), user or ui.username())
1604 message = ui.edit('\n'.join(message), user or ui.username())
1607 ret = q.refresh(repo, pats, msg=message, **opts)
1605 ret = q.refresh(repo, pats, msg=message, **opts)
1608 q.save_dirty()
1606 q.save_dirty()
1609 return ret
1607 return ret
1610
1608
1611 def diff(ui, repo, *pats, **opts):
1609 def diff(ui, repo, *pats, **opts):
1612 """diff of the current patch"""
1610 """diff of the current patch"""
1613 repo.mq.diff(repo, pats, opts)
1611 repo.mq.diff(repo, pats, opts)
1614 return 0
1612 return 0
1615
1613
1616 def fold(ui, repo, *files, **opts):
1614 def fold(ui, repo, *files, **opts):
1617 """fold the named patches into the current patch
1615 """fold the named patches into the current patch
1618
1616
1619 Patches must not yet be applied. Each patch will be successively
1617 Patches must not yet be applied. Each patch will be successively
1620 applied to the current patch in the order given. If all the
1618 applied to the current patch in the order given. If all the
1621 patches apply successfully, the current patch will be refreshed
1619 patches apply successfully, the current patch will be refreshed
1622 with the new cumulative patch, and the folded patches will
1620 with the new cumulative patch, and the folded patches will
1623 be deleted. With -k/--keep, the folded patch files will not
1621 be deleted. With -k/--keep, the folded patch files will not
1624 be removed afterwards.
1622 be removed afterwards.
1625
1623
1626 The header for each folded patch will be concatenated with
1624 The header for each folded patch will be concatenated with
1627 the current patch header, separated by a line of '* * *'."""
1625 the current patch header, separated by a line of '* * *'."""
1628
1626
1629 q = repo.mq
1627 q = repo.mq
1630
1628
1631 if not files:
1629 if not files:
1632 raise util.Abort(_('qfold requires at least one patch name'))
1630 raise util.Abort(_('qfold requires at least one patch name'))
1633 if not q.check_toppatch(repo):
1631 if not q.check_toppatch(repo):
1634 raise util.Abort(_('No patches applied'))
1632 raise util.Abort(_('No patches applied'))
1635
1633
1636 message = commands.logmessage(opts)
1634 message = commands.logmessage(opts)
1637 if opts['edit']:
1635 if opts['edit']:
1638 if message:
1636 if message:
1639 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1637 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1640
1638
1641 parent = q.lookup('qtip')
1639 parent = q.lookup('qtip')
1642 patches = []
1640 patches = []
1643 messages = []
1641 messages = []
1644 for f in files:
1642 for f in files:
1645 p = q.lookup(f)
1643 p = q.lookup(f)
1646 if p in patches or p == parent:
1644 if p in patches or p == parent:
1647 ui.warn(_('Skipping already folded patch %s') % p)
1645 ui.warn(_('Skipping already folded patch %s') % p)
1648 if q.isapplied(p):
1646 if q.isapplied(p):
1649 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1647 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1650 patches.append(p)
1648 patches.append(p)
1651
1649
1652 for p in patches:
1650 for p in patches:
1653 if not message:
1651 if not message:
1654 messages.append(q.readheaders(p)[0])
1652 messages.append(q.readheaders(p)[0])
1655 pf = q.join(p)
1653 pf = q.join(p)
1656 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1654 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1657 if not patchsuccess:
1655 if not patchsuccess:
1658 raise util.Abort(_('Error folding patch %s') % p)
1656 raise util.Abort(_('Error folding patch %s') % p)
1659 patch.updatedir(ui, repo, files)
1657 patch.updatedir(ui, repo, files)
1660
1658
1661 if not message:
1659 if not message:
1662 message, comments, user = q.readheaders(parent)[0:3]
1660 message, comments, user = q.readheaders(parent)[0:3]
1663 for msg in messages:
1661 for msg in messages:
1664 message.append('* * *')
1662 message.append('* * *')
1665 message.extend(msg)
1663 message.extend(msg)
1666 message = '\n'.join(message)
1664 message = '\n'.join(message)
1667
1665
1668 if opts['edit']:
1666 if opts['edit']:
1669 message = ui.edit(message, user or ui.username())
1667 message = ui.edit(message, user or ui.username())
1670
1668
1671 q.refresh(repo, msg=message)
1669 q.refresh(repo, msg=message)
1672 q.delete(repo, patches, opts)
1670 q.delete(repo, patches, opts)
1673 q.save_dirty()
1671 q.save_dirty()
1674
1672
1675 def guard(ui, repo, *args, **opts):
1673 def guard(ui, repo, *args, **opts):
1676 '''set or print guards for a patch
1674 '''set or print guards for a patch
1677
1675
1678 Guards control whether a patch can be pushed. A patch with no
1676 Guards control whether a patch can be pushed. A patch with no
1679 guards is always pushed. A patch with a positive guard ("+foo") is
1677 guards is always pushed. A patch with a positive guard ("+foo") is
1680 pushed only if the qselect command has activated it. A patch with
1678 pushed only if the qselect command has activated it. A patch with
1681 a negative guard ("-foo") is never pushed if the qselect command
1679 a negative guard ("-foo") is never pushed if the qselect command
1682 has activated it.
1680 has activated it.
1683
1681
1684 With no arguments, print the currently active guards.
1682 With no arguments, print the currently active guards.
1685 With arguments, set guards for the named patch.
1683 With arguments, set guards for the named patch.
1686
1684
1687 To set a negative guard "-foo" on topmost patch ("--" is needed so
1685 To set a negative guard "-foo" on topmost patch ("--" is needed so
1688 hg will not interpret "-foo" as an option):
1686 hg will not interpret "-foo" as an option):
1689 hg qguard -- -foo
1687 hg qguard -- -foo
1690
1688
1691 To set guards on another patch:
1689 To set guards on another patch:
1692 hg qguard other.patch +2.6.17 -stable
1690 hg qguard other.patch +2.6.17 -stable
1693 '''
1691 '''
1694 def status(idx):
1692 def status(idx):
1695 guards = q.series_guards[idx] or ['unguarded']
1693 guards = q.series_guards[idx] or ['unguarded']
1696 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1694 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1697 q = repo.mq
1695 q = repo.mq
1698 patch = None
1696 patch = None
1699 args = list(args)
1697 args = list(args)
1700 if opts['list']:
1698 if opts['list']:
1701 if args or opts['none']:
1699 if args or opts['none']:
1702 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1700 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1703 for i in xrange(len(q.series)):
1701 for i in xrange(len(q.series)):
1704 status(i)
1702 status(i)
1705 return
1703 return
1706 if not args or args[0][0:1] in '-+':
1704 if not args or args[0][0:1] in '-+':
1707 if not q.applied:
1705 if not q.applied:
1708 raise util.Abort(_('no patches applied'))
1706 raise util.Abort(_('no patches applied'))
1709 patch = q.applied[-1].name
1707 patch = q.applied[-1].name
1710 if patch is None and args[0][0:1] not in '-+':
1708 if patch is None and args[0][0:1] not in '-+':
1711 patch = args.pop(0)
1709 patch = args.pop(0)
1712 if patch is None:
1710 if patch is None:
1713 raise util.Abort(_('no patch to work with'))
1711 raise util.Abort(_('no patch to work with'))
1714 if args or opts['none']:
1712 if args or opts['none']:
1715 q.set_guards(q.find_series(patch), args)
1713 q.set_guards(q.find_series(patch), args)
1716 q.save_dirty()
1714 q.save_dirty()
1717 else:
1715 else:
1718 status(q.series.index(q.lookup(patch)))
1716 status(q.series.index(q.lookup(patch)))
1719
1717
1720 def header(ui, repo, patch=None):
1718 def header(ui, repo, patch=None):
1721 """Print the header of the topmost or specified patch"""
1719 """Print the header of the topmost or specified patch"""
1722 q = repo.mq
1720 q = repo.mq
1723
1721
1724 if patch:
1722 if patch:
1725 patch = q.lookup(patch)
1723 patch = q.lookup(patch)
1726 else:
1724 else:
1727 if not q.applied:
1725 if not q.applied:
1728 ui.write('No patches applied\n')
1726 ui.write('No patches applied\n')
1729 return 1
1727 return 1
1730 patch = q.lookup('qtip')
1728 patch = q.lookup('qtip')
1731 message = repo.mq.readheaders(patch)[0]
1729 message = repo.mq.readheaders(patch)[0]
1732
1730
1733 ui.write('\n'.join(message) + '\n')
1731 ui.write('\n'.join(message) + '\n')
1734
1732
1735 def lastsavename(path):
1733 def lastsavename(path):
1736 (directory, base) = os.path.split(path)
1734 (directory, base) = os.path.split(path)
1737 names = os.listdir(directory)
1735 names = os.listdir(directory)
1738 namere = re.compile("%s.([0-9]+)" % base)
1736 namere = re.compile("%s.([0-9]+)" % base)
1739 maxindex = None
1737 maxindex = None
1740 maxname = None
1738 maxname = None
1741 for f in names:
1739 for f in names:
1742 m = namere.match(f)
1740 m = namere.match(f)
1743 if m:
1741 if m:
1744 index = int(m.group(1))
1742 index = int(m.group(1))
1745 if maxindex == None or index > maxindex:
1743 if maxindex == None or index > maxindex:
1746 maxindex = index
1744 maxindex = index
1747 maxname = f
1745 maxname = f
1748 if maxname:
1746 if maxname:
1749 return (os.path.join(directory, maxname), maxindex)
1747 return (os.path.join(directory, maxname), maxindex)
1750 return (None, None)
1748 return (None, None)
1751
1749
1752 def savename(path):
1750 def savename(path):
1753 (last, index) = lastsavename(path)
1751 (last, index) = lastsavename(path)
1754 if last is None:
1752 if last is None:
1755 index = 0
1753 index = 0
1756 newpath = path + ".%d" % (index + 1)
1754 newpath = path + ".%d" % (index + 1)
1757 return newpath
1755 return newpath
1758
1756
1759 def push(ui, repo, patch=None, **opts):
1757 def push(ui, repo, patch=None, **opts):
1760 """push the next patch onto the stack"""
1758 """push the next patch onto the stack"""
1761 q = repo.mq
1759 q = repo.mq
1762 mergeq = None
1760 mergeq = None
1763
1761
1764 if opts['all']:
1762 if opts['all']:
1765 if not q.series:
1763 if not q.series:
1766 raise util.Abort(_('no patches in series'))
1764 raise util.Abort(_('no patches in series'))
1767 patch = q.series[-1]
1765 patch = q.series[-1]
1768 if opts['merge']:
1766 if opts['merge']:
1769 if opts['name']:
1767 if opts['name']:
1770 newpath = opts['name']
1768 newpath = opts['name']
1771 else:
1769 else:
1772 newpath, i = lastsavename(q.path)
1770 newpath, i = lastsavename(q.path)
1773 if not newpath:
1771 if not newpath:
1774 ui.warn("no saved queues found, please use -n\n")
1772 ui.warn("no saved queues found, please use -n\n")
1775 return 1
1773 return 1
1776 mergeq = queue(ui, repo.join(""), newpath)
1774 mergeq = queue(ui, repo.join(""), newpath)
1777 ui.warn("merging with queue at: %s\n" % mergeq.path)
1775 ui.warn("merging with queue at: %s\n" % mergeq.path)
1778 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1776 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1779 mergeq=mergeq)
1777 mergeq=mergeq)
1780 q.save_dirty()
1778 q.save_dirty()
1781 return ret
1779 return ret
1782
1780
1783 def pop(ui, repo, patch=None, **opts):
1781 def pop(ui, repo, patch=None, **opts):
1784 """pop the current patch off the stack"""
1782 """pop the current patch off the stack"""
1785 localupdate = True
1783 localupdate = True
1786 if opts['name']:
1784 if opts['name']:
1787 q = queue(ui, repo.join(""), repo.join(opts['name']))
1785 q = queue(ui, repo.join(""), repo.join(opts['name']))
1788 ui.warn('using patch queue: %s\n' % q.path)
1786 ui.warn('using patch queue: %s\n' % q.path)
1789 localupdate = False
1787 localupdate = False
1790 else:
1788 else:
1791 q = repo.mq
1789 q = repo.mq
1792 q.pop(repo, patch, force=opts['force'], update=localupdate, all=opts['all'])
1790 q.pop(repo, patch, force=opts['force'], update=localupdate, all=opts['all'])
1793 q.save_dirty()
1791 q.save_dirty()
1794 return 0
1792 return 0
1795
1793
1796 def rename(ui, repo, patch, name=None, **opts):
1794 def rename(ui, repo, patch, name=None, **opts):
1797 """rename a patch
1795 """rename a patch
1798
1796
1799 With one argument, renames the current patch to PATCH1.
1797 With one argument, renames the current patch to PATCH1.
1800 With two arguments, renames PATCH1 to PATCH2."""
1798 With two arguments, renames PATCH1 to PATCH2."""
1801
1799
1802 q = repo.mq
1800 q = repo.mq
1803
1801
1804 if not name:
1802 if not name:
1805 name = patch
1803 name = patch
1806 patch = None
1804 patch = None
1807
1805
1808 if patch:
1806 if patch:
1809 patch = q.lookup(patch)
1807 patch = q.lookup(patch)
1810 else:
1808 else:
1811 if not q.applied:
1809 if not q.applied:
1812 ui.write(_('No patches applied\n'))
1810 ui.write(_('No patches applied\n'))
1813 return
1811 return
1814 patch = q.lookup('qtip')
1812 patch = q.lookup('qtip')
1815 absdest = q.join(name)
1813 absdest = q.join(name)
1816 if os.path.isdir(absdest):
1814 if os.path.isdir(absdest):
1817 name = os.path.join(name, os.path.basename(patch))
1815 name = os.path.join(name, os.path.basename(patch))
1818 absdest = q.join(name)
1816 absdest = q.join(name)
1819 if os.path.exists(absdest):
1817 if os.path.exists(absdest):
1820 raise util.Abort(_('%s already exists') % absdest)
1818 raise util.Abort(_('%s already exists') % absdest)
1821
1819
1822 if name in q.series:
1820 if name in q.series:
1823 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1821 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1824
1822
1825 if ui.verbose:
1823 if ui.verbose:
1826 ui.write('Renaming %s to %s\n' % (patch, name))
1824 ui.write('Renaming %s to %s\n' % (patch, name))
1827 i = q.find_series(patch)
1825 i = q.find_series(patch)
1828 guards = q.guard_re.findall(q.full_series[i])
1826 guards = q.guard_re.findall(q.full_series[i])
1829 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1827 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1830 q.parse_series()
1828 q.parse_series()
1831 q.series_dirty = 1
1829 q.series_dirty = 1
1832
1830
1833 info = q.isapplied(patch)
1831 info = q.isapplied(patch)
1834 if info:
1832 if info:
1835 q.applied[info[0]] = statusentry(info[1], name)
1833 q.applied[info[0]] = statusentry(info[1], name)
1836 q.applied_dirty = 1
1834 q.applied_dirty = 1
1837
1835
1838 util.rename(q.join(patch), absdest)
1836 util.rename(q.join(patch), absdest)
1839 r = q.qrepo()
1837 r = q.qrepo()
1840 if r:
1838 if r:
1841 wlock = r.wlock()
1839 wlock = r.wlock()
1842 if r.dirstate.state(name) == 'r':
1840 if r.dirstate.state(name) == 'r':
1843 r.undelete([name], wlock)
1841 r.undelete([name], wlock)
1844 r.copy(patch, name, wlock)
1842 r.copy(patch, name, wlock)
1845 r.remove([patch], False, wlock)
1843 r.remove([patch], False, wlock)
1846
1844
1847 q.save_dirty()
1845 q.save_dirty()
1848
1846
1849 def restore(ui, repo, rev, **opts):
1847 def restore(ui, repo, rev, **opts):
1850 """restore the queue state saved by a rev"""
1848 """restore the queue state saved by a rev"""
1851 rev = repo.lookup(rev)
1849 rev = repo.lookup(rev)
1852 q = repo.mq
1850 q = repo.mq
1853 q.restore(repo, rev, delete=opts['delete'],
1851 q.restore(repo, rev, delete=opts['delete'],
1854 qupdate=opts['update'])
1852 qupdate=opts['update'])
1855 q.save_dirty()
1853 q.save_dirty()
1856 return 0
1854 return 0
1857
1855
1858 def save(ui, repo, **opts):
1856 def save(ui, repo, **opts):
1859 """save current queue state"""
1857 """save current queue state"""
1860 q = repo.mq
1858 q = repo.mq
1861 message = commands.logmessage(opts)
1859 message = commands.logmessage(opts)
1862 ret = q.save(repo, msg=message)
1860 ret = q.save(repo, msg=message)
1863 if ret:
1861 if ret:
1864 return ret
1862 return ret
1865 q.save_dirty()
1863 q.save_dirty()
1866 if opts['copy']:
1864 if opts['copy']:
1867 path = q.path
1865 path = q.path
1868 if opts['name']:
1866 if opts['name']:
1869 newpath = os.path.join(q.basepath, opts['name'])
1867 newpath = os.path.join(q.basepath, opts['name'])
1870 if os.path.exists(newpath):
1868 if os.path.exists(newpath):
1871 if not os.path.isdir(newpath):
1869 if not os.path.isdir(newpath):
1872 raise util.Abort(_('destination %s exists and is not '
1870 raise util.Abort(_('destination %s exists and is not '
1873 'a directory') % newpath)
1871 'a directory') % newpath)
1874 if not opts['force']:
1872 if not opts['force']:
1875 raise util.Abort(_('destination %s exists, '
1873 raise util.Abort(_('destination %s exists, '
1876 'use -f to force') % newpath)
1874 'use -f to force') % newpath)
1877 else:
1875 else:
1878 newpath = savename(path)
1876 newpath = savename(path)
1879 ui.warn("copy %s to %s\n" % (path, newpath))
1877 ui.warn("copy %s to %s\n" % (path, newpath))
1880 util.copyfiles(path, newpath)
1878 util.copyfiles(path, newpath)
1881 if opts['empty']:
1879 if opts['empty']:
1882 try:
1880 try:
1883 os.unlink(q.join(q.status_path))
1881 os.unlink(q.join(q.status_path))
1884 except:
1882 except:
1885 pass
1883 pass
1886 return 0
1884 return 0
1887
1885
1888 def strip(ui, repo, rev, **opts):
1886 def strip(ui, repo, rev, **opts):
1889 """strip a revision and all later revs on the same branch"""
1887 """strip a revision and all later revs on the same branch"""
1890 rev = repo.lookup(rev)
1888 rev = repo.lookup(rev)
1891 backup = 'all'
1889 backup = 'all'
1892 if opts['backup']:
1890 if opts['backup']:
1893 backup = 'strip'
1891 backup = 'strip'
1894 elif opts['nobackup']:
1892 elif opts['nobackup']:
1895 backup = 'none'
1893 backup = 'none'
1896 update = repo.dirstate.parents()[0] != revlog.nullid
1894 update = repo.dirstate.parents()[0] != revlog.nullid
1897 repo.mq.strip(repo, rev, backup=backup, update=update)
1895 repo.mq.strip(repo, rev, backup=backup, update=update)
1898 return 0
1896 return 0
1899
1897
1900 def select(ui, repo, *args, **opts):
1898 def select(ui, repo, *args, **opts):
1901 '''set or print guarded patches to push
1899 '''set or print guarded patches to push
1902
1900
1903 Use the qguard command to set or print guards on patch, then use
1901 Use the qguard command to set or print guards on patch, then use
1904 qselect to tell mq which guards to use. A patch will be pushed if it
1902 qselect to tell mq which guards to use. A patch will be pushed if it
1905 has no guards or any positive guards match the currently selected guard,
1903 has no guards or any positive guards match the currently selected guard,
1906 but will not be pushed if any negative guards match the current guard.
1904 but will not be pushed if any negative guards match the current guard.
1907 For example:
1905 For example:
1908
1906
1909 qguard foo.patch -stable (negative guard)
1907 qguard foo.patch -stable (negative guard)
1910 qguard bar.patch +stable (positive guard)
1908 qguard bar.patch +stable (positive guard)
1911 qselect stable
1909 qselect stable
1912
1910
1913 This activates the "stable" guard. mq will skip foo.patch (because
1911 This activates the "stable" guard. mq will skip foo.patch (because
1914 it has a negative match) but push bar.patch (because it
1912 it has a negative match) but push bar.patch (because it
1915 has a positive match).
1913 has a positive match).
1916
1914
1917 With no arguments, prints the currently active guards.
1915 With no arguments, prints the currently active guards.
1918 With one argument, sets the active guard.
1916 With one argument, sets the active guard.
1919
1917
1920 Use -n/--none to deactivate guards (no other arguments needed).
1918 Use -n/--none to deactivate guards (no other arguments needed).
1921 When no guards are active, patches with positive guards are skipped
1919 When no guards are active, patches with positive guards are skipped
1922 and patches with negative guards are pushed.
1920 and patches with negative guards are pushed.
1923
1921
1924 qselect can change the guards on applied patches. It does not pop
1922 qselect can change the guards on applied patches. It does not pop
1925 guarded patches by default. Use --pop to pop back to the last applied
1923 guarded patches by default. Use --pop to pop back to the last applied
1926 patch that is not guarded. Use --reapply (which implies --pop) to push
1924 patch that is not guarded. Use --reapply (which implies --pop) to push
1927 back to the current patch afterwards, but skip guarded patches.
1925 back to the current patch afterwards, but skip guarded patches.
1928
1926
1929 Use -s/--series to print a list of all guards in the series file (no
1927 Use -s/--series to print a list of all guards in the series file (no
1930 other arguments needed). Use -v for more information.'''
1928 other arguments needed). Use -v for more information.'''
1931
1929
1932 q = repo.mq
1930 q = repo.mq
1933 guards = q.active()
1931 guards = q.active()
1934 if args or opts['none']:
1932 if args or opts['none']:
1935 old_unapplied = q.unapplied(repo)
1933 old_unapplied = q.unapplied(repo)
1936 old_guarded = [i for i in xrange(len(q.applied)) if
1934 old_guarded = [i for i in xrange(len(q.applied)) if
1937 not q.pushable(i)[0]]
1935 not q.pushable(i)[0]]
1938 q.set_active(args)
1936 q.set_active(args)
1939 q.save_dirty()
1937 q.save_dirty()
1940 if not args:
1938 if not args:
1941 ui.status(_('guards deactivated\n'))
1939 ui.status(_('guards deactivated\n'))
1942 if not opts['pop'] and not opts['reapply']:
1940 if not opts['pop'] and not opts['reapply']:
1943 unapplied = q.unapplied(repo)
1941 unapplied = q.unapplied(repo)
1944 guarded = [i for i in xrange(len(q.applied))
1942 guarded = [i for i in xrange(len(q.applied))
1945 if not q.pushable(i)[0]]
1943 if not q.pushable(i)[0]]
1946 if len(unapplied) != len(old_unapplied):
1944 if len(unapplied) != len(old_unapplied):
1947 ui.status(_('number of unguarded, unapplied patches has '
1945 ui.status(_('number of unguarded, unapplied patches has '
1948 'changed from %d to %d\n') %
1946 'changed from %d to %d\n') %
1949 (len(old_unapplied), len(unapplied)))
1947 (len(old_unapplied), len(unapplied)))
1950 if len(guarded) != len(old_guarded):
1948 if len(guarded) != len(old_guarded):
1951 ui.status(_('number of guarded, applied patches has changed '
1949 ui.status(_('number of guarded, applied patches has changed '
1952 'from %d to %d\n') %
1950 'from %d to %d\n') %
1953 (len(old_guarded), len(guarded)))
1951 (len(old_guarded), len(guarded)))
1954 elif opts['series']:
1952 elif opts['series']:
1955 guards = {}
1953 guards = {}
1956 noguards = 0
1954 noguards = 0
1957 for gs in q.series_guards:
1955 for gs in q.series_guards:
1958 if not gs:
1956 if not gs:
1959 noguards += 1
1957 noguards += 1
1960 for g in gs:
1958 for g in gs:
1961 guards.setdefault(g, 0)
1959 guards.setdefault(g, 0)
1962 guards[g] += 1
1960 guards[g] += 1
1963 if ui.verbose:
1961 if ui.verbose:
1964 guards['NONE'] = noguards
1962 guards['NONE'] = noguards
1965 guards = guards.items()
1963 guards = guards.items()
1966 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
1964 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
1967 if guards:
1965 if guards:
1968 ui.note(_('guards in series file:\n'))
1966 ui.note(_('guards in series file:\n'))
1969 for guard, count in guards:
1967 for guard, count in guards:
1970 ui.note('%2d ' % count)
1968 ui.note('%2d ' % count)
1971 ui.write(guard, '\n')
1969 ui.write(guard, '\n')
1972 else:
1970 else:
1973 ui.note(_('no guards in series file\n'))
1971 ui.note(_('no guards in series file\n'))
1974 else:
1972 else:
1975 if guards:
1973 if guards:
1976 ui.note(_('active guards:\n'))
1974 ui.note(_('active guards:\n'))
1977 for g in guards:
1975 for g in guards:
1978 ui.write(g, '\n')
1976 ui.write(g, '\n')
1979 else:
1977 else:
1980 ui.write(_('no active guards\n'))
1978 ui.write(_('no active guards\n'))
1981 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
1979 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
1982 popped = False
1980 popped = False
1983 if opts['pop'] or opts['reapply']:
1981 if opts['pop'] or opts['reapply']:
1984 for i in xrange(len(q.applied)):
1982 for i in xrange(len(q.applied)):
1985 pushable, reason = q.pushable(i)
1983 pushable, reason = q.pushable(i)
1986 if not pushable:
1984 if not pushable:
1987 ui.status(_('popping guarded patches\n'))
1985 ui.status(_('popping guarded patches\n'))
1988 popped = True
1986 popped = True
1989 if i == 0:
1987 if i == 0:
1990 q.pop(repo, all=True)
1988 q.pop(repo, all=True)
1991 else:
1989 else:
1992 q.pop(repo, i-1)
1990 q.pop(repo, i-1)
1993 break
1991 break
1994 if popped:
1992 if popped:
1995 try:
1993 try:
1996 if reapply:
1994 if reapply:
1997 ui.status(_('reapplying unguarded patches\n'))
1995 ui.status(_('reapplying unguarded patches\n'))
1998 q.push(repo, reapply)
1996 q.push(repo, reapply)
1999 finally:
1997 finally:
2000 q.save_dirty()
1998 q.save_dirty()
2001
1999
2002 def reposetup(ui, repo):
2000 def reposetup(ui, repo):
2003 class mqrepo(repo.__class__):
2001 class mqrepo(repo.__class__):
2004 def abort_if_wdir_patched(self, errmsg, force=False):
2002 def abort_if_wdir_patched(self, errmsg, force=False):
2005 if self.mq.applied and not force:
2003 if self.mq.applied and not force:
2006 parent = revlog.hex(self.dirstate.parents()[0])
2004 parent = revlog.hex(self.dirstate.parents()[0])
2007 if parent in [s.rev for s in self.mq.applied]:
2005 if parent in [s.rev for s in self.mq.applied]:
2008 raise util.Abort(errmsg)
2006 raise util.Abort(errmsg)
2009
2007
2010 def commit(self, *args, **opts):
2008 def commit(self, *args, **opts):
2011 if len(args) >= 6:
2009 if len(args) >= 6:
2012 force = args[5]
2010 force = args[5]
2013 else:
2011 else:
2014 force = opts.get('force')
2012 force = opts.get('force')
2015 self.abort_if_wdir_patched(
2013 self.abort_if_wdir_patched(
2016 _('cannot commit over an applied mq patch'),
2014 _('cannot commit over an applied mq patch'),
2017 force)
2015 force)
2018
2016
2019 return super(mqrepo, self).commit(*args, **opts)
2017 return super(mqrepo, self).commit(*args, **opts)
2020
2018
2021 def push(self, remote, force=False, revs=None):
2019 def push(self, remote, force=False, revs=None):
2022 if self.mq.applied and not force:
2020 if self.mq.applied and not force:
2023 raise util.Abort(_('source has mq patches applied'))
2021 raise util.Abort(_('source has mq patches applied'))
2024 return super(mqrepo, self).push(remote, force, revs)
2022 return super(mqrepo, self).push(remote, force, revs)
2025
2023
2026 def tags(self):
2024 def tags(self):
2027 if self.tagscache:
2025 if self.tagscache:
2028 return self.tagscache
2026 return self.tagscache
2029
2027
2030 tagscache = super(mqrepo, self).tags()
2028 tagscache = super(mqrepo, self).tags()
2031
2029
2032 q = self.mq
2030 q = self.mq
2033 if not q.applied:
2031 if not q.applied:
2034 return tagscache
2032 return tagscache
2035
2033
2036 mqtags = [(patch.rev, patch.name) for patch in q.applied]
2034 mqtags = [(patch.rev, patch.name) for patch in q.applied]
2037 mqtags.append((mqtags[-1][0], 'qtip'))
2035 mqtags.append((mqtags[-1][0], 'qtip'))
2038 mqtags.append((mqtags[0][0], 'qbase'))
2036 mqtags.append((mqtags[0][0], 'qbase'))
2039 for patch in mqtags:
2037 for patch in mqtags:
2040 if patch[1] in tagscache:
2038 if patch[1] in tagscache:
2041 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2039 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2042 else:
2040 else:
2043 tagscache[patch[1]] = revlog.bin(patch[0])
2041 tagscache[patch[1]] = revlog.bin(patch[0])
2044
2042
2045 return tagscache
2043 return tagscache
2046
2044
2047 def _branchtags(self):
2045 def _branchtags(self):
2048 q = self.mq
2046 q = self.mq
2049 if not q.applied:
2047 if not q.applied:
2050 return super(mqrepo, self)._branchtags()
2048 return super(mqrepo, self)._branchtags()
2051
2049
2052 self.branchcache = {} # avoid recursion in changectx
2050 self.branchcache = {} # avoid recursion in changectx
2053 cl = self.changelog
2051 cl = self.changelog
2054 partial, last, lrev = self._readbranchcache()
2052 partial, last, lrev = self._readbranchcache()
2055
2053
2056 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2054 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2057 start = lrev + 1
2055 start = lrev + 1
2058 if start < qbase:
2056 if start < qbase:
2059 # update the cache (excluding the patches) and save it
2057 # update the cache (excluding the patches) and save it
2060 self._updatebranchcache(partial, lrev+1, qbase)
2058 self._updatebranchcache(partial, lrev+1, qbase)
2061 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2059 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2062 start = qbase
2060 start = qbase
2063 # if start = qbase, the cache is as updated as it should be.
2061 # if start = qbase, the cache is as updated as it should be.
2064 # if start > qbase, the cache includes (part of) the patches.
2062 # if start > qbase, the cache includes (part of) the patches.
2065 # we might as well use it, but we won't save it.
2063 # we might as well use it, but we won't save it.
2066
2064
2067 # update the cache up to the tip
2065 # update the cache up to the tip
2068 self._updatebranchcache(partial, start, cl.count())
2066 self._updatebranchcache(partial, start, cl.count())
2069
2067
2070 return partial
2068 return partial
2071
2069
2072 if repo.local():
2070 if repo.local():
2073 repo.__class__ = mqrepo
2071 repo.__class__ = mqrepo
2074 repo.mq = queue(ui, repo.join(""))
2072 repo.mq = queue(ui, repo.join(""))
2075
2073
2076 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2074 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2077
2075
2078 cmdtable = {
2076 cmdtable = {
2079 "qapplied": (applied, [] + seriesopts, 'hg qapplied [-s] [PATCH]'),
2077 "qapplied": (applied, [] + seriesopts, 'hg qapplied [-s] [PATCH]'),
2080 "qclone": (clone,
2078 "qclone": (clone,
2081 [('', 'pull', None, _('use pull protocol to copy metadata')),
2079 [('', 'pull', None, _('use pull protocol to copy metadata')),
2082 ('U', 'noupdate', None, _('do not update the new working directories')),
2080 ('U', 'noupdate', None, _('do not update the new working directories')),
2083 ('', 'uncompressed', None,
2081 ('', 'uncompressed', None,
2084 _('use uncompressed transfer (fast over LAN)')),
2082 _('use uncompressed transfer (fast over LAN)')),
2085 ('e', 'ssh', '', _('specify ssh command to use')),
2083 ('e', 'ssh', '', _('specify ssh command to use')),
2086 ('p', 'patches', '', _('location of source patch repo')),
2084 ('p', 'patches', '', _('location of source patch repo')),
2087 ('', 'remotecmd', '',
2085 ('', 'remotecmd', '',
2088 _('specify hg command to run on the remote side'))],
2086 _('specify hg command to run on the remote side'))],
2089 'hg qclone [OPTION]... SOURCE [DEST]'),
2087 'hg qclone [OPTION]... SOURCE [DEST]'),
2090 "qcommit|qci":
2088 "qcommit|qci":
2091 (commit,
2089 (commit,
2092 commands.table["^commit|ci"][1],
2090 commands.table["^commit|ci"][1],
2093 'hg qcommit [OPTION]... [FILE]...'),
2091 'hg qcommit [OPTION]... [FILE]...'),
2094 "^qdiff": (diff,
2092 "^qdiff": (diff,
2095 [('g', 'git', None, _('use git extended diff format')),
2093 [('g', 'git', None, _('use git extended diff format')),
2096 ('I', 'include', [], _('include names matching the given patterns')),
2094 ('I', 'include', [], _('include names matching the given patterns')),
2097 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2095 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2098 'hg qdiff [-I] [-X] [FILE]...'),
2096 'hg qdiff [-I] [-X] [FILE]...'),
2099 "qdelete|qremove|qrm":
2097 "qdelete|qremove|qrm":
2100 (delete,
2098 (delete,
2101 [('k', 'keep', None, _('keep patch file')),
2099 [('k', 'keep', None, _('keep patch file')),
2102 ('r', 'rev', [], _('stop managing a revision'))],
2100 ('r', 'rev', [], _('stop managing a revision'))],
2103 'hg qdelete [-k] [-r REV]... PATCH...'),
2101 'hg qdelete [-k] [-r REV]... PATCH...'),
2104 'qfold':
2102 'qfold':
2105 (fold,
2103 (fold,
2106 [('e', 'edit', None, _('edit patch header')),
2104 [('e', 'edit', None, _('edit patch header')),
2107 ('k', 'keep', None, _('keep folded patch files'))
2105 ('k', 'keep', None, _('keep folded patch files'))
2108 ] + commands.commitopts,
2106 ] + commands.commitopts,
2109 'hg qfold [-e] [-m <text>] [-l <file] PATCH...'),
2107 'hg qfold [-e] [-m <text>] [-l <file] PATCH...'),
2110 'qguard': (guard, [('l', 'list', None, _('list all patches and guards')),
2108 'qguard': (guard, [('l', 'list', None, _('list all patches and guards')),
2111 ('n', 'none', None, _('drop all guards'))],
2109 ('n', 'none', None, _('drop all guards'))],
2112 'hg qguard [PATCH] [+GUARD...] [-GUARD...]'),
2110 'hg qguard [PATCH] [+GUARD...] [-GUARD...]'),
2113 'qheader': (header, [],
2111 'qheader': (header, [],
2114 _('hg qheader [PATCH]')),
2112 _('hg qheader [PATCH]')),
2115 "^qimport":
2113 "^qimport":
2116 (qimport,
2114 (qimport,
2117 [('e', 'existing', None, 'import file in patch dir'),
2115 [('e', 'existing', None, 'import file in patch dir'),
2118 ('n', 'name', '', 'patch file name'),
2116 ('n', 'name', '', 'patch file name'),
2119 ('f', 'force', None, 'overwrite existing files'),
2117 ('f', 'force', None, 'overwrite existing files'),
2120 ('r', 'rev', [], 'place existing revisions under mq control'),
2118 ('r', 'rev', [], 'place existing revisions under mq control'),
2121 ('g', 'git', None, _('use git extended diff format'))],
2119 ('g', 'git', None, _('use git extended diff format'))],
2122 'hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...'),
2120 'hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...'),
2123 "^qinit":
2121 "^qinit":
2124 (init,
2122 (init,
2125 [('c', 'create-repo', None, 'create queue repository')],
2123 [('c', 'create-repo', None, 'create queue repository')],
2126 'hg qinit [-c]'),
2124 'hg qinit [-c]'),
2127 "qnew":
2125 "qnew":
2128 (new,
2126 (new,
2129 [('e', 'edit', None, _('edit commit message')),
2127 [('e', 'edit', None, _('edit commit message')),
2130 ('f', 'force', None, _('import uncommitted changes into patch'))
2128 ('f', 'force', None, _('import uncommitted changes into patch'))
2131 ] + commands.commitopts,
2129 ] + commands.commitopts,
2132 'hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH'),
2130 'hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH'),
2133 "qnext": (next, [] + seriesopts, 'hg qnext [-s]'),
2131 "qnext": (next, [] + seriesopts, 'hg qnext [-s]'),
2134 "qprev": (prev, [] + seriesopts, 'hg qprev [-s]'),
2132 "qprev": (prev, [] + seriesopts, 'hg qprev [-s]'),
2135 "^qpop":
2133 "^qpop":
2136 (pop,
2134 (pop,
2137 [('a', 'all', None, 'pop all patches'),
2135 [('a', 'all', None, 'pop all patches'),
2138 ('n', 'name', '', 'queue name to pop'),
2136 ('n', 'name', '', 'queue name to pop'),
2139 ('f', 'force', None, 'forget any local changes')],
2137 ('f', 'force', None, 'forget any local changes')],
2140 'hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]'),
2138 'hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]'),
2141 "^qpush":
2139 "^qpush":
2142 (push,
2140 (push,
2143 [('f', 'force', None, 'apply if the patch has rejects'),
2141 [('f', 'force', None, 'apply if the patch has rejects'),
2144 ('l', 'list', None, 'list patch name in commit text'),
2142 ('l', 'list', None, 'list patch name in commit text'),
2145 ('a', 'all', None, 'apply all patches'),
2143 ('a', 'all', None, 'apply all patches'),
2146 ('m', 'merge', None, 'merge from another queue'),
2144 ('m', 'merge', None, 'merge from another queue'),
2147 ('n', 'name', '', 'merge queue name')],
2145 ('n', 'name', '', 'merge queue name')],
2148 'hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]'),
2146 'hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]'),
2149 "^qrefresh":
2147 "^qrefresh":
2150 (refresh,
2148 (refresh,
2151 [('e', 'edit', None, _('edit commit message')),
2149 [('e', 'edit', None, _('edit commit message')),
2152 ('g', 'git', None, _('use git extended diff format')),
2150 ('g', 'git', None, _('use git extended diff format')),
2153 ('s', 'short', None, 'refresh only files already in the patch'),
2151 ('s', 'short', None, 'refresh only files already in the patch'),
2154 ('I', 'include', [], _('include names matching the given patterns')),
2152 ('I', 'include', [], _('include names matching the given patterns')),
2155 ('X', 'exclude', [], _('exclude names matching the given patterns'))
2153 ('X', 'exclude', [], _('exclude names matching the given patterns'))
2156 ] + commands.commitopts,
2154 ] + commands.commitopts,
2157 'hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] FILES...'),
2155 'hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] FILES...'),
2158 'qrename|qmv':
2156 'qrename|qmv':
2159 (rename, [], 'hg qrename PATCH1 [PATCH2]'),
2157 (rename, [], 'hg qrename PATCH1 [PATCH2]'),
2160 "qrestore":
2158 "qrestore":
2161 (restore,
2159 (restore,
2162 [('d', 'delete', None, 'delete save entry'),
2160 [('d', 'delete', None, 'delete save entry'),
2163 ('u', 'update', None, 'update queue working dir')],
2161 ('u', 'update', None, 'update queue working dir')],
2164 'hg qrestore [-d] [-u] REV'),
2162 'hg qrestore [-d] [-u] REV'),
2165 "qsave":
2163 "qsave":
2166 (save,
2164 (save,
2167 [('c', 'copy', None, 'copy patch directory'),
2165 [('c', 'copy', None, 'copy patch directory'),
2168 ('n', 'name', '', 'copy directory name'),
2166 ('n', 'name', '', 'copy directory name'),
2169 ('e', 'empty', None, 'clear queue status file'),
2167 ('e', 'empty', None, 'clear queue status file'),
2170 ('f', 'force', None, 'force copy')] + commands.commitopts,
2168 ('f', 'force', None, 'force copy')] + commands.commitopts,
2171 'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
2169 'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
2172 "qselect": (select,
2170 "qselect": (select,
2173 [('n', 'none', None, _('disable all guards')),
2171 [('n', 'none', None, _('disable all guards')),
2174 ('s', 'series', None, _('list all guards in series file')),
2172 ('s', 'series', None, _('list all guards in series file')),
2175 ('', 'pop', None,
2173 ('', 'pop', None,
2176 _('pop to before first guarded applied patch')),
2174 _('pop to before first guarded applied patch')),
2177 ('', 'reapply', None, _('pop, then reapply patches'))],
2175 ('', 'reapply', None, _('pop, then reapply patches'))],
2178 'hg qselect [OPTION...] [GUARD...]'),
2176 'hg qselect [OPTION...] [GUARD...]'),
2179 "qseries":
2177 "qseries":
2180 (series,
2178 (series,
2181 [('m', 'missing', None, 'print patches not in series')] + seriesopts,
2179 [('m', 'missing', None, 'print patches not in series')] + seriesopts,
2182 'hg qseries [-ms]'),
2180 'hg qseries [-ms]'),
2183 "^strip":
2181 "^strip":
2184 (strip,
2182 (strip,
2185 [('f', 'force', None, 'force multi-head removal'),
2183 [('f', 'force', None, 'force multi-head removal'),
2186 ('b', 'backup', None, 'bundle unrelated changesets'),
2184 ('b', 'backup', None, 'bundle unrelated changesets'),
2187 ('n', 'nobackup', None, 'no backups')],
2185 ('n', 'nobackup', None, 'no backups')],
2188 'hg strip [-f] [-b] [-n] REV'),
2186 'hg strip [-f] [-b] [-n] REV'),
2189 "qtop": (top, [] + seriesopts, 'hg qtop [-s]'),
2187 "qtop": (top, [] + seriesopts, 'hg qtop [-s]'),
2190 "qunapplied": (unapplied, [] + seriesopts, 'hg qunapplied [-s] [PATCH]'),
2188 "qunapplied": (unapplied, [] + seriesopts, 'hg qunapplied [-s] [PATCH]'),
2191 }
2189 }
@@ -1,280 +1,279 b''
1 # notify.py - email notifications for mercurial
1 # notify.py - email notifications for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 # hook extension to email notifications to people when changesets are
8 # hook extension to email notifications to people when changesets are
9 # committed to a repo they subscribe to.
9 # committed to a repo they subscribe to.
10 #
10 #
11 # default mode is to print messages to stdout, for testing and
11 # default mode is to print messages to stdout, for testing and
12 # configuring.
12 # configuring.
13 #
13 #
14 # to use, configure notify extension and enable in hgrc like this:
14 # to use, configure notify extension and enable in hgrc like this:
15 #
15 #
16 # [extensions]
16 # [extensions]
17 # hgext.notify =
17 # hgext.notify =
18 #
18 #
19 # [hooks]
19 # [hooks]
20 # # one email for each incoming changeset
20 # # one email for each incoming changeset
21 # incoming.notify = python:hgext.notify.hook
21 # incoming.notify = python:hgext.notify.hook
22 # # batch emails when many changesets incoming at one time
22 # # batch emails when many changesets incoming at one time
23 # changegroup.notify = python:hgext.notify.hook
23 # changegroup.notify = python:hgext.notify.hook
24 #
24 #
25 # [notify]
25 # [notify]
26 # # config items go in here
26 # # config items go in here
27 #
27 #
28 # config items:
28 # config items:
29 #
29 #
30 # REQUIRED:
30 # REQUIRED:
31 # config = /path/to/file # file containing subscriptions
31 # config = /path/to/file # file containing subscriptions
32 #
32 #
33 # OPTIONAL:
33 # OPTIONAL:
34 # test = True # print messages to stdout for testing
34 # test = True # print messages to stdout for testing
35 # strip = 3 # number of slashes to strip for url paths
35 # strip = 3 # number of slashes to strip for url paths
36 # domain = example.com # domain to use if committer missing domain
36 # domain = example.com # domain to use if committer missing domain
37 # style = ... # style file to use when formatting email
37 # style = ... # style file to use when formatting email
38 # template = ... # template to use when formatting email
38 # template = ... # template to use when formatting email
39 # incoming = ... # template to use when run as incoming hook
39 # incoming = ... # template to use when run as incoming hook
40 # changegroup = ... # template when run as changegroup hook
40 # changegroup = ... # template when run as changegroup hook
41 # maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
41 # maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
42 # maxsubject = 67 # truncate subject line longer than this
42 # maxsubject = 67 # truncate subject line longer than this
43 # diffstat = True # add a diffstat before the diff content
43 # diffstat = True # add a diffstat before the diff content
44 # sources = serve # notify if source of incoming changes in this list
44 # sources = serve # notify if source of incoming changes in this list
45 # # (serve == ssh or http, push, pull, bundle)
45 # # (serve == ssh or http, push, pull, bundle)
46 # [email]
46 # [email]
47 # from = user@host.com # email address to send as if none given
47 # from = user@host.com # email address to send as if none given
48 # [web]
48 # [web]
49 # baseurl = http://hgserver/... # root of hg web site for browsing commits
49 # baseurl = http://hgserver/... # root of hg web site for browsing commits
50 #
50 #
51 # notify config file has same format as regular hgrc. it has two
51 # notify config file has same format as regular hgrc. it has two
52 # sections so you can express subscriptions in whatever way is handier
52 # sections so you can express subscriptions in whatever way is handier
53 # for you.
53 # for you.
54 #
54 #
55 # [usersubs]
55 # [usersubs]
56 # # key is subscriber email, value is ","-separated list of glob patterns
56 # # key is subscriber email, value is ","-separated list of glob patterns
57 # user@host = pattern
57 # user@host = pattern
58 #
58 #
59 # [reposubs]
59 # [reposubs]
60 # # key is glob pattern, value is ","-separated list of subscriber emails
60 # # key is glob pattern, value is ","-separated list of subscriber emails
61 # pattern = user@host
61 # pattern = user@host
62 #
62 #
63 # glob patterns are matched against path to repo root.
63 # glob patterns are matched against path to repo root.
64 #
64 #
65 # if you like, you can put notify config file in repo that users can
65 # if you like, you can put notify config file in repo that users can
66 # push changes to, they can manage their own subscriptions.
66 # push changes to, they can manage their own subscriptions.
67
67
68 from mercurial.demandload import *
69 from mercurial.i18n import gettext as _
68 from mercurial.i18n import gettext as _
70 from mercurial.node import *
69 from mercurial.node import *
71 demandload(globals(), 'mercurial:patch,cmdutil,templater,util,mail')
70 from mercurial import patch, cmdutil, templater, util, mail
72 demandload(globals(), 'email.Parser fnmatch socket time')
71 import email.Parser, fnmatch, socket, time
73
72
74 # template for single changeset can include email headers.
73 # template for single changeset can include email headers.
75 single_template = '''
74 single_template = '''
76 Subject: changeset in {webroot}: {desc|firstline|strip}
75 Subject: changeset in {webroot}: {desc|firstline|strip}
77 From: {author}
76 From: {author}
78
77
79 changeset {node|short} in {root}
78 changeset {node|short} in {root}
80 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
79 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
81 description:
80 description:
82 \t{desc|tabindent|strip}
81 \t{desc|tabindent|strip}
83 '''.lstrip()
82 '''.lstrip()
84
83
85 # template for multiple changesets should not contain email headers,
84 # template for multiple changesets should not contain email headers,
86 # because only first set of headers will be used and result will look
85 # because only first set of headers will be used and result will look
87 # strange.
86 # strange.
88 multiple_template = '''
87 multiple_template = '''
89 changeset {node|short} in {root}
88 changeset {node|short} in {root}
90 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
89 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
91 summary: {desc|firstline}
90 summary: {desc|firstline}
92 '''
91 '''
93
92
94 deftemplates = {
93 deftemplates = {
95 'changegroup': multiple_template,
94 'changegroup': multiple_template,
96 }
95 }
97
96
98 class notifier(object):
97 class notifier(object):
99 '''email notification class.'''
98 '''email notification class.'''
100
99
101 def __init__(self, ui, repo, hooktype):
100 def __init__(self, ui, repo, hooktype):
102 self.ui = ui
101 self.ui = ui
103 cfg = self.ui.config('notify', 'config')
102 cfg = self.ui.config('notify', 'config')
104 if cfg:
103 if cfg:
105 self.ui.readsections(cfg, 'usersubs', 'reposubs')
104 self.ui.readsections(cfg, 'usersubs', 'reposubs')
106 self.repo = repo
105 self.repo = repo
107 self.stripcount = int(self.ui.config('notify', 'strip', 0))
106 self.stripcount = int(self.ui.config('notify', 'strip', 0))
108 self.root = self.strip(self.repo.root)
107 self.root = self.strip(self.repo.root)
109 self.domain = self.ui.config('notify', 'domain')
108 self.domain = self.ui.config('notify', 'domain')
110 self.subs = self.subscribers()
109 self.subs = self.subscribers()
111
110
112 mapfile = self.ui.config('notify', 'style')
111 mapfile = self.ui.config('notify', 'style')
113 template = (self.ui.config('notify', hooktype) or
112 template = (self.ui.config('notify', hooktype) or
114 self.ui.config('notify', 'template'))
113 self.ui.config('notify', 'template'))
115 self.t = cmdutil.changeset_templater(self.ui, self.repo,
114 self.t = cmdutil.changeset_templater(self.ui, self.repo,
116 False, mapfile, False)
115 False, mapfile, False)
117 if not mapfile and not template:
116 if not mapfile and not template:
118 template = deftemplates.get(hooktype) or single_template
117 template = deftemplates.get(hooktype) or single_template
119 if template:
118 if template:
120 template = templater.parsestring(template, quoted=False)
119 template = templater.parsestring(template, quoted=False)
121 self.t.use_template(template)
120 self.t.use_template(template)
122
121
123 def strip(self, path):
122 def strip(self, path):
124 '''strip leading slashes from local path, turn into web-safe path.'''
123 '''strip leading slashes from local path, turn into web-safe path.'''
125
124
126 path = util.pconvert(path)
125 path = util.pconvert(path)
127 count = self.stripcount
126 count = self.stripcount
128 while count > 0:
127 while count > 0:
129 c = path.find('/')
128 c = path.find('/')
130 if c == -1:
129 if c == -1:
131 break
130 break
132 path = path[c+1:]
131 path = path[c+1:]
133 count -= 1
132 count -= 1
134 return path
133 return path
135
134
136 def fixmail(self, addr):
135 def fixmail(self, addr):
137 '''try to clean up email addresses.'''
136 '''try to clean up email addresses.'''
138
137
139 addr = templater.email(addr.strip())
138 addr = templater.email(addr.strip())
140 a = addr.find('@localhost')
139 a = addr.find('@localhost')
141 if a != -1:
140 if a != -1:
142 addr = addr[:a]
141 addr = addr[:a]
143 if '@' not in addr:
142 if '@' not in addr:
144 return addr + '@' + self.domain
143 return addr + '@' + self.domain
145 return addr
144 return addr
146
145
147 def subscribers(self):
146 def subscribers(self):
148 '''return list of email addresses of subscribers to this repo.'''
147 '''return list of email addresses of subscribers to this repo.'''
149
148
150 subs = {}
149 subs = {}
151 for user, pats in self.ui.configitems('usersubs'):
150 for user, pats in self.ui.configitems('usersubs'):
152 for pat in pats.split(','):
151 for pat in pats.split(','):
153 if fnmatch.fnmatch(self.repo.root, pat.strip()):
152 if fnmatch.fnmatch(self.repo.root, pat.strip()):
154 subs[self.fixmail(user)] = 1
153 subs[self.fixmail(user)] = 1
155 for pat, users in self.ui.configitems('reposubs'):
154 for pat, users in self.ui.configitems('reposubs'):
156 if fnmatch.fnmatch(self.repo.root, pat):
155 if fnmatch.fnmatch(self.repo.root, pat):
157 for user in users.split(','):
156 for user in users.split(','):
158 subs[self.fixmail(user)] = 1
157 subs[self.fixmail(user)] = 1
159 subs = subs.keys()
158 subs = subs.keys()
160 subs.sort()
159 subs.sort()
161 return subs
160 return subs
162
161
163 def url(self, path=None):
162 def url(self, path=None):
164 return self.ui.config('web', 'baseurl') + (path or self.root)
163 return self.ui.config('web', 'baseurl') + (path or self.root)
165
164
166 def node(self, node):
165 def node(self, node):
167 '''format one changeset.'''
166 '''format one changeset.'''
168
167
169 self.t.show(changenode=node, changes=self.repo.changelog.read(node),
168 self.t.show(changenode=node, changes=self.repo.changelog.read(node),
170 baseurl=self.ui.config('web', 'baseurl'),
169 baseurl=self.ui.config('web', 'baseurl'),
171 root=self.repo.root,
170 root=self.repo.root,
172 webroot=self.root)
171 webroot=self.root)
173
172
174 def skipsource(self, source):
173 def skipsource(self, source):
175 '''true if incoming changes from this source should be skipped.'''
174 '''true if incoming changes from this source should be skipped.'''
176 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
175 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
177 return source not in ok_sources
176 return source not in ok_sources
178
177
179 def send(self, node, count, data):
178 def send(self, node, count, data):
180 '''send message.'''
179 '''send message.'''
181
180
182 p = email.Parser.Parser()
181 p = email.Parser.Parser()
183 msg = p.parsestr(data)
182 msg = p.parsestr(data)
184
183
185 def fix_subject():
184 def fix_subject():
186 '''try to make subject line exist and be useful.'''
185 '''try to make subject line exist and be useful.'''
187
186
188 subject = msg['Subject']
187 subject = msg['Subject']
189 if not subject:
188 if not subject:
190 if count > 1:
189 if count > 1:
191 subject = _('%s: %d new changesets') % (self.root, count)
190 subject = _('%s: %d new changesets') % (self.root, count)
192 else:
191 else:
193 changes = self.repo.changelog.read(node)
192 changes = self.repo.changelog.read(node)
194 s = changes[4].lstrip().split('\n', 1)[0].rstrip()
193 s = changes[4].lstrip().split('\n', 1)[0].rstrip()
195 subject = '%s: %s' % (self.root, s)
194 subject = '%s: %s' % (self.root, s)
196 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
195 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
197 if maxsubject and len(subject) > maxsubject:
196 if maxsubject and len(subject) > maxsubject:
198 subject = subject[:maxsubject-3] + '...'
197 subject = subject[:maxsubject-3] + '...'
199 del msg['Subject']
198 del msg['Subject']
200 msg['Subject'] = subject
199 msg['Subject'] = subject
201
200
202 def fix_sender():
201 def fix_sender():
203 '''try to make message have proper sender.'''
202 '''try to make message have proper sender.'''
204
203
205 sender = msg['From']
204 sender = msg['From']
206 if not sender:
205 if not sender:
207 sender = self.ui.config('email', 'from') or self.ui.username()
206 sender = self.ui.config('email', 'from') or self.ui.username()
208 if '@' not in sender or '@localhost' in sender:
207 if '@' not in sender or '@localhost' in sender:
209 sender = self.fixmail(sender)
208 sender = self.fixmail(sender)
210 del msg['From']
209 del msg['From']
211 msg['From'] = sender
210 msg['From'] = sender
212
211
213 fix_subject()
212 fix_subject()
214 fix_sender()
213 fix_sender()
215
214
216 msg['X-Hg-Notification'] = 'changeset ' + short(node)
215 msg['X-Hg-Notification'] = 'changeset ' + short(node)
217 if not msg['Message-Id']:
216 if not msg['Message-Id']:
218 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
217 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
219 (short(node), int(time.time()),
218 (short(node), int(time.time()),
220 hash(self.repo.root), socket.getfqdn()))
219 hash(self.repo.root), socket.getfqdn()))
221 msg['To'] = ', '.join(self.subs)
220 msg['To'] = ', '.join(self.subs)
222
221
223 msgtext = msg.as_string(0)
222 msgtext = msg.as_string(0)
224 if self.ui.configbool('notify', 'test', True):
223 if self.ui.configbool('notify', 'test', True):
225 self.ui.write(msgtext)
224 self.ui.write(msgtext)
226 if not msgtext.endswith('\n'):
225 if not msgtext.endswith('\n'):
227 self.ui.write('\n')
226 self.ui.write('\n')
228 else:
227 else:
229 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
228 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
230 (len(self.subs), count))
229 (len(self.subs), count))
231 mail.sendmail(self.ui, templater.email(msg['From']),
230 mail.sendmail(self.ui, templater.email(msg['From']),
232 self.subs, msgtext)
231 self.subs, msgtext)
233
232
234 def diff(self, node, ref):
233 def diff(self, node, ref):
235 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
234 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
236 if maxdiff == 0:
235 if maxdiff == 0:
237 return
236 return
238 prev = self.repo.changelog.parents(node)[0]
237 prev = self.repo.changelog.parents(node)[0]
239 self.ui.pushbuffer()
238 self.ui.pushbuffer()
240 patch.diff(self.repo, prev, ref)
239 patch.diff(self.repo, prev, ref)
241 difflines = self.ui.popbuffer().splitlines(1)
240 difflines = self.ui.popbuffer().splitlines(1)
242 if self.ui.configbool('notify', 'diffstat', True):
241 if self.ui.configbool('notify', 'diffstat', True):
243 s = patch.diffstat(difflines)
242 s = patch.diffstat(difflines)
244 self.ui.write('\ndiffstat:\n\n' + s)
243 self.ui.write('\ndiffstat:\n\n' + s)
245 if maxdiff > 0 and len(difflines) > maxdiff:
244 if maxdiff > 0 and len(difflines) > maxdiff:
246 self.ui.write(_('\ndiffs (truncated from %d to %d lines):\n\n') %
245 self.ui.write(_('\ndiffs (truncated from %d to %d lines):\n\n') %
247 (len(difflines), maxdiff))
246 (len(difflines), maxdiff))
248 difflines = difflines[:maxdiff]
247 difflines = difflines[:maxdiff]
249 elif difflines:
248 elif difflines:
250 self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
249 self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
251 self.ui.write(*difflines)
250 self.ui.write(*difflines)
252
251
253 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
252 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
254 '''send email notifications to interested subscribers.
253 '''send email notifications to interested subscribers.
255
254
256 if used as changegroup hook, send one email for all changesets in
255 if used as changegroup hook, send one email for all changesets in
257 changegroup. else send one email per changeset.'''
256 changegroup. else send one email per changeset.'''
258 n = notifier(ui, repo, hooktype)
257 n = notifier(ui, repo, hooktype)
259 if not n.subs:
258 if not n.subs:
260 ui.debug(_('notify: no subscribers to repo %s\n') % n.root)
259 ui.debug(_('notify: no subscribers to repo %s\n') % n.root)
261 return
260 return
262 if n.skipsource(source):
261 if n.skipsource(source):
263 ui.debug(_('notify: changes have source "%s" - skipping\n') %
262 ui.debug(_('notify: changes have source "%s" - skipping\n') %
264 source)
263 source)
265 return
264 return
266 node = bin(node)
265 node = bin(node)
267 ui.pushbuffer()
266 ui.pushbuffer()
268 if hooktype == 'changegroup':
267 if hooktype == 'changegroup':
269 start = repo.changelog.rev(node)
268 start = repo.changelog.rev(node)
270 end = repo.changelog.count()
269 end = repo.changelog.count()
271 count = end - start
270 count = end - start
272 for rev in xrange(start, end):
271 for rev in xrange(start, end):
273 n.node(repo.changelog.node(rev))
272 n.node(repo.changelog.node(rev))
274 n.diff(node, repo.changelog.tip())
273 n.diff(node, repo.changelog.tip())
275 else:
274 else:
276 count = 1
275 count = 1
277 n.node(node)
276 n.node(node)
278 n.diff(node, node)
277 n.diff(node, node)
279 data = ui.popbuffer()
278 data = ui.popbuffer()
280 n.send(node, count, data)
279 n.send(node, count, data)
@@ -1,320 +1,319 b''
1 # Command for sending a collection of Mercurial changesets as a series
1 # Command for sending a collection of Mercurial changesets as a series
2 # of patch emails.
2 # of patch emails.
3 #
3 #
4 # The series is started off with a "[PATCH 0 of N]" introduction,
4 # The series is started off with a "[PATCH 0 of N]" introduction,
5 # which describes the series as a whole.
5 # which describes the series as a whole.
6 #
6 #
7 # Each patch email has a Subject line of "[PATCH M of N] ...", using
7 # Each patch email has a Subject line of "[PATCH M of N] ...", using
8 # the first line of the changeset description as the subject text.
8 # the first line of the changeset description as the subject text.
9 # The message contains two or three body parts:
9 # The message contains two or three body parts:
10 #
10 #
11 # The remainder of the changeset description.
11 # The remainder of the changeset description.
12 #
12 #
13 # [Optional] If the diffstat program is installed, the result of
13 # [Optional] If the diffstat program is installed, the result of
14 # running diffstat on the patch.
14 # running diffstat on the patch.
15 #
15 #
16 # The patch itself, as generated by "hg export".
16 # The patch itself, as generated by "hg export".
17 #
17 #
18 # Each message refers to all of its predecessors using the In-Reply-To
18 # Each message refers to all of its predecessors using the In-Reply-To
19 # and References headers, so they will show up as a sequence in
19 # and References headers, so they will show up as a sequence in
20 # threaded mail and news readers, and in mail archives.
20 # threaded mail and news readers, and in mail archives.
21 #
21 #
22 # For each changeset, you will be prompted with a diffstat summary and
22 # For each changeset, you will be prompted with a diffstat summary and
23 # the changeset summary, so you can be sure you are sending the right
23 # the changeset summary, so you can be sure you are sending the right
24 # changes.
24 # changes.
25 #
25 #
26 # To enable this extension:
26 # To enable this extension:
27 #
27 #
28 # [extensions]
28 # [extensions]
29 # hgext.patchbomb =
29 # hgext.patchbomb =
30 #
30 #
31 # To configure other defaults, add a section like this to your hgrc
31 # To configure other defaults, add a section like this to your hgrc
32 # file:
32 # file:
33 #
33 #
34 # [email]
34 # [email]
35 # from = My Name <my@email>
35 # from = My Name <my@email>
36 # to = recipient1, recipient2, ...
36 # to = recipient1, recipient2, ...
37 # cc = cc1, cc2, ...
37 # cc = cc1, cc2, ...
38 # bcc = bcc1, bcc2, ...
38 # bcc = bcc1, bcc2, ...
39 #
39 #
40 # Then you can use the "hg email" command to mail a series of changesets
40 # Then you can use the "hg email" command to mail a series of changesets
41 # as a patchbomb.
41 # as a patchbomb.
42 #
42 #
43 # To avoid sending patches prematurely, it is a good idea to first run
43 # To avoid sending patches prematurely, it is a good idea to first run
44 # the "email" command with the "-n" option (test only). You will be
44 # the "email" command with the "-n" option (test only). You will be
45 # prompted for an email recipient address, a subject an an introductory
45 # prompted for an email recipient address, a subject an an introductory
46 # message describing the patches of your patchbomb. Then when all is
46 # message describing the patches of your patchbomb. Then when all is
47 # done, your pager will be fired up once for each patchbomb message, so
47 # done, your pager will be fired up once for each patchbomb message, so
48 # you can verify everything is alright.
48 # you can verify everything is alright.
49 #
49 #
50 # The "-m" (mbox) option is also very useful. Instead of previewing
50 # The "-m" (mbox) option is also very useful. Instead of previewing
51 # each patchbomb message in a pager or sending the messages directly,
51 # each patchbomb message in a pager or sending the messages directly,
52 # it will create a UNIX mailbox file with the patch emails. This
52 # it will create a UNIX mailbox file with the patch emails. This
53 # mailbox file can be previewed with any mail user agent which supports
53 # mailbox file can be previewed with any mail user agent which supports
54 # UNIX mbox files, i.e. with mutt:
54 # UNIX mbox files, i.e. with mutt:
55 #
55 #
56 # % mutt -R -f mbox
56 # % mutt -R -f mbox
57 #
57 #
58 # When you are previewing the patchbomb messages, you can use `formail'
58 # When you are previewing the patchbomb messages, you can use `formail'
59 # (a utility that is commonly installed as part of the procmail package),
59 # (a utility that is commonly installed as part of the procmail package),
60 # to send each message out:
60 # to send each message out:
61 #
61 #
62 # % formail -s sendmail -bm -t < mbox
62 # % formail -s sendmail -bm -t < mbox
63 #
63 #
64 # That should be all. Now your patchbomb is on its way out.
64 # That should be all. Now your patchbomb is on its way out.
65
65
66 from mercurial.demandload import *
66 import os, errno, popen2, socket, sys, tempfile, time
67 demandload(globals(), '''email.MIMEMultipart email.MIMEText email.Utils
67 import email.MIMEMultipart, email.MIMEText, email.Utils
68 mercurial:cmdutil,commands,hg,mail,ui,patch
68 from mercurial import cmdutil, commands, hg, mail, ui, patch
69 os errno popen2 socket sys tempfile time''')
70 from mercurial.i18n import gettext as _
69 from mercurial.i18n import gettext as _
71 from mercurial.node import *
70 from mercurial.node import *
72
71
73 try:
72 try:
74 # readline gives raw_input editing capabilities, but is not
73 # readline gives raw_input editing capabilities, but is not
75 # present on windows
74 # present on windows
76 import readline
75 import readline
77 except ImportError: pass
76 except ImportError: pass
78
77
79 def patchbomb(ui, repo, *revs, **opts):
78 def patchbomb(ui, repo, *revs, **opts):
80 '''send changesets as a series of patch emails
79 '''send changesets as a series of patch emails
81
80
82 The series starts with a "[PATCH 0 of N]" introduction, which
81 The series starts with a "[PATCH 0 of N]" introduction, which
83 describes the series as a whole.
82 describes the series as a whole.
84
83
85 Each patch email has a Subject line of "[PATCH M of N] ...", using
84 Each patch email has a Subject line of "[PATCH M of N] ...", using
86 the first line of the changeset description as the subject text.
85 the first line of the changeset description as the subject text.
87 The message contains two or three body parts. First, the rest of
86 The message contains two or three body parts. First, the rest of
88 the changeset description. Next, (optionally) if the diffstat
87 the changeset description. Next, (optionally) if the diffstat
89 program is installed, the result of running diffstat on the patch.
88 program is installed, the result of running diffstat on the patch.
90 Finally, the patch itself, as generated by "hg export".'''
89 Finally, the patch itself, as generated by "hg export".'''
91 def prompt(prompt, default = None, rest = ': ', empty_ok = False):
90 def prompt(prompt, default = None, rest = ': ', empty_ok = False):
92 if default: prompt += ' [%s]' % default
91 if default: prompt += ' [%s]' % default
93 prompt += rest
92 prompt += rest
94 while True:
93 while True:
95 r = raw_input(prompt)
94 r = raw_input(prompt)
96 if r: return r
95 if r: return r
97 if default is not None: return default
96 if default is not None: return default
98 if empty_ok: return r
97 if empty_ok: return r
99 ui.warn(_('Please enter a valid value.\n'))
98 ui.warn(_('Please enter a valid value.\n'))
100
99
101 def confirm(s):
100 def confirm(s):
102 if not prompt(s, default = 'y', rest = '? ').lower().startswith('y'):
101 if not prompt(s, default = 'y', rest = '? ').lower().startswith('y'):
103 raise ValueError
102 raise ValueError
104
103
105 def cdiffstat(summary, patchlines):
104 def cdiffstat(summary, patchlines):
106 s = patch.diffstat(patchlines)
105 s = patch.diffstat(patchlines)
107 if s:
106 if s:
108 if summary:
107 if summary:
109 ui.write(summary, '\n')
108 ui.write(summary, '\n')
110 ui.write(s, '\n')
109 ui.write(s, '\n')
111 confirm(_('Does the diffstat above look okay'))
110 confirm(_('Does the diffstat above look okay'))
112 return s
111 return s
113
112
114 def makepatch(patch, idx, total):
113 def makepatch(patch, idx, total):
115 desc = []
114 desc = []
116 node = None
115 node = None
117 body = ''
116 body = ''
118 for line in patch:
117 for line in patch:
119 if line.startswith('#'):
118 if line.startswith('#'):
120 if line.startswith('# Node ID'): node = line.split()[-1]
119 if line.startswith('# Node ID'): node = line.split()[-1]
121 continue
120 continue
122 if (line.startswith('diff -r')
121 if (line.startswith('diff -r')
123 or line.startswith('diff --git')):
122 or line.startswith('diff --git')):
124 break
123 break
125 desc.append(line)
124 desc.append(line)
126 if not node: raise ValueError
125 if not node: raise ValueError
127
126
128 #body = ('\n'.join(desc[1:]).strip() or
127 #body = ('\n'.join(desc[1:]).strip() or
129 # 'Patch subject is complete summary.')
128 # 'Patch subject is complete summary.')
130 #body += '\n\n\n'
129 #body += '\n\n\n'
131
130
132 if opts['plain']:
131 if opts['plain']:
133 while patch and patch[0].startswith('# '): patch.pop(0)
132 while patch and patch[0].startswith('# '): patch.pop(0)
134 if patch: patch.pop(0)
133 if patch: patch.pop(0)
135 while patch and not patch[0].strip(): patch.pop(0)
134 while patch and not patch[0].strip(): patch.pop(0)
136 if opts['diffstat']:
135 if opts['diffstat']:
137 body += cdiffstat('\n'.join(desc), patch) + '\n\n'
136 body += cdiffstat('\n'.join(desc), patch) + '\n\n'
138 if opts['attach']:
137 if opts['attach']:
139 msg = email.MIMEMultipart.MIMEMultipart()
138 msg = email.MIMEMultipart.MIMEMultipart()
140 if body: msg.attach(email.MIMEText.MIMEText(body, 'plain'))
139 if body: msg.attach(email.MIMEText.MIMEText(body, 'plain'))
141 p = email.MIMEText.MIMEText('\n'.join(patch), 'x-patch')
140 p = email.MIMEText.MIMEText('\n'.join(patch), 'x-patch')
142 binnode = bin(node)
141 binnode = bin(node)
143 # if node is mq patch, it will have patch file name as tag
142 # if node is mq patch, it will have patch file name as tag
144 patchname = [t for t in repo.nodetags(binnode)
143 patchname = [t for t in repo.nodetags(binnode)
145 if t.endswith('.patch') or t.endswith('.diff')]
144 if t.endswith('.patch') or t.endswith('.diff')]
146 if patchname:
145 if patchname:
147 patchname = patchname[0]
146 patchname = patchname[0]
148 elif total > 1:
147 elif total > 1:
149 patchname = cmdutil.make_filename(repo, '%b-%n.patch',
148 patchname = cmdutil.make_filename(repo, '%b-%n.patch',
150 binnode, idx, total)
149 binnode, idx, total)
151 else:
150 else:
152 patchname = cmdutil.make_filename(repo, '%b.patch', binnode)
151 patchname = cmdutil.make_filename(repo, '%b.patch', binnode)
153 p['Content-Disposition'] = 'inline; filename=' + patchname
152 p['Content-Disposition'] = 'inline; filename=' + patchname
154 msg.attach(p)
153 msg.attach(p)
155 else:
154 else:
156 body += '\n'.join(patch)
155 body += '\n'.join(patch)
157 msg = email.MIMEText.MIMEText(body)
156 msg = email.MIMEText.MIMEText(body)
158 if total == 1:
157 if total == 1:
159 subj = '[PATCH] ' + desc[0].strip()
158 subj = '[PATCH] ' + desc[0].strip()
160 else:
159 else:
161 tlen = len(str(total))
160 tlen = len(str(total))
162 subj = '[PATCH %0*d of %d] %s' % (tlen, idx, total, desc[0].strip())
161 subj = '[PATCH %0*d of %d] %s' % (tlen, idx, total, desc[0].strip())
163 if subj.endswith('.'): subj = subj[:-1]
162 if subj.endswith('.'): subj = subj[:-1]
164 msg['Subject'] = subj
163 msg['Subject'] = subj
165 msg['X-Mercurial-Node'] = node
164 msg['X-Mercurial-Node'] = node
166 return msg
165 return msg
167
166
168 start_time = int(time.time())
167 start_time = int(time.time())
169
168
170 def genmsgid(id):
169 def genmsgid(id):
171 return '<%s.%s@%s>' % (id[:20], start_time, socket.getfqdn())
170 return '<%s.%s@%s>' % (id[:20], start_time, socket.getfqdn())
172
171
173 patches = []
172 patches = []
174
173
175 class exportee:
174 class exportee:
176 def __init__(self, container):
175 def __init__(self, container):
177 self.lines = []
176 self.lines = []
178 self.container = container
177 self.container = container
179 self.name = 'email'
178 self.name = 'email'
180
179
181 def write(self, data):
180 def write(self, data):
182 self.lines.append(data)
181 self.lines.append(data)
183
182
184 def close(self):
183 def close(self):
185 self.container.append(''.join(self.lines).split('\n'))
184 self.container.append(''.join(self.lines).split('\n'))
186 self.lines = []
185 self.lines = []
187
186
188 commands.export(ui, repo, *revs, **{'output': exportee(patches),
187 commands.export(ui, repo, *revs, **{'output': exportee(patches),
189 'switch_parent': False,
188 'switch_parent': False,
190 'text': None,
189 'text': None,
191 'git': opts.get('git')})
190 'git': opts.get('git')})
192
191
193 jumbo = []
192 jumbo = []
194 msgs = []
193 msgs = []
195
194
196 ui.write(_('This patch series consists of %d patches.\n\n') % len(patches))
195 ui.write(_('This patch series consists of %d patches.\n\n') % len(patches))
197
196
198 for p, i in zip(patches, xrange(len(patches))):
197 for p, i in zip(patches, xrange(len(patches))):
199 jumbo.extend(p)
198 jumbo.extend(p)
200 msgs.append(makepatch(p, i + 1, len(patches)))
199 msgs.append(makepatch(p, i + 1, len(patches)))
201
200
202 sender = (opts['from'] or ui.config('email', 'from') or
201 sender = (opts['from'] or ui.config('email', 'from') or
203 ui.config('patchbomb', 'from') or
202 ui.config('patchbomb', 'from') or
204 prompt('From', ui.username()))
203 prompt('From', ui.username()))
205
204
206 def getaddrs(opt, prpt, default = None):
205 def getaddrs(opt, prpt, default = None):
207 addrs = opts[opt] or (ui.config('email', opt) or
206 addrs = opts[opt] or (ui.config('email', opt) or
208 ui.config('patchbomb', opt) or
207 ui.config('patchbomb', opt) or
209 prompt(prpt, default = default)).split(',')
208 prompt(prpt, default = default)).split(',')
210 return [a.strip() for a in addrs if a.strip()]
209 return [a.strip() for a in addrs if a.strip()]
211 to = getaddrs('to', 'To')
210 to = getaddrs('to', 'To')
212 cc = getaddrs('cc', 'Cc', '')
211 cc = getaddrs('cc', 'Cc', '')
213
212
214 bcc = opts['bcc'] or (ui.config('email', 'bcc') or
213 bcc = opts['bcc'] or (ui.config('email', 'bcc') or
215 ui.config('patchbomb', 'bcc') or '').split(',')
214 ui.config('patchbomb', 'bcc') or '').split(',')
216 bcc = [a.strip() for a in bcc if a.strip()]
215 bcc = [a.strip() for a in bcc if a.strip()]
217
216
218 if len(patches) > 1:
217 if len(patches) > 1:
219 ui.write(_('\nWrite the introductory message for the patch series.\n\n'))
218 ui.write(_('\nWrite the introductory message for the patch series.\n\n'))
220
219
221 tlen = len(str(len(patches)))
220 tlen = len(str(len(patches)))
222
221
223 subj = '[PATCH %0*d of %d] %s' % (
222 subj = '[PATCH %0*d of %d] %s' % (
224 tlen, 0,
223 tlen, 0,
225 len(patches),
224 len(patches),
226 opts['subject'] or
225 opts['subject'] or
227 prompt('Subject:', rest = ' [PATCH %0*d of %d] ' % (tlen, 0,
226 prompt('Subject:', rest = ' [PATCH %0*d of %d] ' % (tlen, 0,
228 len(patches))))
227 len(patches))))
229
228
230 ui.write(_('Finish with ^D or a dot on a line by itself.\n\n'))
229 ui.write(_('Finish with ^D or a dot on a line by itself.\n\n'))
231
230
232 body = []
231 body = []
233
232
234 while True:
233 while True:
235 try: l = raw_input()
234 try: l = raw_input()
236 except EOFError: break
235 except EOFError: break
237 if l == '.': break
236 if l == '.': break
238 body.append(l)
237 body.append(l)
239
238
240 if opts['diffstat']:
239 if opts['diffstat']:
241 d = cdiffstat(_('Final summary:\n'), jumbo)
240 d = cdiffstat(_('Final summary:\n'), jumbo)
242 if d: body.append('\n' + d)
241 if d: body.append('\n' + d)
243
242
244 body = '\n'.join(body) + '\n'
243 body = '\n'.join(body) + '\n'
245
244
246 msg = email.MIMEText.MIMEText(body)
245 msg = email.MIMEText.MIMEText(body)
247 msg['Subject'] = subj
246 msg['Subject'] = subj
248
247
249 msgs.insert(0, msg)
248 msgs.insert(0, msg)
250
249
251 ui.write('\n')
250 ui.write('\n')
252
251
253 if not opts['test'] and not opts['mbox']:
252 if not opts['test'] and not opts['mbox']:
254 mailer = mail.connect(ui)
253 mailer = mail.connect(ui)
255 parent = None
254 parent = None
256
255
257 # Calculate UTC offset
256 # Calculate UTC offset
258 if time.daylight: offset = time.altzone
257 if time.daylight: offset = time.altzone
259 else: offset = time.timezone
258 else: offset = time.timezone
260 if offset <= 0: sign, offset = '+', -offset
259 if offset <= 0: sign, offset = '+', -offset
261 else: sign = '-'
260 else: sign = '-'
262 offset = '%s%02d%02d' % (sign, offset / 3600, (offset % 3600) / 60)
261 offset = '%s%02d%02d' % (sign, offset / 3600, (offset % 3600) / 60)
263
262
264 sender_addr = email.Utils.parseaddr(sender)[1]
263 sender_addr = email.Utils.parseaddr(sender)[1]
265 for m in msgs:
264 for m in msgs:
266 try:
265 try:
267 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
266 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
268 except TypeError:
267 except TypeError:
269 m['Message-Id'] = genmsgid('patchbomb')
268 m['Message-Id'] = genmsgid('patchbomb')
270 if parent:
269 if parent:
271 m['In-Reply-To'] = parent
270 m['In-Reply-To'] = parent
272 else:
271 else:
273 parent = m['Message-Id']
272 parent = m['Message-Id']
274 m['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime(start_time)) + ' ' + offset
273 m['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime(start_time)) + ' ' + offset
275
274
276 start_time += 1
275 start_time += 1
277 m['From'] = sender
276 m['From'] = sender
278 m['To'] = ', '.join(to)
277 m['To'] = ', '.join(to)
279 if cc: m['Cc'] = ', '.join(cc)
278 if cc: m['Cc'] = ', '.join(cc)
280 if bcc: m['Bcc'] = ', '.join(bcc)
279 if bcc: m['Bcc'] = ', '.join(bcc)
281 if opts['test']:
280 if opts['test']:
282 ui.status('Displaying ', m['Subject'], ' ...\n')
281 ui.status('Displaying ', m['Subject'], ' ...\n')
283 fp = os.popen(os.getenv('PAGER', 'more'), 'w')
282 fp = os.popen(os.getenv('PAGER', 'more'), 'w')
284 try:
283 try:
285 fp.write(m.as_string(0))
284 fp.write(m.as_string(0))
286 fp.write('\n')
285 fp.write('\n')
287 except IOError, inst:
286 except IOError, inst:
288 if inst.errno != errno.EPIPE:
287 if inst.errno != errno.EPIPE:
289 raise
288 raise
290 fp.close()
289 fp.close()
291 elif opts['mbox']:
290 elif opts['mbox']:
292 ui.status('Writing ', m['Subject'], ' ...\n')
291 ui.status('Writing ', m['Subject'], ' ...\n')
293 fp = open(opts['mbox'], m.has_key('In-Reply-To') and 'ab+' or 'wb+')
292 fp = open(opts['mbox'], m.has_key('In-Reply-To') and 'ab+' or 'wb+')
294 date = time.asctime(time.localtime(start_time))
293 date = time.asctime(time.localtime(start_time))
295 fp.write('From %s %s\n' % (sender_addr, date))
294 fp.write('From %s %s\n' % (sender_addr, date))
296 fp.write(m.as_string(0))
295 fp.write(m.as_string(0))
297 fp.write('\n\n')
296 fp.write('\n\n')
298 fp.close()
297 fp.close()
299 else:
298 else:
300 ui.status('Sending ', m['Subject'], ' ...\n')
299 ui.status('Sending ', m['Subject'], ' ...\n')
301 # Exim does not remove the Bcc field
300 # Exim does not remove the Bcc field
302 del m['Bcc']
301 del m['Bcc']
303 mailer.sendmail(sender, to + bcc + cc, m.as_string(0))
302 mailer.sendmail(sender, to + bcc + cc, m.as_string(0))
304
303
305 cmdtable = {
304 cmdtable = {
306 'email':
305 'email':
307 (patchbomb,
306 (patchbomb,
308 [('a', 'attach', None, 'send patches as inline attachments'),
307 [('a', 'attach', None, 'send patches as inline attachments'),
309 ('', 'bcc', [], 'email addresses of blind copy recipients'),
308 ('', 'bcc', [], 'email addresses of blind copy recipients'),
310 ('c', 'cc', [], 'email addresses of copy recipients'),
309 ('c', 'cc', [], 'email addresses of copy recipients'),
311 ('d', 'diffstat', None, 'add diffstat output to messages'),
310 ('d', 'diffstat', None, 'add diffstat output to messages'),
312 ('g', 'git', None, _('use git extended diff format')),
311 ('g', 'git', None, _('use git extended diff format')),
313 ('f', 'from', '', 'email address of sender'),
312 ('f', 'from', '', 'email address of sender'),
314 ('', 'plain', None, 'omit hg patch header'),
313 ('', 'plain', None, 'omit hg patch header'),
315 ('n', 'test', None, 'print messages that would be sent'),
314 ('n', 'test', None, 'print messages that would be sent'),
316 ('m', 'mbox', '', 'write messages to mbox file instead of sending them'),
315 ('m', 'mbox', '', 'write messages to mbox file instead of sending them'),
317 ('s', 'subject', '', 'subject of introductory message'),
316 ('s', 'subject', '', 'subject of introductory message'),
318 ('t', 'to', [], 'email addresses of recipients')],
317 ('t', 'to', [], 'email addresses of recipients')],
319 "hg email [OPTION]... [REV]...")
318 "hg email [OPTION]... [REV]...")
320 }
319 }
@@ -1,592 +1,591 b''
1 # Patch transplanting extension for Mercurial
1 # Patch transplanting extension for Mercurial
2 #
2 #
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from mercurial.demandload import *
9 from mercurial.i18n import gettext as _
8 from mercurial.i18n import gettext as _
10 demandload(globals(), 'os tempfile')
9 import os, tempfile
11 demandload(globals(), 'mercurial:bundlerepo,cmdutil,commands,hg,merge,patch')
10 from mercurial import bundlerepo, cmdutil, commands, hg, merge, patch, revlog
12 demandload(globals(), 'mercurial:revlog,util')
11 from mercurial import util
13
12
14 '''patch transplanting tool
13 '''patch transplanting tool
15
14
16 This extension allows you to transplant patches from another branch.
15 This extension allows you to transplant patches from another branch.
17
16
18 Transplanted patches are recorded in .hg/transplant/transplants, as a map
17 Transplanted patches are recorded in .hg/transplant/transplants, as a map
19 from a changeset hash to its hash in the source repository.
18 from a changeset hash to its hash in the source repository.
20 '''
19 '''
21
20
22 class transplantentry:
21 class transplantentry:
23 def __init__(self, lnode, rnode):
22 def __init__(self, lnode, rnode):
24 self.lnode = lnode
23 self.lnode = lnode
25 self.rnode = rnode
24 self.rnode = rnode
26
25
27 class transplants:
26 class transplants:
28 def __init__(self, path=None, transplantfile=None, opener=None):
27 def __init__(self, path=None, transplantfile=None, opener=None):
29 self.path = path
28 self.path = path
30 self.transplantfile = transplantfile
29 self.transplantfile = transplantfile
31 self.opener = opener
30 self.opener = opener
32
31
33 if not opener:
32 if not opener:
34 self.opener = util.opener(self.path)
33 self.opener = util.opener(self.path)
35 self.transplants = []
34 self.transplants = []
36 self.dirty = False
35 self.dirty = False
37 self.read()
36 self.read()
38
37
39 def read(self):
38 def read(self):
40 abspath = os.path.join(self.path, self.transplantfile)
39 abspath = os.path.join(self.path, self.transplantfile)
41 if self.transplantfile and os.path.exists(abspath):
40 if self.transplantfile and os.path.exists(abspath):
42 for line in self.opener(self.transplantfile).read().splitlines():
41 for line in self.opener(self.transplantfile).read().splitlines():
43 lnode, rnode = map(revlog.bin, line.split(':'))
42 lnode, rnode = map(revlog.bin, line.split(':'))
44 self.transplants.append(transplantentry(lnode, rnode))
43 self.transplants.append(transplantentry(lnode, rnode))
45
44
46 def write(self):
45 def write(self):
47 if self.dirty and self.transplantfile:
46 if self.dirty and self.transplantfile:
48 if not os.path.isdir(self.path):
47 if not os.path.isdir(self.path):
49 os.mkdir(self.path)
48 os.mkdir(self.path)
50 fp = self.opener(self.transplantfile, 'w')
49 fp = self.opener(self.transplantfile, 'w')
51 for c in self.transplants:
50 for c in self.transplants:
52 l, r = map(revlog.hex, (c.lnode, c.rnode))
51 l, r = map(revlog.hex, (c.lnode, c.rnode))
53 fp.write(l + ':' + r + '\n')
52 fp.write(l + ':' + r + '\n')
54 fp.close()
53 fp.close()
55 self.dirty = False
54 self.dirty = False
56
55
57 def get(self, rnode):
56 def get(self, rnode):
58 return [t for t in self.transplants if t.rnode == rnode]
57 return [t for t in self.transplants if t.rnode == rnode]
59
58
60 def set(self, lnode, rnode):
59 def set(self, lnode, rnode):
61 self.transplants.append(transplantentry(lnode, rnode))
60 self.transplants.append(transplantentry(lnode, rnode))
62 self.dirty = True
61 self.dirty = True
63
62
64 def remove(self, transplant):
63 def remove(self, transplant):
65 del self.transplants[self.transplants.index(transplant)]
64 del self.transplants[self.transplants.index(transplant)]
66 self.dirty = True
65 self.dirty = True
67
66
68 class transplanter:
67 class transplanter:
69 def __init__(self, ui, repo):
68 def __init__(self, ui, repo):
70 self.ui = ui
69 self.ui = ui
71 self.path = repo.join('transplant')
70 self.path = repo.join('transplant')
72 self.opener = util.opener(self.path)
71 self.opener = util.opener(self.path)
73 self.transplants = transplants(self.path, 'transplants', opener=self.opener)
72 self.transplants = transplants(self.path, 'transplants', opener=self.opener)
74
73
75 def applied(self, repo, node, parent):
74 def applied(self, repo, node, parent):
76 '''returns True if a node is already an ancestor of parent
75 '''returns True if a node is already an ancestor of parent
77 or has already been transplanted'''
76 or has already been transplanted'''
78 if hasnode(repo, node):
77 if hasnode(repo, node):
79 if node in repo.changelog.reachable(parent, stop=node):
78 if node in repo.changelog.reachable(parent, stop=node):
80 return True
79 return True
81 for t in self.transplants.get(node):
80 for t in self.transplants.get(node):
82 # it might have been stripped
81 # it might have been stripped
83 if not hasnode(repo, t.lnode):
82 if not hasnode(repo, t.lnode):
84 self.transplants.remove(t)
83 self.transplants.remove(t)
85 return False
84 return False
86 if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
85 if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
87 return True
86 return True
88 return False
87 return False
89
88
90 def apply(self, repo, source, revmap, merges, opts={}):
89 def apply(self, repo, source, revmap, merges, opts={}):
91 '''apply the revisions in revmap one by one in revision order'''
90 '''apply the revisions in revmap one by one in revision order'''
92 revs = revmap.keys()
91 revs = revmap.keys()
93 revs.sort()
92 revs.sort()
94
93
95 p1, p2 = repo.dirstate.parents()
94 p1, p2 = repo.dirstate.parents()
96 pulls = []
95 pulls = []
97 diffopts = patch.diffopts(self.ui, opts)
96 diffopts = patch.diffopts(self.ui, opts)
98 diffopts.git = True
97 diffopts.git = True
99
98
100 lock = repo.lock()
99 lock = repo.lock()
101 wlock = repo.wlock()
100 wlock = repo.wlock()
102 try:
101 try:
103 for rev in revs:
102 for rev in revs:
104 node = revmap[rev]
103 node = revmap[rev]
105 revstr = '%s:%s' % (rev, revlog.short(node))
104 revstr = '%s:%s' % (rev, revlog.short(node))
106
105
107 if self.applied(repo, node, p1):
106 if self.applied(repo, node, p1):
108 self.ui.warn(_('skipping already applied revision %s\n') %
107 self.ui.warn(_('skipping already applied revision %s\n') %
109 revstr)
108 revstr)
110 continue
109 continue
111
110
112 parents = source.changelog.parents(node)
111 parents = source.changelog.parents(node)
113 if not opts.get('filter'):
112 if not opts.get('filter'):
114 # If the changeset parent is the same as the wdir's parent,
113 # If the changeset parent is the same as the wdir's parent,
115 # just pull it.
114 # just pull it.
116 if parents[0] == p1:
115 if parents[0] == p1:
117 pulls.append(node)
116 pulls.append(node)
118 p1 = node
117 p1 = node
119 continue
118 continue
120 if pulls:
119 if pulls:
121 if source != repo:
120 if source != repo:
122 repo.pull(source, heads=pulls, lock=lock)
121 repo.pull(source, heads=pulls, lock=lock)
123 merge.update(repo, pulls[-1], wlock=wlock)
122 merge.update(repo, pulls[-1], wlock=wlock)
124 p1, p2 = repo.dirstate.parents()
123 p1, p2 = repo.dirstate.parents()
125 pulls = []
124 pulls = []
126
125
127 domerge = False
126 domerge = False
128 if node in merges:
127 if node in merges:
129 # pulling all the merge revs at once would mean we couldn't
128 # pulling all the merge revs at once would mean we couldn't
130 # transplant after the latest even if transplants before them
129 # transplant after the latest even if transplants before them
131 # fail.
130 # fail.
132 domerge = True
131 domerge = True
133 if not hasnode(repo, node):
132 if not hasnode(repo, node):
134 repo.pull(source, heads=[node], lock=lock)
133 repo.pull(source, heads=[node], lock=lock)
135
134
136 if parents[1] != revlog.nullid:
135 if parents[1] != revlog.nullid:
137 self.ui.note(_('skipping merge changeset %s:%s\n')
136 self.ui.note(_('skipping merge changeset %s:%s\n')
138 % (rev, revlog.short(node)))
137 % (rev, revlog.short(node)))
139 patchfile = None
138 patchfile = None
140 else:
139 else:
141 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
140 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
142 fp = os.fdopen(fd, 'w')
141 fp = os.fdopen(fd, 'w')
143 patch.diff(source, parents[0], node, fp=fp, opts=diffopts)
142 patch.diff(source, parents[0], node, fp=fp, opts=diffopts)
144 fp.close()
143 fp.close()
145
144
146 del revmap[rev]
145 del revmap[rev]
147 if patchfile or domerge:
146 if patchfile or domerge:
148 try:
147 try:
149 n = self.applyone(repo, node, source.changelog.read(node),
148 n = self.applyone(repo, node, source.changelog.read(node),
150 patchfile, merge=domerge,
149 patchfile, merge=domerge,
151 log=opts.get('log'),
150 log=opts.get('log'),
152 filter=opts.get('filter'),
151 filter=opts.get('filter'),
153 lock=lock, wlock=wlock)
152 lock=lock, wlock=wlock)
154 if domerge:
153 if domerge:
155 self.ui.status(_('%s merged at %s\n') % (revstr,
154 self.ui.status(_('%s merged at %s\n') % (revstr,
156 revlog.short(n)))
155 revlog.short(n)))
157 else:
156 else:
158 self.ui.status(_('%s transplanted to %s\n') % (revlog.short(node),
157 self.ui.status(_('%s transplanted to %s\n') % (revlog.short(node),
159 revlog.short(n)))
158 revlog.short(n)))
160 finally:
159 finally:
161 if patchfile:
160 if patchfile:
162 os.unlink(patchfile)
161 os.unlink(patchfile)
163 if pulls:
162 if pulls:
164 repo.pull(source, heads=pulls, lock=lock)
163 repo.pull(source, heads=pulls, lock=lock)
165 merge.update(repo, pulls[-1], wlock=wlock)
164 merge.update(repo, pulls[-1], wlock=wlock)
166 finally:
165 finally:
167 self.saveseries(revmap, merges)
166 self.saveseries(revmap, merges)
168 self.transplants.write()
167 self.transplants.write()
169
168
170 def filter(self, filter, changelog, patchfile):
169 def filter(self, filter, changelog, patchfile):
171 '''arbitrarily rewrite changeset before applying it'''
170 '''arbitrarily rewrite changeset before applying it'''
172
171
173 self.ui.status('filtering %s\n' % patchfile)
172 self.ui.status('filtering %s\n' % patchfile)
174 user, date, msg = (changelog[1], changelog[2], changelog[4])
173 user, date, msg = (changelog[1], changelog[2], changelog[4])
175
174
176 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
175 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
177 fp = os.fdopen(fd, 'w')
176 fp = os.fdopen(fd, 'w')
178 fp.write("# HG changeset patch\n")
177 fp.write("# HG changeset patch\n")
179 fp.write("# User %s\n" % user)
178 fp.write("# User %s\n" % user)
180 fp.write("# Date %d %d\n" % date)
179 fp.write("# Date %d %d\n" % date)
181 fp.write(changelog[4])
180 fp.write(changelog[4])
182 fp.close()
181 fp.close()
183
182
184 try:
183 try:
185 util.system('%s %s %s' % (filter, util.shellquote(headerfile),
184 util.system('%s %s %s' % (filter, util.shellquote(headerfile),
186 util.shellquote(patchfile)),
185 util.shellquote(patchfile)),
187 environ={'HGUSER': changelog[1]},
186 environ={'HGUSER': changelog[1]},
188 onerr=util.Abort, errprefix=_('filter failed'))
187 onerr=util.Abort, errprefix=_('filter failed'))
189 user, date, msg = self.parselog(file(headerfile))[1:4]
188 user, date, msg = self.parselog(file(headerfile))[1:4]
190 finally:
189 finally:
191 os.unlink(headerfile)
190 os.unlink(headerfile)
192
191
193 return (user, date, msg)
192 return (user, date, msg)
194
193
195 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
194 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
196 filter=None, lock=None, wlock=None):
195 filter=None, lock=None, wlock=None):
197 '''apply the patch in patchfile to the repository as a transplant'''
196 '''apply the patch in patchfile to the repository as a transplant'''
198 (manifest, user, (time, timezone), files, message) = cl[:5]
197 (manifest, user, (time, timezone), files, message) = cl[:5]
199 date = "%d %d" % (time, timezone)
198 date = "%d %d" % (time, timezone)
200 extra = {'transplant_source': node}
199 extra = {'transplant_source': node}
201 if filter:
200 if filter:
202 (user, date, message) = self.filter(filter, cl, patchfile)
201 (user, date, message) = self.filter(filter, cl, patchfile)
203
202
204 if log:
203 if log:
205 message += '\n(transplanted from %s)' % revlog.hex(node)
204 message += '\n(transplanted from %s)' % revlog.hex(node)
206
205
207 self.ui.status(_('applying %s\n') % revlog.short(node))
206 self.ui.status(_('applying %s\n') % revlog.short(node))
208 self.ui.note('%s %s\n%s\n' % (user, date, message))
207 self.ui.note('%s %s\n%s\n' % (user, date, message))
209
208
210 if not patchfile and not merge:
209 if not patchfile and not merge:
211 raise util.Abort(_('can only omit patchfile if merging'))
210 raise util.Abort(_('can only omit patchfile if merging'))
212 if patchfile:
211 if patchfile:
213 try:
212 try:
214 files = {}
213 files = {}
215 try:
214 try:
216 fuzz = patch.patch(patchfile, self.ui, cwd=repo.root,
215 fuzz = patch.patch(patchfile, self.ui, cwd=repo.root,
217 files=files)
216 files=files)
218 if not files:
217 if not files:
219 self.ui.warn(_('%s: empty changeset') % revlog.hex(node))
218 self.ui.warn(_('%s: empty changeset') % revlog.hex(node))
220 return
219 return
221 finally:
220 finally:
222 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
221 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
223 except Exception, inst:
222 except Exception, inst:
224 if filter:
223 if filter:
225 os.unlink(patchfile)
224 os.unlink(patchfile)
226 seriespath = os.path.join(self.path, 'series')
225 seriespath = os.path.join(self.path, 'series')
227 if os.path.exists(seriespath):
226 if os.path.exists(seriespath):
228 os.unlink(seriespath)
227 os.unlink(seriespath)
229 p1 = repo.dirstate.parents()[0]
228 p1 = repo.dirstate.parents()[0]
230 p2 = node
229 p2 = node
231 self.log(user, date, message, p1, p2, merge=merge)
230 self.log(user, date, message, p1, p2, merge=merge)
232 self.ui.write(str(inst) + '\n')
231 self.ui.write(str(inst) + '\n')
233 raise util.Abort(_('Fix up the merge and run hg transplant --continue'))
232 raise util.Abort(_('Fix up the merge and run hg transplant --continue'))
234 else:
233 else:
235 files = None
234 files = None
236 if merge:
235 if merge:
237 p1, p2 = repo.dirstate.parents()
236 p1, p2 = repo.dirstate.parents()
238 repo.dirstate.setparents(p1, node)
237 repo.dirstate.setparents(p1, node)
239
238
240 n = repo.commit(files, message, user, date, lock=lock, wlock=wlock,
239 n = repo.commit(files, message, user, date, lock=lock, wlock=wlock,
241 extra=extra)
240 extra=extra)
242 if not merge:
241 if not merge:
243 self.transplants.set(n, node)
242 self.transplants.set(n, node)
244
243
245 return n
244 return n
246
245
247 def resume(self, repo, source, opts=None):
246 def resume(self, repo, source, opts=None):
248 '''recover last transaction and apply remaining changesets'''
247 '''recover last transaction and apply remaining changesets'''
249 if os.path.exists(os.path.join(self.path, 'journal')):
248 if os.path.exists(os.path.join(self.path, 'journal')):
250 n, node = self.recover(repo)
249 n, node = self.recover(repo)
251 self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
250 self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
252 revlog.short(n)))
251 revlog.short(n)))
253 seriespath = os.path.join(self.path, 'series')
252 seriespath = os.path.join(self.path, 'series')
254 if not os.path.exists(seriespath):
253 if not os.path.exists(seriespath):
255 self.transplants.write()
254 self.transplants.write()
256 return
255 return
257 nodes, merges = self.readseries()
256 nodes, merges = self.readseries()
258 revmap = {}
257 revmap = {}
259 for n in nodes:
258 for n in nodes:
260 revmap[source.changelog.rev(n)] = n
259 revmap[source.changelog.rev(n)] = n
261 os.unlink(seriespath)
260 os.unlink(seriespath)
262
261
263 self.apply(repo, source, revmap, merges, opts)
262 self.apply(repo, source, revmap, merges, opts)
264
263
265 def recover(self, repo):
264 def recover(self, repo):
266 '''commit working directory using journal metadata'''
265 '''commit working directory using journal metadata'''
267 node, user, date, message, parents = self.readlog()
266 node, user, date, message, parents = self.readlog()
268 merge = len(parents) == 2
267 merge = len(parents) == 2
269
268
270 if not user or not date or not message or not parents[0]:
269 if not user or not date or not message or not parents[0]:
271 raise util.Abort(_('transplant log file is corrupt'))
270 raise util.Abort(_('transplant log file is corrupt'))
272
271
273 extra = {'transplant_source': node}
272 extra = {'transplant_source': node}
274 wlock = repo.wlock()
273 wlock = repo.wlock()
275 p1, p2 = repo.dirstate.parents()
274 p1, p2 = repo.dirstate.parents()
276 if p1 != parents[0]:
275 if p1 != parents[0]:
277 raise util.Abort(_('working dir not at transplant parent %s') %
276 raise util.Abort(_('working dir not at transplant parent %s') %
278 revlog.hex(parents[0]))
277 revlog.hex(parents[0]))
279 if merge:
278 if merge:
280 repo.dirstate.setparents(p1, parents[1])
279 repo.dirstate.setparents(p1, parents[1])
281 n = repo.commit(None, message, user, date, wlock=wlock, extra=extra)
280 n = repo.commit(None, message, user, date, wlock=wlock, extra=extra)
282 if not n:
281 if not n:
283 raise util.Abort(_('commit failed'))
282 raise util.Abort(_('commit failed'))
284 if not merge:
283 if not merge:
285 self.transplants.set(n, node)
284 self.transplants.set(n, node)
286 self.unlog()
285 self.unlog()
287
286
288 return n, node
287 return n, node
289
288
290 def readseries(self):
289 def readseries(self):
291 nodes = []
290 nodes = []
292 merges = []
291 merges = []
293 cur = nodes
292 cur = nodes
294 for line in self.opener('series').read().splitlines():
293 for line in self.opener('series').read().splitlines():
295 if line.startswith('# Merges'):
294 if line.startswith('# Merges'):
296 cur = merges
295 cur = merges
297 continue
296 continue
298 cur.append(revlog.bin(line))
297 cur.append(revlog.bin(line))
299
298
300 return (nodes, merges)
299 return (nodes, merges)
301
300
302 def saveseries(self, revmap, merges):
301 def saveseries(self, revmap, merges):
303 if not revmap:
302 if not revmap:
304 return
303 return
305
304
306 if not os.path.isdir(self.path):
305 if not os.path.isdir(self.path):
307 os.mkdir(self.path)
306 os.mkdir(self.path)
308 series = self.opener('series', 'w')
307 series = self.opener('series', 'w')
309 revs = revmap.keys()
308 revs = revmap.keys()
310 revs.sort()
309 revs.sort()
311 for rev in revs:
310 for rev in revs:
312 series.write(revlog.hex(revmap[rev]) + '\n')
311 series.write(revlog.hex(revmap[rev]) + '\n')
313 if merges:
312 if merges:
314 series.write('# Merges\n')
313 series.write('# Merges\n')
315 for m in merges:
314 for m in merges:
316 series.write(revlog.hex(m) + '\n')
315 series.write(revlog.hex(m) + '\n')
317 series.close()
316 series.close()
318
317
319 def parselog(self, fp):
318 def parselog(self, fp):
320 parents = []
319 parents = []
321 message = []
320 message = []
322 node = revlog.nullid
321 node = revlog.nullid
323 inmsg = False
322 inmsg = False
324 for line in fp.read().splitlines():
323 for line in fp.read().splitlines():
325 if inmsg:
324 if inmsg:
326 message.append(line)
325 message.append(line)
327 elif line.startswith('# User '):
326 elif line.startswith('# User '):
328 user = line[7:]
327 user = line[7:]
329 elif line.startswith('# Date '):
328 elif line.startswith('# Date '):
330 date = line[7:]
329 date = line[7:]
331 elif line.startswith('# Node ID '):
330 elif line.startswith('# Node ID '):
332 node = revlog.bin(line[10:])
331 node = revlog.bin(line[10:])
333 elif line.startswith('# Parent '):
332 elif line.startswith('# Parent '):
334 parents.append(revlog.bin(line[9:]))
333 parents.append(revlog.bin(line[9:]))
335 elif not line.startswith('#'):
334 elif not line.startswith('#'):
336 inmsg = True
335 inmsg = True
337 message.append(line)
336 message.append(line)
338 return (node, user, date, '\n'.join(message), parents)
337 return (node, user, date, '\n'.join(message), parents)
339
338
340 def log(self, user, date, message, p1, p2, merge=False):
339 def log(self, user, date, message, p1, p2, merge=False):
341 '''journal changelog metadata for later recover'''
340 '''journal changelog metadata for later recover'''
342
341
343 if not os.path.isdir(self.path):
342 if not os.path.isdir(self.path):
344 os.mkdir(self.path)
343 os.mkdir(self.path)
345 fp = self.opener('journal', 'w')
344 fp = self.opener('journal', 'w')
346 fp.write('# User %s\n' % user)
345 fp.write('# User %s\n' % user)
347 fp.write('# Date %s\n' % date)
346 fp.write('# Date %s\n' % date)
348 fp.write('# Node ID %s\n' % revlog.hex(p2))
347 fp.write('# Node ID %s\n' % revlog.hex(p2))
349 fp.write('# Parent ' + revlog.hex(p1) + '\n')
348 fp.write('# Parent ' + revlog.hex(p1) + '\n')
350 if merge:
349 if merge:
351 fp.write('# Parent ' + revlog.hex(p2) + '\n')
350 fp.write('# Parent ' + revlog.hex(p2) + '\n')
352 fp.write(message.rstrip() + '\n')
351 fp.write(message.rstrip() + '\n')
353 fp.close()
352 fp.close()
354
353
355 def readlog(self):
354 def readlog(self):
356 return self.parselog(self.opener('journal'))
355 return self.parselog(self.opener('journal'))
357
356
358 def unlog(self):
357 def unlog(self):
359 '''remove changelog journal'''
358 '''remove changelog journal'''
360 absdst = os.path.join(self.path, 'journal')
359 absdst = os.path.join(self.path, 'journal')
361 if os.path.exists(absdst):
360 if os.path.exists(absdst):
362 os.unlink(absdst)
361 os.unlink(absdst)
363
362
364 def transplantfilter(self, repo, source, root):
363 def transplantfilter(self, repo, source, root):
365 def matchfn(node):
364 def matchfn(node):
366 if self.applied(repo, node, root):
365 if self.applied(repo, node, root):
367 return False
366 return False
368 if source.changelog.parents(node)[1] != revlog.nullid:
367 if source.changelog.parents(node)[1] != revlog.nullid:
369 return False
368 return False
370 extra = source.changelog.read(node)[5]
369 extra = source.changelog.read(node)[5]
371 cnode = extra.get('transplant_source')
370 cnode = extra.get('transplant_source')
372 if cnode and self.applied(repo, cnode, root):
371 if cnode and self.applied(repo, cnode, root):
373 return False
372 return False
374 return True
373 return True
375
374
376 return matchfn
375 return matchfn
377
376
378 def hasnode(repo, node):
377 def hasnode(repo, node):
379 try:
378 try:
380 return repo.changelog.rev(node) != None
379 return repo.changelog.rev(node) != None
381 except revlog.RevlogError:
380 except revlog.RevlogError:
382 return False
381 return False
383
382
384 def browserevs(ui, repo, nodes, opts):
383 def browserevs(ui, repo, nodes, opts):
385 '''interactively transplant changesets'''
384 '''interactively transplant changesets'''
386 def browsehelp(ui):
385 def browsehelp(ui):
387 ui.write('y: transplant this changeset\n'
386 ui.write('y: transplant this changeset\n'
388 'n: skip this changeset\n'
387 'n: skip this changeset\n'
389 'm: merge at this changeset\n'
388 'm: merge at this changeset\n'
390 'p: show patch\n'
389 'p: show patch\n'
391 'c: commit selected changesets\n'
390 'c: commit selected changesets\n'
392 'q: cancel transplant\n'
391 'q: cancel transplant\n'
393 '?: show this help\n')
392 '?: show this help\n')
394
393
395 displayer = cmdutil.show_changeset(ui, repo, opts)
394 displayer = cmdutil.show_changeset(ui, repo, opts)
396 transplants = []
395 transplants = []
397 merges = []
396 merges = []
398 for node in nodes:
397 for node in nodes:
399 displayer.show(changenode=node)
398 displayer.show(changenode=node)
400 action = None
399 action = None
401 while not action:
400 while not action:
402 action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
401 action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
403 if action == '?':
402 if action == '?':
404 browsehelp(ui)
403 browsehelp(ui)
405 action = None
404 action = None
406 elif action == 'p':
405 elif action == 'p':
407 parent = repo.changelog.parents(node)[0]
406 parent = repo.changelog.parents(node)[0]
408 patch.diff(repo, parent, node)
407 patch.diff(repo, parent, node)
409 action = None
408 action = None
410 elif action not in ('y', 'n', 'm', 'c', 'q'):
409 elif action not in ('y', 'n', 'm', 'c', 'q'):
411 ui.write('no such option\n')
410 ui.write('no such option\n')
412 action = None
411 action = None
413 if action == 'y':
412 if action == 'y':
414 transplants.append(node)
413 transplants.append(node)
415 elif action == 'm':
414 elif action == 'm':
416 merges.append(node)
415 merges.append(node)
417 elif action == 'c':
416 elif action == 'c':
418 break
417 break
419 elif action == 'q':
418 elif action == 'q':
420 transplants = ()
419 transplants = ()
421 merges = ()
420 merges = ()
422 break
421 break
423 return (transplants, merges)
422 return (transplants, merges)
424
423
425 def transplant(ui, repo, *revs, **opts):
424 def transplant(ui, repo, *revs, **opts):
426 '''transplant changesets from another branch
425 '''transplant changesets from another branch
427
426
428 Selected changesets will be applied on top of the current working
427 Selected changesets will be applied on top of the current working
429 directory with the log of the original changeset. If --log is
428 directory with the log of the original changeset. If --log is
430 specified, log messages will have a comment appended of the form:
429 specified, log messages will have a comment appended of the form:
431
430
432 (transplanted from CHANGESETHASH)
431 (transplanted from CHANGESETHASH)
433
432
434 You can rewrite the changelog message with the --filter option.
433 You can rewrite the changelog message with the --filter option.
435 Its argument will be invoked with the current changelog message
434 Its argument will be invoked with the current changelog message
436 as $1 and the patch as $2.
435 as $1 and the patch as $2.
437
436
438 If --source is specified, selects changesets from the named
437 If --source is specified, selects changesets from the named
439 repository. If --branch is specified, selects changesets from the
438 repository. If --branch is specified, selects changesets from the
440 branch holding the named revision, up to that revision. If --all
439 branch holding the named revision, up to that revision. If --all
441 is specified, all changesets on the branch will be transplanted,
440 is specified, all changesets on the branch will be transplanted,
442 otherwise you will be prompted to select the changesets you want.
441 otherwise you will be prompted to select the changesets you want.
443
442
444 hg transplant --branch REVISION --all will rebase the selected branch
443 hg transplant --branch REVISION --all will rebase the selected branch
445 (up to the named revision) onto your current working directory.
444 (up to the named revision) onto your current working directory.
446
445
447 You can optionally mark selected transplanted changesets as
446 You can optionally mark selected transplanted changesets as
448 merge changesets. You will not be prompted to transplant any
447 merge changesets. You will not be prompted to transplant any
449 ancestors of a merged transplant, and you can merge descendants
448 ancestors of a merged transplant, and you can merge descendants
450 of them normally instead of transplanting them.
449 of them normally instead of transplanting them.
451
450
452 If no merges or revisions are provided, hg transplant will start
451 If no merges or revisions are provided, hg transplant will start
453 an interactive changeset browser.
452 an interactive changeset browser.
454
453
455 If a changeset application fails, you can fix the merge by hand and
454 If a changeset application fails, you can fix the merge by hand and
456 then resume where you left off by calling hg transplant --continue.
455 then resume where you left off by calling hg transplant --continue.
457 '''
456 '''
458 def getoneitem(opts, item, errmsg):
457 def getoneitem(opts, item, errmsg):
459 val = opts.get(item)
458 val = opts.get(item)
460 if val:
459 if val:
461 if len(val) > 1:
460 if len(val) > 1:
462 raise util.Abort(errmsg)
461 raise util.Abort(errmsg)
463 else:
462 else:
464 return val[0]
463 return val[0]
465
464
466 def getremotechanges(repo, url):
465 def getremotechanges(repo, url):
467 sourcerepo = ui.expandpath(url)
466 sourcerepo = ui.expandpath(url)
468 source = hg.repository(ui, sourcerepo)
467 source = hg.repository(ui, sourcerepo)
469 incoming = repo.findincoming(source, force=True)
468 incoming = repo.findincoming(source, force=True)
470 if not incoming:
469 if not incoming:
471 return (source, None, None)
470 return (source, None, None)
472
471
473 bundle = None
472 bundle = None
474 if not source.local():
473 if not source.local():
475 cg = source.changegroup(incoming, 'incoming')
474 cg = source.changegroup(incoming, 'incoming')
476 bundle = commands.write_bundle(cg, compress=False)
475 bundle = commands.write_bundle(cg, compress=False)
477 source = bundlerepo.bundlerepository(ui, repo.root, bundle)
476 source = bundlerepo.bundlerepository(ui, repo.root, bundle)
478
477
479 return (source, incoming, bundle)
478 return (source, incoming, bundle)
480
479
481 def incwalk(repo, incoming, branches, match=util.always):
480 def incwalk(repo, incoming, branches, match=util.always):
482 if not branches:
481 if not branches:
483 branches=None
482 branches=None
484 for node in repo.changelog.nodesbetween(incoming, branches)[0]:
483 for node in repo.changelog.nodesbetween(incoming, branches)[0]:
485 if match(node):
484 if match(node):
486 yield node
485 yield node
487
486
488 def transplantwalk(repo, root, branches, match=util.always):
487 def transplantwalk(repo, root, branches, match=util.always):
489 if not branches:
488 if not branches:
490 branches = repo.heads()
489 branches = repo.heads()
491 ancestors = []
490 ancestors = []
492 for branch in branches:
491 for branch in branches:
493 ancestors.append(repo.changelog.ancestor(root, branch))
492 ancestors.append(repo.changelog.ancestor(root, branch))
494 for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
493 for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
495 if match(node):
494 if match(node):
496 yield node
495 yield node
497
496
498 def checkopts(opts, revs):
497 def checkopts(opts, revs):
499 if opts.get('continue'):
498 if opts.get('continue'):
500 if filter(lambda opt: opts.get(opt), ('branch', 'all', 'merge')):
499 if filter(lambda opt: opts.get(opt), ('branch', 'all', 'merge')):
501 raise util.Abort(_('--continue is incompatible with branch, all or merge'))
500 raise util.Abort(_('--continue is incompatible with branch, all or merge'))
502 return
501 return
503 if not (opts.get('source') or revs or
502 if not (opts.get('source') or revs or
504 opts.get('merge') or opts.get('branch')):
503 opts.get('merge') or opts.get('branch')):
505 raise util.Abort(_('no source URL, branch tag or revision list provided'))
504 raise util.Abort(_('no source URL, branch tag or revision list provided'))
506 if opts.get('all'):
505 if opts.get('all'):
507 if not opts.get('branch'):
506 if not opts.get('branch'):
508 raise util.Abort(_('--all requires a branch revision'))
507 raise util.Abort(_('--all requires a branch revision'))
509 if revs:
508 if revs:
510 raise util.Abort(_('--all is incompatible with a revision list'))
509 raise util.Abort(_('--all is incompatible with a revision list'))
511
510
512 checkopts(opts, revs)
511 checkopts(opts, revs)
513
512
514 if not opts.get('log'):
513 if not opts.get('log'):
515 opts['log'] = ui.config('transplant', 'log')
514 opts['log'] = ui.config('transplant', 'log')
516 if not opts.get('filter'):
515 if not opts.get('filter'):
517 opts['filter'] = ui.config('transplant', 'filter')
516 opts['filter'] = ui.config('transplant', 'filter')
518
517
519 tp = transplanter(ui, repo)
518 tp = transplanter(ui, repo)
520
519
521 p1, p2 = repo.dirstate.parents()
520 p1, p2 = repo.dirstate.parents()
522 if p1 == revlog.nullid:
521 if p1 == revlog.nullid:
523 raise util.Abort(_('no revision checked out'))
522 raise util.Abort(_('no revision checked out'))
524 if not opts.get('continue'):
523 if not opts.get('continue'):
525 if p2 != revlog.nullid:
524 if p2 != revlog.nullid:
526 raise util.Abort(_('outstanding uncommitted merges'))
525 raise util.Abort(_('outstanding uncommitted merges'))
527 m, a, r, d = repo.status()[:4]
526 m, a, r, d = repo.status()[:4]
528 if m or a or r or d:
527 if m or a or r or d:
529 raise util.Abort(_('outstanding local changes'))
528 raise util.Abort(_('outstanding local changes'))
530
529
531 bundle = None
530 bundle = None
532 source = opts.get('source')
531 source = opts.get('source')
533 if source:
532 if source:
534 (source, incoming, bundle) = getremotechanges(repo, source)
533 (source, incoming, bundle) = getremotechanges(repo, source)
535 else:
534 else:
536 source = repo
535 source = repo
537
536
538 try:
537 try:
539 if opts.get('continue'):
538 if opts.get('continue'):
540 tp.resume(repo, source, opts)
539 tp.resume(repo, source, opts)
541 return
540 return
542
541
543 tf=tp.transplantfilter(repo, source, p1)
542 tf=tp.transplantfilter(repo, source, p1)
544 if opts.get('prune'):
543 if opts.get('prune'):
545 prune = [source.lookup(r)
544 prune = [source.lookup(r)
546 for r in cmdutil.revrange(source, opts.get('prune'))]
545 for r in cmdutil.revrange(source, opts.get('prune'))]
547 matchfn = lambda x: tf(x) and x not in prune
546 matchfn = lambda x: tf(x) and x not in prune
548 else:
547 else:
549 matchfn = tf
548 matchfn = tf
550 branches = map(source.lookup, opts.get('branch', ()))
549 branches = map(source.lookup, opts.get('branch', ()))
551 merges = map(source.lookup, opts.get('merge', ()))
550 merges = map(source.lookup, opts.get('merge', ()))
552 revmap = {}
551 revmap = {}
553 if revs:
552 if revs:
554 for r in cmdutil.revrange(source, revs):
553 for r in cmdutil.revrange(source, revs):
555 revmap[int(r)] = source.lookup(r)
554 revmap[int(r)] = source.lookup(r)
556 elif opts.get('all') or not merges:
555 elif opts.get('all') or not merges:
557 if source != repo:
556 if source != repo:
558 alltransplants = incwalk(source, incoming, branches, match=matchfn)
557 alltransplants = incwalk(source, incoming, branches, match=matchfn)
559 else:
558 else:
560 alltransplants = transplantwalk(source, p1, branches, match=matchfn)
559 alltransplants = transplantwalk(source, p1, branches, match=matchfn)
561 if opts.get('all'):
560 if opts.get('all'):
562 revs = alltransplants
561 revs = alltransplants
563 else:
562 else:
564 revs, newmerges = browserevs(ui, source, alltransplants, opts)
563 revs, newmerges = browserevs(ui, source, alltransplants, opts)
565 merges.extend(newmerges)
564 merges.extend(newmerges)
566 for r in revs:
565 for r in revs:
567 revmap[source.changelog.rev(r)] = r
566 revmap[source.changelog.rev(r)] = r
568 for r in merges:
567 for r in merges:
569 revmap[source.changelog.rev(r)] = r
568 revmap[source.changelog.rev(r)] = r
570
569
571 revs = revmap.keys()
570 revs = revmap.keys()
572 revs.sort()
571 revs.sort()
573 pulls = []
572 pulls = []
574
573
575 tp.apply(repo, source, revmap, merges, opts)
574 tp.apply(repo, source, revmap, merges, opts)
576 finally:
575 finally:
577 if bundle:
576 if bundle:
578 os.unlink(bundle)
577 os.unlink(bundle)
579
578
580 cmdtable = {
579 cmdtable = {
581 "transplant":
580 "transplant":
582 (transplant,
581 (transplant,
583 [('s', 'source', '', _('pull patches from REPOSITORY')),
582 [('s', 'source', '', _('pull patches from REPOSITORY')),
584 ('b', 'branch', [], _('pull patches from branch BRANCH')),
583 ('b', 'branch', [], _('pull patches from branch BRANCH')),
585 ('a', 'all', None, _('pull all changesets up to BRANCH')),
584 ('a', 'all', None, _('pull all changesets up to BRANCH')),
586 ('p', 'prune', [], _('skip over REV')),
585 ('p', 'prune', [], _('skip over REV')),
587 ('m', 'merge', [], _('merge at REV')),
586 ('m', 'merge', [], _('merge at REV')),
588 ('', 'log', None, _('append transplant info to log message')),
587 ('', 'log', None, _('append transplant info to log message')),
589 ('c', 'continue', None, _('continue last transplant session after repair')),
588 ('c', 'continue', None, _('continue last transplant session after repair')),
590 ('', 'filter', '', _('filter changesets through FILTER'))],
589 ('', 'filter', '', _('filter changesets through FILTER'))],
591 _('hg transplant [-s REPOSITORY] [-b BRANCH] [-p REV] [-m REV] [-n] REV...'))
590 _('hg transplant [-s REPOSITORY] [-b BRANCH] [-p REV] [-m REV] [-n] REV...'))
592 }
591 }
@@ -1,162 +1,161 b''
1 # appendfile.py - special classes to make repo updates atomic
1 # appendfile.py - special classes to make repo updates atomic
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import *
8 import cStringIO, changelog, errno, manifest, os, tempfile, util
9 demandload(globals(), "cStringIO changelog errno manifest os tempfile util")
10
9
11 # writes to metadata files are ordered. reads: changelog, manifest,
10 # writes to metadata files are ordered. reads: changelog, manifest,
12 # normal files. writes: normal files, manifest, changelog.
11 # normal files. writes: normal files, manifest, changelog.
13
12
14 # manifest contains pointers to offsets in normal files. changelog
13 # manifest contains pointers to offsets in normal files. changelog
15 # contains pointers to offsets in manifest. if reader reads old
14 # contains pointers to offsets in manifest. if reader reads old
16 # changelog while manifest or normal files are written, it has no
15 # changelog while manifest or normal files are written, it has no
17 # pointers into new parts of those files that are maybe not consistent
16 # pointers into new parts of those files that are maybe not consistent
18 # yet, so will not read them.
17 # yet, so will not read them.
19
18
20 # localrepo.addchangegroup thinks it writes changelog first, then
19 # localrepo.addchangegroup thinks it writes changelog first, then
21 # manifest, then normal files (this is order they are available, and
20 # manifest, then normal files (this is order they are available, and
22 # needed for computing linkrev fields), but uses appendfile to hide
21 # needed for computing linkrev fields), but uses appendfile to hide
23 # updates from readers. data not written to manifest or changelog
22 # updates from readers. data not written to manifest or changelog
24 # until all normal files updated. write manifest first, then
23 # until all normal files updated. write manifest first, then
25 # changelog.
24 # changelog.
26
25
27 # with this write ordering, readers cannot see inconsistent view of
26 # with this write ordering, readers cannot see inconsistent view of
28 # repo during update.
27 # repo during update.
29
28
30 class appendfile(object):
29 class appendfile(object):
31 '''implement enough of file protocol to append to revlog file.
30 '''implement enough of file protocol to append to revlog file.
32 appended data is written to temp file. reads and seeks span real
31 appended data is written to temp file. reads and seeks span real
33 file and temp file. readers cannot see appended data until
32 file and temp file. readers cannot see appended data until
34 writedata called.'''
33 writedata called.'''
35
34
36 def __init__(self, fp, tmpname):
35 def __init__(self, fp, tmpname):
37 if tmpname:
36 if tmpname:
38 self.tmpname = tmpname
37 self.tmpname = tmpname
39 self.tmpfp = util.posixfile(self.tmpname, 'ab+')
38 self.tmpfp = util.posixfile(self.tmpname, 'ab+')
40 else:
39 else:
41 fd, self.tmpname = tempfile.mkstemp(prefix="hg-appendfile-")
40 fd, self.tmpname = tempfile.mkstemp(prefix="hg-appendfile-")
42 os.close(fd)
41 os.close(fd)
43 self.tmpfp = util.posixfile(self.tmpname, 'ab+')
42 self.tmpfp = util.posixfile(self.tmpname, 'ab+')
44 self.realfp = fp
43 self.realfp = fp
45 self.offset = fp.tell()
44 self.offset = fp.tell()
46 # real file is not written by anyone else. cache its size so
45 # real file is not written by anyone else. cache its size so
47 # seek and read can be fast.
46 # seek and read can be fast.
48 self.realsize = util.fstat(fp).st_size
47 self.realsize = util.fstat(fp).st_size
49 self.name = fp.name
48 self.name = fp.name
50
49
51 def end(self):
50 def end(self):
52 self.tmpfp.flush() # make sure the stat is correct
51 self.tmpfp.flush() # make sure the stat is correct
53 return self.realsize + util.fstat(self.tmpfp).st_size
52 return self.realsize + util.fstat(self.tmpfp).st_size
54
53
55 def tell(self):
54 def tell(self):
56 return self.offset
55 return self.offset
57
56
58 def flush(self):
57 def flush(self):
59 self.tmpfp.flush()
58 self.tmpfp.flush()
60
59
61 def close(self):
60 def close(self):
62 self.realfp.close()
61 self.realfp.close()
63 self.tmpfp.close()
62 self.tmpfp.close()
64
63
65 def seek(self, offset, whence=0):
64 def seek(self, offset, whence=0):
66 '''virtual file offset spans real file and temp file.'''
65 '''virtual file offset spans real file and temp file.'''
67 if whence == 0:
66 if whence == 0:
68 self.offset = offset
67 self.offset = offset
69 elif whence == 1:
68 elif whence == 1:
70 self.offset += offset
69 self.offset += offset
71 elif whence == 2:
70 elif whence == 2:
72 self.offset = self.end() + offset
71 self.offset = self.end() + offset
73
72
74 if self.offset < self.realsize:
73 if self.offset < self.realsize:
75 self.realfp.seek(self.offset)
74 self.realfp.seek(self.offset)
76 else:
75 else:
77 self.tmpfp.seek(self.offset - self.realsize)
76 self.tmpfp.seek(self.offset - self.realsize)
78
77
79 def read(self, count=-1):
78 def read(self, count=-1):
80 '''only trick here is reads that span real file and temp file.'''
79 '''only trick here is reads that span real file and temp file.'''
81 fp = cStringIO.StringIO()
80 fp = cStringIO.StringIO()
82 old_offset = self.offset
81 old_offset = self.offset
83 if self.offset < self.realsize:
82 if self.offset < self.realsize:
84 s = self.realfp.read(count)
83 s = self.realfp.read(count)
85 fp.write(s)
84 fp.write(s)
86 self.offset += len(s)
85 self.offset += len(s)
87 if count > 0:
86 if count > 0:
88 count -= len(s)
87 count -= len(s)
89 if count != 0:
88 if count != 0:
90 if old_offset != self.offset:
89 if old_offset != self.offset:
91 self.tmpfp.seek(self.offset - self.realsize)
90 self.tmpfp.seek(self.offset - self.realsize)
92 s = self.tmpfp.read(count)
91 s = self.tmpfp.read(count)
93 fp.write(s)
92 fp.write(s)
94 self.offset += len(s)
93 self.offset += len(s)
95 return fp.getvalue()
94 return fp.getvalue()
96
95
97 def write(self, s):
96 def write(self, s):
98 '''append to temp file.'''
97 '''append to temp file.'''
99 self.tmpfp.seek(0, 2)
98 self.tmpfp.seek(0, 2)
100 self.tmpfp.write(s)
99 self.tmpfp.write(s)
101 # all writes are appends, so offset must go to end of file.
100 # all writes are appends, so offset must go to end of file.
102 self.offset = self.realsize + self.tmpfp.tell()
101 self.offset = self.realsize + self.tmpfp.tell()
103
102
104 class appendopener(object):
103 class appendopener(object):
105 '''special opener for files that only read or append.'''
104 '''special opener for files that only read or append.'''
106
105
107 def __init__(self, opener):
106 def __init__(self, opener):
108 self.realopener = opener
107 self.realopener = opener
109 # key: file name, value: appendfile name
108 # key: file name, value: appendfile name
110 self.tmpnames = {}
109 self.tmpnames = {}
111
110
112 def __call__(self, name, mode='r'):
111 def __call__(self, name, mode='r'):
113 '''open file.'''
112 '''open file.'''
114
113
115 assert mode in 'ra+'
114 assert mode in 'ra+'
116 try:
115 try:
117 realfp = self.realopener(name, 'r')
116 realfp = self.realopener(name, 'r')
118 except IOError, err:
117 except IOError, err:
119 if err.errno != errno.ENOENT: raise
118 if err.errno != errno.ENOENT: raise
120 realfp = self.realopener(name, 'w+')
119 realfp = self.realopener(name, 'w+')
121 tmpname = self.tmpnames.get(name)
120 tmpname = self.tmpnames.get(name)
122 fp = appendfile(realfp, tmpname)
121 fp = appendfile(realfp, tmpname)
123 if tmpname is None:
122 if tmpname is None:
124 self.tmpnames[name] = fp.tmpname
123 self.tmpnames[name] = fp.tmpname
125 return fp
124 return fp
126
125
127 def writedata(self):
126 def writedata(self):
128 '''copy data from temp files to real files.'''
127 '''copy data from temp files to real files.'''
129 # write .d file before .i file.
128 # write .d file before .i file.
130 tmpnames = self.tmpnames.items()
129 tmpnames = self.tmpnames.items()
131 tmpnames.sort()
130 tmpnames.sort()
132 for name, tmpname in tmpnames:
131 for name, tmpname in tmpnames:
133 ifp = open(tmpname, 'rb')
132 ifp = open(tmpname, 'rb')
134 ofp = self.realopener(name, 'a')
133 ofp = self.realopener(name, 'a')
135 for chunk in util.filechunkiter(ifp):
134 for chunk in util.filechunkiter(ifp):
136 ofp.write(chunk)
135 ofp.write(chunk)
137 ifp.close()
136 ifp.close()
138 os.unlink(tmpname)
137 os.unlink(tmpname)
139 del self.tmpnames[name]
138 del self.tmpnames[name]
140 ofp.close()
139 ofp.close()
141
140
142 def cleanup(self):
141 def cleanup(self):
143 '''delete temp files (this discards unwritten data!)'''
142 '''delete temp files (this discards unwritten data!)'''
144 for tmpname in self.tmpnames.values():
143 for tmpname in self.tmpnames.values():
145 os.unlink(tmpname)
144 os.unlink(tmpname)
146
145
147 # files for changelog and manifest are in different appendopeners, so
146 # files for changelog and manifest are in different appendopeners, so
148 # not mixed up together.
147 # not mixed up together.
149
148
150 class appendchangelog(changelog.changelog, appendopener):
149 class appendchangelog(changelog.changelog, appendopener):
151 def __init__(self, opener, version):
150 def __init__(self, opener, version):
152 appendopener.__init__(self, opener)
151 appendopener.__init__(self, opener)
153 changelog.changelog.__init__(self, self, version)
152 changelog.changelog.__init__(self, self, version)
154 def checkinlinesize(self, fp, tr):
153 def checkinlinesize(self, fp, tr):
155 return
154 return
156
155
157 class appendmanifest(manifest.manifest, appendopener):
156 class appendmanifest(manifest.manifest, appendopener):
158 def __init__(self, opener, version):
157 def __init__(self, opener, version):
159 appendopener.__init__(self, opener)
158 appendopener.__init__(self, opener)
160 manifest.manifest.__init__(self, self, version)
159 manifest.manifest.__init__(self, self, version)
161 def checkinlinesize(self, fp, tr):
160 def checkinlinesize(self, fp, tr):
162 return
161 return
@@ -1,174 +1,173 b''
1 # archival.py - revision archival for mercurial
1 # archival.py - revision archival for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of
5 # This software may be used and distributed according to the terms of
6 # the GNU General Public License, incorporated herein by reference.
6 # the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import *
9 from i18n import gettext as _
8 from i18n import gettext as _
10 from node import *
9 from node import *
11 demandload(globals(), 'cStringIO os stat tarfile time util zipfile')
10 import cStringIO, os, stat, tarfile, time, util, zipfile
12
11
13 def tidyprefix(dest, prefix, suffixes):
12 def tidyprefix(dest, prefix, suffixes):
14 '''choose prefix to use for names in archive. make sure prefix is
13 '''choose prefix to use for names in archive. make sure prefix is
15 safe for consumers.'''
14 safe for consumers.'''
16
15
17 if prefix:
16 if prefix:
18 prefix = prefix.replace('\\', '/')
17 prefix = prefix.replace('\\', '/')
19 else:
18 else:
20 if not isinstance(dest, str):
19 if not isinstance(dest, str):
21 raise ValueError('dest must be string if no prefix')
20 raise ValueError('dest must be string if no prefix')
22 prefix = os.path.basename(dest)
21 prefix = os.path.basename(dest)
23 lower = prefix.lower()
22 lower = prefix.lower()
24 for sfx in suffixes:
23 for sfx in suffixes:
25 if lower.endswith(sfx):
24 if lower.endswith(sfx):
26 prefix = prefix[:-len(sfx)]
25 prefix = prefix[:-len(sfx)]
27 break
26 break
28 lpfx = os.path.normpath(util.localpath(prefix))
27 lpfx = os.path.normpath(util.localpath(prefix))
29 prefix = util.pconvert(lpfx)
28 prefix = util.pconvert(lpfx)
30 if not prefix.endswith('/'):
29 if not prefix.endswith('/'):
31 prefix += '/'
30 prefix += '/'
32 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
31 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
33 raise util.Abort(_('archive prefix contains illegal components'))
32 raise util.Abort(_('archive prefix contains illegal components'))
34 return prefix
33 return prefix
35
34
36 class tarit:
35 class tarit:
37 '''write archive to tar file or stream. can write uncompressed,
36 '''write archive to tar file or stream. can write uncompressed,
38 or compress with gzip or bzip2.'''
37 or compress with gzip or bzip2.'''
39
38
40 def __init__(self, dest, prefix, mtime, kind=''):
39 def __init__(self, dest, prefix, mtime, kind=''):
41 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
40 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
42 '.tgz', '.tbz2'])
41 '.tgz', '.tbz2'])
43 self.mtime = mtime
42 self.mtime = mtime
44 if isinstance(dest, str):
43 if isinstance(dest, str):
45 self.z = tarfile.open(dest, mode='w:'+kind)
44 self.z = tarfile.open(dest, mode='w:'+kind)
46 else:
45 else:
47 self.z = tarfile.open(mode='w|'+kind, fileobj=dest)
46 self.z = tarfile.open(mode='w|'+kind, fileobj=dest)
48
47
49 def addfile(self, name, mode, data):
48 def addfile(self, name, mode, data):
50 i = tarfile.TarInfo(self.prefix + name)
49 i = tarfile.TarInfo(self.prefix + name)
51 i.mtime = self.mtime
50 i.mtime = self.mtime
52 i.size = len(data)
51 i.size = len(data)
53 i.mode = mode
52 i.mode = mode
54 self.z.addfile(i, cStringIO.StringIO(data))
53 self.z.addfile(i, cStringIO.StringIO(data))
55
54
56 def done(self):
55 def done(self):
57 self.z.close()
56 self.z.close()
58
57
59 class tellable:
58 class tellable:
60 '''provide tell method for zipfile.ZipFile when writing to http
59 '''provide tell method for zipfile.ZipFile when writing to http
61 response file object.'''
60 response file object.'''
62
61
63 def __init__(self, fp):
62 def __init__(self, fp):
64 self.fp = fp
63 self.fp = fp
65 self.offset = 0
64 self.offset = 0
66
65
67 def __getattr__(self, key):
66 def __getattr__(self, key):
68 return getattr(self.fp, key)
67 return getattr(self.fp, key)
69
68
70 def write(self, s):
69 def write(self, s):
71 self.fp.write(s)
70 self.fp.write(s)
72 self.offset += len(s)
71 self.offset += len(s)
73
72
74 def tell(self):
73 def tell(self):
75 return self.offset
74 return self.offset
76
75
77 class zipit:
76 class zipit:
78 '''write archive to zip file or stream. can write uncompressed,
77 '''write archive to zip file or stream. can write uncompressed,
79 or compressed with deflate.'''
78 or compressed with deflate.'''
80
79
81 def __init__(self, dest, prefix, mtime, compress=True):
80 def __init__(self, dest, prefix, mtime, compress=True):
82 self.prefix = tidyprefix(dest, prefix, ('.zip',))
81 self.prefix = tidyprefix(dest, prefix, ('.zip',))
83 if not isinstance(dest, str):
82 if not isinstance(dest, str):
84 try:
83 try:
85 dest.tell()
84 dest.tell()
86 except (AttributeError, IOError):
85 except (AttributeError, IOError):
87 dest = tellable(dest)
86 dest = tellable(dest)
88 self.z = zipfile.ZipFile(dest, 'w',
87 self.z = zipfile.ZipFile(dest, 'w',
89 compress and zipfile.ZIP_DEFLATED or
88 compress and zipfile.ZIP_DEFLATED or
90 zipfile.ZIP_STORED)
89 zipfile.ZIP_STORED)
91 self.date_time = time.gmtime(mtime)[:6]
90 self.date_time = time.gmtime(mtime)[:6]
92
91
93 def addfile(self, name, mode, data):
92 def addfile(self, name, mode, data):
94 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
93 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
95 i.compress_type = self.z.compression
94 i.compress_type = self.z.compression
96 i.flag_bits = 0x08
95 i.flag_bits = 0x08
97 # unzip will not honor unix file modes unless file creator is
96 # unzip will not honor unix file modes unless file creator is
98 # set to unix (id 3).
97 # set to unix (id 3).
99 i.create_system = 3
98 i.create_system = 3
100 i.external_attr = (mode | stat.S_IFREG) << 16L
99 i.external_attr = (mode | stat.S_IFREG) << 16L
101 self.z.writestr(i, data)
100 self.z.writestr(i, data)
102
101
103 def done(self):
102 def done(self):
104 self.z.close()
103 self.z.close()
105
104
106 class fileit:
105 class fileit:
107 '''write archive as files in directory.'''
106 '''write archive as files in directory.'''
108
107
109 def __init__(self, name, prefix, mtime):
108 def __init__(self, name, prefix, mtime):
110 if prefix:
109 if prefix:
111 raise util.Abort(_('cannot give prefix when archiving to files'))
110 raise util.Abort(_('cannot give prefix when archiving to files'))
112 self.basedir = name
111 self.basedir = name
113 self.dirs = {}
112 self.dirs = {}
114 self.oflags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY |
113 self.oflags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY |
115 getattr(os, 'O_BINARY', 0) |
114 getattr(os, 'O_BINARY', 0) |
116 getattr(os, 'O_NOFOLLOW', 0))
115 getattr(os, 'O_NOFOLLOW', 0))
117
116
118 def addfile(self, name, mode, data):
117 def addfile(self, name, mode, data):
119 destfile = os.path.join(self.basedir, name)
118 destfile = os.path.join(self.basedir, name)
120 destdir = os.path.dirname(destfile)
119 destdir = os.path.dirname(destfile)
121 if destdir not in self.dirs:
120 if destdir not in self.dirs:
122 if not os.path.isdir(destdir):
121 if not os.path.isdir(destdir):
123 os.makedirs(destdir)
122 os.makedirs(destdir)
124 self.dirs[destdir] = 1
123 self.dirs[destdir] = 1
125 os.fdopen(os.open(destfile, self.oflags, mode), 'wb').write(data)
124 os.fdopen(os.open(destfile, self.oflags, mode), 'wb').write(data)
126
125
127 def done(self):
126 def done(self):
128 pass
127 pass
129
128
130 archivers = {
129 archivers = {
131 'files': fileit,
130 'files': fileit,
132 'tar': tarit,
131 'tar': tarit,
133 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
132 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
134 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
133 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
135 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
134 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
136 'zip': zipit,
135 'zip': zipit,
137 }
136 }
138
137
139 def archive(repo, dest, node, kind, decode=True, matchfn=None,
138 def archive(repo, dest, node, kind, decode=True, matchfn=None,
140 prefix=None, mtime=None):
139 prefix=None, mtime=None):
141 '''create archive of repo as it was at node.
140 '''create archive of repo as it was at node.
142
141
143 dest can be name of directory, name of archive file, or file
142 dest can be name of directory, name of archive file, or file
144 object to write archive to.
143 object to write archive to.
145
144
146 kind is type of archive to create.
145 kind is type of archive to create.
147
146
148 decode tells whether to put files through decode filters from
147 decode tells whether to put files through decode filters from
149 hgrc.
148 hgrc.
150
149
151 matchfn is function to filter names of files to write to archive.
150 matchfn is function to filter names of files to write to archive.
152
151
153 prefix is name of path to put before every archive member.'''
152 prefix is name of path to put before every archive member.'''
154
153
155 def write(name, mode, data):
154 def write(name, mode, data):
156 if matchfn and not matchfn(name): return
155 if matchfn and not matchfn(name): return
157 if decode:
156 if decode:
158 fp = cStringIO.StringIO()
157 fp = cStringIO.StringIO()
159 repo.wwrite(name, data, fp)
158 repo.wwrite(name, data, fp)
160 data = fp.getvalue()
159 data = fp.getvalue()
161 archiver.addfile(name, mode, data)
160 archiver.addfile(name, mode, data)
162
161
163 change = repo.changelog.read(node)
162 change = repo.changelog.read(node)
164 mn = change[0]
163 mn = change[0]
165 archiver = archivers[kind](dest, prefix, mtime or change[2][0])
164 archiver = archivers[kind](dest, prefix, mtime or change[2][0])
166 m = repo.manifest.read(mn)
165 m = repo.manifest.read(mn)
167 items = m.items()
166 items = m.items()
168 items.sort()
167 items.sort()
169 write('.hg_archival.txt', 0644,
168 write('.hg_archival.txt', 0644,
170 'repo: %s\nnode: %s\n' % (hex(repo.changelog.node(0)), hex(node)))
169 'repo: %s\nnode: %s\n' % (hex(repo.changelog.node(0)), hex(node)))
171 for filename, filenode in items:
170 for filename, filenode in items:
172 write(filename, m.execf(filename) and 0755 or 0644,
171 write(filename, m.execf(filename) and 0755 or 0644,
173 repo.file(filename).read(filenode))
172 repo.file(filename).read(filenode))
174 archiver.done()
173 archiver.done()
@@ -1,256 +1,255 b''
1 """
1 """
2 bundlerepo.py - repository class for viewing uncompressed bundles
2 bundlerepo.py - repository class for viewing uncompressed bundles
3
3
4 This provides a read-only repository interface to bundles as if
4 This provides a read-only repository interface to bundles as if
5 they were part of the actual repository.
5 they were part of the actual repository.
6
6
7 Copyright 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
7 Copyright 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License, incorporated herein by reference.
10 of the GNU General Public License, incorporated herein by reference.
11 """
11 """
12
12
13 from node import *
13 from node import *
14 from i18n import gettext as _
14 from i18n import gettext as _
15 from demandload import demandload
15 import changegroup, util, os, struct, bz2, tempfile
16 demandload(globals(), "changegroup util os struct bz2 tempfile")
17
16
18 import localrepo, changelog, manifest, filelog, revlog
17 import localrepo, changelog, manifest, filelog, revlog
19
18
20 class bundlerevlog(revlog.revlog):
19 class bundlerevlog(revlog.revlog):
21 def __init__(self, opener, indexfile, datafile, bundlefile,
20 def __init__(self, opener, indexfile, datafile, bundlefile,
22 linkmapper=None):
21 linkmapper=None):
23 # How it works:
22 # How it works:
24 # to retrieve a revision, we need to know the offset of
23 # to retrieve a revision, we need to know the offset of
25 # the revision in the bundlefile (an opened file).
24 # the revision in the bundlefile (an opened file).
26 #
25 #
27 # We store this offset in the index (start), to differentiate a
26 # We store this offset in the index (start), to differentiate a
28 # rev in the bundle and from a rev in the revlog, we check
27 # rev in the bundle and from a rev in the revlog, we check
29 # len(index[r]). If the tuple is bigger than 7, it is a bundle
28 # len(index[r]). If the tuple is bigger than 7, it is a bundle
30 # (it is bigger since we store the node to which the delta is)
29 # (it is bigger since we store the node to which the delta is)
31 #
30 #
32 revlog.revlog.__init__(self, opener, indexfile, datafile)
31 revlog.revlog.__init__(self, opener, indexfile, datafile)
33 self.bundlefile = bundlefile
32 self.bundlefile = bundlefile
34 self.basemap = {}
33 self.basemap = {}
35 def chunkpositer():
34 def chunkpositer():
36 for chunk in changegroup.chunkiter(bundlefile):
35 for chunk in changegroup.chunkiter(bundlefile):
37 pos = bundlefile.tell()
36 pos = bundlefile.tell()
38 yield chunk, pos - len(chunk)
37 yield chunk, pos - len(chunk)
39 n = self.count()
38 n = self.count()
40 prev = None
39 prev = None
41 for chunk, start in chunkpositer():
40 for chunk, start in chunkpositer():
42 size = len(chunk)
41 size = len(chunk)
43 if size < 80:
42 if size < 80:
44 raise util.Abort("invalid changegroup")
43 raise util.Abort("invalid changegroup")
45 start += 80
44 start += 80
46 size -= 80
45 size -= 80
47 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
46 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
48 if node in self.nodemap:
47 if node in self.nodemap:
49 prev = node
48 prev = node
50 continue
49 continue
51 for p in (p1, p2):
50 for p in (p1, p2):
52 if not p in self.nodemap:
51 if not p in self.nodemap:
53 raise revlog.RevlogError(_("unknown parent %s") % short(p1))
52 raise revlog.RevlogError(_("unknown parent %s") % short(p1))
54 if linkmapper is None:
53 if linkmapper is None:
55 link = n
54 link = n
56 else:
55 else:
57 link = linkmapper(cs)
56 link = linkmapper(cs)
58
57
59 if not prev:
58 if not prev:
60 prev = p1
59 prev = p1
61 # start, size, base is not used, link, p1, p2, delta ref
60 # start, size, base is not used, link, p1, p2, delta ref
62 if self.version == revlog.REVLOGV0:
61 if self.version == revlog.REVLOGV0:
63 e = (start, size, None, link, p1, p2, node)
62 e = (start, size, None, link, p1, p2, node)
64 else:
63 else:
65 e = (self.offset_type(start, 0), size, -1, None, link,
64 e = (self.offset_type(start, 0), size, -1, None, link,
66 self.rev(p1), self.rev(p2), node)
65 self.rev(p1), self.rev(p2), node)
67 self.basemap[n] = prev
66 self.basemap[n] = prev
68 self.index.append(e)
67 self.index.append(e)
69 self.nodemap[node] = n
68 self.nodemap[node] = n
70 prev = node
69 prev = node
71 n += 1
70 n += 1
72
71
73 def bundle(self, rev):
72 def bundle(self, rev):
74 """is rev from the bundle"""
73 """is rev from the bundle"""
75 if rev < 0:
74 if rev < 0:
76 return False
75 return False
77 return rev in self.basemap
76 return rev in self.basemap
78 def bundlebase(self, rev): return self.basemap[rev]
77 def bundlebase(self, rev): return self.basemap[rev]
79 def chunk(self, rev, df=None, cachelen=4096):
78 def chunk(self, rev, df=None, cachelen=4096):
80 # Warning: in case of bundle, the diff is against bundlebase,
79 # Warning: in case of bundle, the diff is against bundlebase,
81 # not against rev - 1
80 # not against rev - 1
82 # XXX: could use some caching
81 # XXX: could use some caching
83 if not self.bundle(rev):
82 if not self.bundle(rev):
84 return revlog.revlog.chunk(self, rev, df, cachelen)
83 return revlog.revlog.chunk(self, rev, df, cachelen)
85 self.bundlefile.seek(self.start(rev))
84 self.bundlefile.seek(self.start(rev))
86 return self.bundlefile.read(self.length(rev))
85 return self.bundlefile.read(self.length(rev))
87
86
88 def revdiff(self, rev1, rev2):
87 def revdiff(self, rev1, rev2):
89 """return or calculate a delta between two revisions"""
88 """return or calculate a delta between two revisions"""
90 if self.bundle(rev1) and self.bundle(rev2):
89 if self.bundle(rev1) and self.bundle(rev2):
91 # hot path for bundle
90 # hot path for bundle
92 revb = self.rev(self.bundlebase(rev2))
91 revb = self.rev(self.bundlebase(rev2))
93 if revb == rev1:
92 if revb == rev1:
94 return self.chunk(rev2)
93 return self.chunk(rev2)
95 elif not self.bundle(rev1) and not self.bundle(rev2):
94 elif not self.bundle(rev1) and not self.bundle(rev2):
96 return revlog.revlog.chunk(self, rev1, rev2)
95 return revlog.revlog.chunk(self, rev1, rev2)
97
96
98 return self.diff(self.revision(self.node(rev1)),
97 return self.diff(self.revision(self.node(rev1)),
99 self.revision(self.node(rev2)))
98 self.revision(self.node(rev2)))
100
99
101 def revision(self, node):
100 def revision(self, node):
102 """return an uncompressed revision of a given"""
101 """return an uncompressed revision of a given"""
103 if node == nullid: return ""
102 if node == nullid: return ""
104
103
105 text = None
104 text = None
106 chain = []
105 chain = []
107 iter_node = node
106 iter_node = node
108 rev = self.rev(iter_node)
107 rev = self.rev(iter_node)
109 # reconstruct the revision if it is from a changegroup
108 # reconstruct the revision if it is from a changegroup
110 while self.bundle(rev):
109 while self.bundle(rev):
111 if self.cache and self.cache[0] == iter_node:
110 if self.cache and self.cache[0] == iter_node:
112 text = self.cache[2]
111 text = self.cache[2]
113 break
112 break
114 chain.append(rev)
113 chain.append(rev)
115 iter_node = self.bundlebase(rev)
114 iter_node = self.bundlebase(rev)
116 rev = self.rev(iter_node)
115 rev = self.rev(iter_node)
117 if text is None:
116 if text is None:
118 text = revlog.revlog.revision(self, iter_node)
117 text = revlog.revlog.revision(self, iter_node)
119
118
120 while chain:
119 while chain:
121 delta = self.chunk(chain.pop())
120 delta = self.chunk(chain.pop())
122 text = self.patches(text, [delta])
121 text = self.patches(text, [delta])
123
122
124 p1, p2 = self.parents(node)
123 p1, p2 = self.parents(node)
125 if node != revlog.hash(text, p1, p2):
124 if node != revlog.hash(text, p1, p2):
126 raise revlog.RevlogError(_("integrity check failed on %s:%d")
125 raise revlog.RevlogError(_("integrity check failed on %s:%d")
127 % (self.datafile, self.rev(node)))
126 % (self.datafile, self.rev(node)))
128
127
129 self.cache = (node, self.rev(node), text)
128 self.cache = (node, self.rev(node), text)
130 return text
129 return text
131
130
132 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
131 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
133 raise NotImplementedError
132 raise NotImplementedError
134 def addgroup(self, revs, linkmapper, transaction, unique=0):
133 def addgroup(self, revs, linkmapper, transaction, unique=0):
135 raise NotImplementedError
134 raise NotImplementedError
136 def strip(self, rev, minlink):
135 def strip(self, rev, minlink):
137 raise NotImplementedError
136 raise NotImplementedError
138 def checksize(self):
137 def checksize(self):
139 raise NotImplementedError
138 raise NotImplementedError
140
139
141 class bundlechangelog(bundlerevlog, changelog.changelog):
140 class bundlechangelog(bundlerevlog, changelog.changelog):
142 def __init__(self, opener, bundlefile):
141 def __init__(self, opener, bundlefile):
143 changelog.changelog.__init__(self, opener)
142 changelog.changelog.__init__(self, opener)
144 bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
143 bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
145 bundlefile)
144 bundlefile)
146
145
147 class bundlemanifest(bundlerevlog, manifest.manifest):
146 class bundlemanifest(bundlerevlog, manifest.manifest):
148 def __init__(self, opener, bundlefile, linkmapper):
147 def __init__(self, opener, bundlefile, linkmapper):
149 manifest.manifest.__init__(self, opener)
148 manifest.manifest.__init__(self, opener)
150 bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
149 bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
151 bundlefile, linkmapper)
150 bundlefile, linkmapper)
152
151
153 class bundlefilelog(bundlerevlog, filelog.filelog):
152 class bundlefilelog(bundlerevlog, filelog.filelog):
154 def __init__(self, opener, path, bundlefile, linkmapper):
153 def __init__(self, opener, path, bundlefile, linkmapper):
155 filelog.filelog.__init__(self, opener, path)
154 filelog.filelog.__init__(self, opener, path)
156 bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
155 bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
157 bundlefile, linkmapper)
156 bundlefile, linkmapper)
158
157
159 class bundlerepository(localrepo.localrepository):
158 class bundlerepository(localrepo.localrepository):
160 def __init__(self, ui, path, bundlename):
159 def __init__(self, ui, path, bundlename):
161 localrepo.localrepository.__init__(self, ui, path)
160 localrepo.localrepository.__init__(self, ui, path)
162
161
163 self._url = 'bundle:' + bundlename
162 self._url = 'bundle:' + bundlename
164 if path: self._url += '+' + path
163 if path: self._url += '+' + path
165
164
166 self.tempfile = None
165 self.tempfile = None
167 self.bundlefile = open(bundlename, "rb")
166 self.bundlefile = open(bundlename, "rb")
168 header = self.bundlefile.read(6)
167 header = self.bundlefile.read(6)
169 if not header.startswith("HG"):
168 if not header.startswith("HG"):
170 raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename)
169 raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename)
171 elif not header.startswith("HG10"):
170 elif not header.startswith("HG10"):
172 raise util.Abort(_("%s: unknown bundle version") % bundlename)
171 raise util.Abort(_("%s: unknown bundle version") % bundlename)
173 elif header == "HG10BZ":
172 elif header == "HG10BZ":
174 fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-",
173 fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-",
175 suffix=".hg10un", dir=self.path)
174 suffix=".hg10un", dir=self.path)
176 self.tempfile = temp
175 self.tempfile = temp
177 fptemp = os.fdopen(fdtemp, 'wb')
176 fptemp = os.fdopen(fdtemp, 'wb')
178 def generator(f):
177 def generator(f):
179 zd = bz2.BZ2Decompressor()
178 zd = bz2.BZ2Decompressor()
180 zd.decompress("BZ")
179 zd.decompress("BZ")
181 for chunk in f:
180 for chunk in f:
182 yield zd.decompress(chunk)
181 yield zd.decompress(chunk)
183 gen = generator(util.filechunkiter(self.bundlefile, 4096))
182 gen = generator(util.filechunkiter(self.bundlefile, 4096))
184
183
185 try:
184 try:
186 fptemp.write("HG10UN")
185 fptemp.write("HG10UN")
187 for chunk in gen:
186 for chunk in gen:
188 fptemp.write(chunk)
187 fptemp.write(chunk)
189 finally:
188 finally:
190 fptemp.close()
189 fptemp.close()
191 self.bundlefile.close()
190 self.bundlefile.close()
192
191
193 self.bundlefile = open(self.tempfile, "rb")
192 self.bundlefile = open(self.tempfile, "rb")
194 # seek right after the header
193 # seek right after the header
195 self.bundlefile.seek(6)
194 self.bundlefile.seek(6)
196 elif header == "HG10UN":
195 elif header == "HG10UN":
197 # nothing to do
196 # nothing to do
198 pass
197 pass
199 else:
198 else:
200 raise util.Abort(_("%s: unknown bundle compression type")
199 raise util.Abort(_("%s: unknown bundle compression type")
201 % bundlename)
200 % bundlename)
202 self.changelog = bundlechangelog(self.sopener, self.bundlefile)
201 self.changelog = bundlechangelog(self.sopener, self.bundlefile)
203 self.manifest = bundlemanifest(self.sopener, self.bundlefile,
202 self.manifest = bundlemanifest(self.sopener, self.bundlefile,
204 self.changelog.rev)
203 self.changelog.rev)
205 # dict with the mapping 'filename' -> position in the bundle
204 # dict with the mapping 'filename' -> position in the bundle
206 self.bundlefilespos = {}
205 self.bundlefilespos = {}
207 while 1:
206 while 1:
208 f = changegroup.getchunk(self.bundlefile)
207 f = changegroup.getchunk(self.bundlefile)
209 if not f:
208 if not f:
210 break
209 break
211 self.bundlefilespos[f] = self.bundlefile.tell()
210 self.bundlefilespos[f] = self.bundlefile.tell()
212 for c in changegroup.chunkiter(self.bundlefile):
211 for c in changegroup.chunkiter(self.bundlefile):
213 pass
212 pass
214
213
215 def url(self):
214 def url(self):
216 return self._url
215 return self._url
217
216
218 def dev(self):
217 def dev(self):
219 return -1
218 return -1
220
219
221 def file(self, f):
220 def file(self, f):
222 if f[0] == '/':
221 if f[0] == '/':
223 f = f[1:]
222 f = f[1:]
224 if f in self.bundlefilespos:
223 if f in self.bundlefilespos:
225 self.bundlefile.seek(self.bundlefilespos[f])
224 self.bundlefile.seek(self.bundlefilespos[f])
226 return bundlefilelog(self.sopener, f, self.bundlefile,
225 return bundlefilelog(self.sopener, f, self.bundlefile,
227 self.changelog.rev)
226 self.changelog.rev)
228 else:
227 else:
229 return filelog.filelog(self.sopener, f)
228 return filelog.filelog(self.sopener, f)
230
229
231 def close(self):
230 def close(self):
232 """Close assigned bundle file immediately."""
231 """Close assigned bundle file immediately."""
233 self.bundlefile.close()
232 self.bundlefile.close()
234
233
235 def __del__(self):
234 def __del__(self):
236 bundlefile = getattr(self, 'bundlefile', None)
235 bundlefile = getattr(self, 'bundlefile', None)
237 if bundlefile and not bundlefile.closed:
236 if bundlefile and not bundlefile.closed:
238 bundlefile.close()
237 bundlefile.close()
239 tempfile = getattr(self, 'tempfile', None)
238 tempfile = getattr(self, 'tempfile', None)
240 if tempfile is not None:
239 if tempfile is not None:
241 os.unlink(tempfile)
240 os.unlink(tempfile)
242
241
243 def instance(ui, path, create):
242 def instance(ui, path, create):
244 if create:
243 if create:
245 raise util.Abort(_('cannot create new bundle repository'))
244 raise util.Abort(_('cannot create new bundle repository'))
246 path = util.drop_scheme('file', path)
245 path = util.drop_scheme('file', path)
247 if path.startswith('bundle:'):
246 if path.startswith('bundle:'):
248 path = util.drop_scheme('bundle', path)
247 path = util.drop_scheme('bundle', path)
249 s = path.split("+", 1)
248 s = path.split("+", 1)
250 if len(s) == 1:
249 if len(s) == 1:
251 repopath, bundlename = "", s[0]
250 repopath, bundlename = "", s[0]
252 else:
251 else:
253 repopath, bundlename = s
252 repopath, bundlename = s
254 else:
253 else:
255 repopath, bundlename = '', path
254 repopath, bundlename = '', path
256 return bundlerepository(ui, repopath, bundlename)
255 return bundlerepository(ui, repopath, bundlename)
@@ -1,120 +1,120 b''
1 """
1 """
2 changegroup.py - Mercurial changegroup manipulation functions
2 changegroup.py - Mercurial changegroup manipulation functions
3
3
4 Copyright 2006 Matt Mackall <mpm@selenic.com>
4 Copyright 2006 Matt Mackall <mpm@selenic.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8 """
8 """
9
9 from i18n import gettext as _
10 from i18n import gettext as _
10 from demandload import *
11 import struct, os, bz2, zlib, util, tempfile
11 demandload(globals(), "struct os bz2 zlib util tempfile")
12
12
13 def getchunk(source):
13 def getchunk(source):
14 """get a chunk from a changegroup"""
14 """get a chunk from a changegroup"""
15 d = source.read(4)
15 d = source.read(4)
16 if not d:
16 if not d:
17 return ""
17 return ""
18 l = struct.unpack(">l", d)[0]
18 l = struct.unpack(">l", d)[0]
19 if l <= 4:
19 if l <= 4:
20 return ""
20 return ""
21 d = source.read(l - 4)
21 d = source.read(l - 4)
22 if len(d) < l - 4:
22 if len(d) < l - 4:
23 raise util.Abort(_("premature EOF reading chunk"
23 raise util.Abort(_("premature EOF reading chunk"
24 " (got %d bytes, expected %d)")
24 " (got %d bytes, expected %d)")
25 % (len(d), l - 4))
25 % (len(d), l - 4))
26 return d
26 return d
27
27
28 def chunkiter(source):
28 def chunkiter(source):
29 """iterate through the chunks in source"""
29 """iterate through the chunks in source"""
30 while 1:
30 while 1:
31 c = getchunk(source)
31 c = getchunk(source)
32 if not c:
32 if not c:
33 break
33 break
34 yield c
34 yield c
35
35
36 def genchunk(data):
36 def genchunk(data):
37 """build a changegroup chunk"""
37 """build a changegroup chunk"""
38 header = struct.pack(">l", len(data)+ 4)
38 header = struct.pack(">l", len(data)+ 4)
39 return "%s%s" % (header, data)
39 return "%s%s" % (header, data)
40
40
41 def closechunk():
41 def closechunk():
42 return struct.pack(">l", 0)
42 return struct.pack(">l", 0)
43
43
44 class nocompress(object):
44 class nocompress(object):
45 def compress(self, x):
45 def compress(self, x):
46 return x
46 return x
47 def flush(self):
47 def flush(self):
48 return ""
48 return ""
49
49
50 bundletypes = {
50 bundletypes = {
51 "": ("", nocompress),
51 "": ("", nocompress),
52 "HG10UN": ("HG10UN", nocompress),
52 "HG10UN": ("HG10UN", nocompress),
53 "HG10BZ": ("HG10", lambda: bz2.BZ2Compressor()),
53 "HG10BZ": ("HG10", lambda: bz2.BZ2Compressor()),
54 "HG10GZ": ("HG10GZ", lambda: zlib.compressobj()),
54 "HG10GZ": ("HG10GZ", lambda: zlib.compressobj()),
55 }
55 }
56
56
57 def writebundle(cg, filename, bundletype):
57 def writebundle(cg, filename, bundletype):
58 """Write a bundle file and return its filename.
58 """Write a bundle file and return its filename.
59
59
60 Existing files will not be overwritten.
60 Existing files will not be overwritten.
61 If no filename is specified, a temporary file is created.
61 If no filename is specified, a temporary file is created.
62 bz2 compression can be turned off.
62 bz2 compression can be turned off.
63 The bundle file will be deleted in case of errors.
63 The bundle file will be deleted in case of errors.
64 """
64 """
65
65
66 fh = None
66 fh = None
67 cleanup = None
67 cleanup = None
68 try:
68 try:
69 if filename:
69 if filename:
70 if os.path.exists(filename):
70 if os.path.exists(filename):
71 raise util.Abort(_("file '%s' already exists") % filename)
71 raise util.Abort(_("file '%s' already exists") % filename)
72 fh = open(filename, "wb")
72 fh = open(filename, "wb")
73 else:
73 else:
74 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
74 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
75 fh = os.fdopen(fd, "wb")
75 fh = os.fdopen(fd, "wb")
76 cleanup = filename
76 cleanup = filename
77
77
78 header, compressor = bundletypes[bundletype]
78 header, compressor = bundletypes[bundletype]
79 fh.write(header)
79 fh.write(header)
80 z = compressor()
80 z = compressor()
81
81
82 # parse the changegroup data, otherwise we will block
82 # parse the changegroup data, otherwise we will block
83 # in case of sshrepo because we don't know the end of the stream
83 # in case of sshrepo because we don't know the end of the stream
84
84
85 # an empty chunkiter is the end of the changegroup
85 # an empty chunkiter is the end of the changegroup
86 empty = False
86 empty = False
87 while not empty:
87 while not empty:
88 empty = True
88 empty = True
89 for chunk in chunkiter(cg):
89 for chunk in chunkiter(cg):
90 empty = False
90 empty = False
91 fh.write(z.compress(genchunk(chunk)))
91 fh.write(z.compress(genchunk(chunk)))
92 fh.write(z.compress(closechunk()))
92 fh.write(z.compress(closechunk()))
93 fh.write(z.flush())
93 fh.write(z.flush())
94 cleanup = None
94 cleanup = None
95 return filename
95 return filename
96 finally:
96 finally:
97 if fh is not None:
97 if fh is not None:
98 fh.close()
98 fh.close()
99 if cleanup is not None:
99 if cleanup is not None:
100 os.unlink(cleanup)
100 os.unlink(cleanup)
101
101
102 def readbundle(fh, fname):
102 def readbundle(fh, fname):
103 header = fh.read(6)
103 header = fh.read(6)
104 if not header.startswith("HG"):
104 if not header.startswith("HG"):
105 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
105 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
106 elif not header.startswith("HG10"):
106 elif not header.startswith("HG10"):
107 raise util.Abort(_("%s: unknown bundle version") % fname)
107 raise util.Abort(_("%s: unknown bundle version") % fname)
108
108
109 if header == "HG10BZ":
109 if header == "HG10BZ":
110 def generator(f):
110 def generator(f):
111 zd = bz2.BZ2Decompressor()
111 zd = bz2.BZ2Decompressor()
112 zd.decompress("BZ")
112 zd.decompress("BZ")
113 for chunk in util.filechunkiter(f, 4096):
113 for chunk in util.filechunkiter(f, 4096):
114 yield zd.decompress(chunk)
114 yield zd.decompress(chunk)
115 return util.chunkbuffer(generator(fh))
115 return util.chunkbuffer(generator(fh))
116 elif header == "HG10UN":
116 elif header == "HG10UN":
117 return fh
117 return fh
118
118
119 raise util.Abort(_("%s: unknown bundle compression type")
119 raise util.Abort(_("%s: unknown bundle compression type")
120 % fname)
120 % fname)
@@ -1,103 +1,102 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from revlog import *
8 from revlog import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import demandload
10 import os, time, util
11 demandload(globals(), "os time util")
12
11
13 def _string_escape(text):
12 def _string_escape(text):
14 """
13 """
15 >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
14 >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
16 >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d
15 >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d
17 >>> s
16 >>> s
18 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n'
17 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n'
19 >>> res = _string_escape(s)
18 >>> res = _string_escape(s)
20 >>> s == _string_unescape(res)
19 >>> s == _string_unescape(res)
21 True
20 True
22 """
21 """
23 # subset of the string_escape codec
22 # subset of the string_escape codec
24 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
23 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
25 return text.replace('\0', '\\0')
24 return text.replace('\0', '\\0')
26
25
27 def _string_unescape(text):
26 def _string_unescape(text):
28 return text.decode('string_escape')
27 return text.decode('string_escape')
29
28
30 class changelog(revlog):
29 class changelog(revlog):
31 def __init__(self, opener, defversion=REVLOGV0):
30 def __init__(self, opener, defversion=REVLOGV0):
32 revlog.__init__(self, opener, "00changelog.i", "00changelog.d",
31 revlog.__init__(self, opener, "00changelog.i", "00changelog.d",
33 defversion)
32 defversion)
34
33
35 def decode_extra(self, text):
34 def decode_extra(self, text):
36 extra = {}
35 extra = {}
37 for l in text.split('\0'):
36 for l in text.split('\0'):
38 if not l:
37 if not l:
39 continue
38 continue
40 k, v = _string_unescape(l).split(':', 1)
39 k, v = _string_unescape(l).split(':', 1)
41 extra[k] = v
40 extra[k] = v
42 return extra
41 return extra
43
42
44 def encode_extra(self, d):
43 def encode_extra(self, d):
45 items = [_string_escape(":".join(t)) for t in d.iteritems()]
44 items = [_string_escape(":".join(t)) for t in d.iteritems()]
46 return "\0".join(items)
45 return "\0".join(items)
47
46
48 def extract(self, text):
47 def extract(self, text):
49 """
48 """
50 format used:
49 format used:
51 nodeid\n : manifest node in ascii
50 nodeid\n : manifest node in ascii
52 user\n : user, no \n or \r allowed
51 user\n : user, no \n or \r allowed
53 time tz extra\n : date (time is int or float, timezone is int)
52 time tz extra\n : date (time is int or float, timezone is int)
54 : extra is metadatas, encoded and separated by '\0'
53 : extra is metadatas, encoded and separated by '\0'
55 : older versions ignore it
54 : older versions ignore it
56 files\n\n : files modified by the cset, no \n or \r allowed
55 files\n\n : files modified by the cset, no \n or \r allowed
57 (.*) : comment (free text, ideally utf-8)
56 (.*) : comment (free text, ideally utf-8)
58
57
59 changelog v0 doesn't use extra
58 changelog v0 doesn't use extra
60 """
59 """
61 if not text:
60 if not text:
62 return (nullid, "", (0, 0), [], "", {})
61 return (nullid, "", (0, 0), [], "", {})
63 last = text.index("\n\n")
62 last = text.index("\n\n")
64 desc = util.tolocal(text[last + 2:])
63 desc = util.tolocal(text[last + 2:])
65 l = text[:last].split('\n')
64 l = text[:last].split('\n')
66 manifest = bin(l[0])
65 manifest = bin(l[0])
67 user = util.tolocal(l[1])
66 user = util.tolocal(l[1])
68
67
69 extra_data = l[2].split(' ', 2)
68 extra_data = l[2].split(' ', 2)
70 if len(extra_data) != 3:
69 if len(extra_data) != 3:
71 time = float(extra_data.pop(0))
70 time = float(extra_data.pop(0))
72 try:
71 try:
73 # various tools did silly things with the time zone field.
72 # various tools did silly things with the time zone field.
74 timezone = int(extra_data[0])
73 timezone = int(extra_data[0])
75 except:
74 except:
76 timezone = 0
75 timezone = 0
77 extra = {}
76 extra = {}
78 else:
77 else:
79 time, timezone, extra = extra_data
78 time, timezone, extra = extra_data
80 time, timezone = float(time), int(timezone)
79 time, timezone = float(time), int(timezone)
81 extra = self.decode_extra(extra)
80 extra = self.decode_extra(extra)
82 files = l[3:]
81 files = l[3:]
83 return (manifest, user, (time, timezone), files, desc, extra)
82 return (manifest, user, (time, timezone), files, desc, extra)
84
83
85 def read(self, node):
84 def read(self, node):
86 return self.extract(self.revision(node))
85 return self.extract(self.revision(node))
87
86
88 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
87 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
89 user=None, date=None, extra={}):
88 user=None, date=None, extra={}):
90
89
91 user, desc = util.fromlocal(user), util.fromlocal(desc)
90 user, desc = util.fromlocal(user), util.fromlocal(desc)
92
91
93 if date:
92 if date:
94 parseddate = "%d %d" % util.parsedate(date)
93 parseddate = "%d %d" % util.parsedate(date)
95 else:
94 else:
96 parseddate = "%d %d" % util.makedate()
95 parseddate = "%d %d" % util.makedate()
97 if extra:
96 if extra:
98 extra = self.encode_extra(extra)
97 extra = self.encode_extra(extra)
99 parseddate = "%s %s" % (parseddate, extra)
98 parseddate = "%s %s" % (parseddate, extra)
100 list.sort()
99 list.sort()
101 l = [hex(manifest), user, parseddate] + list + ["", desc]
100 l = [hex(manifest), user, parseddate] + list + ["", desc]
102 text = "\n".join(l)
101 text = "\n".join(l)
103 return self.addrevision(text, transaction, self.count(), p1, p2)
102 return self.addrevision(text, transaction, self.count(), p1, p2)
@@ -1,756 +1,754 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
9 from node import *
8 from node import *
10 from i18n import gettext as _
9 from i18n import gettext as _
11 demandload(globals(), 'os sys')
10 import os, sys, mdiff, util, templater, patch
12 demandload(globals(), 'mdiff util templater patch')
13
11
14 revrangesep = ':'
12 revrangesep = ':'
15
13
16 def revpair(repo, revs):
14 def revpair(repo, revs):
17 '''return pair of nodes, given list of revisions. second item can
15 '''return pair of nodes, given list of revisions. second item can
18 be None, meaning use working dir.'''
16 be None, meaning use working dir.'''
19
17
20 def revfix(repo, val, defval):
18 def revfix(repo, val, defval):
21 if not val and val != 0 and defval is not None:
19 if not val and val != 0 and defval is not None:
22 val = defval
20 val = defval
23 return repo.lookup(val)
21 return repo.lookup(val)
24
22
25 if not revs:
23 if not revs:
26 return repo.dirstate.parents()[0], None
24 return repo.dirstate.parents()[0], None
27 end = None
25 end = None
28 if len(revs) == 1:
26 if len(revs) == 1:
29 if revrangesep in revs[0]:
27 if revrangesep in revs[0]:
30 start, end = revs[0].split(revrangesep, 1)
28 start, end = revs[0].split(revrangesep, 1)
31 start = revfix(repo, start, 0)
29 start = revfix(repo, start, 0)
32 end = revfix(repo, end, repo.changelog.count() - 1)
30 end = revfix(repo, end, repo.changelog.count() - 1)
33 else:
31 else:
34 start = revfix(repo, revs[0], None)
32 start = revfix(repo, revs[0], None)
35 elif len(revs) == 2:
33 elif len(revs) == 2:
36 if revrangesep in revs[0] or revrangesep in revs[1]:
34 if revrangesep in revs[0] or revrangesep in revs[1]:
37 raise util.Abort(_('too many revisions specified'))
35 raise util.Abort(_('too many revisions specified'))
38 start = revfix(repo, revs[0], None)
36 start = revfix(repo, revs[0], None)
39 end = revfix(repo, revs[1], None)
37 end = revfix(repo, revs[1], None)
40 else:
38 else:
41 raise util.Abort(_('too many revisions specified'))
39 raise util.Abort(_('too many revisions specified'))
42 return start, end
40 return start, end
43
41
44 def revrange(repo, revs):
42 def revrange(repo, revs):
45 """Yield revision as strings from a list of revision specifications."""
43 """Yield revision as strings from a list of revision specifications."""
46
44
47 def revfix(repo, val, defval):
45 def revfix(repo, val, defval):
48 if not val and val != 0 and defval is not None:
46 if not val and val != 0 and defval is not None:
49 return defval
47 return defval
50 return repo.changelog.rev(repo.lookup(val))
48 return repo.changelog.rev(repo.lookup(val))
51
49
52 seen, l = {}, []
50 seen, l = {}, []
53 for spec in revs:
51 for spec in revs:
54 if revrangesep in spec:
52 if revrangesep in spec:
55 start, end = spec.split(revrangesep, 1)
53 start, end = spec.split(revrangesep, 1)
56 start = revfix(repo, start, 0)
54 start = revfix(repo, start, 0)
57 end = revfix(repo, end, repo.changelog.count() - 1)
55 end = revfix(repo, end, repo.changelog.count() - 1)
58 step = start > end and -1 or 1
56 step = start > end and -1 or 1
59 for rev in xrange(start, end+step, step):
57 for rev in xrange(start, end+step, step):
60 if rev in seen:
58 if rev in seen:
61 continue
59 continue
62 seen[rev] = 1
60 seen[rev] = 1
63 l.append(rev)
61 l.append(rev)
64 else:
62 else:
65 rev = revfix(repo, spec, None)
63 rev = revfix(repo, spec, None)
66 if rev in seen:
64 if rev in seen:
67 continue
65 continue
68 seen[rev] = 1
66 seen[rev] = 1
69 l.append(rev)
67 l.append(rev)
70
68
71 return l
69 return l
72
70
73 def make_filename(repo, pat, node,
71 def make_filename(repo, pat, node,
74 total=None, seqno=None, revwidth=None, pathname=None):
72 total=None, seqno=None, revwidth=None, pathname=None):
75 node_expander = {
73 node_expander = {
76 'H': lambda: hex(node),
74 'H': lambda: hex(node),
77 'R': lambda: str(repo.changelog.rev(node)),
75 'R': lambda: str(repo.changelog.rev(node)),
78 'h': lambda: short(node),
76 'h': lambda: short(node),
79 }
77 }
80 expander = {
78 expander = {
81 '%': lambda: '%',
79 '%': lambda: '%',
82 'b': lambda: os.path.basename(repo.root),
80 'b': lambda: os.path.basename(repo.root),
83 }
81 }
84
82
85 try:
83 try:
86 if node:
84 if node:
87 expander.update(node_expander)
85 expander.update(node_expander)
88 if node and revwidth is not None:
86 if node and revwidth is not None:
89 expander['r'] = (lambda:
87 expander['r'] = (lambda:
90 str(repo.changelog.rev(node)).zfill(revwidth))
88 str(repo.changelog.rev(node)).zfill(revwidth))
91 if total is not None:
89 if total is not None:
92 expander['N'] = lambda: str(total)
90 expander['N'] = lambda: str(total)
93 if seqno is not None:
91 if seqno is not None:
94 expander['n'] = lambda: str(seqno)
92 expander['n'] = lambda: str(seqno)
95 if total is not None and seqno is not None:
93 if total is not None and seqno is not None:
96 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
94 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
97 if pathname is not None:
95 if pathname is not None:
98 expander['s'] = lambda: os.path.basename(pathname)
96 expander['s'] = lambda: os.path.basename(pathname)
99 expander['d'] = lambda: os.path.dirname(pathname) or '.'
97 expander['d'] = lambda: os.path.dirname(pathname) or '.'
100 expander['p'] = lambda: pathname
98 expander['p'] = lambda: pathname
101
99
102 newname = []
100 newname = []
103 patlen = len(pat)
101 patlen = len(pat)
104 i = 0
102 i = 0
105 while i < patlen:
103 while i < patlen:
106 c = pat[i]
104 c = pat[i]
107 if c == '%':
105 if c == '%':
108 i += 1
106 i += 1
109 c = pat[i]
107 c = pat[i]
110 c = expander[c]()
108 c = expander[c]()
111 newname.append(c)
109 newname.append(c)
112 i += 1
110 i += 1
113 return ''.join(newname)
111 return ''.join(newname)
114 except KeyError, inst:
112 except KeyError, inst:
115 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
113 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
116 inst.args[0])
114 inst.args[0])
117
115
118 def make_file(repo, pat, node=None,
116 def make_file(repo, pat, node=None,
119 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
117 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
120 if not pat or pat == '-':
118 if not pat or pat == '-':
121 return 'w' in mode and sys.stdout or sys.stdin
119 return 'w' in mode and sys.stdout or sys.stdin
122 if hasattr(pat, 'write') and 'w' in mode:
120 if hasattr(pat, 'write') and 'w' in mode:
123 return pat
121 return pat
124 if hasattr(pat, 'read') and 'r' in mode:
122 if hasattr(pat, 'read') and 'r' in mode:
125 return pat
123 return pat
126 return open(make_filename(repo, pat, node, total, seqno, revwidth,
124 return open(make_filename(repo, pat, node, total, seqno, revwidth,
127 pathname),
125 pathname),
128 mode)
126 mode)
129
127
130 def matchpats(repo, pats=[], opts={}, head=''):
128 def matchpats(repo, pats=[], opts={}, head=''):
131 cwd = repo.getcwd()
129 cwd = repo.getcwd()
132 if not pats and cwd:
130 if not pats and cwd:
133 opts['include'] = [os.path.join(cwd, i)
131 opts['include'] = [os.path.join(cwd, i)
134 for i in opts.get('include', [])]
132 for i in opts.get('include', [])]
135 opts['exclude'] = [os.path.join(cwd, x)
133 opts['exclude'] = [os.path.join(cwd, x)
136 for x in opts.get('exclude', [])]
134 for x in opts.get('exclude', [])]
137 cwd = ''
135 cwd = ''
138 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
136 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
139 opts.get('exclude'), head)
137 opts.get('exclude'), head)
140
138
141 def walk(repo, pats=[], opts={}, node=None, head='', badmatch=None):
139 def walk(repo, pats=[], opts={}, node=None, head='', badmatch=None):
142 files, matchfn, anypats = matchpats(repo, pats, opts, head)
140 files, matchfn, anypats = matchpats(repo, pats, opts, head)
143 exact = dict.fromkeys(files)
141 exact = dict.fromkeys(files)
144 for src, fn in repo.walk(node=node, files=files, match=matchfn,
142 for src, fn in repo.walk(node=node, files=files, match=matchfn,
145 badmatch=badmatch):
143 badmatch=badmatch):
146 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
144 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
147
145
148 def findrenames(repo, added=None, removed=None, threshold=0.5):
146 def findrenames(repo, added=None, removed=None, threshold=0.5):
149 if added is None or removed is None:
147 if added is None or removed is None:
150 added, removed = repo.status()[1:3]
148 added, removed = repo.status()[1:3]
151 changes = repo.changelog.read(repo.dirstate.parents()[0])
149 changes = repo.changelog.read(repo.dirstate.parents()[0])
152 mf = repo.manifest.read(changes[0])
150 mf = repo.manifest.read(changes[0])
153 for a in added:
151 for a in added:
154 aa = repo.wread(a)
152 aa = repo.wread(a)
155 bestscore, bestname = None, None
153 bestscore, bestname = None, None
156 for r in removed:
154 for r in removed:
157 rr = repo.file(r).read(mf[r])
155 rr = repo.file(r).read(mf[r])
158 delta = mdiff.textdiff(aa, rr)
156 delta = mdiff.textdiff(aa, rr)
159 if len(delta) < len(aa):
157 if len(delta) < len(aa):
160 myscore = 1.0 - (float(len(delta)) / len(aa))
158 myscore = 1.0 - (float(len(delta)) / len(aa))
161 if bestscore is None or myscore > bestscore:
159 if bestscore is None or myscore > bestscore:
162 bestscore, bestname = myscore, r
160 bestscore, bestname = myscore, r
163 if bestname and bestscore >= threshold:
161 if bestname and bestscore >= threshold:
164 yield bestname, a, bestscore
162 yield bestname, a, bestscore
165
163
166 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
164 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
167 similarity=None):
165 similarity=None):
168 if dry_run is None:
166 if dry_run is None:
169 dry_run = opts.get('dry_run')
167 dry_run = opts.get('dry_run')
170 if similarity is None:
168 if similarity is None:
171 similarity = float(opts.get('similarity') or 0)
169 similarity = float(opts.get('similarity') or 0)
172 add, remove = [], []
170 add, remove = [], []
173 mapping = {}
171 mapping = {}
174 for src, abs, rel, exact in walk(repo, pats, opts):
172 for src, abs, rel, exact in walk(repo, pats, opts):
175 if src == 'f' and repo.dirstate.state(abs) == '?':
173 if src == 'f' and repo.dirstate.state(abs) == '?':
176 add.append(abs)
174 add.append(abs)
177 mapping[abs] = rel, exact
175 mapping[abs] = rel, exact
178 if repo.ui.verbose or not exact:
176 if repo.ui.verbose or not exact:
179 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
177 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
180 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
178 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
181 remove.append(abs)
179 remove.append(abs)
182 mapping[abs] = rel, exact
180 mapping[abs] = rel, exact
183 if repo.ui.verbose or not exact:
181 if repo.ui.verbose or not exact:
184 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
182 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
185 if not dry_run:
183 if not dry_run:
186 repo.add(add, wlock=wlock)
184 repo.add(add, wlock=wlock)
187 repo.remove(remove, wlock=wlock)
185 repo.remove(remove, wlock=wlock)
188 if similarity > 0:
186 if similarity > 0:
189 for old, new, score in findrenames(repo, add, remove, similarity):
187 for old, new, score in findrenames(repo, add, remove, similarity):
190 oldrel, oldexact = mapping[old]
188 oldrel, oldexact = mapping[old]
191 newrel, newexact = mapping[new]
189 newrel, newexact = mapping[new]
192 if repo.ui.verbose or not oldexact or not newexact:
190 if repo.ui.verbose or not oldexact or not newexact:
193 repo.ui.status(_('recording removal of %s as rename to %s '
191 repo.ui.status(_('recording removal of %s as rename to %s '
194 '(%d%% similar)\n') %
192 '(%d%% similar)\n') %
195 (oldrel, newrel, score * 100))
193 (oldrel, newrel, score * 100))
196 if not dry_run:
194 if not dry_run:
197 repo.copy(old, new, wlock=wlock)
195 repo.copy(old, new, wlock=wlock)
198
196
199 class changeset_printer(object):
197 class changeset_printer(object):
200 '''show changeset information when templating not requested.'''
198 '''show changeset information when templating not requested.'''
201
199
202 def __init__(self, ui, repo, patch, buffered):
200 def __init__(self, ui, repo, patch, buffered):
203 self.ui = ui
201 self.ui = ui
204 self.repo = repo
202 self.repo = repo
205 self.buffered = buffered
203 self.buffered = buffered
206 self.patch = patch
204 self.patch = patch
207 self.header = {}
205 self.header = {}
208 self.hunk = {}
206 self.hunk = {}
209 self.lastheader = None
207 self.lastheader = None
210
208
211 def flush(self, rev):
209 def flush(self, rev):
212 if rev in self.header:
210 if rev in self.header:
213 h = self.header[rev]
211 h = self.header[rev]
214 if h != self.lastheader:
212 if h != self.lastheader:
215 self.lastheader = h
213 self.lastheader = h
216 self.ui.write(h)
214 self.ui.write(h)
217 del self.header[rev]
215 del self.header[rev]
218 if rev in self.hunk:
216 if rev in self.hunk:
219 self.ui.write(self.hunk[rev])
217 self.ui.write(self.hunk[rev])
220 del self.hunk[rev]
218 del self.hunk[rev]
221 return 1
219 return 1
222 return 0
220 return 0
223
221
224 def show(self, rev=0, changenode=None, copies=None, **props):
222 def show(self, rev=0, changenode=None, copies=None, **props):
225 if self.buffered:
223 if self.buffered:
226 self.ui.pushbuffer()
224 self.ui.pushbuffer()
227 self._show(rev, changenode, copies, props)
225 self._show(rev, changenode, copies, props)
228 self.hunk[rev] = self.ui.popbuffer()
226 self.hunk[rev] = self.ui.popbuffer()
229 else:
227 else:
230 self._show(rev, changenode, copies, props)
228 self._show(rev, changenode, copies, props)
231
229
232 def _show(self, rev, changenode, copies, props):
230 def _show(self, rev, changenode, copies, props):
233 '''show a single changeset or file revision'''
231 '''show a single changeset or file revision'''
234 log = self.repo.changelog
232 log = self.repo.changelog
235 if changenode is None:
233 if changenode is None:
236 changenode = log.node(rev)
234 changenode = log.node(rev)
237 elif not rev:
235 elif not rev:
238 rev = log.rev(changenode)
236 rev = log.rev(changenode)
239
237
240 if self.ui.quiet:
238 if self.ui.quiet:
241 self.ui.write("%d:%s\n" % (rev, short(changenode)))
239 self.ui.write("%d:%s\n" % (rev, short(changenode)))
242 return
240 return
243
241
244 changes = log.read(changenode)
242 changes = log.read(changenode)
245 date = util.datestr(changes[2])
243 date = util.datestr(changes[2])
246 extra = changes[5]
244 extra = changes[5]
247 branch = extra.get("branch")
245 branch = extra.get("branch")
248
246
249 hexfunc = self.ui.debugflag and hex or short
247 hexfunc = self.ui.debugflag and hex or short
250
248
251 parents = log.parentrevs(rev)
249 parents = log.parentrevs(rev)
252 if not self.ui.debugflag:
250 if not self.ui.debugflag:
253 if parents[1] == nullrev:
251 if parents[1] == nullrev:
254 if parents[0] >= rev - 1:
252 if parents[0] >= rev - 1:
255 parents = []
253 parents = []
256 else:
254 else:
257 parents = [parents[0]]
255 parents = [parents[0]]
258 parents = [(p, hexfunc(log.node(p))) for p in parents]
256 parents = [(p, hexfunc(log.node(p))) for p in parents]
259
257
260 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
258 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
261
259
262 if branch:
260 if branch:
263 branch = util.tolocal(branch)
261 branch = util.tolocal(branch)
264 self.ui.write(_("branch: %s\n") % branch)
262 self.ui.write(_("branch: %s\n") % branch)
265 for tag in self.repo.nodetags(changenode):
263 for tag in self.repo.nodetags(changenode):
266 self.ui.write(_("tag: %s\n") % tag)
264 self.ui.write(_("tag: %s\n") % tag)
267 for parent in parents:
265 for parent in parents:
268 self.ui.write(_("parent: %d:%s\n") % parent)
266 self.ui.write(_("parent: %d:%s\n") % parent)
269
267
270 if self.ui.debugflag:
268 if self.ui.debugflag:
271 self.ui.write(_("manifest: %d:%s\n") %
269 self.ui.write(_("manifest: %d:%s\n") %
272 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
270 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
273 self.ui.write(_("user: %s\n") % changes[1])
271 self.ui.write(_("user: %s\n") % changes[1])
274 self.ui.write(_("date: %s\n") % date)
272 self.ui.write(_("date: %s\n") % date)
275
273
276 if self.ui.debugflag:
274 if self.ui.debugflag:
277 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
275 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
278 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
276 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
279 files):
277 files):
280 if value:
278 if value:
281 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
279 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
282 elif changes[3] and self.ui.verbose:
280 elif changes[3] and self.ui.verbose:
283 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
281 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
284 if copies and self.ui.verbose:
282 if copies and self.ui.verbose:
285 copies = ['%s (%s)' % c for c in copies]
283 copies = ['%s (%s)' % c for c in copies]
286 self.ui.write(_("copies: %s\n") % ' '.join(copies))
284 self.ui.write(_("copies: %s\n") % ' '.join(copies))
287
285
288 if extra and self.ui.debugflag:
286 if extra and self.ui.debugflag:
289 extraitems = extra.items()
287 extraitems = extra.items()
290 extraitems.sort()
288 extraitems.sort()
291 for key, value in extraitems:
289 for key, value in extraitems:
292 self.ui.write(_("extra: %s=%s\n")
290 self.ui.write(_("extra: %s=%s\n")
293 % (key, value.encode('string_escape')))
291 % (key, value.encode('string_escape')))
294
292
295 description = changes[4].strip()
293 description = changes[4].strip()
296 if description:
294 if description:
297 if self.ui.verbose:
295 if self.ui.verbose:
298 self.ui.write(_("description:\n"))
296 self.ui.write(_("description:\n"))
299 self.ui.write(description)
297 self.ui.write(description)
300 self.ui.write("\n\n")
298 self.ui.write("\n\n")
301 else:
299 else:
302 self.ui.write(_("summary: %s\n") %
300 self.ui.write(_("summary: %s\n") %
303 description.splitlines()[0])
301 description.splitlines()[0])
304 self.ui.write("\n")
302 self.ui.write("\n")
305
303
306 self.showpatch(changenode)
304 self.showpatch(changenode)
307
305
308 def showpatch(self, node):
306 def showpatch(self, node):
309 if self.patch:
307 if self.patch:
310 prev = self.repo.changelog.parents(node)[0]
308 prev = self.repo.changelog.parents(node)[0]
311 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui)
309 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui)
312 self.ui.write("\n")
310 self.ui.write("\n")
313
311
314 class changeset_templater(changeset_printer):
312 class changeset_templater(changeset_printer):
315 '''format changeset information.'''
313 '''format changeset information.'''
316
314
317 def __init__(self, ui, repo, patch, mapfile, buffered):
315 def __init__(self, ui, repo, patch, mapfile, buffered):
318 changeset_printer.__init__(self, ui, repo, patch, buffered)
316 changeset_printer.__init__(self, ui, repo, patch, buffered)
319 self.t = templater.templater(mapfile, templater.common_filters,
317 self.t = templater.templater(mapfile, templater.common_filters,
320 cache={'parent': '{rev}:{node|short} ',
318 cache={'parent': '{rev}:{node|short} ',
321 'manifest': '{rev}:{node|short}',
319 'manifest': '{rev}:{node|short}',
322 'filecopy': '{name} ({source})'})
320 'filecopy': '{name} ({source})'})
323
321
324 def use_template(self, t):
322 def use_template(self, t):
325 '''set template string to use'''
323 '''set template string to use'''
326 self.t.cache['changeset'] = t
324 self.t.cache['changeset'] = t
327
325
328 def _show(self, rev, changenode, copies, props):
326 def _show(self, rev, changenode, copies, props):
329 '''show a single changeset or file revision'''
327 '''show a single changeset or file revision'''
330 log = self.repo.changelog
328 log = self.repo.changelog
331 if changenode is None:
329 if changenode is None:
332 changenode = log.node(rev)
330 changenode = log.node(rev)
333 elif not rev:
331 elif not rev:
334 rev = log.rev(changenode)
332 rev = log.rev(changenode)
335
333
336 changes = log.read(changenode)
334 changes = log.read(changenode)
337
335
338 def showlist(name, values, plural=None, **args):
336 def showlist(name, values, plural=None, **args):
339 '''expand set of values.
337 '''expand set of values.
340 name is name of key in template map.
338 name is name of key in template map.
341 values is list of strings or dicts.
339 values is list of strings or dicts.
342 plural is plural of name, if not simply name + 's'.
340 plural is plural of name, if not simply name + 's'.
343
341
344 expansion works like this, given name 'foo'.
342 expansion works like this, given name 'foo'.
345
343
346 if values is empty, expand 'no_foos'.
344 if values is empty, expand 'no_foos'.
347
345
348 if 'foo' not in template map, return values as a string,
346 if 'foo' not in template map, return values as a string,
349 joined by space.
347 joined by space.
350
348
351 expand 'start_foos'.
349 expand 'start_foos'.
352
350
353 for each value, expand 'foo'. if 'last_foo' in template
351 for each value, expand 'foo'. if 'last_foo' in template
354 map, expand it instead of 'foo' for last key.
352 map, expand it instead of 'foo' for last key.
355
353
356 expand 'end_foos'.
354 expand 'end_foos'.
357 '''
355 '''
358 if plural: names = plural
356 if plural: names = plural
359 else: names = name + 's'
357 else: names = name + 's'
360 if not values:
358 if not values:
361 noname = 'no_' + names
359 noname = 'no_' + names
362 if noname in self.t:
360 if noname in self.t:
363 yield self.t(noname, **args)
361 yield self.t(noname, **args)
364 return
362 return
365 if name not in self.t:
363 if name not in self.t:
366 if isinstance(values[0], str):
364 if isinstance(values[0], str):
367 yield ' '.join(values)
365 yield ' '.join(values)
368 else:
366 else:
369 for v in values:
367 for v in values:
370 yield dict(v, **args)
368 yield dict(v, **args)
371 return
369 return
372 startname = 'start_' + names
370 startname = 'start_' + names
373 if startname in self.t:
371 if startname in self.t:
374 yield self.t(startname, **args)
372 yield self.t(startname, **args)
375 vargs = args.copy()
373 vargs = args.copy()
376 def one(v, tag=name):
374 def one(v, tag=name):
377 try:
375 try:
378 vargs.update(v)
376 vargs.update(v)
379 except (AttributeError, ValueError):
377 except (AttributeError, ValueError):
380 try:
378 try:
381 for a, b in v:
379 for a, b in v:
382 vargs[a] = b
380 vargs[a] = b
383 except ValueError:
381 except ValueError:
384 vargs[name] = v
382 vargs[name] = v
385 return self.t(tag, **vargs)
383 return self.t(tag, **vargs)
386 lastname = 'last_' + name
384 lastname = 'last_' + name
387 if lastname in self.t:
385 if lastname in self.t:
388 last = values.pop()
386 last = values.pop()
389 else:
387 else:
390 last = None
388 last = None
391 for v in values:
389 for v in values:
392 yield one(v)
390 yield one(v)
393 if last is not None:
391 if last is not None:
394 yield one(last, tag=lastname)
392 yield one(last, tag=lastname)
395 endname = 'end_' + names
393 endname = 'end_' + names
396 if endname in self.t:
394 if endname in self.t:
397 yield self.t(endname, **args)
395 yield self.t(endname, **args)
398
396
399 def showbranches(**args):
397 def showbranches(**args):
400 branch = changes[5].get("branch")
398 branch = changes[5].get("branch")
401 if branch:
399 if branch:
402 branch = util.tolocal(branch)
400 branch = util.tolocal(branch)
403 return showlist('branch', [branch], plural='branches', **args)
401 return showlist('branch', [branch], plural='branches', **args)
404
402
405 def showparents(**args):
403 def showparents(**args):
406 parents = [[('rev', log.rev(p)), ('node', hex(p))]
404 parents = [[('rev', log.rev(p)), ('node', hex(p))]
407 for p in log.parents(changenode)
405 for p in log.parents(changenode)
408 if self.ui.debugflag or p != nullid]
406 if self.ui.debugflag or p != nullid]
409 if (not self.ui.debugflag and len(parents) == 1 and
407 if (not self.ui.debugflag and len(parents) == 1 and
410 parents[0][0][1] == rev - 1):
408 parents[0][0][1] == rev - 1):
411 return
409 return
412 return showlist('parent', parents, **args)
410 return showlist('parent', parents, **args)
413
411
414 def showtags(**args):
412 def showtags(**args):
415 return showlist('tag', self.repo.nodetags(changenode), **args)
413 return showlist('tag', self.repo.nodetags(changenode), **args)
416
414
417 def showextras(**args):
415 def showextras(**args):
418 extras = changes[5].items()
416 extras = changes[5].items()
419 extras.sort()
417 extras.sort()
420 for key, value in extras:
418 for key, value in extras:
421 args = args.copy()
419 args = args.copy()
422 args.update(dict(key=key, value=value))
420 args.update(dict(key=key, value=value))
423 yield self.t('extra', **args)
421 yield self.t('extra', **args)
424
422
425 def showcopies(**args):
423 def showcopies(**args):
426 c = [{'name': x[0], 'source': x[1]} for x in copies]
424 c = [{'name': x[0], 'source': x[1]} for x in copies]
427 return showlist('file_copy', c, plural='file_copies', **args)
425 return showlist('file_copy', c, plural='file_copies', **args)
428
426
429 if self.ui.debugflag:
427 if self.ui.debugflag:
430 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
428 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
431 def showfiles(**args):
429 def showfiles(**args):
432 return showlist('file', files[0], **args)
430 return showlist('file', files[0], **args)
433 def showadds(**args):
431 def showadds(**args):
434 return showlist('file_add', files[1], **args)
432 return showlist('file_add', files[1], **args)
435 def showdels(**args):
433 def showdels(**args):
436 return showlist('file_del', files[2], **args)
434 return showlist('file_del', files[2], **args)
437 def showmanifest(**args):
435 def showmanifest(**args):
438 args = args.copy()
436 args = args.copy()
439 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
437 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
440 node=hex(changes[0])))
438 node=hex(changes[0])))
441 return self.t('manifest', **args)
439 return self.t('manifest', **args)
442 else:
440 else:
443 def showfiles(**args):
441 def showfiles(**args):
444 return showlist('file', changes[3], **args)
442 return showlist('file', changes[3], **args)
445 showadds = ''
443 showadds = ''
446 showdels = ''
444 showdels = ''
447 showmanifest = ''
445 showmanifest = ''
448
446
449 defprops = {
447 defprops = {
450 'author': changes[1],
448 'author': changes[1],
451 'branches': showbranches,
449 'branches': showbranches,
452 'date': changes[2],
450 'date': changes[2],
453 'desc': changes[4],
451 'desc': changes[4],
454 'file_adds': showadds,
452 'file_adds': showadds,
455 'file_dels': showdels,
453 'file_dels': showdels,
456 'files': showfiles,
454 'files': showfiles,
457 'file_copies': showcopies,
455 'file_copies': showcopies,
458 'manifest': showmanifest,
456 'manifest': showmanifest,
459 'node': hex(changenode),
457 'node': hex(changenode),
460 'parents': showparents,
458 'parents': showparents,
461 'rev': rev,
459 'rev': rev,
462 'tags': showtags,
460 'tags': showtags,
463 'extras': showextras,
461 'extras': showextras,
464 }
462 }
465 props = props.copy()
463 props = props.copy()
466 props.update(defprops)
464 props.update(defprops)
467
465
468 try:
466 try:
469 if self.ui.debugflag and 'header_debug' in self.t:
467 if self.ui.debugflag and 'header_debug' in self.t:
470 key = 'header_debug'
468 key = 'header_debug'
471 elif self.ui.quiet and 'header_quiet' in self.t:
469 elif self.ui.quiet and 'header_quiet' in self.t:
472 key = 'header_quiet'
470 key = 'header_quiet'
473 elif self.ui.verbose and 'header_verbose' in self.t:
471 elif self.ui.verbose and 'header_verbose' in self.t:
474 key = 'header_verbose'
472 key = 'header_verbose'
475 elif 'header' in self.t:
473 elif 'header' in self.t:
476 key = 'header'
474 key = 'header'
477 else:
475 else:
478 key = ''
476 key = ''
479 if key:
477 if key:
480 h = templater.stringify(self.t(key, **props))
478 h = templater.stringify(self.t(key, **props))
481 if self.buffered:
479 if self.buffered:
482 self.header[rev] = h
480 self.header[rev] = h
483 else:
481 else:
484 self.ui.write(h)
482 self.ui.write(h)
485 if self.ui.debugflag and 'changeset_debug' in self.t:
483 if self.ui.debugflag and 'changeset_debug' in self.t:
486 key = 'changeset_debug'
484 key = 'changeset_debug'
487 elif self.ui.quiet and 'changeset_quiet' in self.t:
485 elif self.ui.quiet and 'changeset_quiet' in self.t:
488 key = 'changeset_quiet'
486 key = 'changeset_quiet'
489 elif self.ui.verbose and 'changeset_verbose' in self.t:
487 elif self.ui.verbose and 'changeset_verbose' in self.t:
490 key = 'changeset_verbose'
488 key = 'changeset_verbose'
491 else:
489 else:
492 key = 'changeset'
490 key = 'changeset'
493 self.ui.write(templater.stringify(self.t(key, **props)))
491 self.ui.write(templater.stringify(self.t(key, **props)))
494 self.showpatch(changenode)
492 self.showpatch(changenode)
495 except KeyError, inst:
493 except KeyError, inst:
496 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
494 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
497 inst.args[0]))
495 inst.args[0]))
498 except SyntaxError, inst:
496 except SyntaxError, inst:
499 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
497 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
500
498
501 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
499 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
502 """show one changeset using template or regular display.
500 """show one changeset using template or regular display.
503
501
504 Display format will be the first non-empty hit of:
502 Display format will be the first non-empty hit of:
505 1. option 'template'
503 1. option 'template'
506 2. option 'style'
504 2. option 'style'
507 3. [ui] setting 'logtemplate'
505 3. [ui] setting 'logtemplate'
508 4. [ui] setting 'style'
506 4. [ui] setting 'style'
509 If all of these values are either the unset or the empty string,
507 If all of these values are either the unset or the empty string,
510 regular display via changeset_printer() is done.
508 regular display via changeset_printer() is done.
511 """
509 """
512 # options
510 # options
513 patch = False
511 patch = False
514 if opts.get('patch'):
512 if opts.get('patch'):
515 patch = matchfn or util.always
513 patch = matchfn or util.always
516
514
517 tmpl = opts.get('template')
515 tmpl = opts.get('template')
518 mapfile = None
516 mapfile = None
519 if tmpl:
517 if tmpl:
520 tmpl = templater.parsestring(tmpl, quoted=False)
518 tmpl = templater.parsestring(tmpl, quoted=False)
521 else:
519 else:
522 mapfile = opts.get('style')
520 mapfile = opts.get('style')
523 # ui settings
521 # ui settings
524 if not mapfile:
522 if not mapfile:
525 tmpl = ui.config('ui', 'logtemplate')
523 tmpl = ui.config('ui', 'logtemplate')
526 if tmpl:
524 if tmpl:
527 tmpl = templater.parsestring(tmpl)
525 tmpl = templater.parsestring(tmpl)
528 else:
526 else:
529 mapfile = ui.config('ui', 'style')
527 mapfile = ui.config('ui', 'style')
530
528
531 if tmpl or mapfile:
529 if tmpl or mapfile:
532 if mapfile:
530 if mapfile:
533 if not os.path.split(mapfile)[0]:
531 if not os.path.split(mapfile)[0]:
534 mapname = (templater.templatepath('map-cmdline.' + mapfile)
532 mapname = (templater.templatepath('map-cmdline.' + mapfile)
535 or templater.templatepath(mapfile))
533 or templater.templatepath(mapfile))
536 if mapname: mapfile = mapname
534 if mapname: mapfile = mapname
537 try:
535 try:
538 t = changeset_templater(ui, repo, patch, mapfile, buffered)
536 t = changeset_templater(ui, repo, patch, mapfile, buffered)
539 except SyntaxError, inst:
537 except SyntaxError, inst:
540 raise util.Abort(inst.args[0])
538 raise util.Abort(inst.args[0])
541 if tmpl: t.use_template(tmpl)
539 if tmpl: t.use_template(tmpl)
542 return t
540 return t
543 return changeset_printer(ui, repo, patch, buffered)
541 return changeset_printer(ui, repo, patch, buffered)
544
542
545 def finddate(ui, repo, date):
543 def finddate(ui, repo, date):
546 """Find the tipmost changeset that matches the given date spec"""
544 """Find the tipmost changeset that matches the given date spec"""
547 df = util.matchdate(date + " to " + date)
545 df = util.matchdate(date + " to " + date)
548 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
546 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
549 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
547 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
550 results = {}
548 results = {}
551 for st, rev, fns in changeiter:
549 for st, rev, fns in changeiter:
552 if st == 'add':
550 if st == 'add':
553 d = get(rev)[2]
551 d = get(rev)[2]
554 if df(d[0]):
552 if df(d[0]):
555 results[rev] = d
553 results[rev] = d
556 elif st == 'iter':
554 elif st == 'iter':
557 if rev in results:
555 if rev in results:
558 ui.status("Found revision %s from %s\n" %
556 ui.status("Found revision %s from %s\n" %
559 (rev, util.datestr(results[rev])))
557 (rev, util.datestr(results[rev])))
560 return str(rev)
558 return str(rev)
561
559
562 raise util.Abort(_("revision matching date not found"))
560 raise util.Abort(_("revision matching date not found"))
563
561
564 def walkchangerevs(ui, repo, pats, change, opts):
562 def walkchangerevs(ui, repo, pats, change, opts):
565 '''Iterate over files and the revs they changed in.
563 '''Iterate over files and the revs they changed in.
566
564
567 Callers most commonly need to iterate backwards over the history
565 Callers most commonly need to iterate backwards over the history
568 it is interested in. Doing so has awful (quadratic-looking)
566 it is interested in. Doing so has awful (quadratic-looking)
569 performance, so we use iterators in a "windowed" way.
567 performance, so we use iterators in a "windowed" way.
570
568
571 We walk a window of revisions in the desired order. Within the
569 We walk a window of revisions in the desired order. Within the
572 window, we first walk forwards to gather data, then in the desired
570 window, we first walk forwards to gather data, then in the desired
573 order (usually backwards) to display it.
571 order (usually backwards) to display it.
574
572
575 This function returns an (iterator, matchfn) tuple. The iterator
573 This function returns an (iterator, matchfn) tuple. The iterator
576 yields 3-tuples. They will be of one of the following forms:
574 yields 3-tuples. They will be of one of the following forms:
577
575
578 "window", incrementing, lastrev: stepping through a window,
576 "window", incrementing, lastrev: stepping through a window,
579 positive if walking forwards through revs, last rev in the
577 positive if walking forwards through revs, last rev in the
580 sequence iterated over - use to reset state for the current window
578 sequence iterated over - use to reset state for the current window
581
579
582 "add", rev, fns: out-of-order traversal of the given file names
580 "add", rev, fns: out-of-order traversal of the given file names
583 fns, which changed during revision rev - use to gather data for
581 fns, which changed during revision rev - use to gather data for
584 possible display
582 possible display
585
583
586 "iter", rev, None: in-order traversal of the revs earlier iterated
584 "iter", rev, None: in-order traversal of the revs earlier iterated
587 over with "add" - use to display data'''
585 over with "add" - use to display data'''
588
586
589 def increasing_windows(start, end, windowsize=8, sizelimit=512):
587 def increasing_windows(start, end, windowsize=8, sizelimit=512):
590 if start < end:
588 if start < end:
591 while start < end:
589 while start < end:
592 yield start, min(windowsize, end-start)
590 yield start, min(windowsize, end-start)
593 start += windowsize
591 start += windowsize
594 if windowsize < sizelimit:
592 if windowsize < sizelimit:
595 windowsize *= 2
593 windowsize *= 2
596 else:
594 else:
597 while start > end:
595 while start > end:
598 yield start, min(windowsize, start-end-1)
596 yield start, min(windowsize, start-end-1)
599 start -= windowsize
597 start -= windowsize
600 if windowsize < sizelimit:
598 if windowsize < sizelimit:
601 windowsize *= 2
599 windowsize *= 2
602
600
603 files, matchfn, anypats = matchpats(repo, pats, opts)
601 files, matchfn, anypats = matchpats(repo, pats, opts)
604 follow = opts.get('follow') or opts.get('follow_first')
602 follow = opts.get('follow') or opts.get('follow_first')
605
603
606 if repo.changelog.count() == 0:
604 if repo.changelog.count() == 0:
607 return [], matchfn
605 return [], matchfn
608
606
609 if follow:
607 if follow:
610 defrange = '%s:0' % repo.changectx().rev()
608 defrange = '%s:0' % repo.changectx().rev()
611 else:
609 else:
612 defrange = 'tip:0'
610 defrange = 'tip:0'
613 revs = revrange(repo, opts['rev'] or [defrange])
611 revs = revrange(repo, opts['rev'] or [defrange])
614 wanted = {}
612 wanted = {}
615 slowpath = anypats or opts.get('removed')
613 slowpath = anypats or opts.get('removed')
616 fncache = {}
614 fncache = {}
617
615
618 if not slowpath and not files:
616 if not slowpath and not files:
619 # No files, no patterns. Display all revs.
617 # No files, no patterns. Display all revs.
620 wanted = dict.fromkeys(revs)
618 wanted = dict.fromkeys(revs)
621 copies = []
619 copies = []
622 if not slowpath:
620 if not slowpath:
623 # Only files, no patterns. Check the history of each file.
621 # Only files, no patterns. Check the history of each file.
624 def filerevgen(filelog, node):
622 def filerevgen(filelog, node):
625 cl_count = repo.changelog.count()
623 cl_count = repo.changelog.count()
626 if node is None:
624 if node is None:
627 last = filelog.count() - 1
625 last = filelog.count() - 1
628 else:
626 else:
629 last = filelog.rev(node)
627 last = filelog.rev(node)
630 for i, window in increasing_windows(last, nullrev):
628 for i, window in increasing_windows(last, nullrev):
631 revs = []
629 revs = []
632 for j in xrange(i - window, i + 1):
630 for j in xrange(i - window, i + 1):
633 n = filelog.node(j)
631 n = filelog.node(j)
634 revs.append((filelog.linkrev(n),
632 revs.append((filelog.linkrev(n),
635 follow and filelog.renamed(n)))
633 follow and filelog.renamed(n)))
636 revs.reverse()
634 revs.reverse()
637 for rev in revs:
635 for rev in revs:
638 # only yield rev for which we have the changelog, it can
636 # only yield rev for which we have the changelog, it can
639 # happen while doing "hg log" during a pull or commit
637 # happen while doing "hg log" during a pull or commit
640 if rev[0] < cl_count:
638 if rev[0] < cl_count:
641 yield rev
639 yield rev
642 def iterfiles():
640 def iterfiles():
643 for filename in files:
641 for filename in files:
644 yield filename, None
642 yield filename, None
645 for filename_node in copies:
643 for filename_node in copies:
646 yield filename_node
644 yield filename_node
647 minrev, maxrev = min(revs), max(revs)
645 minrev, maxrev = min(revs), max(revs)
648 for file_, node in iterfiles():
646 for file_, node in iterfiles():
649 filelog = repo.file(file_)
647 filelog = repo.file(file_)
650 # A zero count may be a directory or deleted file, so
648 # A zero count may be a directory or deleted file, so
651 # try to find matching entries on the slow path.
649 # try to find matching entries on the slow path.
652 if filelog.count() == 0:
650 if filelog.count() == 0:
653 slowpath = True
651 slowpath = True
654 break
652 break
655 for rev, copied in filerevgen(filelog, node):
653 for rev, copied in filerevgen(filelog, node):
656 if rev <= maxrev:
654 if rev <= maxrev:
657 if rev < minrev:
655 if rev < minrev:
658 break
656 break
659 fncache.setdefault(rev, [])
657 fncache.setdefault(rev, [])
660 fncache[rev].append(file_)
658 fncache[rev].append(file_)
661 wanted[rev] = 1
659 wanted[rev] = 1
662 if follow and copied:
660 if follow and copied:
663 copies.append(copied)
661 copies.append(copied)
664 if slowpath:
662 if slowpath:
665 if follow:
663 if follow:
666 raise util.Abort(_('can only follow copies/renames for explicit '
664 raise util.Abort(_('can only follow copies/renames for explicit '
667 'file names'))
665 'file names'))
668
666
669 # The slow path checks files modified in every changeset.
667 # The slow path checks files modified in every changeset.
670 def changerevgen():
668 def changerevgen():
671 for i, window in increasing_windows(repo.changelog.count()-1,
669 for i, window in increasing_windows(repo.changelog.count()-1,
672 nullrev):
670 nullrev):
673 for j in xrange(i - window, i + 1):
671 for j in xrange(i - window, i + 1):
674 yield j, change(j)[3]
672 yield j, change(j)[3]
675
673
676 for rev, changefiles in changerevgen():
674 for rev, changefiles in changerevgen():
677 matches = filter(matchfn, changefiles)
675 matches = filter(matchfn, changefiles)
678 if matches:
676 if matches:
679 fncache[rev] = matches
677 fncache[rev] = matches
680 wanted[rev] = 1
678 wanted[rev] = 1
681
679
682 class followfilter:
680 class followfilter:
683 def __init__(self, onlyfirst=False):
681 def __init__(self, onlyfirst=False):
684 self.startrev = nullrev
682 self.startrev = nullrev
685 self.roots = []
683 self.roots = []
686 self.onlyfirst = onlyfirst
684 self.onlyfirst = onlyfirst
687
685
688 def match(self, rev):
686 def match(self, rev):
689 def realparents(rev):
687 def realparents(rev):
690 if self.onlyfirst:
688 if self.onlyfirst:
691 return repo.changelog.parentrevs(rev)[0:1]
689 return repo.changelog.parentrevs(rev)[0:1]
692 else:
690 else:
693 return filter(lambda x: x != nullrev,
691 return filter(lambda x: x != nullrev,
694 repo.changelog.parentrevs(rev))
692 repo.changelog.parentrevs(rev))
695
693
696 if self.startrev == nullrev:
694 if self.startrev == nullrev:
697 self.startrev = rev
695 self.startrev = rev
698 return True
696 return True
699
697
700 if rev > self.startrev:
698 if rev > self.startrev:
701 # forward: all descendants
699 # forward: all descendants
702 if not self.roots:
700 if not self.roots:
703 self.roots.append(self.startrev)
701 self.roots.append(self.startrev)
704 for parent in realparents(rev):
702 for parent in realparents(rev):
705 if parent in self.roots:
703 if parent in self.roots:
706 self.roots.append(rev)
704 self.roots.append(rev)
707 return True
705 return True
708 else:
706 else:
709 # backwards: all parents
707 # backwards: all parents
710 if not self.roots:
708 if not self.roots:
711 self.roots.extend(realparents(self.startrev))
709 self.roots.extend(realparents(self.startrev))
712 if rev in self.roots:
710 if rev in self.roots:
713 self.roots.remove(rev)
711 self.roots.remove(rev)
714 self.roots.extend(realparents(rev))
712 self.roots.extend(realparents(rev))
715 return True
713 return True
716
714
717 return False
715 return False
718
716
719 # it might be worthwhile to do this in the iterator if the rev range
717 # it might be worthwhile to do this in the iterator if the rev range
720 # is descending and the prune args are all within that range
718 # is descending and the prune args are all within that range
721 for rev in opts.get('prune', ()):
719 for rev in opts.get('prune', ()):
722 rev = repo.changelog.rev(repo.lookup(rev))
720 rev = repo.changelog.rev(repo.lookup(rev))
723 ff = followfilter()
721 ff = followfilter()
724 stop = min(revs[0], revs[-1])
722 stop = min(revs[0], revs[-1])
725 for x in xrange(rev, stop-1, -1):
723 for x in xrange(rev, stop-1, -1):
726 if ff.match(x) and x in wanted:
724 if ff.match(x) and x in wanted:
727 del wanted[x]
725 del wanted[x]
728
726
729 def iterate():
727 def iterate():
730 if follow and not files:
728 if follow and not files:
731 ff = followfilter(onlyfirst=opts.get('follow_first'))
729 ff = followfilter(onlyfirst=opts.get('follow_first'))
732 def want(rev):
730 def want(rev):
733 if ff.match(rev) and rev in wanted:
731 if ff.match(rev) and rev in wanted:
734 return True
732 return True
735 return False
733 return False
736 else:
734 else:
737 def want(rev):
735 def want(rev):
738 return rev in wanted
736 return rev in wanted
739
737
740 for i, window in increasing_windows(0, len(revs)):
738 for i, window in increasing_windows(0, len(revs)):
741 yield 'window', revs[0] < revs[-1], revs[-1]
739 yield 'window', revs[0] < revs[-1], revs[-1]
742 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
740 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
743 srevs = list(nrevs)
741 srevs = list(nrevs)
744 srevs.sort()
742 srevs.sort()
745 for rev in srevs:
743 for rev in srevs:
746 fns = fncache.get(rev)
744 fns = fncache.get(rev)
747 if not fns:
745 if not fns:
748 def fns_generator():
746 def fns_generator():
749 for f in change(rev)[3]:
747 for f in change(rev)[3]:
750 if matchfn(f):
748 if matchfn(f):
751 yield f
749 yield f
752 fns = fns_generator()
750 fns = fns_generator()
753 yield 'add', rev, fns
751 yield 'add', rev, fns
754 for rev in nrevs:
752 for rev in nrevs:
755 yield 'iter', rev, None
753 yield 'iter', rev, None
756 return iterate(), matchfn
754 return iterate(), matchfn
@@ -1,3278 +1,3278 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 import demandimport; demandimport.enable()
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "bisect os re sys signal imp urllib pdb shlex stat")
11 import bisect, os, re, sys, signal, imp, urllib, pdb, shlex, stat
12 demandload(globals(), "fancyopts ui hg util lock revlog bundlerepo")
12 import fancyopts, ui, hg, util, lock, revlog, bundlerepo
13 demandload(globals(), "difflib patch time help mdiff tempfile")
13 import difflib, patch, time, help, mdiff, tempfile
14 demandload(globals(), "traceback errno version atexit")
14 import traceback, errno, version, atexit
15 demandload(globals(), "archival changegroup cmdutil hgweb.server sshserver")
15 import archival, changegroup, cmdutil, hgweb.server, sshserver
16
16
17 class UnknownCommand(Exception):
17 class UnknownCommand(Exception):
18 """Exception raised if command is not in the command table."""
18 """Exception raised if command is not in the command table."""
19 class AmbiguousCommand(Exception):
19 class AmbiguousCommand(Exception):
20 """Exception raised if command shortcut matches more than one command."""
20 """Exception raised if command shortcut matches more than one command."""
21
21
22 def bail_if_changed(repo):
22 def bail_if_changed(repo):
23 modified, added, removed, deleted = repo.status()[:4]
23 modified, added, removed, deleted = repo.status()[:4]
24 if modified or added or removed or deleted:
24 if modified or added or removed or deleted:
25 raise util.Abort(_("outstanding uncommitted changes"))
25 raise util.Abort(_("outstanding uncommitted changes"))
26
26
27 def logmessage(opts):
27 def logmessage(opts):
28 """ get the log message according to -m and -l option """
28 """ get the log message according to -m and -l option """
29 message = opts['message']
29 message = opts['message']
30 logfile = opts['logfile']
30 logfile = opts['logfile']
31
31
32 if message and logfile:
32 if message and logfile:
33 raise util.Abort(_('options --message and --logfile are mutually '
33 raise util.Abort(_('options --message and --logfile are mutually '
34 'exclusive'))
34 'exclusive'))
35 if not message and logfile:
35 if not message and logfile:
36 try:
36 try:
37 if logfile == '-':
37 if logfile == '-':
38 message = sys.stdin.read()
38 message = sys.stdin.read()
39 else:
39 else:
40 message = open(logfile).read()
40 message = open(logfile).read()
41 except IOError, inst:
41 except IOError, inst:
42 raise util.Abort(_("can't read commit message '%s': %s") %
42 raise util.Abort(_("can't read commit message '%s': %s") %
43 (logfile, inst.strerror))
43 (logfile, inst.strerror))
44 return message
44 return message
45
45
46 def setremoteconfig(ui, opts):
46 def setremoteconfig(ui, opts):
47 "copy remote options to ui tree"
47 "copy remote options to ui tree"
48 if opts.get('ssh'):
48 if opts.get('ssh'):
49 ui.setconfig("ui", "ssh", opts['ssh'])
49 ui.setconfig("ui", "ssh", opts['ssh'])
50 if opts.get('remotecmd'):
50 if opts.get('remotecmd'):
51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
52
52
53 # Commands start here, listed alphabetically
53 # Commands start here, listed alphabetically
54
54
55 def add(ui, repo, *pats, **opts):
55 def add(ui, repo, *pats, **opts):
56 """add the specified files on the next commit
56 """add the specified files on the next commit
57
57
58 Schedule files to be version controlled and added to the repository.
58 Schedule files to be version controlled and added to the repository.
59
59
60 The files will be added to the repository at the next commit. To
60 The files will be added to the repository at the next commit. To
61 undo an add before that, see hg revert.
61 undo an add before that, see hg revert.
62
62
63 If no names are given, add all files in the repository.
63 If no names are given, add all files in the repository.
64 """
64 """
65
65
66 names = []
66 names = []
67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
68 if exact:
68 if exact:
69 if ui.verbose:
69 if ui.verbose:
70 ui.status(_('adding %s\n') % rel)
70 ui.status(_('adding %s\n') % rel)
71 names.append(abs)
71 names.append(abs)
72 elif repo.dirstate.state(abs) == '?':
72 elif repo.dirstate.state(abs) == '?':
73 ui.status(_('adding %s\n') % rel)
73 ui.status(_('adding %s\n') % rel)
74 names.append(abs)
74 names.append(abs)
75 if not opts.get('dry_run'):
75 if not opts.get('dry_run'):
76 repo.add(names)
76 repo.add(names)
77
77
78 def addremove(ui, repo, *pats, **opts):
78 def addremove(ui, repo, *pats, **opts):
79 """add all new files, delete all missing files
79 """add all new files, delete all missing files
80
80
81 Add all new files and remove all missing files from the repository.
81 Add all new files and remove all missing files from the repository.
82
82
83 New files are ignored if they match any of the patterns in .hgignore. As
83 New files are ignored if they match any of the patterns in .hgignore. As
84 with add, these changes take effect at the next commit.
84 with add, these changes take effect at the next commit.
85
85
86 Use the -s option to detect renamed files. With a parameter > 0,
86 Use the -s option to detect renamed files. With a parameter > 0,
87 this compares every removed file with every added file and records
87 this compares every removed file with every added file and records
88 those similar enough as renames. This option takes a percentage
88 those similar enough as renames. This option takes a percentage
89 between 0 (disabled) and 100 (files must be identical) as its
89 between 0 (disabled) and 100 (files must be identical) as its
90 parameter. Detecting renamed files this way can be expensive.
90 parameter. Detecting renamed files this way can be expensive.
91 """
91 """
92 sim = float(opts.get('similarity') or 0)
92 sim = float(opts.get('similarity') or 0)
93 if sim < 0 or sim > 100:
93 if sim < 0 or sim > 100:
94 raise util.Abort(_('similarity must be between 0 and 100'))
94 raise util.Abort(_('similarity must be between 0 and 100'))
95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
96
96
97 def annotate(ui, repo, *pats, **opts):
97 def annotate(ui, repo, *pats, **opts):
98 """show changeset information per file line
98 """show changeset information per file line
99
99
100 List changes in files, showing the revision id responsible for each line
100 List changes in files, showing the revision id responsible for each line
101
101
102 This command is useful to discover who did a change or when a change took
102 This command is useful to discover who did a change or when a change took
103 place.
103 place.
104
104
105 Without the -a option, annotate will avoid processing files it
105 Without the -a option, annotate will avoid processing files it
106 detects as binary. With -a, annotate will generate an annotation
106 detects as binary. With -a, annotate will generate an annotation
107 anyway, probably with undesirable results.
107 anyway, probably with undesirable results.
108 """
108 """
109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
110
110
111 if not pats:
111 if not pats:
112 raise util.Abort(_('at least one file name or pattern required'))
112 raise util.Abort(_('at least one file name or pattern required'))
113
113
114 opmap = [['user', lambda x: ui.shortuser(x.user())],
114 opmap = [['user', lambda x: ui.shortuser(x.user())],
115 ['number', lambda x: str(x.rev())],
115 ['number', lambda x: str(x.rev())],
116 ['changeset', lambda x: short(x.node())],
116 ['changeset', lambda x: short(x.node())],
117 ['date', getdate], ['follow', lambda x: x.path()]]
117 ['date', getdate], ['follow', lambda x: x.path()]]
118 if (not opts['user'] and not opts['changeset'] and not opts['date']
118 if (not opts['user'] and not opts['changeset'] and not opts['date']
119 and not opts['follow']):
119 and not opts['follow']):
120 opts['number'] = 1
120 opts['number'] = 1
121
121
122 ctx = repo.changectx(opts['rev'])
122 ctx = repo.changectx(opts['rev'])
123
123
124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
125 node=ctx.node()):
125 node=ctx.node()):
126 fctx = ctx.filectx(abs)
126 fctx = ctx.filectx(abs)
127 if not opts['text'] and util.binary(fctx.data()):
127 if not opts['text'] and util.binary(fctx.data()):
128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
129 continue
129 continue
130
130
131 lines = fctx.annotate(follow=opts.get('follow'))
131 lines = fctx.annotate(follow=opts.get('follow'))
132 pieces = []
132 pieces = []
133
133
134 for o, f in opmap:
134 for o, f in opmap:
135 if opts[o]:
135 if opts[o]:
136 l = [f(n) for n, dummy in lines]
136 l = [f(n) for n, dummy in lines]
137 if l:
137 if l:
138 m = max(map(len, l))
138 m = max(map(len, l))
139 pieces.append(["%*s" % (m, x) for x in l])
139 pieces.append(["%*s" % (m, x) for x in l])
140
140
141 if pieces:
141 if pieces:
142 for p, l in zip(zip(*pieces), lines):
142 for p, l in zip(zip(*pieces), lines):
143 ui.write("%s: %s" % (" ".join(p), l[1]))
143 ui.write("%s: %s" % (" ".join(p), l[1]))
144
144
145 def archive(ui, repo, dest, **opts):
145 def archive(ui, repo, dest, **opts):
146 '''create unversioned archive of a repository revision
146 '''create unversioned archive of a repository revision
147
147
148 By default, the revision used is the parent of the working
148 By default, the revision used is the parent of the working
149 directory; use "-r" to specify a different revision.
149 directory; use "-r" to specify a different revision.
150
150
151 To specify the type of archive to create, use "-t". Valid
151 To specify the type of archive to create, use "-t". Valid
152 types are:
152 types are:
153
153
154 "files" (default): a directory full of files
154 "files" (default): a directory full of files
155 "tar": tar archive, uncompressed
155 "tar": tar archive, uncompressed
156 "tbz2": tar archive, compressed using bzip2
156 "tbz2": tar archive, compressed using bzip2
157 "tgz": tar archive, compressed using gzip
157 "tgz": tar archive, compressed using gzip
158 "uzip": zip archive, uncompressed
158 "uzip": zip archive, uncompressed
159 "zip": zip archive, compressed using deflate
159 "zip": zip archive, compressed using deflate
160
160
161 The exact name of the destination archive or directory is given
161 The exact name of the destination archive or directory is given
162 using a format string; see "hg help export" for details.
162 using a format string; see "hg help export" for details.
163
163
164 Each member added to an archive file has a directory prefix
164 Each member added to an archive file has a directory prefix
165 prepended. Use "-p" to specify a format string for the prefix.
165 prepended. Use "-p" to specify a format string for the prefix.
166 The default is the basename of the archive, with suffixes removed.
166 The default is the basename of the archive, with suffixes removed.
167 '''
167 '''
168
168
169 node = repo.changectx(opts['rev']).node()
169 node = repo.changectx(opts['rev']).node()
170 dest = cmdutil.make_filename(repo, dest, node)
170 dest = cmdutil.make_filename(repo, dest, node)
171 if os.path.realpath(dest) == repo.root:
171 if os.path.realpath(dest) == repo.root:
172 raise util.Abort(_('repository root cannot be destination'))
172 raise util.Abort(_('repository root cannot be destination'))
173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
174 kind = opts.get('type') or 'files'
174 kind = opts.get('type') or 'files'
175 prefix = opts['prefix']
175 prefix = opts['prefix']
176 if dest == '-':
176 if dest == '-':
177 if kind == 'files':
177 if kind == 'files':
178 raise util.Abort(_('cannot archive plain files to stdout'))
178 raise util.Abort(_('cannot archive plain files to stdout'))
179 dest = sys.stdout
179 dest = sys.stdout
180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
181 prefix = cmdutil.make_filename(repo, prefix, node)
181 prefix = cmdutil.make_filename(repo, prefix, node)
182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
183 matchfn, prefix)
183 matchfn, prefix)
184
184
185 def backout(ui, repo, rev, **opts):
185 def backout(ui, repo, rev, **opts):
186 '''reverse effect of earlier changeset
186 '''reverse effect of earlier changeset
187
187
188 Commit the backed out changes as a new changeset. The new
188 Commit the backed out changes as a new changeset. The new
189 changeset is a child of the backed out changeset.
189 changeset is a child of the backed out changeset.
190
190
191 If you back out a changeset other than the tip, a new head is
191 If you back out a changeset other than the tip, a new head is
192 created. This head is the parent of the working directory. If
192 created. This head is the parent of the working directory. If
193 you back out an old changeset, your working directory will appear
193 you back out an old changeset, your working directory will appear
194 old after the backout. You should merge the backout changeset
194 old after the backout. You should merge the backout changeset
195 with another head.
195 with another head.
196
196
197 The --merge option remembers the parent of the working directory
197 The --merge option remembers the parent of the working directory
198 before starting the backout, then merges the new head with that
198 before starting the backout, then merges the new head with that
199 changeset afterwards. This saves you from doing the merge by
199 changeset afterwards. This saves you from doing the merge by
200 hand. The result of this merge is not committed, as for a normal
200 hand. The result of this merge is not committed, as for a normal
201 merge.'''
201 merge.'''
202
202
203 bail_if_changed(repo)
203 bail_if_changed(repo)
204 op1, op2 = repo.dirstate.parents()
204 op1, op2 = repo.dirstate.parents()
205 if op2 != nullid:
205 if op2 != nullid:
206 raise util.Abort(_('outstanding uncommitted merge'))
206 raise util.Abort(_('outstanding uncommitted merge'))
207 node = repo.lookup(rev)
207 node = repo.lookup(rev)
208 p1, p2 = repo.changelog.parents(node)
208 p1, p2 = repo.changelog.parents(node)
209 if p1 == nullid:
209 if p1 == nullid:
210 raise util.Abort(_('cannot back out a change with no parents'))
210 raise util.Abort(_('cannot back out a change with no parents'))
211 if p2 != nullid:
211 if p2 != nullid:
212 if not opts['parent']:
212 if not opts['parent']:
213 raise util.Abort(_('cannot back out a merge changeset without '
213 raise util.Abort(_('cannot back out a merge changeset without '
214 '--parent'))
214 '--parent'))
215 p = repo.lookup(opts['parent'])
215 p = repo.lookup(opts['parent'])
216 if p not in (p1, p2):
216 if p not in (p1, p2):
217 raise util.Abort(_('%s is not a parent of %s') %
217 raise util.Abort(_('%s is not a parent of %s') %
218 (short(p), short(node)))
218 (short(p), short(node)))
219 parent = p
219 parent = p
220 else:
220 else:
221 if opts['parent']:
221 if opts['parent']:
222 raise util.Abort(_('cannot use --parent on non-merge changeset'))
222 raise util.Abort(_('cannot use --parent on non-merge changeset'))
223 parent = p1
223 parent = p1
224 hg.clean(repo, node, show_stats=False)
224 hg.clean(repo, node, show_stats=False)
225 revert_opts = opts.copy()
225 revert_opts = opts.copy()
226 revert_opts['date'] = None
226 revert_opts['date'] = None
227 revert_opts['all'] = True
227 revert_opts['all'] = True
228 revert_opts['rev'] = hex(parent)
228 revert_opts['rev'] = hex(parent)
229 revert(ui, repo, **revert_opts)
229 revert(ui, repo, **revert_opts)
230 commit_opts = opts.copy()
230 commit_opts = opts.copy()
231 commit_opts['addremove'] = False
231 commit_opts['addremove'] = False
232 if not commit_opts['message'] and not commit_opts['logfile']:
232 if not commit_opts['message'] and not commit_opts['logfile']:
233 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
233 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
234 commit_opts['force_editor'] = True
234 commit_opts['force_editor'] = True
235 commit(ui, repo, **commit_opts)
235 commit(ui, repo, **commit_opts)
236 def nice(node):
236 def nice(node):
237 return '%d:%s' % (repo.changelog.rev(node), short(node))
237 return '%d:%s' % (repo.changelog.rev(node), short(node))
238 ui.status(_('changeset %s backs out changeset %s\n') %
238 ui.status(_('changeset %s backs out changeset %s\n') %
239 (nice(repo.changelog.tip()), nice(node)))
239 (nice(repo.changelog.tip()), nice(node)))
240 if op1 != node:
240 if op1 != node:
241 if opts['merge']:
241 if opts['merge']:
242 ui.status(_('merging with changeset %s\n') % nice(op1))
242 ui.status(_('merging with changeset %s\n') % nice(op1))
243 hg.merge(repo, hex(op1))
243 hg.merge(repo, hex(op1))
244 else:
244 else:
245 ui.status(_('the backout changeset is a new head - '
245 ui.status(_('the backout changeset is a new head - '
246 'do not forget to merge\n'))
246 'do not forget to merge\n'))
247 ui.status(_('(use "backout --merge" '
247 ui.status(_('(use "backout --merge" '
248 'if you want to auto-merge)\n'))
248 'if you want to auto-merge)\n'))
249
249
250 def branch(ui, repo, label=None):
250 def branch(ui, repo, label=None):
251 """set or show the current branch name
251 """set or show the current branch name
252
252
253 With <name>, set the current branch name. Otherwise, show the
253 With <name>, set the current branch name. Otherwise, show the
254 current branch name.
254 current branch name.
255 """
255 """
256
256
257 if label is not None:
257 if label is not None:
258 repo.opener("branch", "w").write(util.fromlocal(label) + '\n')
258 repo.opener("branch", "w").write(util.fromlocal(label) + '\n')
259 else:
259 else:
260 b = util.tolocal(repo.workingctx().branch())
260 b = util.tolocal(repo.workingctx().branch())
261 if b:
261 if b:
262 ui.write("%s\n" % b)
262 ui.write("%s\n" % b)
263
263
264 def branches(ui, repo):
264 def branches(ui, repo):
265 """list repository named branches
265 """list repository named branches
266
266
267 List the repository's named branches.
267 List the repository's named branches.
268 """
268 """
269 b = repo.branchtags()
269 b = repo.branchtags()
270 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
270 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
271 l.sort()
271 l.sort()
272 for r, n, t in l:
272 for r, n, t in l:
273 hexfunc = ui.debugflag and hex or short
273 hexfunc = ui.debugflag and hex or short
274 if ui.quiet:
274 if ui.quiet:
275 ui.write("%s\n" % t)
275 ui.write("%s\n" % t)
276 else:
276 else:
277 t = util.localsub(t, 30)
277 t = util.localsub(t, 30)
278 t += " " * (30 - util.locallen(t))
278 t += " " * (30 - util.locallen(t))
279 ui.write("%s %s:%s\n" % (t, -r, hexfunc(n)))
279 ui.write("%s %s:%s\n" % (t, -r, hexfunc(n)))
280
280
281 def bundle(ui, repo, fname, dest=None, **opts):
281 def bundle(ui, repo, fname, dest=None, **opts):
282 """create a changegroup file
282 """create a changegroup file
283
283
284 Generate a compressed changegroup file collecting changesets not
284 Generate a compressed changegroup file collecting changesets not
285 found in the other repository.
285 found in the other repository.
286
286
287 If no destination repository is specified the destination is assumed
287 If no destination repository is specified the destination is assumed
288 to have all the nodes specified by one or more --base parameters.
288 to have all the nodes specified by one or more --base parameters.
289
289
290 The bundle file can then be transferred using conventional means and
290 The bundle file can then be transferred using conventional means and
291 applied to another repository with the unbundle or pull command.
291 applied to another repository with the unbundle or pull command.
292 This is useful when direct push and pull are not available or when
292 This is useful when direct push and pull are not available or when
293 exporting an entire repository is undesirable.
293 exporting an entire repository is undesirable.
294
294
295 Applying bundles preserves all changeset contents including
295 Applying bundles preserves all changeset contents including
296 permissions, copy/rename information, and revision history.
296 permissions, copy/rename information, and revision history.
297 """
297 """
298 revs = opts.get('rev') or None
298 revs = opts.get('rev') or None
299 if revs:
299 if revs:
300 revs = [repo.lookup(rev) for rev in revs]
300 revs = [repo.lookup(rev) for rev in revs]
301 base = opts.get('base')
301 base = opts.get('base')
302 if base:
302 if base:
303 if dest:
303 if dest:
304 raise util.Abort(_("--base is incompatible with specifiying "
304 raise util.Abort(_("--base is incompatible with specifiying "
305 "a destination"))
305 "a destination"))
306 base = [repo.lookup(rev) for rev in base]
306 base = [repo.lookup(rev) for rev in base]
307 # create the right base
307 # create the right base
308 # XXX: nodesbetween / changegroup* should be "fixed" instead
308 # XXX: nodesbetween / changegroup* should be "fixed" instead
309 o = []
309 o = []
310 has = {nullid: None}
310 has = {nullid: None}
311 for n in base:
311 for n in base:
312 has.update(repo.changelog.reachable(n))
312 has.update(repo.changelog.reachable(n))
313 if revs:
313 if revs:
314 visit = list(revs)
314 visit = list(revs)
315 else:
315 else:
316 visit = repo.changelog.heads()
316 visit = repo.changelog.heads()
317 seen = {}
317 seen = {}
318 while visit:
318 while visit:
319 n = visit.pop(0)
319 n = visit.pop(0)
320 parents = [p for p in repo.changelog.parents(n) if p not in has]
320 parents = [p for p in repo.changelog.parents(n) if p not in has]
321 if len(parents) == 0:
321 if len(parents) == 0:
322 o.insert(0, n)
322 o.insert(0, n)
323 else:
323 else:
324 for p in parents:
324 for p in parents:
325 if p not in seen:
325 if p not in seen:
326 seen[p] = 1
326 seen[p] = 1
327 visit.append(p)
327 visit.append(p)
328 else:
328 else:
329 setremoteconfig(ui, opts)
329 setremoteconfig(ui, opts)
330 dest = ui.expandpath(dest or 'default-push', dest or 'default')
330 dest = ui.expandpath(dest or 'default-push', dest or 'default')
331 other = hg.repository(ui, dest)
331 other = hg.repository(ui, dest)
332 o = repo.findoutgoing(other, force=opts['force'])
332 o = repo.findoutgoing(other, force=opts['force'])
333
333
334 if revs:
334 if revs:
335 cg = repo.changegroupsubset(o, revs, 'bundle')
335 cg = repo.changegroupsubset(o, revs, 'bundle')
336 else:
336 else:
337 cg = repo.changegroup(o, 'bundle')
337 cg = repo.changegroup(o, 'bundle')
338 changegroup.writebundle(cg, fname, "HG10BZ")
338 changegroup.writebundle(cg, fname, "HG10BZ")
339
339
340 def cat(ui, repo, file1, *pats, **opts):
340 def cat(ui, repo, file1, *pats, **opts):
341 """output the latest or given revisions of files
341 """output the latest or given revisions of files
342
342
343 Print the specified files as they were at the given revision.
343 Print the specified files as they were at the given revision.
344 If no revision is given then working dir parent is used, or tip
344 If no revision is given then working dir parent is used, or tip
345 if no revision is checked out.
345 if no revision is checked out.
346
346
347 Output may be to a file, in which case the name of the file is
347 Output may be to a file, in which case the name of the file is
348 given using a format string. The formatting rules are the same as
348 given using a format string. The formatting rules are the same as
349 for the export command, with the following additions:
349 for the export command, with the following additions:
350
350
351 %s basename of file being printed
351 %s basename of file being printed
352 %d dirname of file being printed, or '.' if in repo root
352 %d dirname of file being printed, or '.' if in repo root
353 %p root-relative path name of file being printed
353 %p root-relative path name of file being printed
354 """
354 """
355 ctx = repo.changectx(opts['rev'])
355 ctx = repo.changectx(opts['rev'])
356 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
356 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
357 ctx.node()):
357 ctx.node()):
358 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
358 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
359 fp.write(ctx.filectx(abs).data())
359 fp.write(ctx.filectx(abs).data())
360
360
361 def clone(ui, source, dest=None, **opts):
361 def clone(ui, source, dest=None, **opts):
362 """make a copy of an existing repository
362 """make a copy of an existing repository
363
363
364 Create a copy of an existing repository in a new directory.
364 Create a copy of an existing repository in a new directory.
365
365
366 If no destination directory name is specified, it defaults to the
366 If no destination directory name is specified, it defaults to the
367 basename of the source.
367 basename of the source.
368
368
369 The location of the source is added to the new repository's
369 The location of the source is added to the new repository's
370 .hg/hgrc file, as the default to be used for future pulls.
370 .hg/hgrc file, as the default to be used for future pulls.
371
371
372 For efficiency, hardlinks are used for cloning whenever the source
372 For efficiency, hardlinks are used for cloning whenever the source
373 and destination are on the same filesystem (note this applies only
373 and destination are on the same filesystem (note this applies only
374 to the repository data, not to the checked out files). Some
374 to the repository data, not to the checked out files). Some
375 filesystems, such as AFS, implement hardlinking incorrectly, but
375 filesystems, such as AFS, implement hardlinking incorrectly, but
376 do not report errors. In these cases, use the --pull option to
376 do not report errors. In these cases, use the --pull option to
377 avoid hardlinking.
377 avoid hardlinking.
378
378
379 You can safely clone repositories and checked out files using full
379 You can safely clone repositories and checked out files using full
380 hardlinks with
380 hardlinks with
381
381
382 $ cp -al REPO REPOCLONE
382 $ cp -al REPO REPOCLONE
383
383
384 which is the fastest way to clone. However, the operation is not
384 which is the fastest way to clone. However, the operation is not
385 atomic (making sure REPO is not modified during the operation is
385 atomic (making sure REPO is not modified during the operation is
386 up to you) and you have to make sure your editor breaks hardlinks
386 up to you) and you have to make sure your editor breaks hardlinks
387 (Emacs and most Linux Kernel tools do so).
387 (Emacs and most Linux Kernel tools do so).
388
388
389 If you use the -r option to clone up to a specific revision, no
389 If you use the -r option to clone up to a specific revision, no
390 subsequent revisions will be present in the cloned repository.
390 subsequent revisions will be present in the cloned repository.
391 This option implies --pull, even on local repositories.
391 This option implies --pull, even on local repositories.
392
392
393 See pull for valid source format details.
393 See pull for valid source format details.
394
394
395 It is possible to specify an ssh:// URL as the destination, but no
395 It is possible to specify an ssh:// URL as the destination, but no
396 .hg/hgrc and working directory will be created on the remote side.
396 .hg/hgrc and working directory will be created on the remote side.
397 Look at the help text for the pull command for important details
397 Look at the help text for the pull command for important details
398 about ssh:// URLs.
398 about ssh:// URLs.
399 """
399 """
400 setremoteconfig(ui, opts)
400 setremoteconfig(ui, opts)
401 hg.clone(ui, ui.expandpath(source), dest,
401 hg.clone(ui, ui.expandpath(source), dest,
402 pull=opts['pull'],
402 pull=opts['pull'],
403 stream=opts['uncompressed'],
403 stream=opts['uncompressed'],
404 rev=opts['rev'],
404 rev=opts['rev'],
405 update=not opts['noupdate'])
405 update=not opts['noupdate'])
406
406
407 def commit(ui, repo, *pats, **opts):
407 def commit(ui, repo, *pats, **opts):
408 """commit the specified files or all outstanding changes
408 """commit the specified files or all outstanding changes
409
409
410 Commit changes to the given files into the repository.
410 Commit changes to the given files into the repository.
411
411
412 If a list of files is omitted, all changes reported by "hg status"
412 If a list of files is omitted, all changes reported by "hg status"
413 will be committed.
413 will be committed.
414
414
415 If no commit message is specified, the editor configured in your hgrc
415 If no commit message is specified, the editor configured in your hgrc
416 or in the EDITOR environment variable is started to enter a message.
416 or in the EDITOR environment variable is started to enter a message.
417 """
417 """
418 message = logmessage(opts)
418 message = logmessage(opts)
419
419
420 if opts['addremove']:
420 if opts['addremove']:
421 cmdutil.addremove(repo, pats, opts)
421 cmdutil.addremove(repo, pats, opts)
422 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
422 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
423 if pats:
423 if pats:
424 status = repo.status(files=fns, match=match)
424 status = repo.status(files=fns, match=match)
425 modified, added, removed, deleted, unknown = status[:5]
425 modified, added, removed, deleted, unknown = status[:5]
426 files = modified + added + removed
426 files = modified + added + removed
427 slist = None
427 slist = None
428 for f in fns:
428 for f in fns:
429 if f not in files:
429 if f not in files:
430 rf = repo.wjoin(f)
430 rf = repo.wjoin(f)
431 if f in unknown:
431 if f in unknown:
432 raise util.Abort(_("file %s not tracked!") % rf)
432 raise util.Abort(_("file %s not tracked!") % rf)
433 try:
433 try:
434 mode = os.lstat(rf)[stat.ST_MODE]
434 mode = os.lstat(rf)[stat.ST_MODE]
435 except OSError:
435 except OSError:
436 raise util.Abort(_("file %s not found!") % rf)
436 raise util.Abort(_("file %s not found!") % rf)
437 if stat.S_ISDIR(mode):
437 if stat.S_ISDIR(mode):
438 name = f + '/'
438 name = f + '/'
439 if slist is None:
439 if slist is None:
440 slist = list(files)
440 slist = list(files)
441 slist.sort()
441 slist.sort()
442 i = bisect.bisect(slist, name)
442 i = bisect.bisect(slist, name)
443 if i >= len(slist) or not slist[i].startswith(name):
443 if i >= len(slist) or not slist[i].startswith(name):
444 raise util.Abort(_("no match under directory %s!")
444 raise util.Abort(_("no match under directory %s!")
445 % rf)
445 % rf)
446 elif not stat.S_ISREG(mode):
446 elif not stat.S_ISREG(mode):
447 raise util.Abort(_("can't commit %s: "
447 raise util.Abort(_("can't commit %s: "
448 "unsupported file type!") % rf)
448 "unsupported file type!") % rf)
449 else:
449 else:
450 files = []
450 files = []
451 try:
451 try:
452 repo.commit(files, message, opts['user'], opts['date'], match,
452 repo.commit(files, message, opts['user'], opts['date'], match,
453 force_editor=opts.get('force_editor'))
453 force_editor=opts.get('force_editor'))
454 except ValueError, inst:
454 except ValueError, inst:
455 raise util.Abort(str(inst))
455 raise util.Abort(str(inst))
456
456
457 def docopy(ui, repo, pats, opts, wlock):
457 def docopy(ui, repo, pats, opts, wlock):
458 # called with the repo lock held
458 # called with the repo lock held
459 #
459 #
460 # hgsep => pathname that uses "/" to separate directories
460 # hgsep => pathname that uses "/" to separate directories
461 # ossep => pathname that uses os.sep to separate directories
461 # ossep => pathname that uses os.sep to separate directories
462 cwd = repo.getcwd()
462 cwd = repo.getcwd()
463 errors = 0
463 errors = 0
464 copied = []
464 copied = []
465 targets = {}
465 targets = {}
466
466
467 # abs: hgsep
467 # abs: hgsep
468 # rel: ossep
468 # rel: ossep
469 # return: hgsep
469 # return: hgsep
470 def okaytocopy(abs, rel, exact):
470 def okaytocopy(abs, rel, exact):
471 reasons = {'?': _('is not managed'),
471 reasons = {'?': _('is not managed'),
472 'a': _('has been marked for add'),
472 'a': _('has been marked for add'),
473 'r': _('has been marked for remove')}
473 'r': _('has been marked for remove')}
474 state = repo.dirstate.state(abs)
474 state = repo.dirstate.state(abs)
475 reason = reasons.get(state)
475 reason = reasons.get(state)
476 if reason:
476 if reason:
477 if state == 'a':
477 if state == 'a':
478 origsrc = repo.dirstate.copied(abs)
478 origsrc = repo.dirstate.copied(abs)
479 if origsrc is not None:
479 if origsrc is not None:
480 return origsrc
480 return origsrc
481 if exact:
481 if exact:
482 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
482 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
483 else:
483 else:
484 return abs
484 return abs
485
485
486 # origsrc: hgsep
486 # origsrc: hgsep
487 # abssrc: hgsep
487 # abssrc: hgsep
488 # relsrc: ossep
488 # relsrc: ossep
489 # target: ossep
489 # target: ossep
490 def copy(origsrc, abssrc, relsrc, target, exact):
490 def copy(origsrc, abssrc, relsrc, target, exact):
491 abstarget = util.canonpath(repo.root, cwd, target)
491 abstarget = util.canonpath(repo.root, cwd, target)
492 reltarget = util.pathto(cwd, abstarget)
492 reltarget = util.pathto(cwd, abstarget)
493 prevsrc = targets.get(abstarget)
493 prevsrc = targets.get(abstarget)
494 if prevsrc is not None:
494 if prevsrc is not None:
495 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
495 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
496 (reltarget, util.localpath(abssrc),
496 (reltarget, util.localpath(abssrc),
497 util.localpath(prevsrc)))
497 util.localpath(prevsrc)))
498 return
498 return
499 if (not opts['after'] and os.path.exists(reltarget) or
499 if (not opts['after'] and os.path.exists(reltarget) or
500 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
500 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
501 if not opts['force']:
501 if not opts['force']:
502 ui.warn(_('%s: not overwriting - file exists\n') %
502 ui.warn(_('%s: not overwriting - file exists\n') %
503 reltarget)
503 reltarget)
504 return
504 return
505 if not opts['after'] and not opts.get('dry_run'):
505 if not opts['after'] and not opts.get('dry_run'):
506 os.unlink(reltarget)
506 os.unlink(reltarget)
507 if opts['after']:
507 if opts['after']:
508 if not os.path.exists(reltarget):
508 if not os.path.exists(reltarget):
509 return
509 return
510 else:
510 else:
511 targetdir = os.path.dirname(reltarget) or '.'
511 targetdir = os.path.dirname(reltarget) or '.'
512 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
512 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
513 os.makedirs(targetdir)
513 os.makedirs(targetdir)
514 try:
514 try:
515 restore = repo.dirstate.state(abstarget) == 'r'
515 restore = repo.dirstate.state(abstarget) == 'r'
516 if restore and not opts.get('dry_run'):
516 if restore and not opts.get('dry_run'):
517 repo.undelete([abstarget], wlock)
517 repo.undelete([abstarget], wlock)
518 try:
518 try:
519 if not opts.get('dry_run'):
519 if not opts.get('dry_run'):
520 util.copyfile(relsrc, reltarget)
520 util.copyfile(relsrc, reltarget)
521 restore = False
521 restore = False
522 finally:
522 finally:
523 if restore:
523 if restore:
524 repo.remove([abstarget], wlock)
524 repo.remove([abstarget], wlock)
525 except IOError, inst:
525 except IOError, inst:
526 if inst.errno == errno.ENOENT:
526 if inst.errno == errno.ENOENT:
527 ui.warn(_('%s: deleted in working copy\n') % relsrc)
527 ui.warn(_('%s: deleted in working copy\n') % relsrc)
528 else:
528 else:
529 ui.warn(_('%s: cannot copy - %s\n') %
529 ui.warn(_('%s: cannot copy - %s\n') %
530 (relsrc, inst.strerror))
530 (relsrc, inst.strerror))
531 errors += 1
531 errors += 1
532 return
532 return
533 if ui.verbose or not exact:
533 if ui.verbose or not exact:
534 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
534 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
535 targets[abstarget] = abssrc
535 targets[abstarget] = abssrc
536 if abstarget != origsrc and not opts.get('dry_run'):
536 if abstarget != origsrc and not opts.get('dry_run'):
537 repo.copy(origsrc, abstarget, wlock)
537 repo.copy(origsrc, abstarget, wlock)
538 copied.append((abssrc, relsrc, exact))
538 copied.append((abssrc, relsrc, exact))
539
539
540 # pat: ossep
540 # pat: ossep
541 # dest ossep
541 # dest ossep
542 # srcs: list of (hgsep, hgsep, ossep, bool)
542 # srcs: list of (hgsep, hgsep, ossep, bool)
543 # return: function that takes hgsep and returns ossep
543 # return: function that takes hgsep and returns ossep
544 def targetpathfn(pat, dest, srcs):
544 def targetpathfn(pat, dest, srcs):
545 if os.path.isdir(pat):
545 if os.path.isdir(pat):
546 abspfx = util.canonpath(repo.root, cwd, pat)
546 abspfx = util.canonpath(repo.root, cwd, pat)
547 abspfx = util.localpath(abspfx)
547 abspfx = util.localpath(abspfx)
548 if destdirexists:
548 if destdirexists:
549 striplen = len(os.path.split(abspfx)[0])
549 striplen = len(os.path.split(abspfx)[0])
550 else:
550 else:
551 striplen = len(abspfx)
551 striplen = len(abspfx)
552 if striplen:
552 if striplen:
553 striplen += len(os.sep)
553 striplen += len(os.sep)
554 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
554 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
555 elif destdirexists:
555 elif destdirexists:
556 res = lambda p: os.path.join(dest,
556 res = lambda p: os.path.join(dest,
557 os.path.basename(util.localpath(p)))
557 os.path.basename(util.localpath(p)))
558 else:
558 else:
559 res = lambda p: dest
559 res = lambda p: dest
560 return res
560 return res
561
561
562 # pat: ossep
562 # pat: ossep
563 # dest ossep
563 # dest ossep
564 # srcs: list of (hgsep, hgsep, ossep, bool)
564 # srcs: list of (hgsep, hgsep, ossep, bool)
565 # return: function that takes hgsep and returns ossep
565 # return: function that takes hgsep and returns ossep
566 def targetpathafterfn(pat, dest, srcs):
566 def targetpathafterfn(pat, dest, srcs):
567 if util.patkind(pat, None)[0]:
567 if util.patkind(pat, None)[0]:
568 # a mercurial pattern
568 # a mercurial pattern
569 res = lambda p: os.path.join(dest,
569 res = lambda p: os.path.join(dest,
570 os.path.basename(util.localpath(p)))
570 os.path.basename(util.localpath(p)))
571 else:
571 else:
572 abspfx = util.canonpath(repo.root, cwd, pat)
572 abspfx = util.canonpath(repo.root, cwd, pat)
573 if len(abspfx) < len(srcs[0][0]):
573 if len(abspfx) < len(srcs[0][0]):
574 # A directory. Either the target path contains the last
574 # A directory. Either the target path contains the last
575 # component of the source path or it does not.
575 # component of the source path or it does not.
576 def evalpath(striplen):
576 def evalpath(striplen):
577 score = 0
577 score = 0
578 for s in srcs:
578 for s in srcs:
579 t = os.path.join(dest, util.localpath(s[0])[striplen:])
579 t = os.path.join(dest, util.localpath(s[0])[striplen:])
580 if os.path.exists(t):
580 if os.path.exists(t):
581 score += 1
581 score += 1
582 return score
582 return score
583
583
584 abspfx = util.localpath(abspfx)
584 abspfx = util.localpath(abspfx)
585 striplen = len(abspfx)
585 striplen = len(abspfx)
586 if striplen:
586 if striplen:
587 striplen += len(os.sep)
587 striplen += len(os.sep)
588 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
588 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
589 score = evalpath(striplen)
589 score = evalpath(striplen)
590 striplen1 = len(os.path.split(abspfx)[0])
590 striplen1 = len(os.path.split(abspfx)[0])
591 if striplen1:
591 if striplen1:
592 striplen1 += len(os.sep)
592 striplen1 += len(os.sep)
593 if evalpath(striplen1) > score:
593 if evalpath(striplen1) > score:
594 striplen = striplen1
594 striplen = striplen1
595 res = lambda p: os.path.join(dest,
595 res = lambda p: os.path.join(dest,
596 util.localpath(p)[striplen:])
596 util.localpath(p)[striplen:])
597 else:
597 else:
598 # a file
598 # a file
599 if destdirexists:
599 if destdirexists:
600 res = lambda p: os.path.join(dest,
600 res = lambda p: os.path.join(dest,
601 os.path.basename(util.localpath(p)))
601 os.path.basename(util.localpath(p)))
602 else:
602 else:
603 res = lambda p: dest
603 res = lambda p: dest
604 return res
604 return res
605
605
606
606
607 pats = list(pats)
607 pats = list(pats)
608 if not pats:
608 if not pats:
609 raise util.Abort(_('no source or destination specified'))
609 raise util.Abort(_('no source or destination specified'))
610 if len(pats) == 1:
610 if len(pats) == 1:
611 raise util.Abort(_('no destination specified'))
611 raise util.Abort(_('no destination specified'))
612 dest = pats.pop()
612 dest = pats.pop()
613 destdirexists = os.path.isdir(dest)
613 destdirexists = os.path.isdir(dest)
614 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
614 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
615 raise util.Abort(_('with multiple sources, destination must be an '
615 raise util.Abort(_('with multiple sources, destination must be an '
616 'existing directory'))
616 'existing directory'))
617 if opts['after']:
617 if opts['after']:
618 tfn = targetpathafterfn
618 tfn = targetpathafterfn
619 else:
619 else:
620 tfn = targetpathfn
620 tfn = targetpathfn
621 copylist = []
621 copylist = []
622 for pat in pats:
622 for pat in pats:
623 srcs = []
623 srcs = []
624 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
624 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
625 origsrc = okaytocopy(abssrc, relsrc, exact)
625 origsrc = okaytocopy(abssrc, relsrc, exact)
626 if origsrc:
626 if origsrc:
627 srcs.append((origsrc, abssrc, relsrc, exact))
627 srcs.append((origsrc, abssrc, relsrc, exact))
628 if not srcs:
628 if not srcs:
629 continue
629 continue
630 copylist.append((tfn(pat, dest, srcs), srcs))
630 copylist.append((tfn(pat, dest, srcs), srcs))
631 if not copylist:
631 if not copylist:
632 raise util.Abort(_('no files to copy'))
632 raise util.Abort(_('no files to copy'))
633
633
634 for targetpath, srcs in copylist:
634 for targetpath, srcs in copylist:
635 for origsrc, abssrc, relsrc, exact in srcs:
635 for origsrc, abssrc, relsrc, exact in srcs:
636 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
636 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
637
637
638 if errors:
638 if errors:
639 ui.warn(_('(consider using --after)\n'))
639 ui.warn(_('(consider using --after)\n'))
640 return errors, copied
640 return errors, copied
641
641
642 def copy(ui, repo, *pats, **opts):
642 def copy(ui, repo, *pats, **opts):
643 """mark files as copied for the next commit
643 """mark files as copied for the next commit
644
644
645 Mark dest as having copies of source files. If dest is a
645 Mark dest as having copies of source files. If dest is a
646 directory, copies are put in that directory. If dest is a file,
646 directory, copies are put in that directory. If dest is a file,
647 there can only be one source.
647 there can only be one source.
648
648
649 By default, this command copies the contents of files as they
649 By default, this command copies the contents of files as they
650 stand in the working directory. If invoked with --after, the
650 stand in the working directory. If invoked with --after, the
651 operation is recorded, but no copying is performed.
651 operation is recorded, but no copying is performed.
652
652
653 This command takes effect in the next commit. To undo a copy
653 This command takes effect in the next commit. To undo a copy
654 before that, see hg revert.
654 before that, see hg revert.
655 """
655 """
656 wlock = repo.wlock(0)
656 wlock = repo.wlock(0)
657 errs, copied = docopy(ui, repo, pats, opts, wlock)
657 errs, copied = docopy(ui, repo, pats, opts, wlock)
658 return errs
658 return errs
659
659
660 def debugancestor(ui, index, rev1, rev2):
660 def debugancestor(ui, index, rev1, rev2):
661 """find the ancestor revision of two revisions in a given index"""
661 """find the ancestor revision of two revisions in a given index"""
662 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
662 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
663 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
663 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
664 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
664 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
665
665
666 def debugcomplete(ui, cmd='', **opts):
666 def debugcomplete(ui, cmd='', **opts):
667 """returns the completion list associated with the given command"""
667 """returns the completion list associated with the given command"""
668
668
669 if opts['options']:
669 if opts['options']:
670 options = []
670 options = []
671 otables = [globalopts]
671 otables = [globalopts]
672 if cmd:
672 if cmd:
673 aliases, entry = findcmd(ui, cmd)
673 aliases, entry = findcmd(ui, cmd)
674 otables.append(entry[1])
674 otables.append(entry[1])
675 for t in otables:
675 for t in otables:
676 for o in t:
676 for o in t:
677 if o[0]:
677 if o[0]:
678 options.append('-%s' % o[0])
678 options.append('-%s' % o[0])
679 options.append('--%s' % o[1])
679 options.append('--%s' % o[1])
680 ui.write("%s\n" % "\n".join(options))
680 ui.write("%s\n" % "\n".join(options))
681 return
681 return
682
682
683 clist = findpossible(ui, cmd).keys()
683 clist = findpossible(ui, cmd).keys()
684 clist.sort()
684 clist.sort()
685 ui.write("%s\n" % "\n".join(clist))
685 ui.write("%s\n" % "\n".join(clist))
686
686
687 def debugrebuildstate(ui, repo, rev=None):
687 def debugrebuildstate(ui, repo, rev=None):
688 """rebuild the dirstate as it would look like for the given revision"""
688 """rebuild the dirstate as it would look like for the given revision"""
689 if not rev:
689 if not rev:
690 rev = repo.changelog.tip()
690 rev = repo.changelog.tip()
691 else:
691 else:
692 rev = repo.lookup(rev)
692 rev = repo.lookup(rev)
693 change = repo.changelog.read(rev)
693 change = repo.changelog.read(rev)
694 n = change[0]
694 n = change[0]
695 files = repo.manifest.read(n)
695 files = repo.manifest.read(n)
696 wlock = repo.wlock()
696 wlock = repo.wlock()
697 repo.dirstate.rebuild(rev, files)
697 repo.dirstate.rebuild(rev, files)
698
698
699 def debugcheckstate(ui, repo):
699 def debugcheckstate(ui, repo):
700 """validate the correctness of the current dirstate"""
700 """validate the correctness of the current dirstate"""
701 parent1, parent2 = repo.dirstate.parents()
701 parent1, parent2 = repo.dirstate.parents()
702 repo.dirstate.read()
702 repo.dirstate.read()
703 dc = repo.dirstate.map
703 dc = repo.dirstate.map
704 keys = dc.keys()
704 keys = dc.keys()
705 keys.sort()
705 keys.sort()
706 m1n = repo.changelog.read(parent1)[0]
706 m1n = repo.changelog.read(parent1)[0]
707 m2n = repo.changelog.read(parent2)[0]
707 m2n = repo.changelog.read(parent2)[0]
708 m1 = repo.manifest.read(m1n)
708 m1 = repo.manifest.read(m1n)
709 m2 = repo.manifest.read(m2n)
709 m2 = repo.manifest.read(m2n)
710 errors = 0
710 errors = 0
711 for f in dc:
711 for f in dc:
712 state = repo.dirstate.state(f)
712 state = repo.dirstate.state(f)
713 if state in "nr" and f not in m1:
713 if state in "nr" and f not in m1:
714 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
714 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
715 errors += 1
715 errors += 1
716 if state in "a" and f in m1:
716 if state in "a" and f in m1:
717 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
717 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
718 errors += 1
718 errors += 1
719 if state in "m" and f not in m1 and f not in m2:
719 if state in "m" and f not in m1 and f not in m2:
720 ui.warn(_("%s in state %s, but not in either manifest\n") %
720 ui.warn(_("%s in state %s, but not in either manifest\n") %
721 (f, state))
721 (f, state))
722 errors += 1
722 errors += 1
723 for f in m1:
723 for f in m1:
724 state = repo.dirstate.state(f)
724 state = repo.dirstate.state(f)
725 if state not in "nrm":
725 if state not in "nrm":
726 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
726 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
727 errors += 1
727 errors += 1
728 if errors:
728 if errors:
729 error = _(".hg/dirstate inconsistent with current parent's manifest")
729 error = _(".hg/dirstate inconsistent with current parent's manifest")
730 raise util.Abort(error)
730 raise util.Abort(error)
731
731
732 def showconfig(ui, repo, *values, **opts):
732 def showconfig(ui, repo, *values, **opts):
733 """show combined config settings from all hgrc files
733 """show combined config settings from all hgrc files
734
734
735 With no args, print names and values of all config items.
735 With no args, print names and values of all config items.
736
736
737 With one arg of the form section.name, print just the value of
737 With one arg of the form section.name, print just the value of
738 that config item.
738 that config item.
739
739
740 With multiple args, print names and values of all config items
740 With multiple args, print names and values of all config items
741 with matching section names."""
741 with matching section names."""
742
742
743 untrusted = bool(opts.get('untrusted'))
743 untrusted = bool(opts.get('untrusted'))
744 if values:
744 if values:
745 if len([v for v in values if '.' in v]) > 1:
745 if len([v for v in values if '.' in v]) > 1:
746 raise util.Abort(_('only one config item permitted'))
746 raise util.Abort(_('only one config item permitted'))
747 for section, name, value in ui.walkconfig(untrusted=untrusted):
747 for section, name, value in ui.walkconfig(untrusted=untrusted):
748 sectname = section + '.' + name
748 sectname = section + '.' + name
749 if values:
749 if values:
750 for v in values:
750 for v in values:
751 if v == section:
751 if v == section:
752 ui.write('%s=%s\n' % (sectname, value))
752 ui.write('%s=%s\n' % (sectname, value))
753 elif v == sectname:
753 elif v == sectname:
754 ui.write(value, '\n')
754 ui.write(value, '\n')
755 else:
755 else:
756 ui.write('%s=%s\n' % (sectname, value))
756 ui.write('%s=%s\n' % (sectname, value))
757
757
758 def debugsetparents(ui, repo, rev1, rev2=None):
758 def debugsetparents(ui, repo, rev1, rev2=None):
759 """manually set the parents of the current working directory
759 """manually set the parents of the current working directory
760
760
761 This is useful for writing repository conversion tools, but should
761 This is useful for writing repository conversion tools, but should
762 be used with care.
762 be used with care.
763 """
763 """
764
764
765 if not rev2:
765 if not rev2:
766 rev2 = hex(nullid)
766 rev2 = hex(nullid)
767
767
768 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
768 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
769
769
770 def debugstate(ui, repo):
770 def debugstate(ui, repo):
771 """show the contents of the current dirstate"""
771 """show the contents of the current dirstate"""
772 repo.dirstate.read()
772 repo.dirstate.read()
773 dc = repo.dirstate.map
773 dc = repo.dirstate.map
774 keys = dc.keys()
774 keys = dc.keys()
775 keys.sort()
775 keys.sort()
776 for file_ in keys:
776 for file_ in keys:
777 ui.write("%c %3o %10d %s %s\n"
777 ui.write("%c %3o %10d %s %s\n"
778 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
778 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
779 time.strftime("%x %X",
779 time.strftime("%x %X",
780 time.localtime(dc[file_][3])), file_))
780 time.localtime(dc[file_][3])), file_))
781 for f in repo.dirstate.copies():
781 for f in repo.dirstate.copies():
782 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
782 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
783
783
784 def debugdata(ui, file_, rev):
784 def debugdata(ui, file_, rev):
785 """dump the contents of an data file revision"""
785 """dump the contents of an data file revision"""
786 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
786 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
787 file_[:-2] + ".i", file_, 0)
787 file_[:-2] + ".i", file_, 0)
788 try:
788 try:
789 ui.write(r.revision(r.lookup(rev)))
789 ui.write(r.revision(r.lookup(rev)))
790 except KeyError:
790 except KeyError:
791 raise util.Abort(_('invalid revision identifier %s') % rev)
791 raise util.Abort(_('invalid revision identifier %s') % rev)
792
792
793 def debugdate(ui, date, range=None, **opts):
793 def debugdate(ui, date, range=None, **opts):
794 """parse and display a date"""
794 """parse and display a date"""
795 if opts["extended"]:
795 if opts["extended"]:
796 d = util.parsedate(date, util.extendeddateformats)
796 d = util.parsedate(date, util.extendeddateformats)
797 else:
797 else:
798 d = util.parsedate(date)
798 d = util.parsedate(date)
799 ui.write("internal: %s %s\n" % d)
799 ui.write("internal: %s %s\n" % d)
800 ui.write("standard: %s\n" % util.datestr(d))
800 ui.write("standard: %s\n" % util.datestr(d))
801 if range:
801 if range:
802 m = util.matchdate(range)
802 m = util.matchdate(range)
803 ui.write("match: %s\n" % m(d[0]))
803 ui.write("match: %s\n" % m(d[0]))
804
804
805 def debugindex(ui, file_):
805 def debugindex(ui, file_):
806 """dump the contents of an index file"""
806 """dump the contents of an index file"""
807 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
807 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
808 ui.write(" rev offset length base linkrev" +
808 ui.write(" rev offset length base linkrev" +
809 " nodeid p1 p2\n")
809 " nodeid p1 p2\n")
810 for i in xrange(r.count()):
810 for i in xrange(r.count()):
811 node = r.node(i)
811 node = r.node(i)
812 pp = r.parents(node)
812 pp = r.parents(node)
813 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
813 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
814 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
814 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
815 short(node), short(pp[0]), short(pp[1])))
815 short(node), short(pp[0]), short(pp[1])))
816
816
817 def debugindexdot(ui, file_):
817 def debugindexdot(ui, file_):
818 """dump an index DAG as a .dot file"""
818 """dump an index DAG as a .dot file"""
819 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
819 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
820 ui.write("digraph G {\n")
820 ui.write("digraph G {\n")
821 for i in xrange(r.count()):
821 for i in xrange(r.count()):
822 node = r.node(i)
822 node = r.node(i)
823 pp = r.parents(node)
823 pp = r.parents(node)
824 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
824 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
825 if pp[1] != nullid:
825 if pp[1] != nullid:
826 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
826 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
827 ui.write("}\n")
827 ui.write("}\n")
828
828
829 def debuginstall(ui):
829 def debuginstall(ui):
830 '''test Mercurial installation'''
830 '''test Mercurial installation'''
831
831
832 def writetemp(contents):
832 def writetemp(contents):
833 (fd, name) = tempfile.mkstemp()
833 (fd, name) = tempfile.mkstemp()
834 f = os.fdopen(fd, "wb")
834 f = os.fdopen(fd, "wb")
835 f.write(contents)
835 f.write(contents)
836 f.close()
836 f.close()
837 return name
837 return name
838
838
839 problems = 0
839 problems = 0
840
840
841 # encoding
841 # encoding
842 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
842 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
843 try:
843 try:
844 util.fromlocal("test")
844 util.fromlocal("test")
845 except util.Abort, inst:
845 except util.Abort, inst:
846 ui.write(" %s\n" % inst)
846 ui.write(" %s\n" % inst)
847 ui.write(_(" (check that your locale is properly set)\n"))
847 ui.write(_(" (check that your locale is properly set)\n"))
848 problems += 1
848 problems += 1
849
849
850 # compiled modules
850 # compiled modules
851 ui.status(_("Checking extensions...\n"))
851 ui.status(_("Checking extensions...\n"))
852 try:
852 try:
853 import bdiff, mpatch, base85
853 import bdiff, mpatch, base85
854 except Exception, inst:
854 except Exception, inst:
855 ui.write(" %s\n" % inst)
855 ui.write(" %s\n" % inst)
856 ui.write(_(" One or more extensions could not be found"))
856 ui.write(_(" One or more extensions could not be found"))
857 ui.write(_(" (check that you compiled the extensions)\n"))
857 ui.write(_(" (check that you compiled the extensions)\n"))
858 problems += 1
858 problems += 1
859
859
860 # templates
860 # templates
861 ui.status(_("Checking templates...\n"))
861 ui.status(_("Checking templates...\n"))
862 try:
862 try:
863 import templater
863 import templater
864 t = templater.templater(templater.templatepath("map-cmdline.default"))
864 t = templater.templater(templater.templatepath("map-cmdline.default"))
865 except Exception, inst:
865 except Exception, inst:
866 ui.write(" %s\n" % inst)
866 ui.write(" %s\n" % inst)
867 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
867 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
868 problems += 1
868 problems += 1
869
869
870 # patch
870 # patch
871 ui.status(_("Checking patch...\n"))
871 ui.status(_("Checking patch...\n"))
872 path = os.environ.get('PATH', '')
872 path = os.environ.get('PATH', '')
873 patcher = util.find_in_path('gpatch', path,
873 patcher = util.find_in_path('gpatch', path,
874 util.find_in_path('patch', path, None))
874 util.find_in_path('patch', path, None))
875 if not patcher:
875 if not patcher:
876 ui.write(_(" Can't find patch or gpatch in PATH\n"))
876 ui.write(_(" Can't find patch or gpatch in PATH\n"))
877 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
877 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
878 problems += 1
878 problems += 1
879 else:
879 else:
880 # actually attempt a patch here
880 # actually attempt a patch here
881 a = "1\n2\n3\n4\n"
881 a = "1\n2\n3\n4\n"
882 b = "1\n2\n3\ninsert\n4\n"
882 b = "1\n2\n3\ninsert\n4\n"
883 d = mdiff.unidiff(a, None, b, None, "a")
883 d = mdiff.unidiff(a, None, b, None, "a")
884 fa = writetemp(a)
884 fa = writetemp(a)
885 fd = writetemp(d)
885 fd = writetemp(d)
886 fp = os.popen('%s %s %s' % (patcher, fa, fd))
886 fp = os.popen('%s %s %s' % (patcher, fa, fd))
887 files = []
887 files = []
888 output = ""
888 output = ""
889 for line in fp:
889 for line in fp:
890 output += line
890 output += line
891 if line.startswith('patching file '):
891 if line.startswith('patching file '):
892 pf = util.parse_patch_output(line.rstrip())
892 pf = util.parse_patch_output(line.rstrip())
893 files.append(pf)
893 files.append(pf)
894 if files != [fa]:
894 if files != [fa]:
895 ui.write(_(" unexpected patch output!"))
895 ui.write(_(" unexpected patch output!"))
896 ui.write(_(" (you may have an incompatible version of patch)\n"))
896 ui.write(_(" (you may have an incompatible version of patch)\n"))
897 ui.write(output)
897 ui.write(output)
898 problems += 1
898 problems += 1
899 a = file(fa).read()
899 a = file(fa).read()
900 if a != b:
900 if a != b:
901 ui.write(_(" patch test failed!"))
901 ui.write(_(" patch test failed!"))
902 ui.write(_(" (you may have an incompatible version of patch)\n"))
902 ui.write(_(" (you may have an incompatible version of patch)\n"))
903 problems += 1
903 problems += 1
904 os.unlink(fa)
904 os.unlink(fa)
905 os.unlink(fd)
905 os.unlink(fd)
906
906
907 # merge helper
907 # merge helper
908 ui.status(_("Checking merge helper...\n"))
908 ui.status(_("Checking merge helper...\n"))
909 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
909 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
910 or "hgmerge")
910 or "hgmerge")
911 cmdpath = util.find_in_path(cmd, path)
911 cmdpath = util.find_in_path(cmd, path)
912 if not cmdpath:
912 if not cmdpath:
913 cmdpath = util.find_in_path(cmd.split()[0], path)
913 cmdpath = util.find_in_path(cmd.split()[0], path)
914 if not cmdpath:
914 if not cmdpath:
915 if cmd == 'hgmerge':
915 if cmd == 'hgmerge':
916 ui.write(_(" No merge helper set and can't find default"
916 ui.write(_(" No merge helper set and can't find default"
917 " hgmerge script in PATH\n"))
917 " hgmerge script in PATH\n"))
918 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
918 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
919 else:
919 else:
920 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
920 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
921 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
921 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
922 problems += 1
922 problems += 1
923 else:
923 else:
924 # actually attempt a patch here
924 # actually attempt a patch here
925 fa = writetemp("1\n2\n3\n4\n")
925 fa = writetemp("1\n2\n3\n4\n")
926 fl = writetemp("1\n2\n3\ninsert\n4\n")
926 fl = writetemp("1\n2\n3\ninsert\n4\n")
927 fr = writetemp("begin\n1\n2\n3\n4\n")
927 fr = writetemp("begin\n1\n2\n3\n4\n")
928 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
928 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
929 if r:
929 if r:
930 ui.write(_(" got unexpected merge error %d!") % r)
930 ui.write(_(" got unexpected merge error %d!") % r)
931 problems += 1
931 problems += 1
932 m = file(fl).read()
932 m = file(fl).read()
933 if m != "begin\n1\n2\n3\ninsert\n4\n":
933 if m != "begin\n1\n2\n3\ninsert\n4\n":
934 ui.write(_(" got unexpected merge results!") % r)
934 ui.write(_(" got unexpected merge results!") % r)
935 ui.write(_(" (your merge helper may have the"
935 ui.write(_(" (your merge helper may have the"
936 " wrong argument order)\n"))
936 " wrong argument order)\n"))
937 ui.write(m)
937 ui.write(m)
938 os.unlink(fa)
938 os.unlink(fa)
939 os.unlink(fl)
939 os.unlink(fl)
940 os.unlink(fr)
940 os.unlink(fr)
941
941
942 # editor
942 # editor
943 ui.status(_("Checking commit editor...\n"))
943 ui.status(_("Checking commit editor...\n"))
944 editor = (os.environ.get("HGEDITOR") or
944 editor = (os.environ.get("HGEDITOR") or
945 ui.config("ui", "editor") or
945 ui.config("ui", "editor") or
946 os.environ.get("EDITOR", "vi"))
946 os.environ.get("EDITOR", "vi"))
947 cmdpath = util.find_in_path(editor, path)
947 cmdpath = util.find_in_path(editor, path)
948 if not cmdpath:
948 if not cmdpath:
949 cmdpath = util.find_in_path(editor.split()[0], path)
949 cmdpath = util.find_in_path(editor.split()[0], path)
950 if not cmdpath:
950 if not cmdpath:
951 if editor == 'vi':
951 if editor == 'vi':
952 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
952 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
953 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
953 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
954 else:
954 else:
955 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
955 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
956 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
956 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
957 problems += 1
957 problems += 1
958
958
959 # check username
959 # check username
960 ui.status(_("Checking username...\n"))
960 ui.status(_("Checking username...\n"))
961 user = os.environ.get("HGUSER")
961 user = os.environ.get("HGUSER")
962 if user is None:
962 if user is None:
963 user = ui.config("ui", "username")
963 user = ui.config("ui", "username")
964 if user is None:
964 if user is None:
965 user = os.environ.get("EMAIL")
965 user = os.environ.get("EMAIL")
966 if not user:
966 if not user:
967 ui.warn(" ")
967 ui.warn(" ")
968 ui.username()
968 ui.username()
969 ui.write(_(" (specify a username in your .hgrc file)\n"))
969 ui.write(_(" (specify a username in your .hgrc file)\n"))
970
970
971 if not problems:
971 if not problems:
972 ui.status(_("No problems detected\n"))
972 ui.status(_("No problems detected\n"))
973 else:
973 else:
974 ui.write(_("%s problems detected,"
974 ui.write(_("%s problems detected,"
975 " please check your install!\n") % problems)
975 " please check your install!\n") % problems)
976
976
977 return problems
977 return problems
978
978
979 def debugrename(ui, repo, file1, *pats, **opts):
979 def debugrename(ui, repo, file1, *pats, **opts):
980 """dump rename information"""
980 """dump rename information"""
981
981
982 ctx = repo.changectx(opts.get('rev', 'tip'))
982 ctx = repo.changectx(opts.get('rev', 'tip'))
983 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
983 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
984 ctx.node()):
984 ctx.node()):
985 m = ctx.filectx(abs).renamed()
985 m = ctx.filectx(abs).renamed()
986 if m:
986 if m:
987 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
987 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
988 else:
988 else:
989 ui.write(_("%s not renamed\n") % rel)
989 ui.write(_("%s not renamed\n") % rel)
990
990
991 def debugwalk(ui, repo, *pats, **opts):
991 def debugwalk(ui, repo, *pats, **opts):
992 """show how files match on given patterns"""
992 """show how files match on given patterns"""
993 items = list(cmdutil.walk(repo, pats, opts))
993 items = list(cmdutil.walk(repo, pats, opts))
994 if not items:
994 if not items:
995 return
995 return
996 fmt = '%%s %%-%ds %%-%ds %%s' % (
996 fmt = '%%s %%-%ds %%-%ds %%s' % (
997 max([len(abs) for (src, abs, rel, exact) in items]),
997 max([len(abs) for (src, abs, rel, exact) in items]),
998 max([len(rel) for (src, abs, rel, exact) in items]))
998 max([len(rel) for (src, abs, rel, exact) in items]))
999 for src, abs, rel, exact in items:
999 for src, abs, rel, exact in items:
1000 line = fmt % (src, abs, rel, exact and 'exact' or '')
1000 line = fmt % (src, abs, rel, exact and 'exact' or '')
1001 ui.write("%s\n" % line.rstrip())
1001 ui.write("%s\n" % line.rstrip())
1002
1002
1003 def diff(ui, repo, *pats, **opts):
1003 def diff(ui, repo, *pats, **opts):
1004 """diff repository (or selected files)
1004 """diff repository (or selected files)
1005
1005
1006 Show differences between revisions for the specified files.
1006 Show differences between revisions for the specified files.
1007
1007
1008 Differences between files are shown using the unified diff format.
1008 Differences between files are shown using the unified diff format.
1009
1009
1010 NOTE: diff may generate unexpected results for merges, as it will
1010 NOTE: diff may generate unexpected results for merges, as it will
1011 default to comparing against the working directory's first parent
1011 default to comparing against the working directory's first parent
1012 changeset if no revisions are specified.
1012 changeset if no revisions are specified.
1013
1013
1014 When two revision arguments are given, then changes are shown
1014 When two revision arguments are given, then changes are shown
1015 between those revisions. If only one revision is specified then
1015 between those revisions. If only one revision is specified then
1016 that revision is compared to the working directory, and, when no
1016 that revision is compared to the working directory, and, when no
1017 revisions are specified, the working directory files are compared
1017 revisions are specified, the working directory files are compared
1018 to its parent.
1018 to its parent.
1019
1019
1020 Without the -a option, diff will avoid generating diffs of files
1020 Without the -a option, diff will avoid generating diffs of files
1021 it detects as binary. With -a, diff will generate a diff anyway,
1021 it detects as binary. With -a, diff will generate a diff anyway,
1022 probably with undesirable results.
1022 probably with undesirable results.
1023 """
1023 """
1024 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1024 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1025
1025
1026 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1026 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1027
1027
1028 patch.diff(repo, node1, node2, fns, match=matchfn,
1028 patch.diff(repo, node1, node2, fns, match=matchfn,
1029 opts=patch.diffopts(ui, opts))
1029 opts=patch.diffopts(ui, opts))
1030
1030
1031 def export(ui, repo, *changesets, **opts):
1031 def export(ui, repo, *changesets, **opts):
1032 """dump the header and diffs for one or more changesets
1032 """dump the header and diffs for one or more changesets
1033
1033
1034 Print the changeset header and diffs for one or more revisions.
1034 Print the changeset header and diffs for one or more revisions.
1035
1035
1036 The information shown in the changeset header is: author,
1036 The information shown in the changeset header is: author,
1037 changeset hash, parent(s) and commit comment.
1037 changeset hash, parent(s) and commit comment.
1038
1038
1039 NOTE: export may generate unexpected diff output for merge changesets,
1039 NOTE: export may generate unexpected diff output for merge changesets,
1040 as it will compare the merge changeset against its first parent only.
1040 as it will compare the merge changeset against its first parent only.
1041
1041
1042 Output may be to a file, in which case the name of the file is
1042 Output may be to a file, in which case the name of the file is
1043 given using a format string. The formatting rules are as follows:
1043 given using a format string. The formatting rules are as follows:
1044
1044
1045 %% literal "%" character
1045 %% literal "%" character
1046 %H changeset hash (40 bytes of hexadecimal)
1046 %H changeset hash (40 bytes of hexadecimal)
1047 %N number of patches being generated
1047 %N number of patches being generated
1048 %R changeset revision number
1048 %R changeset revision number
1049 %b basename of the exporting repository
1049 %b basename of the exporting repository
1050 %h short-form changeset hash (12 bytes of hexadecimal)
1050 %h short-form changeset hash (12 bytes of hexadecimal)
1051 %n zero-padded sequence number, starting at 1
1051 %n zero-padded sequence number, starting at 1
1052 %r zero-padded changeset revision number
1052 %r zero-padded changeset revision number
1053
1053
1054 Without the -a option, export will avoid generating diffs of files
1054 Without the -a option, export will avoid generating diffs of files
1055 it detects as binary. With -a, export will generate a diff anyway,
1055 it detects as binary. With -a, export will generate a diff anyway,
1056 probably with undesirable results.
1056 probably with undesirable results.
1057
1057
1058 With the --switch-parent option, the diff will be against the second
1058 With the --switch-parent option, the diff will be against the second
1059 parent. It can be useful to review a merge.
1059 parent. It can be useful to review a merge.
1060 """
1060 """
1061 if not changesets:
1061 if not changesets:
1062 raise util.Abort(_("export requires at least one changeset"))
1062 raise util.Abort(_("export requires at least one changeset"))
1063 revs = cmdutil.revrange(repo, changesets)
1063 revs = cmdutil.revrange(repo, changesets)
1064 if len(revs) > 1:
1064 if len(revs) > 1:
1065 ui.note(_('exporting patches:\n'))
1065 ui.note(_('exporting patches:\n'))
1066 else:
1066 else:
1067 ui.note(_('exporting patch:\n'))
1067 ui.note(_('exporting patch:\n'))
1068 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1068 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1069 switch_parent=opts['switch_parent'],
1069 switch_parent=opts['switch_parent'],
1070 opts=patch.diffopts(ui, opts))
1070 opts=patch.diffopts(ui, opts))
1071
1071
1072 def grep(ui, repo, pattern, *pats, **opts):
1072 def grep(ui, repo, pattern, *pats, **opts):
1073 """search for a pattern in specified files and revisions
1073 """search for a pattern in specified files and revisions
1074
1074
1075 Search revisions of files for a regular expression.
1075 Search revisions of files for a regular expression.
1076
1076
1077 This command behaves differently than Unix grep. It only accepts
1077 This command behaves differently than Unix grep. It only accepts
1078 Python/Perl regexps. It searches repository history, not the
1078 Python/Perl regexps. It searches repository history, not the
1079 working directory. It always prints the revision number in which
1079 working directory. It always prints the revision number in which
1080 a match appears.
1080 a match appears.
1081
1081
1082 By default, grep only prints output for the first revision of a
1082 By default, grep only prints output for the first revision of a
1083 file in which it finds a match. To get it to print every revision
1083 file in which it finds a match. To get it to print every revision
1084 that contains a change in match status ("-" for a match that
1084 that contains a change in match status ("-" for a match that
1085 becomes a non-match, or "+" for a non-match that becomes a match),
1085 becomes a non-match, or "+" for a non-match that becomes a match),
1086 use the --all flag.
1086 use the --all flag.
1087 """
1087 """
1088 reflags = 0
1088 reflags = 0
1089 if opts['ignore_case']:
1089 if opts['ignore_case']:
1090 reflags |= re.I
1090 reflags |= re.I
1091 regexp = re.compile(pattern, reflags)
1091 regexp = re.compile(pattern, reflags)
1092 sep, eol = ':', '\n'
1092 sep, eol = ':', '\n'
1093 if opts['print0']:
1093 if opts['print0']:
1094 sep = eol = '\0'
1094 sep = eol = '\0'
1095
1095
1096 fcache = {}
1096 fcache = {}
1097 def getfile(fn):
1097 def getfile(fn):
1098 if fn not in fcache:
1098 if fn not in fcache:
1099 fcache[fn] = repo.file(fn)
1099 fcache[fn] = repo.file(fn)
1100 return fcache[fn]
1100 return fcache[fn]
1101
1101
1102 def matchlines(body):
1102 def matchlines(body):
1103 begin = 0
1103 begin = 0
1104 linenum = 0
1104 linenum = 0
1105 while True:
1105 while True:
1106 match = regexp.search(body, begin)
1106 match = regexp.search(body, begin)
1107 if not match:
1107 if not match:
1108 break
1108 break
1109 mstart, mend = match.span()
1109 mstart, mend = match.span()
1110 linenum += body.count('\n', begin, mstart) + 1
1110 linenum += body.count('\n', begin, mstart) + 1
1111 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1111 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1112 lend = body.find('\n', mend)
1112 lend = body.find('\n', mend)
1113 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1113 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1114 begin = lend + 1
1114 begin = lend + 1
1115
1115
1116 class linestate(object):
1116 class linestate(object):
1117 def __init__(self, line, linenum, colstart, colend):
1117 def __init__(self, line, linenum, colstart, colend):
1118 self.line = line
1118 self.line = line
1119 self.linenum = linenum
1119 self.linenum = linenum
1120 self.colstart = colstart
1120 self.colstart = colstart
1121 self.colend = colend
1121 self.colend = colend
1122
1122
1123 def __eq__(self, other):
1123 def __eq__(self, other):
1124 return self.line == other.line
1124 return self.line == other.line
1125
1125
1126 matches = {}
1126 matches = {}
1127 copies = {}
1127 copies = {}
1128 def grepbody(fn, rev, body):
1128 def grepbody(fn, rev, body):
1129 matches[rev].setdefault(fn, [])
1129 matches[rev].setdefault(fn, [])
1130 m = matches[rev][fn]
1130 m = matches[rev][fn]
1131 for lnum, cstart, cend, line in matchlines(body):
1131 for lnum, cstart, cend, line in matchlines(body):
1132 s = linestate(line, lnum, cstart, cend)
1132 s = linestate(line, lnum, cstart, cend)
1133 m.append(s)
1133 m.append(s)
1134
1134
1135 def difflinestates(a, b):
1135 def difflinestates(a, b):
1136 sm = difflib.SequenceMatcher(None, a, b)
1136 sm = difflib.SequenceMatcher(None, a, b)
1137 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1137 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1138 if tag == 'insert':
1138 if tag == 'insert':
1139 for i in xrange(blo, bhi):
1139 for i in xrange(blo, bhi):
1140 yield ('+', b[i])
1140 yield ('+', b[i])
1141 elif tag == 'delete':
1141 elif tag == 'delete':
1142 for i in xrange(alo, ahi):
1142 for i in xrange(alo, ahi):
1143 yield ('-', a[i])
1143 yield ('-', a[i])
1144 elif tag == 'replace':
1144 elif tag == 'replace':
1145 for i in xrange(alo, ahi):
1145 for i in xrange(alo, ahi):
1146 yield ('-', a[i])
1146 yield ('-', a[i])
1147 for i in xrange(blo, bhi):
1147 for i in xrange(blo, bhi):
1148 yield ('+', b[i])
1148 yield ('+', b[i])
1149
1149
1150 prev = {}
1150 prev = {}
1151 def display(fn, rev, states, prevstates):
1151 def display(fn, rev, states, prevstates):
1152 counts = {'-': 0, '+': 0}
1152 counts = {'-': 0, '+': 0}
1153 filerevmatches = {}
1153 filerevmatches = {}
1154 if incrementing or not opts['all']:
1154 if incrementing or not opts['all']:
1155 a, b, r = prevstates, states, rev
1155 a, b, r = prevstates, states, rev
1156 else:
1156 else:
1157 a, b, r = states, prevstates, prev.get(fn, -1)
1157 a, b, r = states, prevstates, prev.get(fn, -1)
1158 for change, l in difflinestates(a, b):
1158 for change, l in difflinestates(a, b):
1159 cols = [fn, str(r)]
1159 cols = [fn, str(r)]
1160 if opts['line_number']:
1160 if opts['line_number']:
1161 cols.append(str(l.linenum))
1161 cols.append(str(l.linenum))
1162 if opts['all']:
1162 if opts['all']:
1163 cols.append(change)
1163 cols.append(change)
1164 if opts['user']:
1164 if opts['user']:
1165 cols.append(ui.shortuser(get(r)[1]))
1165 cols.append(ui.shortuser(get(r)[1]))
1166 if opts['files_with_matches']:
1166 if opts['files_with_matches']:
1167 c = (fn, r)
1167 c = (fn, r)
1168 if c in filerevmatches:
1168 if c in filerevmatches:
1169 continue
1169 continue
1170 filerevmatches[c] = 1
1170 filerevmatches[c] = 1
1171 else:
1171 else:
1172 cols.append(l.line)
1172 cols.append(l.line)
1173 ui.write(sep.join(cols), eol)
1173 ui.write(sep.join(cols), eol)
1174 counts[change] += 1
1174 counts[change] += 1
1175 return counts['+'], counts['-']
1175 return counts['+'], counts['-']
1176
1176
1177 fstate = {}
1177 fstate = {}
1178 skip = {}
1178 skip = {}
1179 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1179 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1180 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1180 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1181 count = 0
1181 count = 0
1182 incrementing = False
1182 incrementing = False
1183 follow = opts.get('follow')
1183 follow = opts.get('follow')
1184 for st, rev, fns in changeiter:
1184 for st, rev, fns in changeiter:
1185 if st == 'window':
1185 if st == 'window':
1186 incrementing = rev
1186 incrementing = rev
1187 matches.clear()
1187 matches.clear()
1188 elif st == 'add':
1188 elif st == 'add':
1189 mf = repo.changectx(rev).manifest()
1189 mf = repo.changectx(rev).manifest()
1190 matches[rev] = {}
1190 matches[rev] = {}
1191 for fn in fns:
1191 for fn in fns:
1192 if fn in skip:
1192 if fn in skip:
1193 continue
1193 continue
1194 fstate.setdefault(fn, {})
1194 fstate.setdefault(fn, {})
1195 try:
1195 try:
1196 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1196 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1197 if follow:
1197 if follow:
1198 copied = getfile(fn).renamed(mf[fn])
1198 copied = getfile(fn).renamed(mf[fn])
1199 if copied:
1199 if copied:
1200 copies.setdefault(rev, {})[fn] = copied[0]
1200 copies.setdefault(rev, {})[fn] = copied[0]
1201 except KeyError:
1201 except KeyError:
1202 pass
1202 pass
1203 elif st == 'iter':
1203 elif st == 'iter':
1204 states = matches[rev].items()
1204 states = matches[rev].items()
1205 states.sort()
1205 states.sort()
1206 for fn, m in states:
1206 for fn, m in states:
1207 copy = copies.get(rev, {}).get(fn)
1207 copy = copies.get(rev, {}).get(fn)
1208 if fn in skip:
1208 if fn in skip:
1209 if copy:
1209 if copy:
1210 skip[copy] = True
1210 skip[copy] = True
1211 continue
1211 continue
1212 if incrementing or not opts['all'] or fstate[fn]:
1212 if incrementing or not opts['all'] or fstate[fn]:
1213 pos, neg = display(fn, rev, m, fstate[fn])
1213 pos, neg = display(fn, rev, m, fstate[fn])
1214 count += pos + neg
1214 count += pos + neg
1215 if pos and not opts['all']:
1215 if pos and not opts['all']:
1216 skip[fn] = True
1216 skip[fn] = True
1217 if copy:
1217 if copy:
1218 skip[copy] = True
1218 skip[copy] = True
1219 fstate[fn] = m
1219 fstate[fn] = m
1220 if copy:
1220 if copy:
1221 fstate[copy] = m
1221 fstate[copy] = m
1222 prev[fn] = rev
1222 prev[fn] = rev
1223
1223
1224 if not incrementing:
1224 if not incrementing:
1225 fstate = fstate.items()
1225 fstate = fstate.items()
1226 fstate.sort()
1226 fstate.sort()
1227 for fn, state in fstate:
1227 for fn, state in fstate:
1228 if fn in skip:
1228 if fn in skip:
1229 continue
1229 continue
1230 if fn not in copies.get(prev[fn], {}):
1230 if fn not in copies.get(prev[fn], {}):
1231 display(fn, rev, {}, state)
1231 display(fn, rev, {}, state)
1232 return (count == 0 and 1) or 0
1232 return (count == 0 and 1) or 0
1233
1233
1234 def heads(ui, repo, **opts):
1234 def heads(ui, repo, **opts):
1235 """show current repository heads
1235 """show current repository heads
1236
1236
1237 Show all repository head changesets.
1237 Show all repository head changesets.
1238
1238
1239 Repository "heads" are changesets that don't have children
1239 Repository "heads" are changesets that don't have children
1240 changesets. They are where development generally takes place and
1240 changesets. They are where development generally takes place and
1241 are the usual targets for update and merge operations.
1241 are the usual targets for update and merge operations.
1242 """
1242 """
1243 if opts['rev']:
1243 if opts['rev']:
1244 heads = repo.heads(repo.lookup(opts['rev']))
1244 heads = repo.heads(repo.lookup(opts['rev']))
1245 else:
1245 else:
1246 heads = repo.heads()
1246 heads = repo.heads()
1247 displayer = cmdutil.show_changeset(ui, repo, opts)
1247 displayer = cmdutil.show_changeset(ui, repo, opts)
1248 for n in heads:
1248 for n in heads:
1249 displayer.show(changenode=n)
1249 displayer.show(changenode=n)
1250
1250
1251 def help_(ui, name=None, with_version=False):
1251 def help_(ui, name=None, with_version=False):
1252 """show help for a command, extension, or list of commands
1252 """show help for a command, extension, or list of commands
1253
1253
1254 With no arguments, print a list of commands and short help.
1254 With no arguments, print a list of commands and short help.
1255
1255
1256 Given a command name, print help for that command.
1256 Given a command name, print help for that command.
1257
1257
1258 Given an extension name, print help for that extension, and the
1258 Given an extension name, print help for that extension, and the
1259 commands it provides."""
1259 commands it provides."""
1260 option_lists = []
1260 option_lists = []
1261
1261
1262 def helpcmd(name):
1262 def helpcmd(name):
1263 if with_version:
1263 if with_version:
1264 version_(ui)
1264 version_(ui)
1265 ui.write('\n')
1265 ui.write('\n')
1266 aliases, i = findcmd(ui, name)
1266 aliases, i = findcmd(ui, name)
1267 # synopsis
1267 # synopsis
1268 ui.write("%s\n\n" % i[2])
1268 ui.write("%s\n\n" % i[2])
1269
1269
1270 # description
1270 # description
1271 doc = i[0].__doc__
1271 doc = i[0].__doc__
1272 if not doc:
1272 if not doc:
1273 doc = _("(No help text available)")
1273 doc = _("(No help text available)")
1274 if ui.quiet:
1274 if ui.quiet:
1275 doc = doc.splitlines(0)[0]
1275 doc = doc.splitlines(0)[0]
1276 ui.write("%s\n" % doc.rstrip())
1276 ui.write("%s\n" % doc.rstrip())
1277
1277
1278 if not ui.quiet:
1278 if not ui.quiet:
1279 # aliases
1279 # aliases
1280 if len(aliases) > 1:
1280 if len(aliases) > 1:
1281 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1281 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1282
1282
1283 # options
1283 # options
1284 if i[1]:
1284 if i[1]:
1285 option_lists.append(("options", i[1]))
1285 option_lists.append(("options", i[1]))
1286
1286
1287 def helplist(select=None):
1287 def helplist(select=None):
1288 h = {}
1288 h = {}
1289 cmds = {}
1289 cmds = {}
1290 for c, e in table.items():
1290 for c, e in table.items():
1291 f = c.split("|", 1)[0]
1291 f = c.split("|", 1)[0]
1292 if select and not select(f):
1292 if select and not select(f):
1293 continue
1293 continue
1294 if name == "shortlist" and not f.startswith("^"):
1294 if name == "shortlist" and not f.startswith("^"):
1295 continue
1295 continue
1296 f = f.lstrip("^")
1296 f = f.lstrip("^")
1297 if not ui.debugflag and f.startswith("debug"):
1297 if not ui.debugflag and f.startswith("debug"):
1298 continue
1298 continue
1299 doc = e[0].__doc__
1299 doc = e[0].__doc__
1300 if not doc:
1300 if not doc:
1301 doc = _("(No help text available)")
1301 doc = _("(No help text available)")
1302 h[f] = doc.splitlines(0)[0].rstrip()
1302 h[f] = doc.splitlines(0)[0].rstrip()
1303 cmds[f] = c.lstrip("^")
1303 cmds[f] = c.lstrip("^")
1304
1304
1305 fns = h.keys()
1305 fns = h.keys()
1306 fns.sort()
1306 fns.sort()
1307 m = max(map(len, fns))
1307 m = max(map(len, fns))
1308 for f in fns:
1308 for f in fns:
1309 if ui.verbose:
1309 if ui.verbose:
1310 commands = cmds[f].replace("|",", ")
1310 commands = cmds[f].replace("|",", ")
1311 ui.write(" %s:\n %s\n"%(commands, h[f]))
1311 ui.write(" %s:\n %s\n"%(commands, h[f]))
1312 else:
1312 else:
1313 ui.write(' %-*s %s\n' % (m, f, h[f]))
1313 ui.write(' %-*s %s\n' % (m, f, h[f]))
1314
1314
1315 def helptopic(name):
1315 def helptopic(name):
1316 v = None
1316 v = None
1317 for i in help.helptable:
1317 for i in help.helptable:
1318 l = i.split('|')
1318 l = i.split('|')
1319 if name in l:
1319 if name in l:
1320 v = i
1320 v = i
1321 header = l[-1]
1321 header = l[-1]
1322 if not v:
1322 if not v:
1323 raise UnknownCommand(name)
1323 raise UnknownCommand(name)
1324
1324
1325 # description
1325 # description
1326 doc = help.helptable[v]
1326 doc = help.helptable[v]
1327 if not doc:
1327 if not doc:
1328 doc = _("(No help text available)")
1328 doc = _("(No help text available)")
1329 if callable(doc):
1329 if callable(doc):
1330 doc = doc()
1330 doc = doc()
1331
1331
1332 ui.write("%s\n" % header)
1332 ui.write("%s\n" % header)
1333 ui.write("%s\n" % doc.rstrip())
1333 ui.write("%s\n" % doc.rstrip())
1334
1334
1335 def helpext(name):
1335 def helpext(name):
1336 try:
1336 try:
1337 mod = findext(name)
1337 mod = findext(name)
1338 except KeyError:
1338 except KeyError:
1339 raise UnknownCommand(name)
1339 raise UnknownCommand(name)
1340
1340
1341 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1341 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1342 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1342 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1343 for d in doc[1:]:
1343 for d in doc[1:]:
1344 ui.write(d, '\n')
1344 ui.write(d, '\n')
1345
1345
1346 ui.status('\n')
1346 ui.status('\n')
1347 if ui.verbose:
1347 if ui.verbose:
1348 ui.status(_('list of commands:\n\n'))
1348 ui.status(_('list of commands:\n\n'))
1349 else:
1349 else:
1350 ui.status(_('list of commands (use "hg help -v %s" '
1350 ui.status(_('list of commands (use "hg help -v %s" '
1351 'to show aliases and global options):\n\n') % name)
1351 'to show aliases and global options):\n\n') % name)
1352
1352
1353 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
1353 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
1354 helplist(modcmds.has_key)
1354 helplist(modcmds.has_key)
1355
1355
1356 if name and name != 'shortlist':
1356 if name and name != 'shortlist':
1357 i = None
1357 i = None
1358 for f in (helpcmd, helptopic, helpext):
1358 for f in (helpcmd, helptopic, helpext):
1359 try:
1359 try:
1360 f(name)
1360 f(name)
1361 i = None
1361 i = None
1362 break
1362 break
1363 except UnknownCommand, inst:
1363 except UnknownCommand, inst:
1364 i = inst
1364 i = inst
1365 if i:
1365 if i:
1366 raise i
1366 raise i
1367
1367
1368 else:
1368 else:
1369 # program name
1369 # program name
1370 if ui.verbose or with_version:
1370 if ui.verbose or with_version:
1371 version_(ui)
1371 version_(ui)
1372 else:
1372 else:
1373 ui.status(_("Mercurial Distributed SCM\n"))
1373 ui.status(_("Mercurial Distributed SCM\n"))
1374 ui.status('\n')
1374 ui.status('\n')
1375
1375
1376 # list of commands
1376 # list of commands
1377 if name == "shortlist":
1377 if name == "shortlist":
1378 ui.status(_('basic commands (use "hg help" '
1378 ui.status(_('basic commands (use "hg help" '
1379 'for the full list or option "-v" for details):\n\n'))
1379 'for the full list or option "-v" for details):\n\n'))
1380 elif ui.verbose:
1380 elif ui.verbose:
1381 ui.status(_('list of commands:\n\n'))
1381 ui.status(_('list of commands:\n\n'))
1382 else:
1382 else:
1383 ui.status(_('list of commands (use "hg help -v" '
1383 ui.status(_('list of commands (use "hg help -v" '
1384 'to show aliases and global options):\n\n'))
1384 'to show aliases and global options):\n\n'))
1385
1385
1386 helplist()
1386 helplist()
1387
1387
1388 # global options
1388 # global options
1389 if ui.verbose:
1389 if ui.verbose:
1390 option_lists.append(("global options", globalopts))
1390 option_lists.append(("global options", globalopts))
1391
1391
1392 # list all option lists
1392 # list all option lists
1393 opt_output = []
1393 opt_output = []
1394 for title, options in option_lists:
1394 for title, options in option_lists:
1395 opt_output.append(("\n%s:\n" % title, None))
1395 opt_output.append(("\n%s:\n" % title, None))
1396 for shortopt, longopt, default, desc in options:
1396 for shortopt, longopt, default, desc in options:
1397 if "DEPRECATED" in desc and not ui.verbose: continue
1397 if "DEPRECATED" in desc and not ui.verbose: continue
1398 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1398 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1399 longopt and " --%s" % longopt),
1399 longopt and " --%s" % longopt),
1400 "%s%s" % (desc,
1400 "%s%s" % (desc,
1401 default
1401 default
1402 and _(" (default: %s)") % default
1402 and _(" (default: %s)") % default
1403 or "")))
1403 or "")))
1404
1404
1405 if opt_output:
1405 if opt_output:
1406 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1406 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1407 for first, second in opt_output:
1407 for first, second in opt_output:
1408 if second:
1408 if second:
1409 ui.write(" %-*s %s\n" % (opts_len, first, second))
1409 ui.write(" %-*s %s\n" % (opts_len, first, second))
1410 else:
1410 else:
1411 ui.write("%s\n" % first)
1411 ui.write("%s\n" % first)
1412
1412
1413 def identify(ui, repo):
1413 def identify(ui, repo):
1414 """print information about the working copy
1414 """print information about the working copy
1415
1415
1416 Print a short summary of the current state of the repo.
1416 Print a short summary of the current state of the repo.
1417
1417
1418 This summary identifies the repository state using one or two parent
1418 This summary identifies the repository state using one or two parent
1419 hash identifiers, followed by a "+" if there are uncommitted changes
1419 hash identifiers, followed by a "+" if there are uncommitted changes
1420 in the working directory, followed by a list of tags for this revision.
1420 in the working directory, followed by a list of tags for this revision.
1421 """
1421 """
1422 parents = [p for p in repo.dirstate.parents() if p != nullid]
1422 parents = [p for p in repo.dirstate.parents() if p != nullid]
1423 if not parents:
1423 if not parents:
1424 ui.write(_("unknown\n"))
1424 ui.write(_("unknown\n"))
1425 return
1425 return
1426
1426
1427 hexfunc = ui.debugflag and hex or short
1427 hexfunc = ui.debugflag and hex or short
1428 modified, added, removed, deleted = repo.status()[:4]
1428 modified, added, removed, deleted = repo.status()[:4]
1429 output = ["%s%s" %
1429 output = ["%s%s" %
1430 ('+'.join([hexfunc(parent) for parent in parents]),
1430 ('+'.join([hexfunc(parent) for parent in parents]),
1431 (modified or added or removed or deleted) and "+" or "")]
1431 (modified or added or removed or deleted) and "+" or "")]
1432
1432
1433 if not ui.quiet:
1433 if not ui.quiet:
1434
1434
1435 branch = util.tolocal(repo.workingctx().branch())
1435 branch = util.tolocal(repo.workingctx().branch())
1436 if branch:
1436 if branch:
1437 output.append("(%s)" % branch)
1437 output.append("(%s)" % branch)
1438
1438
1439 # multiple tags for a single parent separated by '/'
1439 # multiple tags for a single parent separated by '/'
1440 parenttags = ['/'.join(tags)
1440 parenttags = ['/'.join(tags)
1441 for tags in map(repo.nodetags, parents) if tags]
1441 for tags in map(repo.nodetags, parents) if tags]
1442 # tags for multiple parents separated by ' + '
1442 # tags for multiple parents separated by ' + '
1443 if parenttags:
1443 if parenttags:
1444 output.append(' + '.join(parenttags))
1444 output.append(' + '.join(parenttags))
1445
1445
1446 ui.write("%s\n" % ' '.join(output))
1446 ui.write("%s\n" % ' '.join(output))
1447
1447
1448 def import_(ui, repo, patch1, *patches, **opts):
1448 def import_(ui, repo, patch1, *patches, **opts):
1449 """import an ordered set of patches
1449 """import an ordered set of patches
1450
1450
1451 Import a list of patches and commit them individually.
1451 Import a list of patches and commit them individually.
1452
1452
1453 If there are outstanding changes in the working directory, import
1453 If there are outstanding changes in the working directory, import
1454 will abort unless given the -f flag.
1454 will abort unless given the -f flag.
1455
1455
1456 You can import a patch straight from a mail message. Even patches
1456 You can import a patch straight from a mail message. Even patches
1457 as attachments work (body part must be type text/plain or
1457 as attachments work (body part must be type text/plain or
1458 text/x-patch to be used). From and Subject headers of email
1458 text/x-patch to be used). From and Subject headers of email
1459 message are used as default committer and commit message. All
1459 message are used as default committer and commit message. All
1460 text/plain body parts before first diff are added to commit
1460 text/plain body parts before first diff are added to commit
1461 message.
1461 message.
1462
1462
1463 If imported patch was generated by hg export, user and description
1463 If imported patch was generated by hg export, user and description
1464 from patch override values from message headers and body. Values
1464 from patch override values from message headers and body. Values
1465 given on command line with -m and -u override these.
1465 given on command line with -m and -u override these.
1466
1466
1467 To read a patch from standard input, use patch name "-".
1467 To read a patch from standard input, use patch name "-".
1468 """
1468 """
1469 patches = (patch1,) + patches
1469 patches = (patch1,) + patches
1470
1470
1471 if not opts['force']:
1471 if not opts['force']:
1472 bail_if_changed(repo)
1472 bail_if_changed(repo)
1473
1473
1474 d = opts["base"]
1474 d = opts["base"]
1475 strip = opts["strip"]
1475 strip = opts["strip"]
1476
1476
1477 wlock = repo.wlock()
1477 wlock = repo.wlock()
1478 lock = repo.lock()
1478 lock = repo.lock()
1479
1479
1480 for p in patches:
1480 for p in patches:
1481 pf = os.path.join(d, p)
1481 pf = os.path.join(d, p)
1482
1482
1483 if pf == '-':
1483 if pf == '-':
1484 ui.status(_("applying patch from stdin\n"))
1484 ui.status(_("applying patch from stdin\n"))
1485 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1485 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1486 else:
1486 else:
1487 ui.status(_("applying %s\n") % p)
1487 ui.status(_("applying %s\n") % p)
1488 tmpname, message, user, date = patch.extract(ui, file(pf))
1488 tmpname, message, user, date = patch.extract(ui, file(pf))
1489
1489
1490 if tmpname is None:
1490 if tmpname is None:
1491 raise util.Abort(_('no diffs found'))
1491 raise util.Abort(_('no diffs found'))
1492
1492
1493 try:
1493 try:
1494 cmdline_message = logmessage(opts)
1494 cmdline_message = logmessage(opts)
1495 if cmdline_message:
1495 if cmdline_message:
1496 # pickup the cmdline msg
1496 # pickup the cmdline msg
1497 message = cmdline_message
1497 message = cmdline_message
1498 elif message:
1498 elif message:
1499 # pickup the patch msg
1499 # pickup the patch msg
1500 message = message.strip()
1500 message = message.strip()
1501 else:
1501 else:
1502 # launch the editor
1502 # launch the editor
1503 message = None
1503 message = None
1504 ui.debug(_('message:\n%s\n') % message)
1504 ui.debug(_('message:\n%s\n') % message)
1505
1505
1506 files = {}
1506 files = {}
1507 try:
1507 try:
1508 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1508 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1509 files=files)
1509 files=files)
1510 finally:
1510 finally:
1511 files = patch.updatedir(ui, repo, files, wlock=wlock)
1511 files = patch.updatedir(ui, repo, files, wlock=wlock)
1512 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1512 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1513 finally:
1513 finally:
1514 os.unlink(tmpname)
1514 os.unlink(tmpname)
1515
1515
1516 def incoming(ui, repo, source="default", **opts):
1516 def incoming(ui, repo, source="default", **opts):
1517 """show new changesets found in source
1517 """show new changesets found in source
1518
1518
1519 Show new changesets found in the specified path/URL or the default
1519 Show new changesets found in the specified path/URL or the default
1520 pull location. These are the changesets that would be pulled if a pull
1520 pull location. These are the changesets that would be pulled if a pull
1521 was requested.
1521 was requested.
1522
1522
1523 For remote repository, using --bundle avoids downloading the changesets
1523 For remote repository, using --bundle avoids downloading the changesets
1524 twice if the incoming is followed by a pull.
1524 twice if the incoming is followed by a pull.
1525
1525
1526 See pull for valid source format details.
1526 See pull for valid source format details.
1527 """
1527 """
1528 source = ui.expandpath(source)
1528 source = ui.expandpath(source)
1529 setremoteconfig(ui, opts)
1529 setremoteconfig(ui, opts)
1530
1530
1531 other = hg.repository(ui, source)
1531 other = hg.repository(ui, source)
1532 incoming = repo.findincoming(other, force=opts["force"])
1532 incoming = repo.findincoming(other, force=opts["force"])
1533 if not incoming:
1533 if not incoming:
1534 ui.status(_("no changes found\n"))
1534 ui.status(_("no changes found\n"))
1535 return
1535 return
1536
1536
1537 cleanup = None
1537 cleanup = None
1538 try:
1538 try:
1539 fname = opts["bundle"]
1539 fname = opts["bundle"]
1540 if fname or not other.local():
1540 if fname or not other.local():
1541 # create a bundle (uncompressed if other repo is not local)
1541 # create a bundle (uncompressed if other repo is not local)
1542 cg = other.changegroup(incoming, "incoming")
1542 cg = other.changegroup(incoming, "incoming")
1543 bundletype = other.local() and "HG10BZ" or "HG10UN"
1543 bundletype = other.local() and "HG10BZ" or "HG10UN"
1544 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1544 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1545 # keep written bundle?
1545 # keep written bundle?
1546 if opts["bundle"]:
1546 if opts["bundle"]:
1547 cleanup = None
1547 cleanup = None
1548 if not other.local():
1548 if not other.local():
1549 # use the created uncompressed bundlerepo
1549 # use the created uncompressed bundlerepo
1550 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1550 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1551
1551
1552 revs = None
1552 revs = None
1553 if opts['rev']:
1553 if opts['rev']:
1554 revs = [other.lookup(rev) for rev in opts['rev']]
1554 revs = [other.lookup(rev) for rev in opts['rev']]
1555 o = other.changelog.nodesbetween(incoming, revs)[0]
1555 o = other.changelog.nodesbetween(incoming, revs)[0]
1556 if opts['newest_first']:
1556 if opts['newest_first']:
1557 o.reverse()
1557 o.reverse()
1558 displayer = cmdutil.show_changeset(ui, other, opts)
1558 displayer = cmdutil.show_changeset(ui, other, opts)
1559 for n in o:
1559 for n in o:
1560 parents = [p for p in other.changelog.parents(n) if p != nullid]
1560 parents = [p for p in other.changelog.parents(n) if p != nullid]
1561 if opts['no_merges'] and len(parents) == 2:
1561 if opts['no_merges'] and len(parents) == 2:
1562 continue
1562 continue
1563 displayer.show(changenode=n)
1563 displayer.show(changenode=n)
1564 finally:
1564 finally:
1565 if hasattr(other, 'close'):
1565 if hasattr(other, 'close'):
1566 other.close()
1566 other.close()
1567 if cleanup:
1567 if cleanup:
1568 os.unlink(cleanup)
1568 os.unlink(cleanup)
1569
1569
1570 def init(ui, dest=".", **opts):
1570 def init(ui, dest=".", **opts):
1571 """create a new repository in the given directory
1571 """create a new repository in the given directory
1572
1572
1573 Initialize a new repository in the given directory. If the given
1573 Initialize a new repository in the given directory. If the given
1574 directory does not exist, it is created.
1574 directory does not exist, it is created.
1575
1575
1576 If no directory is given, the current directory is used.
1576 If no directory is given, the current directory is used.
1577
1577
1578 It is possible to specify an ssh:// URL as the destination.
1578 It is possible to specify an ssh:// URL as the destination.
1579 Look at the help text for the pull command for important details
1579 Look at the help text for the pull command for important details
1580 about ssh:// URLs.
1580 about ssh:// URLs.
1581 """
1581 """
1582 setremoteconfig(ui, opts)
1582 setremoteconfig(ui, opts)
1583 hg.repository(ui, dest, create=1)
1583 hg.repository(ui, dest, create=1)
1584
1584
1585 def locate(ui, repo, *pats, **opts):
1585 def locate(ui, repo, *pats, **opts):
1586 """locate files matching specific patterns
1586 """locate files matching specific patterns
1587
1587
1588 Print all files under Mercurial control whose names match the
1588 Print all files under Mercurial control whose names match the
1589 given patterns.
1589 given patterns.
1590
1590
1591 This command searches the current directory and its
1591 This command searches the current directory and its
1592 subdirectories. To search an entire repository, move to the root
1592 subdirectories. To search an entire repository, move to the root
1593 of the repository.
1593 of the repository.
1594
1594
1595 If no patterns are given to match, this command prints all file
1595 If no patterns are given to match, this command prints all file
1596 names.
1596 names.
1597
1597
1598 If you want to feed the output of this command into the "xargs"
1598 If you want to feed the output of this command into the "xargs"
1599 command, use the "-0" option to both this command and "xargs".
1599 command, use the "-0" option to both this command and "xargs".
1600 This will avoid the problem of "xargs" treating single filenames
1600 This will avoid the problem of "xargs" treating single filenames
1601 that contain white space as multiple filenames.
1601 that contain white space as multiple filenames.
1602 """
1602 """
1603 end = opts['print0'] and '\0' or '\n'
1603 end = opts['print0'] and '\0' or '\n'
1604 rev = opts['rev']
1604 rev = opts['rev']
1605 if rev:
1605 if rev:
1606 node = repo.lookup(rev)
1606 node = repo.lookup(rev)
1607 else:
1607 else:
1608 node = None
1608 node = None
1609
1609
1610 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1610 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1611 head='(?:.*/|)'):
1611 head='(?:.*/|)'):
1612 if not node and repo.dirstate.state(abs) == '?':
1612 if not node and repo.dirstate.state(abs) == '?':
1613 continue
1613 continue
1614 if opts['fullpath']:
1614 if opts['fullpath']:
1615 ui.write(os.path.join(repo.root, abs), end)
1615 ui.write(os.path.join(repo.root, abs), end)
1616 else:
1616 else:
1617 ui.write(((pats and rel) or abs), end)
1617 ui.write(((pats and rel) or abs), end)
1618
1618
1619 def log(ui, repo, *pats, **opts):
1619 def log(ui, repo, *pats, **opts):
1620 """show revision history of entire repository or files
1620 """show revision history of entire repository or files
1621
1621
1622 Print the revision history of the specified files or the entire
1622 Print the revision history of the specified files or the entire
1623 project.
1623 project.
1624
1624
1625 File history is shown without following rename or copy history of
1625 File history is shown without following rename or copy history of
1626 files. Use -f/--follow with a file name to follow history across
1626 files. Use -f/--follow with a file name to follow history across
1627 renames and copies. --follow without a file name will only show
1627 renames and copies. --follow without a file name will only show
1628 ancestors or descendants of the starting revision. --follow-first
1628 ancestors or descendants of the starting revision. --follow-first
1629 only follows the first parent of merge revisions.
1629 only follows the first parent of merge revisions.
1630
1630
1631 If no revision range is specified, the default is tip:0 unless
1631 If no revision range is specified, the default is tip:0 unless
1632 --follow is set, in which case the working directory parent is
1632 --follow is set, in which case the working directory parent is
1633 used as the starting revision.
1633 used as the starting revision.
1634
1634
1635 By default this command outputs: changeset id and hash, tags,
1635 By default this command outputs: changeset id and hash, tags,
1636 non-trivial parents, user, date and time, and a summary for each
1636 non-trivial parents, user, date and time, and a summary for each
1637 commit. When the -v/--verbose switch is used, the list of changed
1637 commit. When the -v/--verbose switch is used, the list of changed
1638 files and full commit message is shown.
1638 files and full commit message is shown.
1639
1639
1640 NOTE: log -p may generate unexpected diff output for merge
1640 NOTE: log -p may generate unexpected diff output for merge
1641 changesets, as it will compare the merge changeset against its
1641 changesets, as it will compare the merge changeset against its
1642 first parent only. Also, the files: list will only reflect files
1642 first parent only. Also, the files: list will only reflect files
1643 that are different from BOTH parents.
1643 that are different from BOTH parents.
1644
1644
1645 """
1645 """
1646
1646
1647 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1647 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1648 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1648 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1649
1649
1650 if opts['limit']:
1650 if opts['limit']:
1651 try:
1651 try:
1652 limit = int(opts['limit'])
1652 limit = int(opts['limit'])
1653 except ValueError:
1653 except ValueError:
1654 raise util.Abort(_('limit must be a positive integer'))
1654 raise util.Abort(_('limit must be a positive integer'))
1655 if limit <= 0: raise util.Abort(_('limit must be positive'))
1655 if limit <= 0: raise util.Abort(_('limit must be positive'))
1656 else:
1656 else:
1657 limit = sys.maxint
1657 limit = sys.maxint
1658 count = 0
1658 count = 0
1659
1659
1660 if opts['copies'] and opts['rev']:
1660 if opts['copies'] and opts['rev']:
1661 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1661 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1662 else:
1662 else:
1663 endrev = repo.changelog.count()
1663 endrev = repo.changelog.count()
1664 rcache = {}
1664 rcache = {}
1665 ncache = {}
1665 ncache = {}
1666 dcache = []
1666 dcache = []
1667 def getrenamed(fn, rev, man):
1667 def getrenamed(fn, rev, man):
1668 '''looks up all renames for a file (up to endrev) the first
1668 '''looks up all renames for a file (up to endrev) the first
1669 time the file is given. It indexes on the changerev and only
1669 time the file is given. It indexes on the changerev and only
1670 parses the manifest if linkrev != changerev.
1670 parses the manifest if linkrev != changerev.
1671 Returns rename info for fn at changerev rev.'''
1671 Returns rename info for fn at changerev rev.'''
1672 if fn not in rcache:
1672 if fn not in rcache:
1673 rcache[fn] = {}
1673 rcache[fn] = {}
1674 ncache[fn] = {}
1674 ncache[fn] = {}
1675 fl = repo.file(fn)
1675 fl = repo.file(fn)
1676 for i in xrange(fl.count()):
1676 for i in xrange(fl.count()):
1677 node = fl.node(i)
1677 node = fl.node(i)
1678 lr = fl.linkrev(node)
1678 lr = fl.linkrev(node)
1679 renamed = fl.renamed(node)
1679 renamed = fl.renamed(node)
1680 rcache[fn][lr] = renamed
1680 rcache[fn][lr] = renamed
1681 if renamed:
1681 if renamed:
1682 ncache[fn][node] = renamed
1682 ncache[fn][node] = renamed
1683 if lr >= endrev:
1683 if lr >= endrev:
1684 break
1684 break
1685 if rev in rcache[fn]:
1685 if rev in rcache[fn]:
1686 return rcache[fn][rev]
1686 return rcache[fn][rev]
1687 mr = repo.manifest.rev(man)
1687 mr = repo.manifest.rev(man)
1688 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1688 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1689 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1689 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1690 if not dcache or dcache[0] != man:
1690 if not dcache or dcache[0] != man:
1691 dcache[:] = [man, repo.manifest.readdelta(man)]
1691 dcache[:] = [man, repo.manifest.readdelta(man)]
1692 if fn in dcache[1]:
1692 if fn in dcache[1]:
1693 return ncache[fn].get(dcache[1][fn])
1693 return ncache[fn].get(dcache[1][fn])
1694 return None
1694 return None
1695
1695
1696 df = False
1696 df = False
1697 if opts["date"]:
1697 if opts["date"]:
1698 df = util.matchdate(opts["date"])
1698 df = util.matchdate(opts["date"])
1699
1699
1700 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1700 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1701 for st, rev, fns in changeiter:
1701 for st, rev, fns in changeiter:
1702 if st == 'add':
1702 if st == 'add':
1703 changenode = repo.changelog.node(rev)
1703 changenode = repo.changelog.node(rev)
1704 parents = [p for p in repo.changelog.parentrevs(rev)
1704 parents = [p for p in repo.changelog.parentrevs(rev)
1705 if p != nullrev]
1705 if p != nullrev]
1706 if opts['no_merges'] and len(parents) == 2:
1706 if opts['no_merges'] and len(parents) == 2:
1707 continue
1707 continue
1708 if opts['only_merges'] and len(parents) != 2:
1708 if opts['only_merges'] and len(parents) != 2:
1709 continue
1709 continue
1710
1710
1711 if df:
1711 if df:
1712 changes = get(rev)
1712 changes = get(rev)
1713 if not df(changes[2][0]):
1713 if not df(changes[2][0]):
1714 continue
1714 continue
1715
1715
1716 if opts['keyword']:
1716 if opts['keyword']:
1717 changes = get(rev)
1717 changes = get(rev)
1718 miss = 0
1718 miss = 0
1719 for k in [kw.lower() for kw in opts['keyword']]:
1719 for k in [kw.lower() for kw in opts['keyword']]:
1720 if not (k in changes[1].lower() or
1720 if not (k in changes[1].lower() or
1721 k in changes[4].lower() or
1721 k in changes[4].lower() or
1722 k in " ".join(changes[3][:20]).lower()):
1722 k in " ".join(changes[3][:20]).lower()):
1723 miss = 1
1723 miss = 1
1724 break
1724 break
1725 if miss:
1725 if miss:
1726 continue
1726 continue
1727
1727
1728 copies = []
1728 copies = []
1729 if opts.get('copies') and rev:
1729 if opts.get('copies') and rev:
1730 mf = get(rev)[0]
1730 mf = get(rev)[0]
1731 for fn in get(rev)[3]:
1731 for fn in get(rev)[3]:
1732 rename = getrenamed(fn, rev, mf)
1732 rename = getrenamed(fn, rev, mf)
1733 if rename:
1733 if rename:
1734 copies.append((fn, rename[0]))
1734 copies.append((fn, rename[0]))
1735 displayer.show(rev, changenode, copies=copies)
1735 displayer.show(rev, changenode, copies=copies)
1736 elif st == 'iter':
1736 elif st == 'iter':
1737 if count == limit: break
1737 if count == limit: break
1738 if displayer.flush(rev):
1738 if displayer.flush(rev):
1739 count += 1
1739 count += 1
1740
1740
1741 def manifest(ui, repo, rev=None):
1741 def manifest(ui, repo, rev=None):
1742 """output the latest or given revision of the project manifest
1742 """output the latest or given revision of the project manifest
1743
1743
1744 Print a list of version controlled files for the given revision.
1744 Print a list of version controlled files for the given revision.
1745
1745
1746 The manifest is the list of files being version controlled. If no revision
1746 The manifest is the list of files being version controlled. If no revision
1747 is given then the first parent of the working directory is used.
1747 is given then the first parent of the working directory is used.
1748
1748
1749 With -v flag, print file permissions. With --debug flag, print
1749 With -v flag, print file permissions. With --debug flag, print
1750 file revision hashes.
1750 file revision hashes.
1751 """
1751 """
1752
1752
1753 m = repo.changectx(rev).manifest()
1753 m = repo.changectx(rev).manifest()
1754 files = m.keys()
1754 files = m.keys()
1755 files.sort()
1755 files.sort()
1756
1756
1757 for f in files:
1757 for f in files:
1758 if ui.debugflag:
1758 if ui.debugflag:
1759 ui.write("%40s " % hex(m[f]))
1759 ui.write("%40s " % hex(m[f]))
1760 if ui.verbose:
1760 if ui.verbose:
1761 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1761 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1762 ui.write("%s\n" % f)
1762 ui.write("%s\n" % f)
1763
1763
1764 def merge(ui, repo, node=None, force=None):
1764 def merge(ui, repo, node=None, force=None):
1765 """Merge working directory with another revision
1765 """Merge working directory with another revision
1766
1766
1767 Merge the contents of the current working directory and the
1767 Merge the contents of the current working directory and the
1768 requested revision. Files that changed between either parent are
1768 requested revision. Files that changed between either parent are
1769 marked as changed for the next commit and a commit must be
1769 marked as changed for the next commit and a commit must be
1770 performed before any further updates are allowed.
1770 performed before any further updates are allowed.
1771
1771
1772 If no revision is specified, the working directory's parent is a
1772 If no revision is specified, the working directory's parent is a
1773 head revision, and the repository contains exactly one other head,
1773 head revision, and the repository contains exactly one other head,
1774 the other head is merged with by default. Otherwise, an explicit
1774 the other head is merged with by default. Otherwise, an explicit
1775 revision to merge with must be provided.
1775 revision to merge with must be provided.
1776 """
1776 """
1777
1777
1778 if not node:
1778 if not node:
1779 heads = repo.heads()
1779 heads = repo.heads()
1780 if len(heads) > 2:
1780 if len(heads) > 2:
1781 raise util.Abort(_('repo has %d heads - '
1781 raise util.Abort(_('repo has %d heads - '
1782 'please merge with an explicit rev') %
1782 'please merge with an explicit rev') %
1783 len(heads))
1783 len(heads))
1784 if len(heads) == 1:
1784 if len(heads) == 1:
1785 raise util.Abort(_('there is nothing to merge - '
1785 raise util.Abort(_('there is nothing to merge - '
1786 'use "hg update" instead'))
1786 'use "hg update" instead'))
1787 parent = repo.dirstate.parents()[0]
1787 parent = repo.dirstate.parents()[0]
1788 if parent not in heads:
1788 if parent not in heads:
1789 raise util.Abort(_('working dir not at a head rev - '
1789 raise util.Abort(_('working dir not at a head rev - '
1790 'use "hg update" or merge with an explicit rev'))
1790 'use "hg update" or merge with an explicit rev'))
1791 node = parent == heads[0] and heads[-1] or heads[0]
1791 node = parent == heads[0] and heads[-1] or heads[0]
1792 return hg.merge(repo, node, force=force)
1792 return hg.merge(repo, node, force=force)
1793
1793
1794 def outgoing(ui, repo, dest=None, **opts):
1794 def outgoing(ui, repo, dest=None, **opts):
1795 """show changesets not found in destination
1795 """show changesets not found in destination
1796
1796
1797 Show changesets not found in the specified destination repository or
1797 Show changesets not found in the specified destination repository or
1798 the default push location. These are the changesets that would be pushed
1798 the default push location. These are the changesets that would be pushed
1799 if a push was requested.
1799 if a push was requested.
1800
1800
1801 See pull for valid destination format details.
1801 See pull for valid destination format details.
1802 """
1802 """
1803 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1803 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1804 setremoteconfig(ui, opts)
1804 setremoteconfig(ui, opts)
1805 revs = None
1805 revs = None
1806 if opts['rev']:
1806 if opts['rev']:
1807 revs = [repo.lookup(rev) for rev in opts['rev']]
1807 revs = [repo.lookup(rev) for rev in opts['rev']]
1808
1808
1809 other = hg.repository(ui, dest)
1809 other = hg.repository(ui, dest)
1810 o = repo.findoutgoing(other, force=opts['force'])
1810 o = repo.findoutgoing(other, force=opts['force'])
1811 if not o:
1811 if not o:
1812 ui.status(_("no changes found\n"))
1812 ui.status(_("no changes found\n"))
1813 return
1813 return
1814 o = repo.changelog.nodesbetween(o, revs)[0]
1814 o = repo.changelog.nodesbetween(o, revs)[0]
1815 if opts['newest_first']:
1815 if opts['newest_first']:
1816 o.reverse()
1816 o.reverse()
1817 displayer = cmdutil.show_changeset(ui, repo, opts)
1817 displayer = cmdutil.show_changeset(ui, repo, opts)
1818 for n in o:
1818 for n in o:
1819 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1819 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1820 if opts['no_merges'] and len(parents) == 2:
1820 if opts['no_merges'] and len(parents) == 2:
1821 continue
1821 continue
1822 displayer.show(changenode=n)
1822 displayer.show(changenode=n)
1823
1823
1824 def parents(ui, repo, file_=None, **opts):
1824 def parents(ui, repo, file_=None, **opts):
1825 """show the parents of the working dir or revision
1825 """show the parents of the working dir or revision
1826
1826
1827 Print the working directory's parent revisions.
1827 Print the working directory's parent revisions.
1828 """
1828 """
1829 rev = opts.get('rev')
1829 rev = opts.get('rev')
1830 if rev:
1830 if rev:
1831 if file_:
1831 if file_:
1832 ctx = repo.filectx(file_, changeid=rev)
1832 ctx = repo.filectx(file_, changeid=rev)
1833 else:
1833 else:
1834 ctx = repo.changectx(rev)
1834 ctx = repo.changectx(rev)
1835 p = [cp.node() for cp in ctx.parents()]
1835 p = [cp.node() for cp in ctx.parents()]
1836 else:
1836 else:
1837 p = repo.dirstate.parents()
1837 p = repo.dirstate.parents()
1838
1838
1839 displayer = cmdutil.show_changeset(ui, repo, opts)
1839 displayer = cmdutil.show_changeset(ui, repo, opts)
1840 for n in p:
1840 for n in p:
1841 if n != nullid:
1841 if n != nullid:
1842 displayer.show(changenode=n)
1842 displayer.show(changenode=n)
1843
1843
1844 def paths(ui, repo, search=None):
1844 def paths(ui, repo, search=None):
1845 """show definition of symbolic path names
1845 """show definition of symbolic path names
1846
1846
1847 Show definition of symbolic path name NAME. If no name is given, show
1847 Show definition of symbolic path name NAME. If no name is given, show
1848 definition of available names.
1848 definition of available names.
1849
1849
1850 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1850 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1851 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1851 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1852 """
1852 """
1853 if search:
1853 if search:
1854 for name, path in ui.configitems("paths"):
1854 for name, path in ui.configitems("paths"):
1855 if name == search:
1855 if name == search:
1856 ui.write("%s\n" % path)
1856 ui.write("%s\n" % path)
1857 return
1857 return
1858 ui.warn(_("not found!\n"))
1858 ui.warn(_("not found!\n"))
1859 return 1
1859 return 1
1860 else:
1860 else:
1861 for name, path in ui.configitems("paths"):
1861 for name, path in ui.configitems("paths"):
1862 ui.write("%s = %s\n" % (name, path))
1862 ui.write("%s = %s\n" % (name, path))
1863
1863
1864 def postincoming(ui, repo, modheads, optupdate):
1864 def postincoming(ui, repo, modheads, optupdate):
1865 if modheads == 0:
1865 if modheads == 0:
1866 return
1866 return
1867 if optupdate:
1867 if optupdate:
1868 if modheads == 1:
1868 if modheads == 1:
1869 return hg.update(repo, repo.changelog.tip()) # update
1869 return hg.update(repo, repo.changelog.tip()) # update
1870 else:
1870 else:
1871 ui.status(_("not updating, since new heads added\n"))
1871 ui.status(_("not updating, since new heads added\n"))
1872 if modheads > 1:
1872 if modheads > 1:
1873 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1873 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1874 else:
1874 else:
1875 ui.status(_("(run 'hg update' to get a working copy)\n"))
1875 ui.status(_("(run 'hg update' to get a working copy)\n"))
1876
1876
1877 def pull(ui, repo, source="default", **opts):
1877 def pull(ui, repo, source="default", **opts):
1878 """pull changes from the specified source
1878 """pull changes from the specified source
1879
1879
1880 Pull changes from a remote repository to a local one.
1880 Pull changes from a remote repository to a local one.
1881
1881
1882 This finds all changes from the repository at the specified path
1882 This finds all changes from the repository at the specified path
1883 or URL and adds them to the local repository. By default, this
1883 or URL and adds them to the local repository. By default, this
1884 does not update the copy of the project in the working directory.
1884 does not update the copy of the project in the working directory.
1885
1885
1886 Valid URLs are of the form:
1886 Valid URLs are of the form:
1887
1887
1888 local/filesystem/path (or file://local/filesystem/path)
1888 local/filesystem/path (or file://local/filesystem/path)
1889 http://[user@]host[:port]/[path]
1889 http://[user@]host[:port]/[path]
1890 https://[user@]host[:port]/[path]
1890 https://[user@]host[:port]/[path]
1891 ssh://[user@]host[:port]/[path]
1891 ssh://[user@]host[:port]/[path]
1892 static-http://host[:port]/[path]
1892 static-http://host[:port]/[path]
1893
1893
1894 Paths in the local filesystem can either point to Mercurial
1894 Paths in the local filesystem can either point to Mercurial
1895 repositories or to bundle files (as created by 'hg bundle' or
1895 repositories or to bundle files (as created by 'hg bundle' or
1896 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1896 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1897 allows access to a Mercurial repository where you simply use a web
1897 allows access to a Mercurial repository where you simply use a web
1898 server to publish the .hg directory as static content.
1898 server to publish the .hg directory as static content.
1899
1899
1900 Some notes about using SSH with Mercurial:
1900 Some notes about using SSH with Mercurial:
1901 - SSH requires an accessible shell account on the destination machine
1901 - SSH requires an accessible shell account on the destination machine
1902 and a copy of hg in the remote path or specified with as remotecmd.
1902 and a copy of hg in the remote path or specified with as remotecmd.
1903 - path is relative to the remote user's home directory by default.
1903 - path is relative to the remote user's home directory by default.
1904 Use an extra slash at the start of a path to specify an absolute path:
1904 Use an extra slash at the start of a path to specify an absolute path:
1905 ssh://example.com//tmp/repository
1905 ssh://example.com//tmp/repository
1906 - Mercurial doesn't use its own compression via SSH; the right thing
1906 - Mercurial doesn't use its own compression via SSH; the right thing
1907 to do is to configure it in your ~/.ssh/config, e.g.:
1907 to do is to configure it in your ~/.ssh/config, e.g.:
1908 Host *.mylocalnetwork.example.com
1908 Host *.mylocalnetwork.example.com
1909 Compression no
1909 Compression no
1910 Host *
1910 Host *
1911 Compression yes
1911 Compression yes
1912 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1912 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1913 with the --ssh command line option.
1913 with the --ssh command line option.
1914 """
1914 """
1915 source = ui.expandpath(source)
1915 source = ui.expandpath(source)
1916 setremoteconfig(ui, opts)
1916 setremoteconfig(ui, opts)
1917
1917
1918 other = hg.repository(ui, source)
1918 other = hg.repository(ui, source)
1919 ui.status(_('pulling from %s\n') % (source))
1919 ui.status(_('pulling from %s\n') % (source))
1920 revs = None
1920 revs = None
1921 if opts['rev']:
1921 if opts['rev']:
1922 if 'lookup' in other.capabilities:
1922 if 'lookup' in other.capabilities:
1923 revs = [other.lookup(rev) for rev in opts['rev']]
1923 revs = [other.lookup(rev) for rev in opts['rev']]
1924 else:
1924 else:
1925 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1925 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1926 raise util.Abort(error)
1926 raise util.Abort(error)
1927 modheads = repo.pull(other, heads=revs, force=opts['force'])
1927 modheads = repo.pull(other, heads=revs, force=opts['force'])
1928 return postincoming(ui, repo, modheads, opts['update'])
1928 return postincoming(ui, repo, modheads, opts['update'])
1929
1929
1930 def push(ui, repo, dest=None, **opts):
1930 def push(ui, repo, dest=None, **opts):
1931 """push changes to the specified destination
1931 """push changes to the specified destination
1932
1932
1933 Push changes from the local repository to the given destination.
1933 Push changes from the local repository to the given destination.
1934
1934
1935 This is the symmetrical operation for pull. It helps to move
1935 This is the symmetrical operation for pull. It helps to move
1936 changes from the current repository to a different one. If the
1936 changes from the current repository to a different one. If the
1937 destination is local this is identical to a pull in that directory
1937 destination is local this is identical to a pull in that directory
1938 from the current one.
1938 from the current one.
1939
1939
1940 By default, push will refuse to run if it detects the result would
1940 By default, push will refuse to run if it detects the result would
1941 increase the number of remote heads. This generally indicates the
1941 increase the number of remote heads. This generally indicates the
1942 the client has forgotten to sync and merge before pushing.
1942 the client has forgotten to sync and merge before pushing.
1943
1943
1944 Valid URLs are of the form:
1944 Valid URLs are of the form:
1945
1945
1946 local/filesystem/path (or file://local/filesystem/path)
1946 local/filesystem/path (or file://local/filesystem/path)
1947 ssh://[user@]host[:port]/[path]
1947 ssh://[user@]host[:port]/[path]
1948 http://[user@]host[:port]/[path]
1948 http://[user@]host[:port]/[path]
1949 https://[user@]host[:port]/[path]
1949 https://[user@]host[:port]/[path]
1950
1950
1951 Look at the help text for the pull command for important details
1951 Look at the help text for the pull command for important details
1952 about ssh:// URLs.
1952 about ssh:// URLs.
1953
1953
1954 Pushing to http:// and https:// URLs is only possible, if this
1954 Pushing to http:// and https:// URLs is only possible, if this
1955 feature is explicitly enabled on the remote Mercurial server.
1955 feature is explicitly enabled on the remote Mercurial server.
1956 """
1956 """
1957 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1957 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1958 setremoteconfig(ui, opts)
1958 setremoteconfig(ui, opts)
1959
1959
1960 other = hg.repository(ui, dest)
1960 other = hg.repository(ui, dest)
1961 ui.status('pushing to %s\n' % (dest))
1961 ui.status('pushing to %s\n' % (dest))
1962 revs = None
1962 revs = None
1963 if opts['rev']:
1963 if opts['rev']:
1964 revs = [repo.lookup(rev) for rev in opts['rev']]
1964 revs = [repo.lookup(rev) for rev in opts['rev']]
1965 r = repo.push(other, opts['force'], revs=revs)
1965 r = repo.push(other, opts['force'], revs=revs)
1966 return r == 0
1966 return r == 0
1967
1967
1968 def rawcommit(ui, repo, *pats, **opts):
1968 def rawcommit(ui, repo, *pats, **opts):
1969 """raw commit interface (DEPRECATED)
1969 """raw commit interface (DEPRECATED)
1970
1970
1971 (DEPRECATED)
1971 (DEPRECATED)
1972 Lowlevel commit, for use in helper scripts.
1972 Lowlevel commit, for use in helper scripts.
1973
1973
1974 This command is not intended to be used by normal users, as it is
1974 This command is not intended to be used by normal users, as it is
1975 primarily useful for importing from other SCMs.
1975 primarily useful for importing from other SCMs.
1976
1976
1977 This command is now deprecated and will be removed in a future
1977 This command is now deprecated and will be removed in a future
1978 release, please use debugsetparents and commit instead.
1978 release, please use debugsetparents and commit instead.
1979 """
1979 """
1980
1980
1981 ui.warn(_("(the rawcommit command is deprecated)\n"))
1981 ui.warn(_("(the rawcommit command is deprecated)\n"))
1982
1982
1983 message = logmessage(opts)
1983 message = logmessage(opts)
1984
1984
1985 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
1985 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
1986 if opts['files']:
1986 if opts['files']:
1987 files += open(opts['files']).read().splitlines()
1987 files += open(opts['files']).read().splitlines()
1988
1988
1989 parents = [repo.lookup(p) for p in opts['parent']]
1989 parents = [repo.lookup(p) for p in opts['parent']]
1990
1990
1991 try:
1991 try:
1992 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
1992 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
1993 except ValueError, inst:
1993 except ValueError, inst:
1994 raise util.Abort(str(inst))
1994 raise util.Abort(str(inst))
1995
1995
1996 def recover(ui, repo):
1996 def recover(ui, repo):
1997 """roll back an interrupted transaction
1997 """roll back an interrupted transaction
1998
1998
1999 Recover from an interrupted commit or pull.
1999 Recover from an interrupted commit or pull.
2000
2000
2001 This command tries to fix the repository status after an interrupted
2001 This command tries to fix the repository status after an interrupted
2002 operation. It should only be necessary when Mercurial suggests it.
2002 operation. It should only be necessary when Mercurial suggests it.
2003 """
2003 """
2004 if repo.recover():
2004 if repo.recover():
2005 return hg.verify(repo)
2005 return hg.verify(repo)
2006 return 1
2006 return 1
2007
2007
2008 def remove(ui, repo, *pats, **opts):
2008 def remove(ui, repo, *pats, **opts):
2009 """remove the specified files on the next commit
2009 """remove the specified files on the next commit
2010
2010
2011 Schedule the indicated files for removal from the repository.
2011 Schedule the indicated files for removal from the repository.
2012
2012
2013 This only removes files from the current branch, not from the
2013 This only removes files from the current branch, not from the
2014 entire project history. If the files still exist in the working
2014 entire project history. If the files still exist in the working
2015 directory, they will be deleted from it. If invoked with --after,
2015 directory, they will be deleted from it. If invoked with --after,
2016 files that have been manually deleted are marked as removed.
2016 files that have been manually deleted are marked as removed.
2017
2017
2018 This command schedules the files to be removed at the next commit.
2018 This command schedules the files to be removed at the next commit.
2019 To undo a remove before that, see hg revert.
2019 To undo a remove before that, see hg revert.
2020
2020
2021 Modified files and added files are not removed by default. To
2021 Modified files and added files are not removed by default. To
2022 remove them, use the -f/--force option.
2022 remove them, use the -f/--force option.
2023 """
2023 """
2024 names = []
2024 names = []
2025 if not opts['after'] and not pats:
2025 if not opts['after'] and not pats:
2026 raise util.Abort(_('no files specified'))
2026 raise util.Abort(_('no files specified'))
2027 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2027 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2028 exact = dict.fromkeys(files)
2028 exact = dict.fromkeys(files)
2029 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2029 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2030 modified, added, removed, deleted, unknown = mardu
2030 modified, added, removed, deleted, unknown = mardu
2031 remove, forget = [], []
2031 remove, forget = [], []
2032 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2032 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2033 reason = None
2033 reason = None
2034 if abs not in deleted and opts['after']:
2034 if abs not in deleted and opts['after']:
2035 reason = _('is still present')
2035 reason = _('is still present')
2036 elif abs in modified and not opts['force']:
2036 elif abs in modified and not opts['force']:
2037 reason = _('is modified (use -f to force removal)')
2037 reason = _('is modified (use -f to force removal)')
2038 elif abs in added:
2038 elif abs in added:
2039 if opts['force']:
2039 if opts['force']:
2040 forget.append(abs)
2040 forget.append(abs)
2041 continue
2041 continue
2042 reason = _('has been marked for add (use -f to force removal)')
2042 reason = _('has been marked for add (use -f to force removal)')
2043 elif abs in unknown:
2043 elif abs in unknown:
2044 reason = _('is not managed')
2044 reason = _('is not managed')
2045 elif abs in removed:
2045 elif abs in removed:
2046 continue
2046 continue
2047 if reason:
2047 if reason:
2048 if exact:
2048 if exact:
2049 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2049 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2050 else:
2050 else:
2051 if ui.verbose or not exact:
2051 if ui.verbose or not exact:
2052 ui.status(_('removing %s\n') % rel)
2052 ui.status(_('removing %s\n') % rel)
2053 remove.append(abs)
2053 remove.append(abs)
2054 repo.forget(forget)
2054 repo.forget(forget)
2055 repo.remove(remove, unlink=not opts['after'])
2055 repo.remove(remove, unlink=not opts['after'])
2056
2056
2057 def rename(ui, repo, *pats, **opts):
2057 def rename(ui, repo, *pats, **opts):
2058 """rename files; equivalent of copy + remove
2058 """rename files; equivalent of copy + remove
2059
2059
2060 Mark dest as copies of sources; mark sources for deletion. If
2060 Mark dest as copies of sources; mark sources for deletion. If
2061 dest is a directory, copies are put in that directory. If dest is
2061 dest is a directory, copies are put in that directory. If dest is
2062 a file, there can only be one source.
2062 a file, there can only be one source.
2063
2063
2064 By default, this command copies the contents of files as they
2064 By default, this command copies the contents of files as they
2065 stand in the working directory. If invoked with --after, the
2065 stand in the working directory. If invoked with --after, the
2066 operation is recorded, but no copying is performed.
2066 operation is recorded, but no copying is performed.
2067
2067
2068 This command takes effect in the next commit. To undo a rename
2068 This command takes effect in the next commit. To undo a rename
2069 before that, see hg revert.
2069 before that, see hg revert.
2070 """
2070 """
2071 wlock = repo.wlock(0)
2071 wlock = repo.wlock(0)
2072 errs, copied = docopy(ui, repo, pats, opts, wlock)
2072 errs, copied = docopy(ui, repo, pats, opts, wlock)
2073 names = []
2073 names = []
2074 for abs, rel, exact in copied:
2074 for abs, rel, exact in copied:
2075 if ui.verbose or not exact:
2075 if ui.verbose or not exact:
2076 ui.status(_('removing %s\n') % rel)
2076 ui.status(_('removing %s\n') % rel)
2077 names.append(abs)
2077 names.append(abs)
2078 if not opts.get('dry_run'):
2078 if not opts.get('dry_run'):
2079 repo.remove(names, True, wlock)
2079 repo.remove(names, True, wlock)
2080 return errs
2080 return errs
2081
2081
2082 def revert(ui, repo, *pats, **opts):
2082 def revert(ui, repo, *pats, **opts):
2083 """revert files or dirs to their states as of some revision
2083 """revert files or dirs to their states as of some revision
2084
2084
2085 With no revision specified, revert the named files or directories
2085 With no revision specified, revert the named files or directories
2086 to the contents they had in the parent of the working directory.
2086 to the contents they had in the parent of the working directory.
2087 This restores the contents of the affected files to an unmodified
2087 This restores the contents of the affected files to an unmodified
2088 state and unschedules adds, removes, copies, and renames. If the
2088 state and unschedules adds, removes, copies, and renames. If the
2089 working directory has two parents, you must explicitly specify the
2089 working directory has two parents, you must explicitly specify the
2090 revision to revert to.
2090 revision to revert to.
2091
2091
2092 Modified files are saved with a .orig suffix before reverting.
2092 Modified files are saved with a .orig suffix before reverting.
2093 To disable these backups, use --no-backup.
2093 To disable these backups, use --no-backup.
2094
2094
2095 Using the -r option, revert the given files or directories to their
2095 Using the -r option, revert the given files or directories to their
2096 contents as of a specific revision. This can be helpful to "roll
2096 contents as of a specific revision. This can be helpful to "roll
2097 back" some or all of a change that should not have been committed.
2097 back" some or all of a change that should not have been committed.
2098
2098
2099 Revert modifies the working directory. It does not commit any
2099 Revert modifies the working directory. It does not commit any
2100 changes, or change the parent of the working directory. If you
2100 changes, or change the parent of the working directory. If you
2101 revert to a revision other than the parent of the working
2101 revert to a revision other than the parent of the working
2102 directory, the reverted files will thus appear modified
2102 directory, the reverted files will thus appear modified
2103 afterwards.
2103 afterwards.
2104
2104
2105 If a file has been deleted, it is recreated. If the executable
2105 If a file has been deleted, it is recreated. If the executable
2106 mode of a file was changed, it is reset.
2106 mode of a file was changed, it is reset.
2107
2107
2108 If names are given, all files matching the names are reverted.
2108 If names are given, all files matching the names are reverted.
2109
2109
2110 If no arguments are given, no files are reverted.
2110 If no arguments are given, no files are reverted.
2111 """
2111 """
2112
2112
2113 if opts["date"]:
2113 if opts["date"]:
2114 if opts["rev"]:
2114 if opts["rev"]:
2115 raise util.Abort(_("you can't specify a revision and a date"))
2115 raise util.Abort(_("you can't specify a revision and a date"))
2116 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2116 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2117
2117
2118 if not pats and not opts['all']:
2118 if not pats and not opts['all']:
2119 raise util.Abort(_('no files or directories specified; '
2119 raise util.Abort(_('no files or directories specified; '
2120 'use --all to revert the whole repo'))
2120 'use --all to revert the whole repo'))
2121
2121
2122 parent, p2 = repo.dirstate.parents()
2122 parent, p2 = repo.dirstate.parents()
2123 if not opts['rev'] and p2 != nullid:
2123 if not opts['rev'] and p2 != nullid:
2124 raise util.Abort(_('uncommitted merge - please provide a '
2124 raise util.Abort(_('uncommitted merge - please provide a '
2125 'specific revision'))
2125 'specific revision'))
2126 node = repo.changectx(opts['rev']).node()
2126 node = repo.changectx(opts['rev']).node()
2127 mf = repo.manifest.read(repo.changelog.read(node)[0])
2127 mf = repo.manifest.read(repo.changelog.read(node)[0])
2128 if node == parent:
2128 if node == parent:
2129 pmf = mf
2129 pmf = mf
2130 else:
2130 else:
2131 pmf = None
2131 pmf = None
2132
2132
2133 wlock = repo.wlock()
2133 wlock = repo.wlock()
2134
2134
2135 # need all matching names in dirstate and manifest of target rev,
2135 # need all matching names in dirstate and manifest of target rev,
2136 # so have to walk both. do not print errors if files exist in one
2136 # so have to walk both. do not print errors if files exist in one
2137 # but not other.
2137 # but not other.
2138
2138
2139 names = {}
2139 names = {}
2140 target_only = {}
2140 target_only = {}
2141
2141
2142 # walk dirstate.
2142 # walk dirstate.
2143
2143
2144 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2144 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2145 badmatch=mf.has_key):
2145 badmatch=mf.has_key):
2146 names[abs] = (rel, exact)
2146 names[abs] = (rel, exact)
2147 if src == 'b':
2147 if src == 'b':
2148 target_only[abs] = True
2148 target_only[abs] = True
2149
2149
2150 # walk target manifest.
2150 # walk target manifest.
2151
2151
2152 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2152 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2153 badmatch=names.has_key):
2153 badmatch=names.has_key):
2154 if abs in names: continue
2154 if abs in names: continue
2155 names[abs] = (rel, exact)
2155 names[abs] = (rel, exact)
2156 target_only[abs] = True
2156 target_only[abs] = True
2157
2157
2158 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2158 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2159 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2159 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2160
2160
2161 revert = ([], _('reverting %s\n'))
2161 revert = ([], _('reverting %s\n'))
2162 add = ([], _('adding %s\n'))
2162 add = ([], _('adding %s\n'))
2163 remove = ([], _('removing %s\n'))
2163 remove = ([], _('removing %s\n'))
2164 forget = ([], _('forgetting %s\n'))
2164 forget = ([], _('forgetting %s\n'))
2165 undelete = ([], _('undeleting %s\n'))
2165 undelete = ([], _('undeleting %s\n'))
2166 update = {}
2166 update = {}
2167
2167
2168 disptable = (
2168 disptable = (
2169 # dispatch table:
2169 # dispatch table:
2170 # file state
2170 # file state
2171 # action if in target manifest
2171 # action if in target manifest
2172 # action if not in target manifest
2172 # action if not in target manifest
2173 # make backup if in target manifest
2173 # make backup if in target manifest
2174 # make backup if not in target manifest
2174 # make backup if not in target manifest
2175 (modified, revert, remove, True, True),
2175 (modified, revert, remove, True, True),
2176 (added, revert, forget, True, False),
2176 (added, revert, forget, True, False),
2177 (removed, undelete, None, False, False),
2177 (removed, undelete, None, False, False),
2178 (deleted, revert, remove, False, False),
2178 (deleted, revert, remove, False, False),
2179 (unknown, add, None, True, False),
2179 (unknown, add, None, True, False),
2180 (target_only, add, None, False, False),
2180 (target_only, add, None, False, False),
2181 )
2181 )
2182
2182
2183 entries = names.items()
2183 entries = names.items()
2184 entries.sort()
2184 entries.sort()
2185
2185
2186 for abs, (rel, exact) in entries:
2186 for abs, (rel, exact) in entries:
2187 mfentry = mf.get(abs)
2187 mfentry = mf.get(abs)
2188 def handle(xlist, dobackup):
2188 def handle(xlist, dobackup):
2189 xlist[0].append(abs)
2189 xlist[0].append(abs)
2190 update[abs] = 1
2190 update[abs] = 1
2191 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2191 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2192 bakname = "%s.orig" % rel
2192 bakname = "%s.orig" % rel
2193 ui.note(_('saving current version of %s as %s\n') %
2193 ui.note(_('saving current version of %s as %s\n') %
2194 (rel, bakname))
2194 (rel, bakname))
2195 if not opts.get('dry_run'):
2195 if not opts.get('dry_run'):
2196 util.copyfile(rel, bakname)
2196 util.copyfile(rel, bakname)
2197 if ui.verbose or not exact:
2197 if ui.verbose or not exact:
2198 ui.status(xlist[1] % rel)
2198 ui.status(xlist[1] % rel)
2199 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2199 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2200 if abs not in table: continue
2200 if abs not in table: continue
2201 # file has changed in dirstate
2201 # file has changed in dirstate
2202 if mfentry:
2202 if mfentry:
2203 handle(hitlist, backuphit)
2203 handle(hitlist, backuphit)
2204 elif misslist is not None:
2204 elif misslist is not None:
2205 handle(misslist, backupmiss)
2205 handle(misslist, backupmiss)
2206 else:
2206 else:
2207 if exact: ui.warn(_('file not managed: %s\n') % rel)
2207 if exact: ui.warn(_('file not managed: %s\n') % rel)
2208 break
2208 break
2209 else:
2209 else:
2210 # file has not changed in dirstate
2210 # file has not changed in dirstate
2211 if node == parent:
2211 if node == parent:
2212 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2212 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2213 continue
2213 continue
2214 if pmf is None:
2214 if pmf is None:
2215 # only need parent manifest in this unlikely case,
2215 # only need parent manifest in this unlikely case,
2216 # so do not read by default
2216 # so do not read by default
2217 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2217 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2218 if abs in pmf:
2218 if abs in pmf:
2219 if mfentry:
2219 if mfentry:
2220 # if version of file is same in parent and target
2220 # if version of file is same in parent and target
2221 # manifests, do nothing
2221 # manifests, do nothing
2222 if pmf[abs] != mfentry:
2222 if pmf[abs] != mfentry:
2223 handle(revert, False)
2223 handle(revert, False)
2224 else:
2224 else:
2225 handle(remove, False)
2225 handle(remove, False)
2226
2226
2227 if not opts.get('dry_run'):
2227 if not opts.get('dry_run'):
2228 repo.dirstate.forget(forget[0])
2228 repo.dirstate.forget(forget[0])
2229 r = hg.revert(repo, node, update.has_key, wlock)
2229 r = hg.revert(repo, node, update.has_key, wlock)
2230 repo.dirstate.update(add[0], 'a')
2230 repo.dirstate.update(add[0], 'a')
2231 repo.dirstate.update(undelete[0], 'n')
2231 repo.dirstate.update(undelete[0], 'n')
2232 repo.dirstate.update(remove[0], 'r')
2232 repo.dirstate.update(remove[0], 'r')
2233 return r
2233 return r
2234
2234
2235 def rollback(ui, repo):
2235 def rollback(ui, repo):
2236 """roll back the last transaction in this repository
2236 """roll back the last transaction in this repository
2237
2237
2238 Roll back the last transaction in this repository, restoring the
2238 Roll back the last transaction in this repository, restoring the
2239 project to its state prior to the transaction.
2239 project to its state prior to the transaction.
2240
2240
2241 Transactions are used to encapsulate the effects of all commands
2241 Transactions are used to encapsulate the effects of all commands
2242 that create new changesets or propagate existing changesets into a
2242 that create new changesets or propagate existing changesets into a
2243 repository. For example, the following commands are transactional,
2243 repository. For example, the following commands are transactional,
2244 and their effects can be rolled back:
2244 and their effects can be rolled back:
2245
2245
2246 commit
2246 commit
2247 import
2247 import
2248 pull
2248 pull
2249 push (with this repository as destination)
2249 push (with this repository as destination)
2250 unbundle
2250 unbundle
2251
2251
2252 This command should be used with care. There is only one level of
2252 This command should be used with care. There is only one level of
2253 rollback, and there is no way to undo a rollback.
2253 rollback, and there is no way to undo a rollback.
2254
2254
2255 This command is not intended for use on public repositories. Once
2255 This command is not intended for use on public repositories. Once
2256 changes are visible for pull by other users, rolling a transaction
2256 changes are visible for pull by other users, rolling a transaction
2257 back locally is ineffective (someone else may already have pulled
2257 back locally is ineffective (someone else may already have pulled
2258 the changes). Furthermore, a race is possible with readers of the
2258 the changes). Furthermore, a race is possible with readers of the
2259 repository; for example an in-progress pull from the repository
2259 repository; for example an in-progress pull from the repository
2260 may fail if a rollback is performed.
2260 may fail if a rollback is performed.
2261 """
2261 """
2262 repo.rollback()
2262 repo.rollback()
2263
2263
2264 def root(ui, repo):
2264 def root(ui, repo):
2265 """print the root (top) of the current working dir
2265 """print the root (top) of the current working dir
2266
2266
2267 Print the root directory of the current repository.
2267 Print the root directory of the current repository.
2268 """
2268 """
2269 ui.write(repo.root + "\n")
2269 ui.write(repo.root + "\n")
2270
2270
2271 def serve(ui, repo, **opts):
2271 def serve(ui, repo, **opts):
2272 """export the repository via HTTP
2272 """export the repository via HTTP
2273
2273
2274 Start a local HTTP repository browser and pull server.
2274 Start a local HTTP repository browser and pull server.
2275
2275
2276 By default, the server logs accesses to stdout and errors to
2276 By default, the server logs accesses to stdout and errors to
2277 stderr. Use the "-A" and "-E" options to log to files.
2277 stderr. Use the "-A" and "-E" options to log to files.
2278 """
2278 """
2279
2279
2280 if opts["stdio"]:
2280 if opts["stdio"]:
2281 if repo is None:
2281 if repo is None:
2282 raise hg.RepoError(_("There is no Mercurial repository here"
2282 raise hg.RepoError(_("There is no Mercurial repository here"
2283 " (.hg not found)"))
2283 " (.hg not found)"))
2284 s = sshserver.sshserver(ui, repo)
2284 s = sshserver.sshserver(ui, repo)
2285 s.serve_forever()
2285 s.serve_forever()
2286
2286
2287 optlist = ("name templates style address port ipv6"
2287 optlist = ("name templates style address port ipv6"
2288 " accesslog errorlog webdir_conf")
2288 " accesslog errorlog webdir_conf")
2289 for o in optlist.split():
2289 for o in optlist.split():
2290 if opts[o]:
2290 if opts[o]:
2291 ui.setconfig("web", o, str(opts[o]))
2291 ui.setconfig("web", o, str(opts[o]))
2292
2292
2293 if repo is None and not ui.config("web", "webdir_conf"):
2293 if repo is None and not ui.config("web", "webdir_conf"):
2294 raise hg.RepoError(_("There is no Mercurial repository here"
2294 raise hg.RepoError(_("There is no Mercurial repository here"
2295 " (.hg not found)"))
2295 " (.hg not found)"))
2296
2296
2297 if opts['daemon'] and not opts['daemon_pipefds']:
2297 if opts['daemon'] and not opts['daemon_pipefds']:
2298 rfd, wfd = os.pipe()
2298 rfd, wfd = os.pipe()
2299 args = sys.argv[:]
2299 args = sys.argv[:]
2300 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2300 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2301 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2301 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2302 args[0], args)
2302 args[0], args)
2303 os.close(wfd)
2303 os.close(wfd)
2304 os.read(rfd, 1)
2304 os.read(rfd, 1)
2305 os._exit(0)
2305 os._exit(0)
2306
2306
2307 httpd = hgweb.server.create_server(ui, repo)
2307 httpd = hgweb.server.create_server(ui, repo)
2308
2308
2309 if ui.verbose:
2309 if ui.verbose:
2310 if httpd.port != 80:
2310 if httpd.port != 80:
2311 ui.status(_('listening at http://%s:%d/\n') %
2311 ui.status(_('listening at http://%s:%d/\n') %
2312 (httpd.addr, httpd.port))
2312 (httpd.addr, httpd.port))
2313 else:
2313 else:
2314 ui.status(_('listening at http://%s/\n') % httpd.addr)
2314 ui.status(_('listening at http://%s/\n') % httpd.addr)
2315
2315
2316 if opts['pid_file']:
2316 if opts['pid_file']:
2317 fp = open(opts['pid_file'], 'w')
2317 fp = open(opts['pid_file'], 'w')
2318 fp.write(str(os.getpid()) + '\n')
2318 fp.write(str(os.getpid()) + '\n')
2319 fp.close()
2319 fp.close()
2320
2320
2321 if opts['daemon_pipefds']:
2321 if opts['daemon_pipefds']:
2322 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2322 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2323 os.close(rfd)
2323 os.close(rfd)
2324 os.write(wfd, 'y')
2324 os.write(wfd, 'y')
2325 os.close(wfd)
2325 os.close(wfd)
2326 sys.stdout.flush()
2326 sys.stdout.flush()
2327 sys.stderr.flush()
2327 sys.stderr.flush()
2328 fd = os.open(util.nulldev, os.O_RDWR)
2328 fd = os.open(util.nulldev, os.O_RDWR)
2329 if fd != 0: os.dup2(fd, 0)
2329 if fd != 0: os.dup2(fd, 0)
2330 if fd != 1: os.dup2(fd, 1)
2330 if fd != 1: os.dup2(fd, 1)
2331 if fd != 2: os.dup2(fd, 2)
2331 if fd != 2: os.dup2(fd, 2)
2332 if fd not in (0, 1, 2): os.close(fd)
2332 if fd not in (0, 1, 2): os.close(fd)
2333
2333
2334 httpd.serve_forever()
2334 httpd.serve_forever()
2335
2335
2336 def status(ui, repo, *pats, **opts):
2336 def status(ui, repo, *pats, **opts):
2337 """show changed files in the working directory
2337 """show changed files in the working directory
2338
2338
2339 Show status of files in the repository. If names are given, only
2339 Show status of files in the repository. If names are given, only
2340 files that match are shown. Files that are clean or ignored, are
2340 files that match are shown. Files that are clean or ignored, are
2341 not listed unless -c (clean), -i (ignored) or -A is given.
2341 not listed unless -c (clean), -i (ignored) or -A is given.
2342
2342
2343 NOTE: status may appear to disagree with diff if permissions have
2343 NOTE: status may appear to disagree with diff if permissions have
2344 changed or a merge has occurred. The standard diff format does not
2344 changed or a merge has occurred. The standard diff format does not
2345 report permission changes and diff only reports changes relative
2345 report permission changes and diff only reports changes relative
2346 to one merge parent.
2346 to one merge parent.
2347
2347
2348 If one revision is given, it is used as the base revision.
2348 If one revision is given, it is used as the base revision.
2349 If two revisions are given, the difference between them is shown.
2349 If two revisions are given, the difference between them is shown.
2350
2350
2351 The codes used to show the status of files are:
2351 The codes used to show the status of files are:
2352 M = modified
2352 M = modified
2353 A = added
2353 A = added
2354 R = removed
2354 R = removed
2355 C = clean
2355 C = clean
2356 ! = deleted, but still tracked
2356 ! = deleted, but still tracked
2357 ? = not tracked
2357 ? = not tracked
2358 I = ignored (not shown by default)
2358 I = ignored (not shown by default)
2359 = the previous added file was copied from here
2359 = the previous added file was copied from here
2360 """
2360 """
2361
2361
2362 all = opts['all']
2362 all = opts['all']
2363 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2363 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2364
2364
2365 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2365 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2366 cwd = (pats and repo.getcwd()) or ''
2366 cwd = (pats and repo.getcwd()) or ''
2367 modified, added, removed, deleted, unknown, ignored, clean = [
2367 modified, added, removed, deleted, unknown, ignored, clean = [
2368 [util.pathto(cwd, x) for x in n]
2368 [util.pathto(cwd, x) for x in n]
2369 for n in repo.status(node1=node1, node2=node2, files=files,
2369 for n in repo.status(node1=node1, node2=node2, files=files,
2370 match=matchfn,
2370 match=matchfn,
2371 list_ignored=all or opts['ignored'],
2371 list_ignored=all or opts['ignored'],
2372 list_clean=all or opts['clean'])]
2372 list_clean=all or opts['clean'])]
2373
2373
2374 changetypes = (('modified', 'M', modified),
2374 changetypes = (('modified', 'M', modified),
2375 ('added', 'A', added),
2375 ('added', 'A', added),
2376 ('removed', 'R', removed),
2376 ('removed', 'R', removed),
2377 ('deleted', '!', deleted),
2377 ('deleted', '!', deleted),
2378 ('unknown', '?', unknown),
2378 ('unknown', '?', unknown),
2379 ('ignored', 'I', ignored))
2379 ('ignored', 'I', ignored))
2380
2380
2381 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2381 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2382
2382
2383 end = opts['print0'] and '\0' or '\n'
2383 end = opts['print0'] and '\0' or '\n'
2384
2384
2385 for opt, char, changes in ([ct for ct in explicit_changetypes
2385 for opt, char, changes in ([ct for ct in explicit_changetypes
2386 if all or opts[ct[0]]]
2386 if all or opts[ct[0]]]
2387 or changetypes):
2387 or changetypes):
2388 if opts['no_status']:
2388 if opts['no_status']:
2389 format = "%%s%s" % end
2389 format = "%%s%s" % end
2390 else:
2390 else:
2391 format = "%s %%s%s" % (char, end)
2391 format = "%s %%s%s" % (char, end)
2392
2392
2393 for f in changes:
2393 for f in changes:
2394 ui.write(format % f)
2394 ui.write(format % f)
2395 if ((all or opts.get('copies')) and not opts.get('no_status')):
2395 if ((all or opts.get('copies')) and not opts.get('no_status')):
2396 copied = repo.dirstate.copied(f)
2396 copied = repo.dirstate.copied(f)
2397 if copied:
2397 if copied:
2398 ui.write(' %s%s' % (copied, end))
2398 ui.write(' %s%s' % (copied, end))
2399
2399
2400 def tag(ui, repo, name, rev_=None, **opts):
2400 def tag(ui, repo, name, rev_=None, **opts):
2401 """add a tag for the current tip or a given revision
2401 """add a tag for the current tip or a given revision
2402
2402
2403 Name a particular revision using <name>.
2403 Name a particular revision using <name>.
2404
2404
2405 Tags are used to name particular revisions of the repository and are
2405 Tags are used to name particular revisions of the repository and are
2406 very useful to compare different revision, to go back to significant
2406 very useful to compare different revision, to go back to significant
2407 earlier versions or to mark branch points as releases, etc.
2407 earlier versions or to mark branch points as releases, etc.
2408
2408
2409 If no revision is given, the parent of the working directory is used.
2409 If no revision is given, the parent of the working directory is used.
2410
2410
2411 To facilitate version control, distribution, and merging of tags,
2411 To facilitate version control, distribution, and merging of tags,
2412 they are stored as a file named ".hgtags" which is managed
2412 they are stored as a file named ".hgtags" which is managed
2413 similarly to other project files and can be hand-edited if
2413 similarly to other project files and can be hand-edited if
2414 necessary. The file '.hg/localtags' is used for local tags (not
2414 necessary. The file '.hg/localtags' is used for local tags (not
2415 shared among repositories).
2415 shared among repositories).
2416 """
2416 """
2417 if name in ['tip', '.', 'null']:
2417 if name in ['tip', '.', 'null']:
2418 raise util.Abort(_("the name '%s' is reserved") % name)
2418 raise util.Abort(_("the name '%s' is reserved") % name)
2419 if rev_ is not None:
2419 if rev_ is not None:
2420 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2420 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2421 "please use 'hg tag [-r REV] NAME' instead\n"))
2421 "please use 'hg tag [-r REV] NAME' instead\n"))
2422 if opts['rev']:
2422 if opts['rev']:
2423 raise util.Abort(_("use only one form to specify the revision"))
2423 raise util.Abort(_("use only one form to specify the revision"))
2424 if opts['rev']:
2424 if opts['rev']:
2425 rev_ = opts['rev']
2425 rev_ = opts['rev']
2426 if not rev_ and repo.dirstate.parents()[1] != nullid:
2426 if not rev_ and repo.dirstate.parents()[1] != nullid:
2427 raise util.Abort(_('uncommitted merge - please provide a '
2427 raise util.Abort(_('uncommitted merge - please provide a '
2428 'specific revision'))
2428 'specific revision'))
2429 r = repo.changectx(rev_).node()
2429 r = repo.changectx(rev_).node()
2430
2430
2431 message = opts['message']
2431 message = opts['message']
2432 if not message:
2432 if not message:
2433 message = _('Added tag %s for changeset %s') % (name, short(r))
2433 message = _('Added tag %s for changeset %s') % (name, short(r))
2434
2434
2435 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2435 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2436
2436
2437 def tags(ui, repo):
2437 def tags(ui, repo):
2438 """list repository tags
2438 """list repository tags
2439
2439
2440 List the repository tags.
2440 List the repository tags.
2441
2441
2442 This lists both regular and local tags.
2442 This lists both regular and local tags.
2443 """
2443 """
2444
2444
2445 l = repo.tagslist()
2445 l = repo.tagslist()
2446 l.reverse()
2446 l.reverse()
2447 hexfunc = ui.debugflag and hex or short
2447 hexfunc = ui.debugflag and hex or short
2448 for t, n in l:
2448 for t, n in l:
2449 try:
2449 try:
2450 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2450 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2451 except KeyError:
2451 except KeyError:
2452 r = " ?:?"
2452 r = " ?:?"
2453 if ui.quiet:
2453 if ui.quiet:
2454 ui.write("%s\n" % t)
2454 ui.write("%s\n" % t)
2455 else:
2455 else:
2456 t = util.localsub(t, 30)
2456 t = util.localsub(t, 30)
2457 t += " " * (30 - util.locallen(t))
2457 t += " " * (30 - util.locallen(t))
2458 ui.write("%s %s\n" % (t, r))
2458 ui.write("%s %s\n" % (t, r))
2459
2459
2460 def tip(ui, repo, **opts):
2460 def tip(ui, repo, **opts):
2461 """show the tip revision
2461 """show the tip revision
2462
2462
2463 Show the tip revision.
2463 Show the tip revision.
2464 """
2464 """
2465 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2465 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2466
2466
2467 def unbundle(ui, repo, fname, **opts):
2467 def unbundle(ui, repo, fname, **opts):
2468 """apply a changegroup file
2468 """apply a changegroup file
2469
2469
2470 Apply a compressed changegroup file generated by the bundle
2470 Apply a compressed changegroup file generated by the bundle
2471 command.
2471 command.
2472 """
2472 """
2473 gen = changegroup.readbundle(urllib.urlopen(fname), fname)
2473 gen = changegroup.readbundle(urllib.urlopen(fname), fname)
2474 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2474 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2475 return postincoming(ui, repo, modheads, opts['update'])
2475 return postincoming(ui, repo, modheads, opts['update'])
2476
2476
2477 def update(ui, repo, node=None, clean=False, date=None):
2477 def update(ui, repo, node=None, clean=False, date=None):
2478 """update or merge working directory
2478 """update or merge working directory
2479
2479
2480 Update the working directory to the specified revision.
2480 Update the working directory to the specified revision.
2481
2481
2482 If there are no outstanding changes in the working directory and
2482 If there are no outstanding changes in the working directory and
2483 there is a linear relationship between the current version and the
2483 there is a linear relationship between the current version and the
2484 requested version, the result is the requested version.
2484 requested version, the result is the requested version.
2485
2485
2486 To merge the working directory with another revision, use the
2486 To merge the working directory with another revision, use the
2487 merge command.
2487 merge command.
2488
2488
2489 By default, update will refuse to run if doing so would require
2489 By default, update will refuse to run if doing so would require
2490 merging or discarding local changes.
2490 merging or discarding local changes.
2491 """
2491 """
2492 if date:
2492 if date:
2493 if node:
2493 if node:
2494 raise util.Abort(_("you can't specify a revision and a date"))
2494 raise util.Abort(_("you can't specify a revision and a date"))
2495 node = cmdutil.finddate(ui, repo, date)
2495 node = cmdutil.finddate(ui, repo, date)
2496
2496
2497 if clean:
2497 if clean:
2498 return hg.clean(repo, node)
2498 return hg.clean(repo, node)
2499 else:
2499 else:
2500 return hg.update(repo, node)
2500 return hg.update(repo, node)
2501
2501
2502 def verify(ui, repo):
2502 def verify(ui, repo):
2503 """verify the integrity of the repository
2503 """verify the integrity of the repository
2504
2504
2505 Verify the integrity of the current repository.
2505 Verify the integrity of the current repository.
2506
2506
2507 This will perform an extensive check of the repository's
2507 This will perform an extensive check of the repository's
2508 integrity, validating the hashes and checksums of each entry in
2508 integrity, validating the hashes and checksums of each entry in
2509 the changelog, manifest, and tracked files, as well as the
2509 the changelog, manifest, and tracked files, as well as the
2510 integrity of their crosslinks and indices.
2510 integrity of their crosslinks and indices.
2511 """
2511 """
2512 return hg.verify(repo)
2512 return hg.verify(repo)
2513
2513
2514 def version_(ui):
2514 def version_(ui):
2515 """output version and copyright information"""
2515 """output version and copyright information"""
2516 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2516 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2517 % version.get_version())
2517 % version.get_version())
2518 ui.status(_(
2518 ui.status(_(
2519 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2519 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2520 "This is free software; see the source for copying conditions. "
2520 "This is free software; see the source for copying conditions. "
2521 "There is NO\nwarranty; "
2521 "There is NO\nwarranty; "
2522 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2522 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2523 ))
2523 ))
2524
2524
2525 # Command options and aliases are listed here, alphabetically
2525 # Command options and aliases are listed here, alphabetically
2526
2526
2527 globalopts = [
2527 globalopts = [
2528 ('R', 'repository', '',
2528 ('R', 'repository', '',
2529 _('repository root directory or symbolic path name')),
2529 _('repository root directory or symbolic path name')),
2530 ('', 'cwd', '', _('change working directory')),
2530 ('', 'cwd', '', _('change working directory')),
2531 ('y', 'noninteractive', None,
2531 ('y', 'noninteractive', None,
2532 _('do not prompt, assume \'yes\' for any required answers')),
2532 _('do not prompt, assume \'yes\' for any required answers')),
2533 ('q', 'quiet', None, _('suppress output')),
2533 ('q', 'quiet', None, _('suppress output')),
2534 ('v', 'verbose', None, _('enable additional output')),
2534 ('v', 'verbose', None, _('enable additional output')),
2535 ('', 'config', [], _('set/override config option')),
2535 ('', 'config', [], _('set/override config option')),
2536 ('', 'debug', None, _('enable debugging output')),
2536 ('', 'debug', None, _('enable debugging output')),
2537 ('', 'debugger', None, _('start debugger')),
2537 ('', 'debugger', None, _('start debugger')),
2538 ('', 'encoding', util._encoding, _('set the charset encoding')),
2538 ('', 'encoding', util._encoding, _('set the charset encoding')),
2539 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2539 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2540 ('', 'lsprof', None, _('print improved command execution profile')),
2540 ('', 'lsprof', None, _('print improved command execution profile')),
2541 ('', 'traceback', None, _('print traceback on exception')),
2541 ('', 'traceback', None, _('print traceback on exception')),
2542 ('', 'time', None, _('time how long the command takes')),
2542 ('', 'time', None, _('time how long the command takes')),
2543 ('', 'profile', None, _('print command execution profile')),
2543 ('', 'profile', None, _('print command execution profile')),
2544 ('', 'version', None, _('output version information and exit')),
2544 ('', 'version', None, _('output version information and exit')),
2545 ('h', 'help', None, _('display help and exit')),
2545 ('h', 'help', None, _('display help and exit')),
2546 ]
2546 ]
2547
2547
2548 dryrunopts = [('n', 'dry-run', None,
2548 dryrunopts = [('n', 'dry-run', None,
2549 _('do not perform actions, just print output'))]
2549 _('do not perform actions, just print output'))]
2550
2550
2551 remoteopts = [
2551 remoteopts = [
2552 ('e', 'ssh', '', _('specify ssh command to use')),
2552 ('e', 'ssh', '', _('specify ssh command to use')),
2553 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2553 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2554 ]
2554 ]
2555
2555
2556 walkopts = [
2556 walkopts = [
2557 ('I', 'include', [], _('include names matching the given patterns')),
2557 ('I', 'include', [], _('include names matching the given patterns')),
2558 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2558 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2559 ]
2559 ]
2560
2560
2561 commitopts = [
2561 commitopts = [
2562 ('m', 'message', '', _('use <text> as commit message')),
2562 ('m', 'message', '', _('use <text> as commit message')),
2563 ('l', 'logfile', '', _('read commit message from <file>')),
2563 ('l', 'logfile', '', _('read commit message from <file>')),
2564 ]
2564 ]
2565
2565
2566 table = {
2566 table = {
2567 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2567 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2568 "addremove":
2568 "addremove":
2569 (addremove,
2569 (addremove,
2570 [('s', 'similarity', '',
2570 [('s', 'similarity', '',
2571 _('guess renamed files by similarity (0<=s<=100)')),
2571 _('guess renamed files by similarity (0<=s<=100)')),
2572 ] + walkopts + dryrunopts,
2572 ] + walkopts + dryrunopts,
2573 _('hg addremove [OPTION]... [FILE]...')),
2573 _('hg addremove [OPTION]... [FILE]...')),
2574 "^annotate":
2574 "^annotate":
2575 (annotate,
2575 (annotate,
2576 [('r', 'rev', '', _('annotate the specified revision')),
2576 [('r', 'rev', '', _('annotate the specified revision')),
2577 ('f', 'follow', None, _('follow file copies and renames')),
2577 ('f', 'follow', None, _('follow file copies and renames')),
2578 ('a', 'text', None, _('treat all files as text')),
2578 ('a', 'text', None, _('treat all files as text')),
2579 ('u', 'user', None, _('list the author')),
2579 ('u', 'user', None, _('list the author')),
2580 ('d', 'date', None, _('list the date')),
2580 ('d', 'date', None, _('list the date')),
2581 ('n', 'number', None, _('list the revision number (default)')),
2581 ('n', 'number', None, _('list the revision number (default)')),
2582 ('c', 'changeset', None, _('list the changeset')),
2582 ('c', 'changeset', None, _('list the changeset')),
2583 ] + walkopts,
2583 ] + walkopts,
2584 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2584 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2585 "archive":
2585 "archive":
2586 (archive,
2586 (archive,
2587 [('', 'no-decode', None, _('do not pass files through decoders')),
2587 [('', 'no-decode', None, _('do not pass files through decoders')),
2588 ('p', 'prefix', '', _('directory prefix for files in archive')),
2588 ('p', 'prefix', '', _('directory prefix for files in archive')),
2589 ('r', 'rev', '', _('revision to distribute')),
2589 ('r', 'rev', '', _('revision to distribute')),
2590 ('t', 'type', '', _('type of distribution to create')),
2590 ('t', 'type', '', _('type of distribution to create')),
2591 ] + walkopts,
2591 ] + walkopts,
2592 _('hg archive [OPTION]... DEST')),
2592 _('hg archive [OPTION]... DEST')),
2593 "backout":
2593 "backout":
2594 (backout,
2594 (backout,
2595 [('', 'merge', None,
2595 [('', 'merge', None,
2596 _('merge with old dirstate parent after backout')),
2596 _('merge with old dirstate parent after backout')),
2597 ('d', 'date', '', _('record datecode as commit date')),
2597 ('d', 'date', '', _('record datecode as commit date')),
2598 ('', 'parent', '', _('parent to choose when backing out merge')),
2598 ('', 'parent', '', _('parent to choose when backing out merge')),
2599 ('u', 'user', '', _('record user as committer')),
2599 ('u', 'user', '', _('record user as committer')),
2600 ] + walkopts + commitopts,
2600 ] + walkopts + commitopts,
2601 _('hg backout [OPTION]... REV')),
2601 _('hg backout [OPTION]... REV')),
2602 "branch": (branch, [], _('hg branch [NAME]')),
2602 "branch": (branch, [], _('hg branch [NAME]')),
2603 "branches": (branches, [], _('hg branches')),
2603 "branches": (branches, [], _('hg branches')),
2604 "bundle":
2604 "bundle":
2605 (bundle,
2605 (bundle,
2606 [('f', 'force', None,
2606 [('f', 'force', None,
2607 _('run even when remote repository is unrelated')),
2607 _('run even when remote repository is unrelated')),
2608 ('r', 'rev', [],
2608 ('r', 'rev', [],
2609 _('a changeset you would like to bundle')),
2609 _('a changeset you would like to bundle')),
2610 ('', 'base', [],
2610 ('', 'base', [],
2611 _('a base changeset to specify instead of a destination')),
2611 _('a base changeset to specify instead of a destination')),
2612 ] + remoteopts,
2612 ] + remoteopts,
2613 _('hg bundle [--base REV]... [--rev REV]... FILE [DEST]')),
2613 _('hg bundle [--base REV]... [--rev REV]... FILE [DEST]')),
2614 "cat":
2614 "cat":
2615 (cat,
2615 (cat,
2616 [('o', 'output', '', _('print output to file with formatted name')),
2616 [('o', 'output', '', _('print output to file with formatted name')),
2617 ('r', 'rev', '', _('print the given revision')),
2617 ('r', 'rev', '', _('print the given revision')),
2618 ] + walkopts,
2618 ] + walkopts,
2619 _('hg cat [OPTION]... FILE...')),
2619 _('hg cat [OPTION]... FILE...')),
2620 "^clone":
2620 "^clone":
2621 (clone,
2621 (clone,
2622 [('U', 'noupdate', None, _('do not update the new working directory')),
2622 [('U', 'noupdate', None, _('do not update the new working directory')),
2623 ('r', 'rev', [],
2623 ('r', 'rev', [],
2624 _('a changeset you would like to have after cloning')),
2624 _('a changeset you would like to have after cloning')),
2625 ('', 'pull', None, _('use pull protocol to copy metadata')),
2625 ('', 'pull', None, _('use pull protocol to copy metadata')),
2626 ('', 'uncompressed', None,
2626 ('', 'uncompressed', None,
2627 _('use uncompressed transfer (fast over LAN)')),
2627 _('use uncompressed transfer (fast over LAN)')),
2628 ] + remoteopts,
2628 ] + remoteopts,
2629 _('hg clone [OPTION]... SOURCE [DEST]')),
2629 _('hg clone [OPTION]... SOURCE [DEST]')),
2630 "^commit|ci":
2630 "^commit|ci":
2631 (commit,
2631 (commit,
2632 [('A', 'addremove', None,
2632 [('A', 'addremove', None,
2633 _('mark new/missing files as added/removed before committing')),
2633 _('mark new/missing files as added/removed before committing')),
2634 ('d', 'date', '', _('record datecode as commit date')),
2634 ('d', 'date', '', _('record datecode as commit date')),
2635 ('u', 'user', '', _('record user as commiter')),
2635 ('u', 'user', '', _('record user as commiter')),
2636 ] + walkopts + commitopts,
2636 ] + walkopts + commitopts,
2637 _('hg commit [OPTION]... [FILE]...')),
2637 _('hg commit [OPTION]... [FILE]...')),
2638 "copy|cp":
2638 "copy|cp":
2639 (copy,
2639 (copy,
2640 [('A', 'after', None, _('record a copy that has already occurred')),
2640 [('A', 'after', None, _('record a copy that has already occurred')),
2641 ('f', 'force', None,
2641 ('f', 'force', None,
2642 _('forcibly copy over an existing managed file')),
2642 _('forcibly copy over an existing managed file')),
2643 ] + walkopts + dryrunopts,
2643 ] + walkopts + dryrunopts,
2644 _('hg copy [OPTION]... [SOURCE]... DEST')),
2644 _('hg copy [OPTION]... [SOURCE]... DEST')),
2645 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2645 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2646 "debugcomplete":
2646 "debugcomplete":
2647 (debugcomplete,
2647 (debugcomplete,
2648 [('o', 'options', None, _('show the command options'))],
2648 [('o', 'options', None, _('show the command options'))],
2649 _('debugcomplete [-o] CMD')),
2649 _('debugcomplete [-o] CMD')),
2650 "debuginstall": (debuginstall, [], _('debuginstall')),
2650 "debuginstall": (debuginstall, [], _('debuginstall')),
2651 "debugrebuildstate":
2651 "debugrebuildstate":
2652 (debugrebuildstate,
2652 (debugrebuildstate,
2653 [('r', 'rev', '', _('revision to rebuild to'))],
2653 [('r', 'rev', '', _('revision to rebuild to'))],
2654 _('debugrebuildstate [-r REV] [REV]')),
2654 _('debugrebuildstate [-r REV] [REV]')),
2655 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2655 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2656 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2656 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2657 "debugstate": (debugstate, [], _('debugstate')),
2657 "debugstate": (debugstate, [], _('debugstate')),
2658 "debugdate":
2658 "debugdate":
2659 (debugdate,
2659 (debugdate,
2660 [('e', 'extended', None, _('try extended date formats'))],
2660 [('e', 'extended', None, _('try extended date formats'))],
2661 _('debugdate [-e] DATE [RANGE]')),
2661 _('debugdate [-e] DATE [RANGE]')),
2662 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2662 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2663 "debugindex": (debugindex, [], _('debugindex FILE')),
2663 "debugindex": (debugindex, [], _('debugindex FILE')),
2664 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2664 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2665 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2665 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2666 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2666 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2667 "^diff":
2667 "^diff":
2668 (diff,
2668 (diff,
2669 [('r', 'rev', [], _('revision')),
2669 [('r', 'rev', [], _('revision')),
2670 ('a', 'text', None, _('treat all files as text')),
2670 ('a', 'text', None, _('treat all files as text')),
2671 ('p', 'show-function', None,
2671 ('p', 'show-function', None,
2672 _('show which function each change is in')),
2672 _('show which function each change is in')),
2673 ('g', 'git', None, _('use git extended diff format')),
2673 ('g', 'git', None, _('use git extended diff format')),
2674 ('', 'nodates', None, _("don't include dates in diff headers")),
2674 ('', 'nodates', None, _("don't include dates in diff headers")),
2675 ('w', 'ignore-all-space', None,
2675 ('w', 'ignore-all-space', None,
2676 _('ignore white space when comparing lines')),
2676 _('ignore white space when comparing lines')),
2677 ('b', 'ignore-space-change', None,
2677 ('b', 'ignore-space-change', None,
2678 _('ignore changes in the amount of white space')),
2678 _('ignore changes in the amount of white space')),
2679 ('B', 'ignore-blank-lines', None,
2679 ('B', 'ignore-blank-lines', None,
2680 _('ignore changes whose lines are all blank')),
2680 _('ignore changes whose lines are all blank')),
2681 ] + walkopts,
2681 ] + walkopts,
2682 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2682 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2683 "^export":
2683 "^export":
2684 (export,
2684 (export,
2685 [('o', 'output', '', _('print output to file with formatted name')),
2685 [('o', 'output', '', _('print output to file with formatted name')),
2686 ('a', 'text', None, _('treat all files as text')),
2686 ('a', 'text', None, _('treat all files as text')),
2687 ('g', 'git', None, _('use git extended diff format')),
2687 ('g', 'git', None, _('use git extended diff format')),
2688 ('', 'nodates', None, _("don't include dates in diff headers")),
2688 ('', 'nodates', None, _("don't include dates in diff headers")),
2689 ('', 'switch-parent', None, _('diff against the second parent'))],
2689 ('', 'switch-parent', None, _('diff against the second parent'))],
2690 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2690 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2691 "grep":
2691 "grep":
2692 (grep,
2692 (grep,
2693 [('0', 'print0', None, _('end fields with NUL')),
2693 [('0', 'print0', None, _('end fields with NUL')),
2694 ('', 'all', None, _('print all revisions that match')),
2694 ('', 'all', None, _('print all revisions that match')),
2695 ('f', 'follow', None,
2695 ('f', 'follow', None,
2696 _('follow changeset history, or file history across copies and renames')),
2696 _('follow changeset history, or file history across copies and renames')),
2697 ('i', 'ignore-case', None, _('ignore case when matching')),
2697 ('i', 'ignore-case', None, _('ignore case when matching')),
2698 ('l', 'files-with-matches', None,
2698 ('l', 'files-with-matches', None,
2699 _('print only filenames and revs that match')),
2699 _('print only filenames and revs that match')),
2700 ('n', 'line-number', None, _('print matching line numbers')),
2700 ('n', 'line-number', None, _('print matching line numbers')),
2701 ('r', 'rev', [], _('search in given revision range')),
2701 ('r', 'rev', [], _('search in given revision range')),
2702 ('u', 'user', None, _('print user who committed change')),
2702 ('u', 'user', None, _('print user who committed change')),
2703 ] + walkopts,
2703 ] + walkopts,
2704 _('hg grep [OPTION]... PATTERN [FILE]...')),
2704 _('hg grep [OPTION]... PATTERN [FILE]...')),
2705 "heads":
2705 "heads":
2706 (heads,
2706 (heads,
2707 [('', 'style', '', _('display using template map file')),
2707 [('', 'style', '', _('display using template map file')),
2708 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2708 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2709 ('', 'template', '', _('display with template'))],
2709 ('', 'template', '', _('display with template'))],
2710 _('hg heads [-r REV]')),
2710 _('hg heads [-r REV]')),
2711 "help": (help_, [], _('hg help [COMMAND]')),
2711 "help": (help_, [], _('hg help [COMMAND]')),
2712 "identify|id": (identify, [], _('hg identify')),
2712 "identify|id": (identify, [], _('hg identify')),
2713 "import|patch":
2713 "import|patch":
2714 (import_,
2714 (import_,
2715 [('p', 'strip', 1,
2715 [('p', 'strip', 1,
2716 _('directory strip option for patch. This has the same\n'
2716 _('directory strip option for patch. This has the same\n'
2717 'meaning as the corresponding patch option')),
2717 'meaning as the corresponding patch option')),
2718 ('b', 'base', '', _('base path')),
2718 ('b', 'base', '', _('base path')),
2719 ('f', 'force', None,
2719 ('f', 'force', None,
2720 _('skip check for outstanding uncommitted changes'))] + commitopts,
2720 _('skip check for outstanding uncommitted changes'))] + commitopts,
2721 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2721 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2722 "incoming|in": (incoming,
2722 "incoming|in": (incoming,
2723 [('M', 'no-merges', None, _('do not show merges')),
2723 [('M', 'no-merges', None, _('do not show merges')),
2724 ('f', 'force', None,
2724 ('f', 'force', None,
2725 _('run even when remote repository is unrelated')),
2725 _('run even when remote repository is unrelated')),
2726 ('', 'style', '', _('display using template map file')),
2726 ('', 'style', '', _('display using template map file')),
2727 ('n', 'newest-first', None, _('show newest record first')),
2727 ('n', 'newest-first', None, _('show newest record first')),
2728 ('', 'bundle', '', _('file to store the bundles into')),
2728 ('', 'bundle', '', _('file to store the bundles into')),
2729 ('p', 'patch', None, _('show patch')),
2729 ('p', 'patch', None, _('show patch')),
2730 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2730 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2731 ('', 'template', '', _('display with template')),
2731 ('', 'template', '', _('display with template')),
2732 ] + remoteopts,
2732 ] + remoteopts,
2733 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2733 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2734 ' [--bundle FILENAME] [SOURCE]')),
2734 ' [--bundle FILENAME] [SOURCE]')),
2735 "^init":
2735 "^init":
2736 (init, remoteopts, _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2736 (init, remoteopts, _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2737 "locate":
2737 "locate":
2738 (locate,
2738 (locate,
2739 [('r', 'rev', '', _('search the repository as it stood at rev')),
2739 [('r', 'rev', '', _('search the repository as it stood at rev')),
2740 ('0', 'print0', None,
2740 ('0', 'print0', None,
2741 _('end filenames with NUL, for use with xargs')),
2741 _('end filenames with NUL, for use with xargs')),
2742 ('f', 'fullpath', None,
2742 ('f', 'fullpath', None,
2743 _('print complete paths from the filesystem root')),
2743 _('print complete paths from the filesystem root')),
2744 ] + walkopts,
2744 ] + walkopts,
2745 _('hg locate [OPTION]... [PATTERN]...')),
2745 _('hg locate [OPTION]... [PATTERN]...')),
2746 "^log|history":
2746 "^log|history":
2747 (log,
2747 (log,
2748 [('f', 'follow', None,
2748 [('f', 'follow', None,
2749 _('follow changeset history, or file history across copies and renames')),
2749 _('follow changeset history, or file history across copies and renames')),
2750 ('', 'follow-first', None,
2750 ('', 'follow-first', None,
2751 _('only follow the first parent of merge changesets')),
2751 _('only follow the first parent of merge changesets')),
2752 ('d', 'date', '', _('show revs matching date spec')),
2752 ('d', 'date', '', _('show revs matching date spec')),
2753 ('C', 'copies', None, _('show copied files')),
2753 ('C', 'copies', None, _('show copied files')),
2754 ('k', 'keyword', [], _('search for a keyword')),
2754 ('k', 'keyword', [], _('search for a keyword')),
2755 ('l', 'limit', '', _('limit number of changes displayed')),
2755 ('l', 'limit', '', _('limit number of changes displayed')),
2756 ('r', 'rev', [], _('show the specified revision or range')),
2756 ('r', 'rev', [], _('show the specified revision or range')),
2757 ('', 'removed', None, _('include revs where files were removed')),
2757 ('', 'removed', None, _('include revs where files were removed')),
2758 ('M', 'no-merges', None, _('do not show merges')),
2758 ('M', 'no-merges', None, _('do not show merges')),
2759 ('', 'style', '', _('display using template map file')),
2759 ('', 'style', '', _('display using template map file')),
2760 ('m', 'only-merges', None, _('show only merges')),
2760 ('m', 'only-merges', None, _('show only merges')),
2761 ('p', 'patch', None, _('show patch')),
2761 ('p', 'patch', None, _('show patch')),
2762 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2762 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2763 ('', 'template', '', _('display with template')),
2763 ('', 'template', '', _('display with template')),
2764 ] + walkopts,
2764 ] + walkopts,
2765 _('hg log [OPTION]... [FILE]')),
2765 _('hg log [OPTION]... [FILE]')),
2766 "manifest": (manifest, [], _('hg manifest [REV]')),
2766 "manifest": (manifest, [], _('hg manifest [REV]')),
2767 "merge":
2767 "merge":
2768 (merge,
2768 (merge,
2769 [('f', 'force', None, _('force a merge with outstanding changes'))],
2769 [('f', 'force', None, _('force a merge with outstanding changes'))],
2770 _('hg merge [-f] [REV]')),
2770 _('hg merge [-f] [REV]')),
2771 "outgoing|out": (outgoing,
2771 "outgoing|out": (outgoing,
2772 [('M', 'no-merges', None, _('do not show merges')),
2772 [('M', 'no-merges', None, _('do not show merges')),
2773 ('f', 'force', None,
2773 ('f', 'force', None,
2774 _('run even when remote repository is unrelated')),
2774 _('run even when remote repository is unrelated')),
2775 ('p', 'patch', None, _('show patch')),
2775 ('p', 'patch', None, _('show patch')),
2776 ('', 'style', '', _('display using template map file')),
2776 ('', 'style', '', _('display using template map file')),
2777 ('r', 'rev', [], _('a specific revision you would like to push')),
2777 ('r', 'rev', [], _('a specific revision you would like to push')),
2778 ('n', 'newest-first', None, _('show newest record first')),
2778 ('n', 'newest-first', None, _('show newest record first')),
2779 ('', 'template', '', _('display with template')),
2779 ('', 'template', '', _('display with template')),
2780 ] + remoteopts,
2780 ] + remoteopts,
2781 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
2781 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
2782 "^parents":
2782 "^parents":
2783 (parents,
2783 (parents,
2784 [('r', 'rev', '', _('show parents from the specified rev')),
2784 [('r', 'rev', '', _('show parents from the specified rev')),
2785 ('', 'style', '', _('display using template map file')),
2785 ('', 'style', '', _('display using template map file')),
2786 ('', 'template', '', _('display with template'))],
2786 ('', 'template', '', _('display with template'))],
2787 _('hg parents [-r REV] [FILE]')),
2787 _('hg parents [-r REV] [FILE]')),
2788 "paths": (paths, [], _('hg paths [NAME]')),
2788 "paths": (paths, [], _('hg paths [NAME]')),
2789 "^pull":
2789 "^pull":
2790 (pull,
2790 (pull,
2791 [('u', 'update', None,
2791 [('u', 'update', None,
2792 _('update to new tip if changesets were pulled')),
2792 _('update to new tip if changesets were pulled')),
2793 ('f', 'force', None,
2793 ('f', 'force', None,
2794 _('run even when remote repository is unrelated')),
2794 _('run even when remote repository is unrelated')),
2795 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2795 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2796 ] + remoteopts,
2796 ] + remoteopts,
2797 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
2797 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
2798 "^push":
2798 "^push":
2799 (push,
2799 (push,
2800 [('f', 'force', None, _('force push')),
2800 [('f', 'force', None, _('force push')),
2801 ('r', 'rev', [], _('a specific revision you would like to push')),
2801 ('r', 'rev', [], _('a specific revision you would like to push')),
2802 ] + remoteopts,
2802 ] + remoteopts,
2803 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
2803 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
2804 "debugrawcommit|rawcommit":
2804 "debugrawcommit|rawcommit":
2805 (rawcommit,
2805 (rawcommit,
2806 [('p', 'parent', [], _('parent')),
2806 [('p', 'parent', [], _('parent')),
2807 ('d', 'date', '', _('date code')),
2807 ('d', 'date', '', _('date code')),
2808 ('u', 'user', '', _('user')),
2808 ('u', 'user', '', _('user')),
2809 ('F', 'files', '', _('file list'))
2809 ('F', 'files', '', _('file list'))
2810 ] + commitopts,
2810 ] + commitopts,
2811 _('hg debugrawcommit [OPTION]... [FILE]...')),
2811 _('hg debugrawcommit [OPTION]... [FILE]...')),
2812 "recover": (recover, [], _('hg recover')),
2812 "recover": (recover, [], _('hg recover')),
2813 "^remove|rm":
2813 "^remove|rm":
2814 (remove,
2814 (remove,
2815 [('A', 'after', None, _('record remove that has already occurred')),
2815 [('A', 'after', None, _('record remove that has already occurred')),
2816 ('f', 'force', None, _('remove file even if modified')),
2816 ('f', 'force', None, _('remove file even if modified')),
2817 ] + walkopts,
2817 ] + walkopts,
2818 _('hg remove [OPTION]... FILE...')),
2818 _('hg remove [OPTION]... FILE...')),
2819 "rename|mv":
2819 "rename|mv":
2820 (rename,
2820 (rename,
2821 [('A', 'after', None, _('record a rename that has already occurred')),
2821 [('A', 'after', None, _('record a rename that has already occurred')),
2822 ('f', 'force', None,
2822 ('f', 'force', None,
2823 _('forcibly copy over an existing managed file')),
2823 _('forcibly copy over an existing managed file')),
2824 ] + walkopts + dryrunopts,
2824 ] + walkopts + dryrunopts,
2825 _('hg rename [OPTION]... SOURCE... DEST')),
2825 _('hg rename [OPTION]... SOURCE... DEST')),
2826 "^revert":
2826 "^revert":
2827 (revert,
2827 (revert,
2828 [('a', 'all', None, _('revert all changes when no arguments given')),
2828 [('a', 'all', None, _('revert all changes when no arguments given')),
2829 ('d', 'date', '', _('tipmost revision matching date')),
2829 ('d', 'date', '', _('tipmost revision matching date')),
2830 ('r', 'rev', '', _('revision to revert to')),
2830 ('r', 'rev', '', _('revision to revert to')),
2831 ('', 'no-backup', None, _('do not save backup copies of files')),
2831 ('', 'no-backup', None, _('do not save backup copies of files')),
2832 ] + walkopts + dryrunopts,
2832 ] + walkopts + dryrunopts,
2833 _('hg revert [-r REV] [NAME]...')),
2833 _('hg revert [-r REV] [NAME]...')),
2834 "rollback": (rollback, [], _('hg rollback')),
2834 "rollback": (rollback, [], _('hg rollback')),
2835 "root": (root, [], _('hg root')),
2835 "root": (root, [], _('hg root')),
2836 "showconfig|debugconfig":
2836 "showconfig|debugconfig":
2837 (showconfig,
2837 (showconfig,
2838 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2838 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2839 _('showconfig [-u] [NAME]...')),
2839 _('showconfig [-u] [NAME]...')),
2840 "^serve":
2840 "^serve":
2841 (serve,
2841 (serve,
2842 [('A', 'accesslog', '', _('name of access log file to write to')),
2842 [('A', 'accesslog', '', _('name of access log file to write to')),
2843 ('d', 'daemon', None, _('run server in background')),
2843 ('d', 'daemon', None, _('run server in background')),
2844 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2844 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2845 ('E', 'errorlog', '', _('name of error log file to write to')),
2845 ('E', 'errorlog', '', _('name of error log file to write to')),
2846 ('p', 'port', 0, _('port to use (default: 8000)')),
2846 ('p', 'port', 0, _('port to use (default: 8000)')),
2847 ('a', 'address', '', _('address to use')),
2847 ('a', 'address', '', _('address to use')),
2848 ('n', 'name', '',
2848 ('n', 'name', '',
2849 _('name to show in web pages (default: working dir)')),
2849 _('name to show in web pages (default: working dir)')),
2850 ('', 'webdir-conf', '', _('name of the webdir config file'
2850 ('', 'webdir-conf', '', _('name of the webdir config file'
2851 ' (serve more than one repo)')),
2851 ' (serve more than one repo)')),
2852 ('', 'pid-file', '', _('name of file to write process ID to')),
2852 ('', 'pid-file', '', _('name of file to write process ID to')),
2853 ('', 'stdio', None, _('for remote clients')),
2853 ('', 'stdio', None, _('for remote clients')),
2854 ('t', 'templates', '', _('web templates to use')),
2854 ('t', 'templates', '', _('web templates to use')),
2855 ('', 'style', '', _('template style to use')),
2855 ('', 'style', '', _('template style to use')),
2856 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2856 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2857 _('hg serve [OPTION]...')),
2857 _('hg serve [OPTION]...')),
2858 "^status|st":
2858 "^status|st":
2859 (status,
2859 (status,
2860 [('A', 'all', None, _('show status of all files')),
2860 [('A', 'all', None, _('show status of all files')),
2861 ('m', 'modified', None, _('show only modified files')),
2861 ('m', 'modified', None, _('show only modified files')),
2862 ('a', 'added', None, _('show only added files')),
2862 ('a', 'added', None, _('show only added files')),
2863 ('r', 'removed', None, _('show only removed files')),
2863 ('r', 'removed', None, _('show only removed files')),
2864 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2864 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2865 ('c', 'clean', None, _('show only files without changes')),
2865 ('c', 'clean', None, _('show only files without changes')),
2866 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2866 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2867 ('i', 'ignored', None, _('show ignored files')),
2867 ('i', 'ignored', None, _('show ignored files')),
2868 ('n', 'no-status', None, _('hide status prefix')),
2868 ('n', 'no-status', None, _('hide status prefix')),
2869 ('C', 'copies', None, _('show source of copied files')),
2869 ('C', 'copies', None, _('show source of copied files')),
2870 ('0', 'print0', None,
2870 ('0', 'print0', None,
2871 _('end filenames with NUL, for use with xargs')),
2871 _('end filenames with NUL, for use with xargs')),
2872 ('', 'rev', [], _('show difference from revision')),
2872 ('', 'rev', [], _('show difference from revision')),
2873 ] + walkopts,
2873 ] + walkopts,
2874 _('hg status [OPTION]... [FILE]...')),
2874 _('hg status [OPTION]... [FILE]...')),
2875 "tag":
2875 "tag":
2876 (tag,
2876 (tag,
2877 [('l', 'local', None, _('make the tag local')),
2877 [('l', 'local', None, _('make the tag local')),
2878 ('m', 'message', '', _('message for tag commit log entry')),
2878 ('m', 'message', '', _('message for tag commit log entry')),
2879 ('d', 'date', '', _('record datecode as commit date')),
2879 ('d', 'date', '', _('record datecode as commit date')),
2880 ('u', 'user', '', _('record user as commiter')),
2880 ('u', 'user', '', _('record user as commiter')),
2881 ('r', 'rev', '', _('revision to tag'))],
2881 ('r', 'rev', '', _('revision to tag'))],
2882 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2882 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2883 "tags": (tags, [], _('hg tags')),
2883 "tags": (tags, [], _('hg tags')),
2884 "tip":
2884 "tip":
2885 (tip,
2885 (tip,
2886 [('', 'style', '', _('display using template map file')),
2886 [('', 'style', '', _('display using template map file')),
2887 ('p', 'patch', None, _('show patch')),
2887 ('p', 'patch', None, _('show patch')),
2888 ('', 'template', '', _('display with template'))],
2888 ('', 'template', '', _('display with template'))],
2889 _('hg tip [-p]')),
2889 _('hg tip [-p]')),
2890 "unbundle":
2890 "unbundle":
2891 (unbundle,
2891 (unbundle,
2892 [('u', 'update', None,
2892 [('u', 'update', None,
2893 _('update to new tip if changesets were unbundled'))],
2893 _('update to new tip if changesets were unbundled'))],
2894 _('hg unbundle [-u] FILE')),
2894 _('hg unbundle [-u] FILE')),
2895 "^update|up|checkout|co":
2895 "^update|up|checkout|co":
2896 (update,
2896 (update,
2897 [('C', 'clean', None, _('overwrite locally modified files')),
2897 [('C', 'clean', None, _('overwrite locally modified files')),
2898 ('d', 'date', '', _('tipmost revision matching date'))],
2898 ('d', 'date', '', _('tipmost revision matching date'))],
2899 _('hg update [-C] [REV]')),
2899 _('hg update [-C] [REV]')),
2900 "verify": (verify, [], _('hg verify')),
2900 "verify": (verify, [], _('hg verify')),
2901 "version": (version_, [], _('hg version')),
2901 "version": (version_, [], _('hg version')),
2902 }
2902 }
2903
2903
2904 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2904 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2905 " debugindex debugindexdot debugdate debuginstall")
2905 " debugindex debugindexdot debugdate debuginstall")
2906 optionalrepo = ("paths serve showconfig")
2906 optionalrepo = ("paths serve showconfig")
2907
2907
2908 def findpossible(ui, cmd):
2908 def findpossible(ui, cmd):
2909 """
2909 """
2910 Return cmd -> (aliases, command table entry)
2910 Return cmd -> (aliases, command table entry)
2911 for each matching command.
2911 for each matching command.
2912 Return debug commands (or their aliases) only if no normal command matches.
2912 Return debug commands (or their aliases) only if no normal command matches.
2913 """
2913 """
2914 choice = {}
2914 choice = {}
2915 debugchoice = {}
2915 debugchoice = {}
2916 for e in table.keys():
2916 for e in table.keys():
2917 aliases = e.lstrip("^").split("|")
2917 aliases = e.lstrip("^").split("|")
2918 found = None
2918 found = None
2919 if cmd in aliases:
2919 if cmd in aliases:
2920 found = cmd
2920 found = cmd
2921 elif not ui.config("ui", "strict"):
2921 elif not ui.config("ui", "strict"):
2922 for a in aliases:
2922 for a in aliases:
2923 if a.startswith(cmd):
2923 if a.startswith(cmd):
2924 found = a
2924 found = a
2925 break
2925 break
2926 if found is not None:
2926 if found is not None:
2927 if aliases[0].startswith("debug") or found.startswith("debug"):
2927 if aliases[0].startswith("debug") or found.startswith("debug"):
2928 debugchoice[found] = (aliases, table[e])
2928 debugchoice[found] = (aliases, table[e])
2929 else:
2929 else:
2930 choice[found] = (aliases, table[e])
2930 choice[found] = (aliases, table[e])
2931
2931
2932 if not choice and debugchoice:
2932 if not choice and debugchoice:
2933 choice = debugchoice
2933 choice = debugchoice
2934
2934
2935 return choice
2935 return choice
2936
2936
2937 def findcmd(ui, cmd):
2937 def findcmd(ui, cmd):
2938 """Return (aliases, command table entry) for command string."""
2938 """Return (aliases, command table entry) for command string."""
2939 choice = findpossible(ui, cmd)
2939 choice = findpossible(ui, cmd)
2940
2940
2941 if choice.has_key(cmd):
2941 if choice.has_key(cmd):
2942 return choice[cmd]
2942 return choice[cmd]
2943
2943
2944 if len(choice) > 1:
2944 if len(choice) > 1:
2945 clist = choice.keys()
2945 clist = choice.keys()
2946 clist.sort()
2946 clist.sort()
2947 raise AmbiguousCommand(cmd, clist)
2947 raise AmbiguousCommand(cmd, clist)
2948
2948
2949 if choice:
2949 if choice:
2950 return choice.values()[0]
2950 return choice.values()[0]
2951
2951
2952 raise UnknownCommand(cmd)
2952 raise UnknownCommand(cmd)
2953
2953
2954 def catchterm(*args):
2954 def catchterm(*args):
2955 raise util.SignalInterrupt
2955 raise util.SignalInterrupt
2956
2956
2957 def run():
2957 def run():
2958 sys.exit(dispatch(sys.argv[1:]))
2958 sys.exit(dispatch(sys.argv[1:]))
2959
2959
2960 class ParseError(Exception):
2960 class ParseError(Exception):
2961 """Exception raised on errors in parsing the command line."""
2961 """Exception raised on errors in parsing the command line."""
2962
2962
2963 def parse(ui, args):
2963 def parse(ui, args):
2964 options = {}
2964 options = {}
2965 cmdoptions = {}
2965 cmdoptions = {}
2966
2966
2967 try:
2967 try:
2968 args = fancyopts.fancyopts(args, globalopts, options)
2968 args = fancyopts.fancyopts(args, globalopts, options)
2969 except fancyopts.getopt.GetoptError, inst:
2969 except fancyopts.getopt.GetoptError, inst:
2970 raise ParseError(None, inst)
2970 raise ParseError(None, inst)
2971
2971
2972 if args:
2972 if args:
2973 cmd, args = args[0], args[1:]
2973 cmd, args = args[0], args[1:]
2974 aliases, i = findcmd(ui, cmd)
2974 aliases, i = findcmd(ui, cmd)
2975 cmd = aliases[0]
2975 cmd = aliases[0]
2976 defaults = ui.config("defaults", cmd)
2976 defaults = ui.config("defaults", cmd)
2977 if defaults:
2977 if defaults:
2978 args = shlex.split(defaults) + args
2978 args = shlex.split(defaults) + args
2979 c = list(i[1])
2979 c = list(i[1])
2980 else:
2980 else:
2981 cmd = None
2981 cmd = None
2982 c = []
2982 c = []
2983
2983
2984 # combine global options into local
2984 # combine global options into local
2985 for o in globalopts:
2985 for o in globalopts:
2986 c.append((o[0], o[1], options[o[1]], o[3]))
2986 c.append((o[0], o[1], options[o[1]], o[3]))
2987
2987
2988 try:
2988 try:
2989 args = fancyopts.fancyopts(args, c, cmdoptions)
2989 args = fancyopts.fancyopts(args, c, cmdoptions)
2990 except fancyopts.getopt.GetoptError, inst:
2990 except fancyopts.getopt.GetoptError, inst:
2991 raise ParseError(cmd, inst)
2991 raise ParseError(cmd, inst)
2992
2992
2993 # separate global options back out
2993 # separate global options back out
2994 for o in globalopts:
2994 for o in globalopts:
2995 n = o[1]
2995 n = o[1]
2996 options[n] = cmdoptions[n]
2996 options[n] = cmdoptions[n]
2997 del cmdoptions[n]
2997 del cmdoptions[n]
2998
2998
2999 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2999 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3000
3000
3001 external = {}
3001 external = {}
3002
3002
3003 def findext(name):
3003 def findext(name):
3004 '''return module with given extension name'''
3004 '''return module with given extension name'''
3005 try:
3005 try:
3006 return sys.modules[external[name]]
3006 return sys.modules[external[name]]
3007 except KeyError:
3007 except KeyError:
3008 for k, v in external.iteritems():
3008 for k, v in external.iteritems():
3009 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3009 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3010 return sys.modules[v]
3010 return sys.modules[v]
3011 raise KeyError(name)
3011 raise KeyError(name)
3012
3012
3013 def load_extensions(ui):
3013 def load_extensions(ui):
3014 added = []
3014 added = []
3015 for ext_name, load_from_name in ui.extensions():
3015 for ext_name, load_from_name in ui.extensions():
3016 if ext_name in external:
3016 if ext_name in external:
3017 continue
3017 continue
3018 try:
3018 try:
3019 if load_from_name:
3019 if load_from_name:
3020 # the module will be loaded in sys.modules
3020 # the module will be loaded in sys.modules
3021 # choose an unique name so that it doesn't
3021 # choose an unique name so that it doesn't
3022 # conflicts with other modules
3022 # conflicts with other modules
3023 module_name = "hgext_%s" % ext_name.replace('.', '_')
3023 module_name = "hgext_%s" % ext_name.replace('.', '_')
3024 mod = imp.load_source(module_name, load_from_name)
3024 mod = imp.load_source(module_name, load_from_name)
3025 else:
3025 else:
3026 def importh(name):
3026 def importh(name):
3027 mod = __import__(name)
3027 mod = __import__(name)
3028 components = name.split('.')
3028 components = name.split('.')
3029 for comp in components[1:]:
3029 for comp in components[1:]:
3030 mod = getattr(mod, comp)
3030 mod = getattr(mod, comp)
3031 return mod
3031 return mod
3032 try:
3032 try:
3033 mod = importh("hgext.%s" % ext_name)
3033 mod = importh("hgext.%s" % ext_name)
3034 except ImportError:
3034 except ImportError:
3035 mod = importh(ext_name)
3035 mod = importh(ext_name)
3036 external[ext_name] = mod.__name__
3036 external[ext_name] = mod.__name__
3037 added.append((mod, ext_name))
3037 added.append((mod, ext_name))
3038 except (util.SignalInterrupt, KeyboardInterrupt):
3038 except (util.SignalInterrupt, KeyboardInterrupt):
3039 raise
3039 raise
3040 except Exception, inst:
3040 except Exception, inst:
3041 ui.warn(_("*** failed to import extension %s: %s\n") %
3041 ui.warn(_("*** failed to import extension %s: %s\n") %
3042 (ext_name, inst))
3042 (ext_name, inst))
3043 if ui.print_exc():
3043 if ui.print_exc():
3044 return 1
3044 return 1
3045
3045
3046 for mod, name in added:
3046 for mod, name in added:
3047 uisetup = getattr(mod, 'uisetup', None)
3047 uisetup = getattr(mod, 'uisetup', None)
3048 if uisetup:
3048 if uisetup:
3049 uisetup(ui)
3049 uisetup(ui)
3050 cmdtable = getattr(mod, 'cmdtable', {})
3050 cmdtable = getattr(mod, 'cmdtable', {})
3051 for t in cmdtable:
3051 for t in cmdtable:
3052 if t in table:
3052 if t in table:
3053 ui.warn(_("module %s overrides %s\n") % (name, t))
3053 ui.warn(_("module %s overrides %s\n") % (name, t))
3054 table.update(cmdtable)
3054 table.update(cmdtable)
3055
3055
3056 def parseconfig(config):
3056 def parseconfig(config):
3057 """parse the --config options from the command line"""
3057 """parse the --config options from the command line"""
3058 parsed = []
3058 parsed = []
3059 for cfg in config:
3059 for cfg in config:
3060 try:
3060 try:
3061 name, value = cfg.split('=', 1)
3061 name, value = cfg.split('=', 1)
3062 section, name = name.split('.', 1)
3062 section, name = name.split('.', 1)
3063 if not section or not name:
3063 if not section or not name:
3064 raise IndexError
3064 raise IndexError
3065 parsed.append((section, name, value))
3065 parsed.append((section, name, value))
3066 except (IndexError, ValueError):
3066 except (IndexError, ValueError):
3067 raise util.Abort(_('malformed --config option: %s') % cfg)
3067 raise util.Abort(_('malformed --config option: %s') % cfg)
3068 return parsed
3068 return parsed
3069
3069
3070 def dispatch(args):
3070 def dispatch(args):
3071 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3071 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3072 num = getattr(signal, name, None)
3072 num = getattr(signal, name, None)
3073 if num: signal.signal(num, catchterm)
3073 if num: signal.signal(num, catchterm)
3074
3074
3075 try:
3075 try:
3076 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3076 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3077 except util.Abort, inst:
3077 except util.Abort, inst:
3078 sys.stderr.write(_("abort: %s\n") % inst)
3078 sys.stderr.write(_("abort: %s\n") % inst)
3079 return -1
3079 return -1
3080
3080
3081 load_extensions(u)
3081 load_extensions(u)
3082 u.addreadhook(load_extensions)
3082 u.addreadhook(load_extensions)
3083
3083
3084 try:
3084 try:
3085 cmd, func, args, options, cmdoptions = parse(u, args)
3085 cmd, func, args, options, cmdoptions = parse(u, args)
3086 if options["encoding"]:
3086 if options["encoding"]:
3087 util._encoding = options["encoding"]
3087 util._encoding = options["encoding"]
3088 if options["encodingmode"]:
3088 if options["encodingmode"]:
3089 util._encodingmode = options["encodingmode"]
3089 util._encodingmode = options["encodingmode"]
3090 if options["time"]:
3090 if options["time"]:
3091 def get_times():
3091 def get_times():
3092 t = os.times()
3092 t = os.times()
3093 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3093 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3094 t = (t[0], t[1], t[2], t[3], time.clock())
3094 t = (t[0], t[1], t[2], t[3], time.clock())
3095 return t
3095 return t
3096 s = get_times()
3096 s = get_times()
3097 def print_time():
3097 def print_time():
3098 t = get_times()
3098 t = get_times()
3099 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3099 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3100 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3100 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3101 atexit.register(print_time)
3101 atexit.register(print_time)
3102
3102
3103 # enter the debugger before command execution
3103 # enter the debugger before command execution
3104 if options['debugger']:
3104 if options['debugger']:
3105 pdb.set_trace()
3105 pdb.set_trace()
3106
3106
3107 try:
3107 try:
3108 if options['cwd']:
3108 if options['cwd']:
3109 os.chdir(options['cwd'])
3109 os.chdir(options['cwd'])
3110
3110
3111 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3111 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3112 not options["noninteractive"], options["traceback"],
3112 not options["noninteractive"], options["traceback"],
3113 parseconfig(options["config"]))
3113 parseconfig(options["config"]))
3114
3114
3115 path = u.expandpath(options["repository"]) or ""
3115 path = u.expandpath(options["repository"]) or ""
3116 repo = path and hg.repository(u, path=path) or None
3116 repo = path and hg.repository(u, path=path) or None
3117 if repo and not repo.local():
3117 if repo and not repo.local():
3118 raise util.Abort(_("repository '%s' is not local") % path)
3118 raise util.Abort(_("repository '%s' is not local") % path)
3119
3119
3120 if options['help']:
3120 if options['help']:
3121 return help_(u, cmd, options['version'])
3121 return help_(u, cmd, options['version'])
3122 elif options['version']:
3122 elif options['version']:
3123 return version_(u)
3123 return version_(u)
3124 elif not cmd:
3124 elif not cmd:
3125 return help_(u, 'shortlist')
3125 return help_(u, 'shortlist')
3126
3126
3127 if cmd not in norepo.split():
3127 if cmd not in norepo.split():
3128 try:
3128 try:
3129 if not repo:
3129 if not repo:
3130 repo = hg.repository(u, path=path)
3130 repo = hg.repository(u, path=path)
3131 u = repo.ui
3131 u = repo.ui
3132 for name in external.itervalues():
3132 for name in external.itervalues():
3133 mod = sys.modules[name]
3133 mod = sys.modules[name]
3134 if hasattr(mod, 'reposetup'):
3134 if hasattr(mod, 'reposetup'):
3135 mod.reposetup(u, repo)
3135 mod.reposetup(u, repo)
3136 hg.repo_setup_hooks.append(mod.reposetup)
3136 hg.repo_setup_hooks.append(mod.reposetup)
3137 except hg.RepoError:
3137 except hg.RepoError:
3138 if cmd not in optionalrepo.split():
3138 if cmd not in optionalrepo.split():
3139 raise
3139 raise
3140 d = lambda: func(u, repo, *args, **cmdoptions)
3140 d = lambda: func(u, repo, *args, **cmdoptions)
3141 else:
3141 else:
3142 d = lambda: func(u, *args, **cmdoptions)
3142 d = lambda: func(u, *args, **cmdoptions)
3143
3143
3144 try:
3144 try:
3145 if options['profile']:
3145 if options['profile']:
3146 import hotshot, hotshot.stats
3146 import hotshot, hotshot.stats
3147 prof = hotshot.Profile("hg.prof")
3147 prof = hotshot.Profile("hg.prof")
3148 try:
3148 try:
3149 try:
3149 try:
3150 return prof.runcall(d)
3150 return prof.runcall(d)
3151 except:
3151 except:
3152 try:
3152 try:
3153 u.warn(_('exception raised - generating '
3153 u.warn(_('exception raised - generating '
3154 'profile anyway\n'))
3154 'profile anyway\n'))
3155 except:
3155 except:
3156 pass
3156 pass
3157 raise
3157 raise
3158 finally:
3158 finally:
3159 prof.close()
3159 prof.close()
3160 stats = hotshot.stats.load("hg.prof")
3160 stats = hotshot.stats.load("hg.prof")
3161 stats.strip_dirs()
3161 stats.strip_dirs()
3162 stats.sort_stats('time', 'calls')
3162 stats.sort_stats('time', 'calls')
3163 stats.print_stats(40)
3163 stats.print_stats(40)
3164 elif options['lsprof']:
3164 elif options['lsprof']:
3165 try:
3165 try:
3166 from mercurial import lsprof
3166 from mercurial import lsprof
3167 except ImportError:
3167 except ImportError:
3168 raise util.Abort(_(
3168 raise util.Abort(_(
3169 'lsprof not available - install from '
3169 'lsprof not available - install from '
3170 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3170 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3171 p = lsprof.Profiler()
3171 p = lsprof.Profiler()
3172 p.enable(subcalls=True)
3172 p.enable(subcalls=True)
3173 try:
3173 try:
3174 return d()
3174 return d()
3175 finally:
3175 finally:
3176 p.disable()
3176 p.disable()
3177 stats = lsprof.Stats(p.getstats())
3177 stats = lsprof.Stats(p.getstats())
3178 stats.sort()
3178 stats.sort()
3179 stats.pprint(top=10, file=sys.stderr, climit=5)
3179 stats.pprint(top=10, file=sys.stderr, climit=5)
3180 else:
3180 else:
3181 return d()
3181 return d()
3182 finally:
3182 finally:
3183 u.flush()
3183 u.flush()
3184 except:
3184 except:
3185 # enter the debugger when we hit an exception
3185 # enter the debugger when we hit an exception
3186 if options['debugger']:
3186 if options['debugger']:
3187 pdb.post_mortem(sys.exc_info()[2])
3187 pdb.post_mortem(sys.exc_info()[2])
3188 u.print_exc()
3188 u.print_exc()
3189 raise
3189 raise
3190 except ParseError, inst:
3190 except ParseError, inst:
3191 if inst.args[0]:
3191 if inst.args[0]:
3192 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3192 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3193 help_(u, inst.args[0])
3193 help_(u, inst.args[0])
3194 else:
3194 else:
3195 u.warn(_("hg: %s\n") % inst.args[1])
3195 u.warn(_("hg: %s\n") % inst.args[1])
3196 help_(u, 'shortlist')
3196 help_(u, 'shortlist')
3197 except AmbiguousCommand, inst:
3197 except AmbiguousCommand, inst:
3198 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3198 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3199 (inst.args[0], " ".join(inst.args[1])))
3199 (inst.args[0], " ".join(inst.args[1])))
3200 except UnknownCommand, inst:
3200 except UnknownCommand, inst:
3201 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3201 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3202 help_(u, 'shortlist')
3202 help_(u, 'shortlist')
3203 except hg.RepoError, inst:
3203 except hg.RepoError, inst:
3204 u.warn(_("abort: %s!\n") % inst)
3204 u.warn(_("abort: %s!\n") % inst)
3205 except lock.LockHeld, inst:
3205 except lock.LockHeld, inst:
3206 if inst.errno == errno.ETIMEDOUT:
3206 if inst.errno == errno.ETIMEDOUT:
3207 reason = _('timed out waiting for lock held by %s') % inst.locker
3207 reason = _('timed out waiting for lock held by %s') % inst.locker
3208 else:
3208 else:
3209 reason = _('lock held by %s') % inst.locker
3209 reason = _('lock held by %s') % inst.locker
3210 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3210 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3211 except lock.LockUnavailable, inst:
3211 except lock.LockUnavailable, inst:
3212 u.warn(_("abort: could not lock %s: %s\n") %
3212 u.warn(_("abort: could not lock %s: %s\n") %
3213 (inst.desc or inst.filename, inst.strerror))
3213 (inst.desc or inst.filename, inst.strerror))
3214 except revlog.RevlogError, inst:
3214 except revlog.RevlogError, inst:
3215 u.warn(_("abort: %s!\n") % inst)
3215 u.warn(_("abort: %s!\n") % inst)
3216 except util.SignalInterrupt:
3216 except util.SignalInterrupt:
3217 u.warn(_("killed!\n"))
3217 u.warn(_("killed!\n"))
3218 except KeyboardInterrupt:
3218 except KeyboardInterrupt:
3219 try:
3219 try:
3220 u.warn(_("interrupted!\n"))
3220 u.warn(_("interrupted!\n"))
3221 except IOError, inst:
3221 except IOError, inst:
3222 if inst.errno == errno.EPIPE:
3222 if inst.errno == errno.EPIPE:
3223 if u.debugflag:
3223 if u.debugflag:
3224 u.warn(_("\nbroken pipe\n"))
3224 u.warn(_("\nbroken pipe\n"))
3225 else:
3225 else:
3226 raise
3226 raise
3227 except IOError, inst:
3227 except IOError, inst:
3228 if hasattr(inst, "code"):
3228 if hasattr(inst, "code"):
3229 u.warn(_("abort: %s\n") % inst)
3229 u.warn(_("abort: %s\n") % inst)
3230 elif hasattr(inst, "reason"):
3230 elif hasattr(inst, "reason"):
3231 u.warn(_("abort: error: %s\n") % inst.reason[1])
3231 u.warn(_("abort: error: %s\n") % inst.reason[1])
3232 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3232 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3233 if u.debugflag:
3233 if u.debugflag:
3234 u.warn(_("broken pipe\n"))
3234 u.warn(_("broken pipe\n"))
3235 elif getattr(inst, "strerror", None):
3235 elif getattr(inst, "strerror", None):
3236 if getattr(inst, "filename", None):
3236 if getattr(inst, "filename", None):
3237 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3237 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3238 else:
3238 else:
3239 u.warn(_("abort: %s\n") % inst.strerror)
3239 u.warn(_("abort: %s\n") % inst.strerror)
3240 else:
3240 else:
3241 raise
3241 raise
3242 except OSError, inst:
3242 except OSError, inst:
3243 if getattr(inst, "filename", None):
3243 if getattr(inst, "filename", None):
3244 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3244 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3245 else:
3245 else:
3246 u.warn(_("abort: %s\n") % inst.strerror)
3246 u.warn(_("abort: %s\n") % inst.strerror)
3247 except util.UnexpectedOutput, inst:
3247 except util.UnexpectedOutput, inst:
3248 u.warn(_("abort: %s") % inst[0])
3248 u.warn(_("abort: %s") % inst[0])
3249 if not isinstance(inst[1], basestring):
3249 if not isinstance(inst[1], basestring):
3250 u.warn(" %r\n" % (inst[1],))
3250 u.warn(" %r\n" % (inst[1],))
3251 elif not inst[1]:
3251 elif not inst[1]:
3252 u.warn(_(" empty string\n"))
3252 u.warn(_(" empty string\n"))
3253 else:
3253 else:
3254 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3254 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3255 except util.Abort, inst:
3255 except util.Abort, inst:
3256 u.warn(_("abort: %s\n") % inst)
3256 u.warn(_("abort: %s\n") % inst)
3257 except TypeError, inst:
3257 except TypeError, inst:
3258 # was this an argument error?
3258 # was this an argument error?
3259 tb = traceback.extract_tb(sys.exc_info()[2])
3259 tb = traceback.extract_tb(sys.exc_info()[2])
3260 if len(tb) > 2: # no
3260 if len(tb) > 2: # no
3261 raise
3261 raise
3262 u.debug(inst, "\n")
3262 u.debug(inst, "\n")
3263 u.warn(_("%s: invalid arguments\n") % cmd)
3263 u.warn(_("%s: invalid arguments\n") % cmd)
3264 help_(u, cmd)
3264 help_(u, cmd)
3265 except SystemExit, inst:
3265 except SystemExit, inst:
3266 # Commands shouldn't sys.exit directly, but give a return code.
3266 # Commands shouldn't sys.exit directly, but give a return code.
3267 # Just in case catch this and and pass exit code to caller.
3267 # Just in case catch this and and pass exit code to caller.
3268 return inst.code
3268 return inst.code
3269 except:
3269 except:
3270 u.warn(_("** unknown exception encountered, details follow\n"))
3270 u.warn(_("** unknown exception encountered, details follow\n"))
3271 u.warn(_("** report bug details to "
3271 u.warn(_("** report bug details to "
3272 "http://www.selenic.com/mercurial/bts\n"))
3272 "http://www.selenic.com/mercurial/bts\n"))
3273 u.warn(_("** or mercurial@selenic.com\n"))
3273 u.warn(_("** or mercurial@selenic.com\n"))
3274 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3274 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3275 % version.get_version())
3275 % version.get_version())
3276 raise
3276 raise
3277
3277
3278 return -1
3278 return -1
@@ -1,509 +1,508 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import demandload
10 import ancestor, bdiff, repo, revlog, util, os
11 demandload(globals(), "ancestor bdiff repo revlog util os")
12
11
13 class changectx(object):
12 class changectx(object):
14 """A changecontext object makes access to data related to a particular
13 """A changecontext object makes access to data related to a particular
15 changeset convenient."""
14 changeset convenient."""
16 def __init__(self, repo, changeid=None):
15 def __init__(self, repo, changeid=None):
17 """changeid is a revision number, node, or tag"""
16 """changeid is a revision number, node, or tag"""
18 self._repo = repo
17 self._repo = repo
19
18
20 if not changeid and changeid != 0:
19 if not changeid and changeid != 0:
21 p1, p2 = self._repo.dirstate.parents()
20 p1, p2 = self._repo.dirstate.parents()
22 self._rev = self._repo.changelog.rev(p1)
21 self._rev = self._repo.changelog.rev(p1)
23 if self._rev == -1:
22 if self._rev == -1:
24 changeid = 'tip'
23 changeid = 'tip'
25 else:
24 else:
26 self._node = p1
25 self._node = p1
27 return
26 return
28
27
29 self._node = self._repo.lookup(changeid)
28 self._node = self._repo.lookup(changeid)
30 self._rev = self._repo.changelog.rev(self._node)
29 self._rev = self._repo.changelog.rev(self._node)
31
30
32 def __str__(self):
31 def __str__(self):
33 return short(self.node())
32 return short(self.node())
34
33
35 def __repr__(self):
34 def __repr__(self):
36 return "<changectx %s>" % str(self)
35 return "<changectx %s>" % str(self)
37
36
38 def __eq__(self, other):
37 def __eq__(self, other):
39 try:
38 try:
40 return self._rev == other._rev
39 return self._rev == other._rev
41 except AttributeError:
40 except AttributeError:
42 return False
41 return False
43
42
44 def __nonzero__(self):
43 def __nonzero__(self):
45 return self._rev != nullrev
44 return self._rev != nullrev
46
45
47 def __getattr__(self, name):
46 def __getattr__(self, name):
48 if name == '_changeset':
47 if name == '_changeset':
49 self._changeset = self._repo.changelog.read(self.node())
48 self._changeset = self._repo.changelog.read(self.node())
50 return self._changeset
49 return self._changeset
51 elif name == '_manifest':
50 elif name == '_manifest':
52 self._manifest = self._repo.manifest.read(self._changeset[0])
51 self._manifest = self._repo.manifest.read(self._changeset[0])
53 return self._manifest
52 return self._manifest
54 elif name == '_manifestdelta':
53 elif name == '_manifestdelta':
55 md = self._repo.manifest.readdelta(self._changeset[0])
54 md = self._repo.manifest.readdelta(self._changeset[0])
56 self._manifestdelta = md
55 self._manifestdelta = md
57 return self._manifestdelta
56 return self._manifestdelta
58 else:
57 else:
59 raise AttributeError, name
58 raise AttributeError, name
60
59
61 def changeset(self): return self._changeset
60 def changeset(self): return self._changeset
62 def manifest(self): return self._manifest
61 def manifest(self): return self._manifest
63
62
64 def rev(self): return self._rev
63 def rev(self): return self._rev
65 def node(self): return self._node
64 def node(self): return self._node
66 def user(self): return self._changeset[1]
65 def user(self): return self._changeset[1]
67 def date(self): return self._changeset[2]
66 def date(self): return self._changeset[2]
68 def files(self): return self._changeset[3]
67 def files(self): return self._changeset[3]
69 def description(self): return self._changeset[4]
68 def description(self): return self._changeset[4]
70 def branch(self): return self._changeset[5].get("branch", "")
69 def branch(self): return self._changeset[5].get("branch", "")
71
70
72 def parents(self):
71 def parents(self):
73 """return contexts for each parent changeset"""
72 """return contexts for each parent changeset"""
74 p = self._repo.changelog.parents(self._node)
73 p = self._repo.changelog.parents(self._node)
75 return [changectx(self._repo, x) for x in p]
74 return [changectx(self._repo, x) for x in p]
76
75
77 def children(self):
76 def children(self):
78 """return contexts for each child changeset"""
77 """return contexts for each child changeset"""
79 c = self._repo.changelog.children(self._node)
78 c = self._repo.changelog.children(self._node)
80 return [changectx(self._repo, x) for x in c]
79 return [changectx(self._repo, x) for x in c]
81
80
82 def filenode(self, path):
81 def filenode(self, path):
83 if '_manifest' in self.__dict__:
82 if '_manifest' in self.__dict__:
84 try:
83 try:
85 return self._manifest[path]
84 return self._manifest[path]
86 except KeyError:
85 except KeyError:
87 raise repo.LookupError(_("'%s' not found in manifest") % path)
86 raise repo.LookupError(_("'%s' not found in manifest") % path)
88 if '_manifestdelta' in self.__dict__ or path in self.files():
87 if '_manifestdelta' in self.__dict__ or path in self.files():
89 if path in self._manifestdelta:
88 if path in self._manifestdelta:
90 return self._manifestdelta[path]
89 return self._manifestdelta[path]
91 node, flag = self._repo.manifest.find(self._changeset[0], path)
90 node, flag = self._repo.manifest.find(self._changeset[0], path)
92 if not node:
91 if not node:
93 raise repo.LookupError(_("'%s' not found in manifest") % path)
92 raise repo.LookupError(_("'%s' not found in manifest") % path)
94
93
95 return node
94 return node
96
95
97 def filectx(self, path, fileid=None):
96 def filectx(self, path, fileid=None):
98 """get a file context from this changeset"""
97 """get a file context from this changeset"""
99 if fileid is None:
98 if fileid is None:
100 fileid = self.filenode(path)
99 fileid = self.filenode(path)
101 return filectx(self._repo, path, fileid=fileid, changectx=self)
100 return filectx(self._repo, path, fileid=fileid, changectx=self)
102
101
103 def filectxs(self):
102 def filectxs(self):
104 """generate a file context for each file in this changeset's
103 """generate a file context for each file in this changeset's
105 manifest"""
104 manifest"""
106 mf = self.manifest()
105 mf = self.manifest()
107 m = mf.keys()
106 m = mf.keys()
108 m.sort()
107 m.sort()
109 for f in m:
108 for f in m:
110 yield self.filectx(f, fileid=mf[f])
109 yield self.filectx(f, fileid=mf[f])
111
110
112 def ancestor(self, c2):
111 def ancestor(self, c2):
113 """
112 """
114 return the ancestor context of self and c2
113 return the ancestor context of self and c2
115 """
114 """
116 n = self._repo.changelog.ancestor(self._node, c2._node)
115 n = self._repo.changelog.ancestor(self._node, c2._node)
117 return changectx(self._repo, n)
116 return changectx(self._repo, n)
118
117
119 class filectx(object):
118 class filectx(object):
120 """A filecontext object makes access to data related to a particular
119 """A filecontext object makes access to data related to a particular
121 filerevision convenient."""
120 filerevision convenient."""
122 def __init__(self, repo, path, changeid=None, fileid=None,
121 def __init__(self, repo, path, changeid=None, fileid=None,
123 filelog=None, changectx=None):
122 filelog=None, changectx=None):
124 """changeid can be a changeset revision, node, or tag.
123 """changeid can be a changeset revision, node, or tag.
125 fileid can be a file revision or node."""
124 fileid can be a file revision or node."""
126 self._repo = repo
125 self._repo = repo
127 self._path = path
126 self._path = path
128
127
129 assert changeid is not None or fileid is not None
128 assert changeid is not None or fileid is not None
130
129
131 if filelog:
130 if filelog:
132 self._filelog = filelog
131 self._filelog = filelog
133 if changectx:
132 if changectx:
134 self._changectx = changectx
133 self._changectx = changectx
135 self._changeid = changectx.node()
134 self._changeid = changectx.node()
136
135
137 if fileid is None:
136 if fileid is None:
138 self._changeid = changeid
137 self._changeid = changeid
139 else:
138 else:
140 self._fileid = fileid
139 self._fileid = fileid
141
140
142 def __getattr__(self, name):
141 def __getattr__(self, name):
143 if name == '_changectx':
142 if name == '_changectx':
144 self._changectx = changectx(self._repo, self._changeid)
143 self._changectx = changectx(self._repo, self._changeid)
145 return self._changectx
144 return self._changectx
146 elif name == '_filelog':
145 elif name == '_filelog':
147 self._filelog = self._repo.file(self._path)
146 self._filelog = self._repo.file(self._path)
148 return self._filelog
147 return self._filelog
149 elif name == '_changeid':
148 elif name == '_changeid':
150 self._changeid = self._filelog.linkrev(self._filenode)
149 self._changeid = self._filelog.linkrev(self._filenode)
151 return self._changeid
150 return self._changeid
152 elif name == '_filenode':
151 elif name == '_filenode':
153 try:
152 try:
154 if '_fileid' in self.__dict__:
153 if '_fileid' in self.__dict__:
155 self._filenode = self._filelog.lookup(self._fileid)
154 self._filenode = self._filelog.lookup(self._fileid)
156 else:
155 else:
157 self._filenode = self._changectx.filenode(self._path)
156 self._filenode = self._changectx.filenode(self._path)
158 except revlog.RevlogError, inst:
157 except revlog.RevlogError, inst:
159 raise repo.LookupError(str(inst))
158 raise repo.LookupError(str(inst))
160 return self._filenode
159 return self._filenode
161 elif name == '_filerev':
160 elif name == '_filerev':
162 self._filerev = self._filelog.rev(self._filenode)
161 self._filerev = self._filelog.rev(self._filenode)
163 return self._filerev
162 return self._filerev
164 else:
163 else:
165 raise AttributeError, name
164 raise AttributeError, name
166
165
167 def __nonzero__(self):
166 def __nonzero__(self):
168 try:
167 try:
169 n = self._filenode
168 n = self._filenode
170 return True
169 return True
171 except repo.LookupError:
170 except repo.LookupError:
172 # file is missing
171 # file is missing
173 return False
172 return False
174
173
175 def __str__(self):
174 def __str__(self):
176 return "%s@%s" % (self.path(), short(self.node()))
175 return "%s@%s" % (self.path(), short(self.node()))
177
176
178 def __repr__(self):
177 def __repr__(self):
179 return "<filectx %s>" % str(self)
178 return "<filectx %s>" % str(self)
180
179
181 def __eq__(self, other):
180 def __eq__(self, other):
182 try:
181 try:
183 return (self._path == other._path
182 return (self._path == other._path
184 and self._changeid == other._changeid)
183 and self._changeid == other._changeid)
185 except AttributeError:
184 except AttributeError:
186 return False
185 return False
187
186
188 def filectx(self, fileid):
187 def filectx(self, fileid):
189 '''opens an arbitrary revision of the file without
188 '''opens an arbitrary revision of the file without
190 opening a new filelog'''
189 opening a new filelog'''
191 return filectx(self._repo, self._path, fileid=fileid,
190 return filectx(self._repo, self._path, fileid=fileid,
192 filelog=self._filelog)
191 filelog=self._filelog)
193
192
194 def filerev(self): return self._filerev
193 def filerev(self): return self._filerev
195 def filenode(self): return self._filenode
194 def filenode(self): return self._filenode
196 def filelog(self): return self._filelog
195 def filelog(self): return self._filelog
197
196
198 def rev(self):
197 def rev(self):
199 if '_changectx' in self.__dict__:
198 if '_changectx' in self.__dict__:
200 return self._changectx.rev()
199 return self._changectx.rev()
201 return self._filelog.linkrev(self._filenode)
200 return self._filelog.linkrev(self._filenode)
202
201
203 def node(self): return self._changectx.node()
202 def node(self): return self._changectx.node()
204 def user(self): return self._changectx.user()
203 def user(self): return self._changectx.user()
205 def date(self): return self._changectx.date()
204 def date(self): return self._changectx.date()
206 def files(self): return self._changectx.files()
205 def files(self): return self._changectx.files()
207 def description(self): return self._changectx.description()
206 def description(self): return self._changectx.description()
208 def branch(self): return self._changectx.branch()
207 def branch(self): return self._changectx.branch()
209 def manifest(self): return self._changectx.manifest()
208 def manifest(self): return self._changectx.manifest()
210 def changectx(self): return self._changectx
209 def changectx(self): return self._changectx
211
210
212 def data(self): return self._filelog.read(self._filenode)
211 def data(self): return self._filelog.read(self._filenode)
213 def renamed(self): return self._filelog.renamed(self._filenode)
212 def renamed(self): return self._filelog.renamed(self._filenode)
214 def path(self): return self._path
213 def path(self): return self._path
215 def size(self): return self._filelog.size(self._filerev)
214 def size(self): return self._filelog.size(self._filerev)
216
215
217 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
216 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
218
217
219 def parents(self):
218 def parents(self):
220 p = self._path
219 p = self._path
221 fl = self._filelog
220 fl = self._filelog
222 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
221 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
223
222
224 r = self.renamed()
223 r = self.renamed()
225 if r:
224 if r:
226 pl[0] = (r[0], r[1], None)
225 pl[0] = (r[0], r[1], None)
227
226
228 return [filectx(self._repo, p, fileid=n, filelog=l)
227 return [filectx(self._repo, p, fileid=n, filelog=l)
229 for p,n,l in pl if n != nullid]
228 for p,n,l in pl if n != nullid]
230
229
231 def children(self):
230 def children(self):
232 # hard for renames
231 # hard for renames
233 c = self._filelog.children(self._filenode)
232 c = self._filelog.children(self._filenode)
234 return [filectx(self._repo, self._path, fileid=x,
233 return [filectx(self._repo, self._path, fileid=x,
235 filelog=self._filelog) for x in c]
234 filelog=self._filelog) for x in c]
236
235
237 def annotate(self, follow=False):
236 def annotate(self, follow=False):
238 '''returns a list of tuples of (ctx, line) for each line
237 '''returns a list of tuples of (ctx, line) for each line
239 in the file, where ctx is the filectx of the node where
238 in the file, where ctx is the filectx of the node where
240 that line was last changed'''
239 that line was last changed'''
241
240
242 def decorate(text, rev):
241 def decorate(text, rev):
243 return ([rev] * len(text.splitlines()), text)
242 return ([rev] * len(text.splitlines()), text)
244
243
245 def pair(parent, child):
244 def pair(parent, child):
246 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
245 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
247 child[0][b1:b2] = parent[0][a1:a2]
246 child[0][b1:b2] = parent[0][a1:a2]
248 return child
247 return child
249
248
250 getlog = util.cachefunc(lambda x: self._repo.file(x))
249 getlog = util.cachefunc(lambda x: self._repo.file(x))
251 def getctx(path, fileid):
250 def getctx(path, fileid):
252 log = path == self._path and self._filelog or getlog(path)
251 log = path == self._path and self._filelog or getlog(path)
253 return filectx(self._repo, path, fileid=fileid, filelog=log)
252 return filectx(self._repo, path, fileid=fileid, filelog=log)
254 getctx = util.cachefunc(getctx)
253 getctx = util.cachefunc(getctx)
255
254
256 def parents(f):
255 def parents(f):
257 # we want to reuse filectx objects as much as possible
256 # we want to reuse filectx objects as much as possible
258 p = f._path
257 p = f._path
259 if f._filerev is None: # working dir
258 if f._filerev is None: # working dir
260 pl = [(n.path(), n.filerev()) for n in f.parents()]
259 pl = [(n.path(), n.filerev()) for n in f.parents()]
261 else:
260 else:
262 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
261 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
263
262
264 if follow:
263 if follow:
265 r = f.renamed()
264 r = f.renamed()
266 if r:
265 if r:
267 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
266 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
268
267
269 return [getctx(p, n) for p, n in pl if n != nullrev]
268 return [getctx(p, n) for p, n in pl if n != nullrev]
270
269
271 # use linkrev to find the first changeset where self appeared
270 # use linkrev to find the first changeset where self appeared
272 if self.rev() != self._filelog.linkrev(self._filenode):
271 if self.rev() != self._filelog.linkrev(self._filenode):
273 base = self.filectx(self.filerev())
272 base = self.filectx(self.filerev())
274 else:
273 else:
275 base = self
274 base = self
276
275
277 # find all ancestors
276 # find all ancestors
278 needed = {base: 1}
277 needed = {base: 1}
279 visit = [base]
278 visit = [base]
280 files = [base._path]
279 files = [base._path]
281 while visit:
280 while visit:
282 f = visit.pop(0)
281 f = visit.pop(0)
283 for p in parents(f):
282 for p in parents(f):
284 if p not in needed:
283 if p not in needed:
285 needed[p] = 1
284 needed[p] = 1
286 visit.append(p)
285 visit.append(p)
287 if p._path not in files:
286 if p._path not in files:
288 files.append(p._path)
287 files.append(p._path)
289 else:
288 else:
290 # count how many times we'll use this
289 # count how many times we'll use this
291 needed[p] += 1
290 needed[p] += 1
292
291
293 # sort by revision (per file) which is a topological order
292 # sort by revision (per file) which is a topological order
294 visit = []
293 visit = []
295 files.reverse()
294 files.reverse()
296 for f in files:
295 for f in files:
297 fn = [(n._filerev, n) for n in needed.keys() if n._path == f]
296 fn = [(n._filerev, n) for n in needed.keys() if n._path == f]
298 fn.sort()
297 fn.sort()
299 visit.extend(fn)
298 visit.extend(fn)
300 hist = {}
299 hist = {}
301
300
302 for r, f in visit:
301 for r, f in visit:
303 curr = decorate(f.data(), f)
302 curr = decorate(f.data(), f)
304 for p in parents(f):
303 for p in parents(f):
305 if p != nullid:
304 if p != nullid:
306 curr = pair(hist[p], curr)
305 curr = pair(hist[p], curr)
307 # trim the history of unneeded revs
306 # trim the history of unneeded revs
308 needed[p] -= 1
307 needed[p] -= 1
309 if not needed[p]:
308 if not needed[p]:
310 del hist[p]
309 del hist[p]
311 hist[f] = curr
310 hist[f] = curr
312
311
313 return zip(hist[f][0], hist[f][1].splitlines(1))
312 return zip(hist[f][0], hist[f][1].splitlines(1))
314
313
315 def ancestor(self, fc2):
314 def ancestor(self, fc2):
316 """
315 """
317 find the common ancestor file context, if any, of self, and fc2
316 find the common ancestor file context, if any, of self, and fc2
318 """
317 """
319
318
320 acache = {}
319 acache = {}
321
320
322 # prime the ancestor cache for the working directory
321 # prime the ancestor cache for the working directory
323 for c in (self, fc2):
322 for c in (self, fc2):
324 if c._filerev == None:
323 if c._filerev == None:
325 pl = [(n.path(), n.filenode()) for n in c.parents()]
324 pl = [(n.path(), n.filenode()) for n in c.parents()]
326 acache[(c._path, None)] = pl
325 acache[(c._path, None)] = pl
327
326
328 flcache = {self._path:self._filelog, fc2._path:fc2._filelog}
327 flcache = {self._path:self._filelog, fc2._path:fc2._filelog}
329 def parents(vertex):
328 def parents(vertex):
330 if vertex in acache:
329 if vertex in acache:
331 return acache[vertex]
330 return acache[vertex]
332 f, n = vertex
331 f, n = vertex
333 if f not in flcache:
332 if f not in flcache:
334 flcache[f] = self._repo.file(f)
333 flcache[f] = self._repo.file(f)
335 fl = flcache[f]
334 fl = flcache[f]
336 pl = [(f, p) for p in fl.parents(n) if p != nullid]
335 pl = [(f, p) for p in fl.parents(n) if p != nullid]
337 re = fl.renamed(n)
336 re = fl.renamed(n)
338 if re:
337 if re:
339 pl.append(re)
338 pl.append(re)
340 acache[vertex] = pl
339 acache[vertex] = pl
341 return pl
340 return pl
342
341
343 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
342 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
344 v = ancestor.ancestor(a, b, parents)
343 v = ancestor.ancestor(a, b, parents)
345 if v:
344 if v:
346 f, n = v
345 f, n = v
347 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
346 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
348
347
349 return None
348 return None
350
349
351 class workingctx(changectx):
350 class workingctx(changectx):
352 """A workingctx object makes access to data related to
351 """A workingctx object makes access to data related to
353 the current working directory convenient."""
352 the current working directory convenient."""
354 def __init__(self, repo):
353 def __init__(self, repo):
355 self._repo = repo
354 self._repo = repo
356 self._rev = None
355 self._rev = None
357 self._node = None
356 self._node = None
358
357
359 def __str__(self):
358 def __str__(self):
360 return str(self._parents[0]) + "+"
359 return str(self._parents[0]) + "+"
361
360
362 def __nonzero__(self):
361 def __nonzero__(self):
363 return True
362 return True
364
363
365 def __getattr__(self, name):
364 def __getattr__(self, name):
366 if name == '_parents':
365 if name == '_parents':
367 self._parents = self._repo.parents()
366 self._parents = self._repo.parents()
368 return self._parents
367 return self._parents
369 if name == '_status':
368 if name == '_status':
370 self._status = self._repo.status()
369 self._status = self._repo.status()
371 return self._status
370 return self._status
372 if name == '_manifest':
371 if name == '_manifest':
373 self._buildmanifest()
372 self._buildmanifest()
374 return self._manifest
373 return self._manifest
375 else:
374 else:
376 raise AttributeError, name
375 raise AttributeError, name
377
376
378 def _buildmanifest(self):
377 def _buildmanifest(self):
379 """generate a manifest corresponding to the working directory"""
378 """generate a manifest corresponding to the working directory"""
380
379
381 man = self._parents[0].manifest().copy()
380 man = self._parents[0].manifest().copy()
382 copied = self._repo.dirstate.copies()
381 copied = self._repo.dirstate.copies()
383 modified, added, removed, deleted, unknown = self._status[:5]
382 modified, added, removed, deleted, unknown = self._status[:5]
384 for i, l in (("a", added), ("m", modified), ("u", unknown)):
383 for i, l in (("a", added), ("m", modified), ("u", unknown)):
385 for f in l:
384 for f in l:
386 man[f] = man.get(copied.get(f, f), nullid) + i
385 man[f] = man.get(copied.get(f, f), nullid) + i
387 try:
386 try:
388 man.set(f, util.is_exec(self._repo.wjoin(f), man.execf(f)))
387 man.set(f, util.is_exec(self._repo.wjoin(f), man.execf(f)))
389 except OSError:
388 except OSError:
390 pass
389 pass
391
390
392 for f in deleted + removed:
391 for f in deleted + removed:
393 if f in man:
392 if f in man:
394 del man[f]
393 del man[f]
395
394
396 self._manifest = man
395 self._manifest = man
397
396
398 def manifest(self): return self._manifest
397 def manifest(self): return self._manifest
399
398
400 def user(self): return self._repo.ui.username()
399 def user(self): return self._repo.ui.username()
401 def date(self): return util.makedate()
400 def date(self): return util.makedate()
402 def description(self): return ""
401 def description(self): return ""
403 def files(self):
402 def files(self):
404 f = self.modified() + self.added() + self.removed()
403 f = self.modified() + self.added() + self.removed()
405 f.sort()
404 f.sort()
406 return f
405 return f
407
406
408 def modified(self): return self._status[0]
407 def modified(self): return self._status[0]
409 def added(self): return self._status[1]
408 def added(self): return self._status[1]
410 def removed(self): return self._status[2]
409 def removed(self): return self._status[2]
411 def deleted(self): return self._status[3]
410 def deleted(self): return self._status[3]
412 def unknown(self): return self._status[4]
411 def unknown(self): return self._status[4]
413 def clean(self): return self._status[5]
412 def clean(self): return self._status[5]
414 def branch(self):
413 def branch(self):
415 try:
414 try:
416 return self._repo.opener("branch").read().strip()
415 return self._repo.opener("branch").read().strip()
417 except IOError:
416 except IOError:
418 return ""
417 return ""
419
418
420 def parents(self):
419 def parents(self):
421 """return contexts for each parent changeset"""
420 """return contexts for each parent changeset"""
422 return self._parents
421 return self._parents
423
422
424 def children(self):
423 def children(self):
425 return []
424 return []
426
425
427 def filectx(self, path):
426 def filectx(self, path):
428 """get a file context from the working directory"""
427 """get a file context from the working directory"""
429 return workingfilectx(self._repo, path, workingctx=self)
428 return workingfilectx(self._repo, path, workingctx=self)
430
429
431 def ancestor(self, c2):
430 def ancestor(self, c2):
432 """return the ancestor context of self and c2"""
431 """return the ancestor context of self and c2"""
433 return self._parents[0].ancestor(c2) # punt on two parents for now
432 return self._parents[0].ancestor(c2) # punt on two parents for now
434
433
435 class workingfilectx(filectx):
434 class workingfilectx(filectx):
436 """A workingfilectx object makes access to data related to a particular
435 """A workingfilectx object makes access to data related to a particular
437 file in the working directory convenient."""
436 file in the working directory convenient."""
438 def __init__(self, repo, path, filelog=None, workingctx=None):
437 def __init__(self, repo, path, filelog=None, workingctx=None):
439 """changeid can be a changeset revision, node, or tag.
438 """changeid can be a changeset revision, node, or tag.
440 fileid can be a file revision or node."""
439 fileid can be a file revision or node."""
441 self._repo = repo
440 self._repo = repo
442 self._path = path
441 self._path = path
443 self._changeid = None
442 self._changeid = None
444 self._filerev = self._filenode = None
443 self._filerev = self._filenode = None
445
444
446 if filelog:
445 if filelog:
447 self._filelog = filelog
446 self._filelog = filelog
448 if workingctx:
447 if workingctx:
449 self._changectx = workingctx
448 self._changectx = workingctx
450
449
451 def __getattr__(self, name):
450 def __getattr__(self, name):
452 if name == '_changectx':
451 if name == '_changectx':
453 self._changectx = workingctx(repo)
452 self._changectx = workingctx(repo)
454 return self._changectx
453 return self._changectx
455 elif name == '_repopath':
454 elif name == '_repopath':
456 self._repopath = (self._repo.dirstate.copied(self._path)
455 self._repopath = (self._repo.dirstate.copied(self._path)
457 or self._path)
456 or self._path)
458 return self._repopath
457 return self._repopath
459 elif name == '_filelog':
458 elif name == '_filelog':
460 self._filelog = self._repo.file(self._repopath)
459 self._filelog = self._repo.file(self._repopath)
461 return self._filelog
460 return self._filelog
462 else:
461 else:
463 raise AttributeError, name
462 raise AttributeError, name
464
463
465 def __nonzero__(self):
464 def __nonzero__(self):
466 return True
465 return True
467
466
468 def __str__(self):
467 def __str__(self):
469 return "%s@%s" % (self.path(), self._changectx)
468 return "%s@%s" % (self.path(), self._changectx)
470
469
471 def filectx(self, fileid):
470 def filectx(self, fileid):
472 '''opens an arbitrary revision of the file without
471 '''opens an arbitrary revision of the file without
473 opening a new filelog'''
472 opening a new filelog'''
474 return filectx(self._repo, self._repopath, fileid=fileid,
473 return filectx(self._repo, self._repopath, fileid=fileid,
475 filelog=self._filelog)
474 filelog=self._filelog)
476
475
477 def rev(self):
476 def rev(self):
478 if '_changectx' in self.__dict__:
477 if '_changectx' in self.__dict__:
479 return self._changectx.rev()
478 return self._changectx.rev()
480 return self._filelog.linkrev(self._filenode)
479 return self._filelog.linkrev(self._filenode)
481
480
482 def data(self): return self._repo.wread(self._path)
481 def data(self): return self._repo.wread(self._path)
483 def renamed(self):
482 def renamed(self):
484 rp = self._repopath
483 rp = self._repopath
485 if rp == self._path:
484 if rp == self._path:
486 return None
485 return None
487 return rp, self._workingctx._parents._manifest.get(rp, nullid)
486 return rp, self._workingctx._parents._manifest.get(rp, nullid)
488
487
489 def parents(self):
488 def parents(self):
490 '''return parent filectxs, following copies if necessary'''
489 '''return parent filectxs, following copies if necessary'''
491 p = self._path
490 p = self._path
492 rp = self._repopath
491 rp = self._repopath
493 pcl = self._changectx._parents
492 pcl = self._changectx._parents
494 fl = self._filelog
493 fl = self._filelog
495 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
494 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
496 if len(pcl) > 1:
495 if len(pcl) > 1:
497 if rp != p:
496 if rp != p:
498 fl = None
497 fl = None
499 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
498 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
500
499
501 return [filectx(self._repo, p, fileid=n, filelog=l)
500 return [filectx(self._repo, p, fileid=n, filelog=l)
502 for p,n,l in pl if n != nullid]
501 for p,n,l in pl if n != nullid]
503
502
504 def children(self):
503 def children(self):
505 return []
504 return []
506
505
507 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
506 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
508
507
509 def cmp(self, text): return self._repo.wread(self._path) == text
508 def cmp(self, text): return self._repo.wread(self._path) == text
@@ -1,531 +1,530 b''
1 """
1 """
2 dirstate.py - working directory tracking for mercurial
2 dirstate.py - working directory tracking for mercurial
3
3
4 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8 """
8 """
9
9
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 import struct, os, time, bisect, stat, strutil, util, re, errno
13 demandload(globals(), "struct os time bisect stat strutil util re errno")
14
13
15 class dirstate(object):
14 class dirstate(object):
16 format = ">cllll"
15 format = ">cllll"
17
16
18 def __init__(self, opener, ui, root):
17 def __init__(self, opener, ui, root):
19 self.opener = opener
18 self.opener = opener
20 self.root = root
19 self.root = root
21 self.dirty = 0
20 self.dirty = 0
22 self.ui = ui
21 self.ui = ui
23 self.map = None
22 self.map = None
24 self.pl = None
23 self.pl = None
25 self.dirs = None
24 self.dirs = None
26 self.copymap = {}
25 self.copymap = {}
27 self.ignorefunc = None
26 self.ignorefunc = None
28
27
29 def wjoin(self, f):
28 def wjoin(self, f):
30 return os.path.join(self.root, f)
29 return os.path.join(self.root, f)
31
30
32 def getcwd(self):
31 def getcwd(self):
33 cwd = os.getcwd()
32 cwd = os.getcwd()
34 if cwd == self.root: return ''
33 if cwd == self.root: return ''
35 # self.root ends with a path separator if self.root is '/' or 'C:\'
34 # self.root ends with a path separator if self.root is '/' or 'C:\'
36 common_prefix_len = len(self.root)
35 common_prefix_len = len(self.root)
37 if not self.root.endswith(os.sep):
36 if not self.root.endswith(os.sep):
38 common_prefix_len += 1
37 common_prefix_len += 1
39 return cwd[common_prefix_len:]
38 return cwd[common_prefix_len:]
40
39
41 def hgignore(self):
40 def hgignore(self):
42 '''return the contents of .hgignore files as a list of patterns.
41 '''return the contents of .hgignore files as a list of patterns.
43
42
44 the files parsed for patterns include:
43 the files parsed for patterns include:
45 .hgignore in the repository root
44 .hgignore in the repository root
46 any additional files specified in the [ui] section of ~/.hgrc
45 any additional files specified in the [ui] section of ~/.hgrc
47
46
48 trailing white space is dropped.
47 trailing white space is dropped.
49 the escape character is backslash.
48 the escape character is backslash.
50 comments start with #.
49 comments start with #.
51 empty lines are skipped.
50 empty lines are skipped.
52
51
53 lines can be of the following formats:
52 lines can be of the following formats:
54
53
55 syntax: regexp # defaults following lines to non-rooted regexps
54 syntax: regexp # defaults following lines to non-rooted regexps
56 syntax: glob # defaults following lines to non-rooted globs
55 syntax: glob # defaults following lines to non-rooted globs
57 re:pattern # non-rooted regular expression
56 re:pattern # non-rooted regular expression
58 glob:pattern # non-rooted glob
57 glob:pattern # non-rooted glob
59 pattern # pattern of the current default type'''
58 pattern # pattern of the current default type'''
60 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
59 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
61 def parselines(fp):
60 def parselines(fp):
62 for line in fp:
61 for line in fp:
63 escape = False
62 escape = False
64 for i in xrange(len(line)):
63 for i in xrange(len(line)):
65 if escape: escape = False
64 if escape: escape = False
66 elif line[i] == '\\': escape = True
65 elif line[i] == '\\': escape = True
67 elif line[i] == '#': break
66 elif line[i] == '#': break
68 line = line[:i].rstrip()
67 line = line[:i].rstrip()
69 if line: yield line
68 if line: yield line
70 repoignore = self.wjoin('.hgignore')
69 repoignore = self.wjoin('.hgignore')
71 files = [repoignore]
70 files = [repoignore]
72 files.extend(self.ui.hgignorefiles())
71 files.extend(self.ui.hgignorefiles())
73 pats = {}
72 pats = {}
74 for f in files:
73 for f in files:
75 try:
74 try:
76 pats[f] = []
75 pats[f] = []
77 fp = open(f)
76 fp = open(f)
78 syntax = 'relre:'
77 syntax = 'relre:'
79 for line in parselines(fp):
78 for line in parselines(fp):
80 if line.startswith('syntax:'):
79 if line.startswith('syntax:'):
81 s = line[7:].strip()
80 s = line[7:].strip()
82 try:
81 try:
83 syntax = syntaxes[s]
82 syntax = syntaxes[s]
84 except KeyError:
83 except KeyError:
85 self.ui.warn(_("%s: ignoring invalid "
84 self.ui.warn(_("%s: ignoring invalid "
86 "syntax '%s'\n") % (f, s))
85 "syntax '%s'\n") % (f, s))
87 continue
86 continue
88 pat = syntax + line
87 pat = syntax + line
89 for s in syntaxes.values():
88 for s in syntaxes.values():
90 if line.startswith(s):
89 if line.startswith(s):
91 pat = line
90 pat = line
92 break
91 break
93 pats[f].append(pat)
92 pats[f].append(pat)
94 except IOError, inst:
93 except IOError, inst:
95 if f != repoignore:
94 if f != repoignore:
96 self.ui.warn(_("skipping unreadable ignore file"
95 self.ui.warn(_("skipping unreadable ignore file"
97 " '%s': %s\n") % (f, inst.strerror))
96 " '%s': %s\n") % (f, inst.strerror))
98 return pats
97 return pats
99
98
100 def ignore(self, fn):
99 def ignore(self, fn):
101 '''default match function used by dirstate and
100 '''default match function used by dirstate and
102 localrepository. this honours the repository .hgignore file
101 localrepository. this honours the repository .hgignore file
103 and any other files specified in the [ui] section of .hgrc.'''
102 and any other files specified in the [ui] section of .hgrc.'''
104 if not self.ignorefunc:
103 if not self.ignorefunc:
105 ignore = self.hgignore()
104 ignore = self.hgignore()
106 allpats = []
105 allpats = []
107 [allpats.extend(patlist) for patlist in ignore.values()]
106 [allpats.extend(patlist) for patlist in ignore.values()]
108 if allpats:
107 if allpats:
109 try:
108 try:
110 files, self.ignorefunc, anypats = (
109 files, self.ignorefunc, anypats = (
111 util.matcher(self.root, inc=allpats, src='.hgignore'))
110 util.matcher(self.root, inc=allpats, src='.hgignore'))
112 except util.Abort:
111 except util.Abort:
113 # Re-raise an exception where the src is the right file
112 # Re-raise an exception where the src is the right file
114 for f, patlist in ignore.items():
113 for f, patlist in ignore.items():
115 files, self.ignorefunc, anypats = (
114 files, self.ignorefunc, anypats = (
116 util.matcher(self.root, inc=patlist, src=f))
115 util.matcher(self.root, inc=patlist, src=f))
117 else:
116 else:
118 self.ignorefunc = util.never
117 self.ignorefunc = util.never
119 return self.ignorefunc(fn)
118 return self.ignorefunc(fn)
120
119
121 def __del__(self):
120 def __del__(self):
122 if self.dirty:
121 if self.dirty:
123 self.write()
122 self.write()
124
123
125 def __getitem__(self, key):
124 def __getitem__(self, key):
126 try:
125 try:
127 return self.map[key]
126 return self.map[key]
128 except TypeError:
127 except TypeError:
129 self.lazyread()
128 self.lazyread()
130 return self[key]
129 return self[key]
131
130
132 def __contains__(self, key):
131 def __contains__(self, key):
133 self.lazyread()
132 self.lazyread()
134 return key in self.map
133 return key in self.map
135
134
136 def parents(self):
135 def parents(self):
137 self.lazyread()
136 self.lazyread()
138 return self.pl
137 return self.pl
139
138
140 def markdirty(self):
139 def markdirty(self):
141 if not self.dirty:
140 if not self.dirty:
142 self.dirty = 1
141 self.dirty = 1
143
142
144 def setparents(self, p1, p2=nullid):
143 def setparents(self, p1, p2=nullid):
145 self.lazyread()
144 self.lazyread()
146 self.markdirty()
145 self.markdirty()
147 self.pl = p1, p2
146 self.pl = p1, p2
148
147
149 def state(self, key):
148 def state(self, key):
150 try:
149 try:
151 return self[key][0]
150 return self[key][0]
152 except KeyError:
151 except KeyError:
153 return "?"
152 return "?"
154
153
155 def lazyread(self):
154 def lazyread(self):
156 if self.map is None:
155 if self.map is None:
157 self.read()
156 self.read()
158
157
159 def parse(self, st):
158 def parse(self, st):
160 self.pl = [st[:20], st[20: 40]]
159 self.pl = [st[:20], st[20: 40]]
161
160
162 # deref fields so they will be local in loop
161 # deref fields so they will be local in loop
163 map = self.map
162 map = self.map
164 copymap = self.copymap
163 copymap = self.copymap
165 format = self.format
164 format = self.format
166 unpack = struct.unpack
165 unpack = struct.unpack
167
166
168 pos = 40
167 pos = 40
169 e_size = struct.calcsize(format)
168 e_size = struct.calcsize(format)
170
169
171 while pos < len(st):
170 while pos < len(st):
172 newpos = pos + e_size
171 newpos = pos + e_size
173 e = unpack(format, st[pos:newpos])
172 e = unpack(format, st[pos:newpos])
174 l = e[4]
173 l = e[4]
175 pos = newpos
174 pos = newpos
176 newpos = pos + l
175 newpos = pos + l
177 f = st[pos:newpos]
176 f = st[pos:newpos]
178 if '\0' in f:
177 if '\0' in f:
179 f, c = f.split('\0')
178 f, c = f.split('\0')
180 copymap[f] = c
179 copymap[f] = c
181 map[f] = e[:4]
180 map[f] = e[:4]
182 pos = newpos
181 pos = newpos
183
182
184 def read(self):
183 def read(self):
185 self.map = {}
184 self.map = {}
186 self.pl = [nullid, nullid]
185 self.pl = [nullid, nullid]
187 try:
186 try:
188 st = self.opener("dirstate").read()
187 st = self.opener("dirstate").read()
189 if st:
188 if st:
190 self.parse(st)
189 self.parse(st)
191 except IOError, err:
190 except IOError, err:
192 if err.errno != errno.ENOENT: raise
191 if err.errno != errno.ENOENT: raise
193
192
194 def copy(self, source, dest):
193 def copy(self, source, dest):
195 self.lazyread()
194 self.lazyread()
196 self.markdirty()
195 self.markdirty()
197 self.copymap[dest] = source
196 self.copymap[dest] = source
198
197
199 def copied(self, file):
198 def copied(self, file):
200 return self.copymap.get(file, None)
199 return self.copymap.get(file, None)
201
200
202 def copies(self):
201 def copies(self):
203 return self.copymap
202 return self.copymap
204
203
205 def initdirs(self):
204 def initdirs(self):
206 if self.dirs is None:
205 if self.dirs is None:
207 self.dirs = {}
206 self.dirs = {}
208 for f in self.map:
207 for f in self.map:
209 self.updatedirs(f, 1)
208 self.updatedirs(f, 1)
210
209
211 def updatedirs(self, path, delta):
210 def updatedirs(self, path, delta):
212 if self.dirs is not None:
211 if self.dirs is not None:
213 for c in strutil.findall(path, '/'):
212 for c in strutil.findall(path, '/'):
214 pc = path[:c]
213 pc = path[:c]
215 self.dirs.setdefault(pc, 0)
214 self.dirs.setdefault(pc, 0)
216 self.dirs[pc] += delta
215 self.dirs[pc] += delta
217
216
218 def checkinterfering(self, files):
217 def checkinterfering(self, files):
219 def prefixes(f):
218 def prefixes(f):
220 for c in strutil.rfindall(f, '/'):
219 for c in strutil.rfindall(f, '/'):
221 yield f[:c]
220 yield f[:c]
222 self.lazyread()
221 self.lazyread()
223 self.initdirs()
222 self.initdirs()
224 seendirs = {}
223 seendirs = {}
225 for f in files:
224 for f in files:
226 # shadows
225 # shadows
227 if self.dirs.get(f):
226 if self.dirs.get(f):
228 raise util.Abort(_('directory named %r already in dirstate') %
227 raise util.Abort(_('directory named %r already in dirstate') %
229 f)
228 f)
230 for d in prefixes(f):
229 for d in prefixes(f):
231 if d in seendirs:
230 if d in seendirs:
232 break
231 break
233 if d in self.map:
232 if d in self.map:
234 raise util.Abort(_('file named %r already in dirstate') %
233 raise util.Abort(_('file named %r already in dirstate') %
235 d)
234 d)
236 seendirs[d] = True
235 seendirs[d] = True
237 # disallowed
236 # disallowed
238 if '\r' in f or '\n' in f:
237 if '\r' in f or '\n' in f:
239 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames"))
238 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames"))
240
239
241 def update(self, files, state, **kw):
240 def update(self, files, state, **kw):
242 ''' current states:
241 ''' current states:
243 n normal
242 n normal
244 m needs merging
243 m needs merging
245 r marked for removal
244 r marked for removal
246 a marked for addition'''
245 a marked for addition'''
247
246
248 if not files: return
247 if not files: return
249 self.lazyread()
248 self.lazyread()
250 self.markdirty()
249 self.markdirty()
251 if state == "a":
250 if state == "a":
252 self.initdirs()
251 self.initdirs()
253 self.checkinterfering(files)
252 self.checkinterfering(files)
254 for f in files:
253 for f in files:
255 if state == "r":
254 if state == "r":
256 self.map[f] = ('r', 0, 0, 0)
255 self.map[f] = ('r', 0, 0, 0)
257 self.updatedirs(f, -1)
256 self.updatedirs(f, -1)
258 else:
257 else:
259 if state == "a":
258 if state == "a":
260 self.updatedirs(f, 1)
259 self.updatedirs(f, 1)
261 s = os.lstat(self.wjoin(f))
260 s = os.lstat(self.wjoin(f))
262 st_size = kw.get('st_size', s.st_size)
261 st_size = kw.get('st_size', s.st_size)
263 st_mtime = kw.get('st_mtime', s.st_mtime)
262 st_mtime = kw.get('st_mtime', s.st_mtime)
264 self.map[f] = (state, s.st_mode, st_size, st_mtime)
263 self.map[f] = (state, s.st_mode, st_size, st_mtime)
265 if self.copymap.has_key(f):
264 if self.copymap.has_key(f):
266 del self.copymap[f]
265 del self.copymap[f]
267
266
268 def forget(self, files):
267 def forget(self, files):
269 if not files: return
268 if not files: return
270 self.lazyread()
269 self.lazyread()
271 self.markdirty()
270 self.markdirty()
272 self.initdirs()
271 self.initdirs()
273 for f in files:
272 for f in files:
274 try:
273 try:
275 del self.map[f]
274 del self.map[f]
276 self.updatedirs(f, -1)
275 self.updatedirs(f, -1)
277 except KeyError:
276 except KeyError:
278 self.ui.warn(_("not in dirstate: %s!\n") % f)
277 self.ui.warn(_("not in dirstate: %s!\n") % f)
279 pass
278 pass
280
279
281 def clear(self):
280 def clear(self):
282 self.map = {}
281 self.map = {}
283 self.copymap = {}
282 self.copymap = {}
284 self.dirs = None
283 self.dirs = None
285 self.markdirty()
284 self.markdirty()
286
285
287 def rebuild(self, parent, files):
286 def rebuild(self, parent, files):
288 self.clear()
287 self.clear()
289 for f in files:
288 for f in files:
290 if files.execf(f):
289 if files.execf(f):
291 self.map[f] = ('n', 0777, -1, 0)
290 self.map[f] = ('n', 0777, -1, 0)
292 else:
291 else:
293 self.map[f] = ('n', 0666, -1, 0)
292 self.map[f] = ('n', 0666, -1, 0)
294 self.pl = (parent, nullid)
293 self.pl = (parent, nullid)
295 self.markdirty()
294 self.markdirty()
296
295
297 def write(self):
296 def write(self):
298 if not self.dirty:
297 if not self.dirty:
299 return
298 return
300 st = self.opener("dirstate", "w", atomic=True)
299 st = self.opener("dirstate", "w", atomic=True)
301 st.write("".join(self.pl))
300 st.write("".join(self.pl))
302 for f, e in self.map.items():
301 for f, e in self.map.items():
303 c = self.copied(f)
302 c = self.copied(f)
304 if c:
303 if c:
305 f = f + "\0" + c
304 f = f + "\0" + c
306 e = struct.pack(self.format, e[0], e[1], e[2], e[3], len(f))
305 e = struct.pack(self.format, e[0], e[1], e[2], e[3], len(f))
307 st.write(e + f)
306 st.write(e + f)
308 self.dirty = 0
307 self.dirty = 0
309
308
310 def filterfiles(self, files):
309 def filterfiles(self, files):
311 ret = {}
310 ret = {}
312 unknown = []
311 unknown = []
313
312
314 for x in files:
313 for x in files:
315 if x == '.':
314 if x == '.':
316 return self.map.copy()
315 return self.map.copy()
317 if x not in self.map:
316 if x not in self.map:
318 unknown.append(x)
317 unknown.append(x)
319 else:
318 else:
320 ret[x] = self.map[x]
319 ret[x] = self.map[x]
321
320
322 if not unknown:
321 if not unknown:
323 return ret
322 return ret
324
323
325 b = self.map.keys()
324 b = self.map.keys()
326 b.sort()
325 b.sort()
327 blen = len(b)
326 blen = len(b)
328
327
329 for x in unknown:
328 for x in unknown:
330 bs = bisect.bisect(b, "%s%s" % (x, '/'))
329 bs = bisect.bisect(b, "%s%s" % (x, '/'))
331 while bs < blen:
330 while bs < blen:
332 s = b[bs]
331 s = b[bs]
333 if len(s) > len(x) and s.startswith(x):
332 if len(s) > len(x) and s.startswith(x):
334 ret[s] = self.map[s]
333 ret[s] = self.map[s]
335 else:
334 else:
336 break
335 break
337 bs += 1
336 bs += 1
338 return ret
337 return ret
339
338
340 def supported_type(self, f, st, verbose=False):
339 def supported_type(self, f, st, verbose=False):
341 if stat.S_ISREG(st.st_mode):
340 if stat.S_ISREG(st.st_mode):
342 return True
341 return True
343 if verbose:
342 if verbose:
344 kind = 'unknown'
343 kind = 'unknown'
345 if stat.S_ISCHR(st.st_mode): kind = _('character device')
344 if stat.S_ISCHR(st.st_mode): kind = _('character device')
346 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
345 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
347 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
346 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
348 elif stat.S_ISLNK(st.st_mode): kind = _('symbolic link')
347 elif stat.S_ISLNK(st.st_mode): kind = _('symbolic link')
349 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
348 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
350 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
349 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
351 self.ui.warn(_('%s: unsupported file type (type is %s)\n') % (
350 self.ui.warn(_('%s: unsupported file type (type is %s)\n') % (
352 util.pathto(self.getcwd(), f),
351 util.pathto(self.getcwd(), f),
353 kind))
352 kind))
354 return False
353 return False
355
354
356 def walk(self, files=None, match=util.always, badmatch=None):
355 def walk(self, files=None, match=util.always, badmatch=None):
357 # filter out the stat
356 # filter out the stat
358 for src, f, st in self.statwalk(files, match, badmatch=badmatch):
357 for src, f, st in self.statwalk(files, match, badmatch=badmatch):
359 yield src, f
358 yield src, f
360
359
361 def statwalk(self, files=None, match=util.always, ignored=False,
360 def statwalk(self, files=None, match=util.always, ignored=False,
362 badmatch=None):
361 badmatch=None):
363 '''
362 '''
364 walk recursively through the directory tree, finding all files
363 walk recursively through the directory tree, finding all files
365 matched by the match function
364 matched by the match function
366
365
367 results are yielded in a tuple (src, filename, st), where src
366 results are yielded in a tuple (src, filename, st), where src
368 is one of:
367 is one of:
369 'f' the file was found in the directory tree
368 'f' the file was found in the directory tree
370 'm' the file was only in the dirstate and not in the tree
369 'm' the file was only in the dirstate and not in the tree
371 'b' file was not found and matched badmatch
370 'b' file was not found and matched badmatch
372
371
373 and st is the stat result if the file was found in the directory.
372 and st is the stat result if the file was found in the directory.
374 '''
373 '''
375 self.lazyread()
374 self.lazyread()
376
375
377 # walk all files by default
376 # walk all files by default
378 if not files:
377 if not files:
379 files = [self.root]
378 files = [self.root]
380 dc = self.map.copy()
379 dc = self.map.copy()
381 else:
380 else:
382 files = util.unique(files)
381 files = util.unique(files)
383 dc = self.filterfiles(files)
382 dc = self.filterfiles(files)
384
383
385 def imatch(file_):
384 def imatch(file_):
386 if file_ not in dc and self.ignore(file_):
385 if file_ not in dc and self.ignore(file_):
387 return False
386 return False
388 return match(file_)
387 return match(file_)
389
388
390 if ignored: imatch = match
389 if ignored: imatch = match
391
390
392 # self.root may end with a path separator when self.root == '/'
391 # self.root may end with a path separator when self.root == '/'
393 common_prefix_len = len(self.root)
392 common_prefix_len = len(self.root)
394 if not self.root.endswith('/'):
393 if not self.root.endswith('/'):
395 common_prefix_len += 1
394 common_prefix_len += 1
396 # recursion free walker, faster than os.walk.
395 # recursion free walker, faster than os.walk.
397 def findfiles(s):
396 def findfiles(s):
398 work = [s]
397 work = [s]
399 while work:
398 while work:
400 top = work.pop()
399 top = work.pop()
401 names = os.listdir(top)
400 names = os.listdir(top)
402 names.sort()
401 names.sort()
403 # nd is the top of the repository dir tree
402 # nd is the top of the repository dir tree
404 nd = util.normpath(top[common_prefix_len:])
403 nd = util.normpath(top[common_prefix_len:])
405 if nd == '.':
404 if nd == '.':
406 nd = ''
405 nd = ''
407 else:
406 else:
408 # do not recurse into a repo contained in this
407 # do not recurse into a repo contained in this
409 # one. use bisect to find .hg directory so speed
408 # one. use bisect to find .hg directory so speed
410 # is good on big directory.
409 # is good on big directory.
411 hg = bisect.bisect_left(names, '.hg')
410 hg = bisect.bisect_left(names, '.hg')
412 if hg < len(names) and names[hg] == '.hg':
411 if hg < len(names) and names[hg] == '.hg':
413 if os.path.isdir(os.path.join(top, '.hg')):
412 if os.path.isdir(os.path.join(top, '.hg')):
414 continue
413 continue
415 for f in names:
414 for f in names:
416 np = util.pconvert(os.path.join(nd, f))
415 np = util.pconvert(os.path.join(nd, f))
417 if seen(np):
416 if seen(np):
418 continue
417 continue
419 p = os.path.join(top, f)
418 p = os.path.join(top, f)
420 # don't trip over symlinks
419 # don't trip over symlinks
421 st = os.lstat(p)
420 st = os.lstat(p)
422 if stat.S_ISDIR(st.st_mode):
421 if stat.S_ISDIR(st.st_mode):
423 ds = util.pconvert(os.path.join(nd, f +'/'))
422 ds = util.pconvert(os.path.join(nd, f +'/'))
424 if imatch(ds):
423 if imatch(ds):
425 work.append(p)
424 work.append(p)
426 if imatch(np) and np in dc:
425 if imatch(np) and np in dc:
427 yield 'm', np, st
426 yield 'm', np, st
428 elif imatch(np):
427 elif imatch(np):
429 if self.supported_type(np, st):
428 if self.supported_type(np, st):
430 yield 'f', np, st
429 yield 'f', np, st
431 elif np in dc:
430 elif np in dc:
432 yield 'm', np, st
431 yield 'm', np, st
433
432
434 known = {'.hg': 1}
433 known = {'.hg': 1}
435 def seen(fn):
434 def seen(fn):
436 if fn in known: return True
435 if fn in known: return True
437 known[fn] = 1
436 known[fn] = 1
438
437
439 # step one, find all files that match our criteria
438 # step one, find all files that match our criteria
440 files.sort()
439 files.sort()
441 for ff in files:
440 for ff in files:
442 nf = util.normpath(ff)
441 nf = util.normpath(ff)
443 f = self.wjoin(ff)
442 f = self.wjoin(ff)
444 try:
443 try:
445 st = os.lstat(f)
444 st = os.lstat(f)
446 except OSError, inst:
445 except OSError, inst:
447 found = False
446 found = False
448 for fn in dc:
447 for fn in dc:
449 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
448 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
450 found = True
449 found = True
451 break
450 break
452 if not found:
451 if not found:
453 if inst.errno != errno.ENOENT or not badmatch:
452 if inst.errno != errno.ENOENT or not badmatch:
454 self.ui.warn('%s: %s\n' % (
453 self.ui.warn('%s: %s\n' % (
455 util.pathto(self.getcwd(), ff),
454 util.pathto(self.getcwd(), ff),
456 inst.strerror))
455 inst.strerror))
457 elif badmatch and badmatch(ff) and imatch(nf):
456 elif badmatch and badmatch(ff) and imatch(nf):
458 yield 'b', ff, None
457 yield 'b', ff, None
459 continue
458 continue
460 if stat.S_ISDIR(st.st_mode):
459 if stat.S_ISDIR(st.st_mode):
461 cmp1 = (lambda x, y: cmp(x[1], y[1]))
460 cmp1 = (lambda x, y: cmp(x[1], y[1]))
462 sorted_ = [ x for x in findfiles(f) ]
461 sorted_ = [ x for x in findfiles(f) ]
463 sorted_.sort(cmp1)
462 sorted_.sort(cmp1)
464 for e in sorted_:
463 for e in sorted_:
465 yield e
464 yield e
466 else:
465 else:
467 if not seen(nf) and match(nf):
466 if not seen(nf) and match(nf):
468 if self.supported_type(ff, st, verbose=True):
467 if self.supported_type(ff, st, verbose=True):
469 yield 'f', nf, st
468 yield 'f', nf, st
470 elif ff in dc:
469 elif ff in dc:
471 yield 'm', nf, st
470 yield 'm', nf, st
472
471
473 # step two run through anything left in the dc hash and yield
472 # step two run through anything left in the dc hash and yield
474 # if we haven't already seen it
473 # if we haven't already seen it
475 ks = dc.keys()
474 ks = dc.keys()
476 ks.sort()
475 ks.sort()
477 for k in ks:
476 for k in ks:
478 if not seen(k) and imatch(k):
477 if not seen(k) and imatch(k):
479 yield 'm', k, None
478 yield 'm', k, None
480
479
481 def status(self, files=None, match=util.always, list_ignored=False,
480 def status(self, files=None, match=util.always, list_ignored=False,
482 list_clean=False):
481 list_clean=False):
483 lookup, modified, added, unknown, ignored = [], [], [], [], []
482 lookup, modified, added, unknown, ignored = [], [], [], [], []
484 removed, deleted, clean = [], [], []
483 removed, deleted, clean = [], [], []
485
484
486 for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
485 for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
487 try:
486 try:
488 type_, mode, size, time = self[fn]
487 type_, mode, size, time = self[fn]
489 except KeyError:
488 except KeyError:
490 if list_ignored and self.ignore(fn):
489 if list_ignored and self.ignore(fn):
491 ignored.append(fn)
490 ignored.append(fn)
492 else:
491 else:
493 unknown.append(fn)
492 unknown.append(fn)
494 continue
493 continue
495 if src == 'm':
494 if src == 'm':
496 nonexistent = True
495 nonexistent = True
497 if not st:
496 if not st:
498 try:
497 try:
499 st = os.lstat(self.wjoin(fn))
498 st = os.lstat(self.wjoin(fn))
500 except OSError, inst:
499 except OSError, inst:
501 if inst.errno != errno.ENOENT:
500 if inst.errno != errno.ENOENT:
502 raise
501 raise
503 st = None
502 st = None
504 # We need to re-check that it is a valid file
503 # We need to re-check that it is a valid file
505 if st and self.supported_type(fn, st):
504 if st and self.supported_type(fn, st):
506 nonexistent = False
505 nonexistent = False
507 # XXX: what to do with file no longer present in the fs
506 # XXX: what to do with file no longer present in the fs
508 # who are not removed in the dirstate ?
507 # who are not removed in the dirstate ?
509 if nonexistent and type_ in "nm":
508 if nonexistent and type_ in "nm":
510 deleted.append(fn)
509 deleted.append(fn)
511 continue
510 continue
512 # check the common case first
511 # check the common case first
513 if type_ == 'n':
512 if type_ == 'n':
514 if not st:
513 if not st:
515 st = os.lstat(self.wjoin(fn))
514 st = os.lstat(self.wjoin(fn))
516 if size >= 0 and (size != st.st_size
515 if size >= 0 and (size != st.st_size
517 or (mode ^ st.st_mode) & 0100):
516 or (mode ^ st.st_mode) & 0100):
518 modified.append(fn)
517 modified.append(fn)
519 elif time != int(st.st_mtime):
518 elif time != int(st.st_mtime):
520 lookup.append(fn)
519 lookup.append(fn)
521 elif list_clean:
520 elif list_clean:
522 clean.append(fn)
521 clean.append(fn)
523 elif type_ == 'm':
522 elif type_ == 'm':
524 modified.append(fn)
523 modified.append(fn)
525 elif type_ == 'a':
524 elif type_ == 'a':
526 added.append(fn)
525 added.append(fn)
527 elif type_ == 'r':
526 elif type_ == 'r':
528 removed.append(fn)
527 removed.append(fn)
529
528
530 return (lookup, modified, added, removed, deleted, unknown, ignored,
529 return (lookup, modified, added, removed, deleted, unknown, ignored,
531 clean)
530 clean)
@@ -1,86 +1,85 b''
1 # filelog.py - file history class for mercurial
1 # filelog.py - file history class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from revlog import *
8 from revlog import *
9 from demandload import *
9 import os
10 demandload(globals(), "os")
11
10
12 class filelog(revlog):
11 class filelog(revlog):
13 def __init__(self, opener, path, defversion=REVLOG_DEFAULT_VERSION):
12 def __init__(self, opener, path, defversion=REVLOG_DEFAULT_VERSION):
14 revlog.__init__(self, opener,
13 revlog.__init__(self, opener,
15 "/".join(("data", self.encodedir(path + ".i"))),
14 "/".join(("data", self.encodedir(path + ".i"))),
16 "/".join(("data", self.encodedir(path + ".d"))),
15 "/".join(("data", self.encodedir(path + ".d"))),
17 defversion)
16 defversion)
18
17
19 # This avoids a collision between a file named foo and a dir named
18 # This avoids a collision between a file named foo and a dir named
20 # foo.i or foo.d
19 # foo.i or foo.d
21 def encodedir(self, path):
20 def encodedir(self, path):
22 return (path
21 return (path
23 .replace(".hg/", ".hg.hg/")
22 .replace(".hg/", ".hg.hg/")
24 .replace(".i/", ".i.hg/")
23 .replace(".i/", ".i.hg/")
25 .replace(".d/", ".d.hg/"))
24 .replace(".d/", ".d.hg/"))
26
25
27 def decodedir(self, path):
26 def decodedir(self, path):
28 return (path
27 return (path
29 .replace(".d.hg/", ".d/")
28 .replace(".d.hg/", ".d/")
30 .replace(".i.hg/", ".i/")
29 .replace(".i.hg/", ".i/")
31 .replace(".hg.hg/", ".hg/"))
30 .replace(".hg.hg/", ".hg/"))
32
31
33 def read(self, node):
32 def read(self, node):
34 t = self.revision(node)
33 t = self.revision(node)
35 if not t.startswith('\1\n'):
34 if not t.startswith('\1\n'):
36 return t
35 return t
37 s = t.index('\1\n', 2)
36 s = t.index('\1\n', 2)
38 return t[s+2:]
37 return t[s+2:]
39
38
40 def _readmeta(self, node):
39 def _readmeta(self, node):
41 t = self.revision(node)
40 t = self.revision(node)
42 if not t.startswith('\1\n'):
41 if not t.startswith('\1\n'):
43 return {}
42 return {}
44 s = t.index('\1\n', 2)
43 s = t.index('\1\n', 2)
45 mt = t[2:s]
44 mt = t[2:s]
46 m = {}
45 m = {}
47 for l in mt.splitlines():
46 for l in mt.splitlines():
48 k, v = l.split(": ", 1)
47 k, v = l.split(": ", 1)
49 m[k] = v
48 m[k] = v
50 return m
49 return m
51
50
52 def add(self, text, meta, transaction, link, p1=None, p2=None):
51 def add(self, text, meta, transaction, link, p1=None, p2=None):
53 if meta or text.startswith('\1\n'):
52 if meta or text.startswith('\1\n'):
54 mt = ""
53 mt = ""
55 if meta:
54 if meta:
56 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
55 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
57 text = "\1\n%s\1\n%s" % ("".join(mt), text)
56 text = "\1\n%s\1\n%s" % ("".join(mt), text)
58 return self.addrevision(text, transaction, link, p1, p2)
57 return self.addrevision(text, transaction, link, p1, p2)
59
58
60 def renamed(self, node):
59 def renamed(self, node):
61 if self.parents(node)[0] != nullid:
60 if self.parents(node)[0] != nullid:
62 return False
61 return False
63 m = self._readmeta(node)
62 m = self._readmeta(node)
64 if m and m.has_key("copy"):
63 if m and m.has_key("copy"):
65 return (m["copy"], bin(m["copyrev"]))
64 return (m["copy"], bin(m["copyrev"]))
66 return False
65 return False
67
66
68 def size(self, rev):
67 def size(self, rev):
69 """return the size of a given revision"""
68 """return the size of a given revision"""
70
69
71 # for revisions with renames, we have to go the slow way
70 # for revisions with renames, we have to go the slow way
72 node = self.node(rev)
71 node = self.node(rev)
73 if self.renamed(node):
72 if self.renamed(node):
74 return len(self.read(node))
73 return len(self.read(node))
75
74
76 return revlog.size(self, rev)
75 return revlog.size(self, rev)
77
76
78 def cmp(self, node, text):
77 def cmp(self, node, text):
79 """compare text with a given file revision"""
78 """compare text with a given file revision"""
80
79
81 # for renames, we have to go the slow way
80 # for renames, we have to go the slow way
82 if self.renamed(node):
81 if self.renamed(node):
83 t2 = self.read(node)
82 t2 = self.read(node)
84 return t2 != text
83 return t2 != text
85
84
86 return revlog.cmp(self, node, text)
85 return revlog.cmp(self, node, text)
@@ -1,279 +1,280 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from node import *
9 from node import *
10 from repo import *
10 from repo import *
11 from demandload import *
12 from i18n import gettext as _
11 from i18n import gettext as _
13 demandload(globals(), "localrepo bundlerepo httprepo sshrepo statichttprepo")
12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
14 demandload(globals(), "errno lock os shutil util merge@_merge verify@_verify")
13 import errno, lock, os, shutil, util
14 import merge as _merge
15 import verify as _verify
15
16
16 def _local(path):
17 def _local(path):
17 return (os.path.isfile(util.drop_scheme('file', path)) and
18 return (os.path.isfile(util.drop_scheme('file', path)) and
18 bundlerepo or localrepo)
19 bundlerepo or localrepo)
19
20
20 schemes = {
21 schemes = {
21 'bundle': bundlerepo,
22 'bundle': bundlerepo,
22 'file': _local,
23 'file': _local,
23 'hg': httprepo,
24 'hg': httprepo,
24 'http': httprepo,
25 'http': httprepo,
25 'https': httprepo,
26 'https': httprepo,
26 'old-http': statichttprepo,
27 'old-http': statichttprepo,
27 'ssh': sshrepo,
28 'ssh': sshrepo,
28 'static-http': statichttprepo,
29 'static-http': statichttprepo,
29 }
30 }
30
31
31 def _lookup(path):
32 def _lookup(path):
32 scheme = 'file'
33 scheme = 'file'
33 if path:
34 if path:
34 c = path.find(':')
35 c = path.find(':')
35 if c > 0:
36 if c > 0:
36 scheme = path[:c]
37 scheme = path[:c]
37 thing = schemes.get(scheme) or schemes['file']
38 thing = schemes.get(scheme) or schemes['file']
38 try:
39 try:
39 return thing(path)
40 return thing(path)
40 except TypeError:
41 except TypeError:
41 return thing
42 return thing
42
43
43 def islocal(repo):
44 def islocal(repo):
44 '''return true if repo or path is local'''
45 '''return true if repo or path is local'''
45 if isinstance(repo, str):
46 if isinstance(repo, str):
46 try:
47 try:
47 return _lookup(repo).islocal(repo)
48 return _lookup(repo).islocal(repo)
48 except AttributeError:
49 except AttributeError:
49 return False
50 return False
50 return repo.local()
51 return repo.local()
51
52
52 repo_setup_hooks = []
53 repo_setup_hooks = []
53
54
54 def repository(ui, path='', create=False):
55 def repository(ui, path='', create=False):
55 """return a repository object for the specified path"""
56 """return a repository object for the specified path"""
56 repo = _lookup(path).instance(ui, path, create)
57 repo = _lookup(path).instance(ui, path, create)
57 for hook in repo_setup_hooks:
58 for hook in repo_setup_hooks:
58 hook(ui, repo)
59 hook(ui, repo)
59 return repo
60 return repo
60
61
61 def defaultdest(source):
62 def defaultdest(source):
62 '''return default destination of clone if none is given'''
63 '''return default destination of clone if none is given'''
63 return os.path.basename(os.path.normpath(source))
64 return os.path.basename(os.path.normpath(source))
64
65
65 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
66 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
66 stream=False):
67 stream=False):
67 """Make a copy of an existing repository.
68 """Make a copy of an existing repository.
68
69
69 Create a copy of an existing repository in a new directory. The
70 Create a copy of an existing repository in a new directory. The
70 source and destination are URLs, as passed to the repository
71 source and destination are URLs, as passed to the repository
71 function. Returns a pair of repository objects, the source and
72 function. Returns a pair of repository objects, the source and
72 newly created destination.
73 newly created destination.
73
74
74 The location of the source is added to the new repository's
75 The location of the source is added to the new repository's
75 .hg/hgrc file, as the default to be used for future pulls and
76 .hg/hgrc file, as the default to be used for future pulls and
76 pushes.
77 pushes.
77
78
78 If an exception is raised, the partly cloned/updated destination
79 If an exception is raised, the partly cloned/updated destination
79 repository will be deleted.
80 repository will be deleted.
80
81
81 Arguments:
82 Arguments:
82
83
83 source: repository object or URL
84 source: repository object or URL
84
85
85 dest: URL of destination repository to create (defaults to base
86 dest: URL of destination repository to create (defaults to base
86 name of source repository)
87 name of source repository)
87
88
88 pull: always pull from source repository, even in local case
89 pull: always pull from source repository, even in local case
89
90
90 stream: stream raw data uncompressed from repository (fast over
91 stream: stream raw data uncompressed from repository (fast over
91 LAN, slow over WAN)
92 LAN, slow over WAN)
92
93
93 rev: revision to clone up to (implies pull=True)
94 rev: revision to clone up to (implies pull=True)
94
95
95 update: update working directory after clone completes, if
96 update: update working directory after clone completes, if
96 destination is local repository
97 destination is local repository
97 """
98 """
98 if isinstance(source, str):
99 if isinstance(source, str):
99 src_repo = repository(ui, source)
100 src_repo = repository(ui, source)
100 else:
101 else:
101 src_repo = source
102 src_repo = source
102 source = src_repo.url()
103 source = src_repo.url()
103
104
104 if dest is None:
105 if dest is None:
105 dest = defaultdest(source)
106 dest = defaultdest(source)
106 ui.status(_("destination directory: %s\n") % dest)
107 ui.status(_("destination directory: %s\n") % dest)
107
108
108 def localpath(path):
109 def localpath(path):
109 if path.startswith('file://'):
110 if path.startswith('file://'):
110 return path[7:]
111 return path[7:]
111 if path.startswith('file:'):
112 if path.startswith('file:'):
112 return path[5:]
113 return path[5:]
113 return path
114 return path
114
115
115 dest = localpath(dest)
116 dest = localpath(dest)
116 source = localpath(source)
117 source = localpath(source)
117
118
118 if os.path.exists(dest):
119 if os.path.exists(dest):
119 raise util.Abort(_("destination '%s' already exists") % dest)
120 raise util.Abort(_("destination '%s' already exists") % dest)
120
121
121 class DirCleanup(object):
122 class DirCleanup(object):
122 def __init__(self, dir_):
123 def __init__(self, dir_):
123 self.rmtree = shutil.rmtree
124 self.rmtree = shutil.rmtree
124 self.dir_ = dir_
125 self.dir_ = dir_
125 def close(self):
126 def close(self):
126 self.dir_ = None
127 self.dir_ = None
127 def __del__(self):
128 def __del__(self):
128 if self.dir_:
129 if self.dir_:
129 self.rmtree(self.dir_, True)
130 self.rmtree(self.dir_, True)
130
131
131 dir_cleanup = None
132 dir_cleanup = None
132 if islocal(dest):
133 if islocal(dest):
133 dir_cleanup = DirCleanup(dest)
134 dir_cleanup = DirCleanup(dest)
134
135
135 abspath = source
136 abspath = source
136 copy = False
137 copy = False
137 if src_repo.local() and islocal(dest):
138 if src_repo.local() and islocal(dest):
138 abspath = os.path.abspath(source)
139 abspath = os.path.abspath(source)
139 copy = not pull and not rev
140 copy = not pull and not rev
140
141
141 src_lock, dest_lock = None, None
142 src_lock, dest_lock = None, None
142 if copy:
143 if copy:
143 try:
144 try:
144 # we use a lock here because if we race with commit, we
145 # we use a lock here because if we race with commit, we
145 # can end up with extra data in the cloned revlogs that's
146 # can end up with extra data in the cloned revlogs that's
146 # not pointed to by changesets, thus causing verify to
147 # not pointed to by changesets, thus causing verify to
147 # fail
148 # fail
148 src_lock = src_repo.lock()
149 src_lock = src_repo.lock()
149 except lock.LockException:
150 except lock.LockException:
150 copy = False
151 copy = False
151
152
152 if copy:
153 if copy:
153 def force_copy(src, dst):
154 def force_copy(src, dst):
154 try:
155 try:
155 util.copyfiles(src, dst)
156 util.copyfiles(src, dst)
156 except OSError, inst:
157 except OSError, inst:
157 if inst.errno != errno.ENOENT:
158 if inst.errno != errno.ENOENT:
158 raise
159 raise
159
160
160 src_store = os.path.realpath(src_repo.spath)
161 src_store = os.path.realpath(src_repo.spath)
161 if not os.path.exists(dest):
162 if not os.path.exists(dest):
162 os.mkdir(dest)
163 os.mkdir(dest)
163 dest_path = os.path.realpath(os.path.join(dest, ".hg"))
164 dest_path = os.path.realpath(os.path.join(dest, ".hg"))
164 os.mkdir(dest_path)
165 os.mkdir(dest_path)
165 if src_repo.spath != src_repo.path:
166 if src_repo.spath != src_repo.path:
166 dest_store = os.path.join(dest_path, "store")
167 dest_store = os.path.join(dest_path, "store")
167 os.mkdir(dest_store)
168 os.mkdir(dest_store)
168 else:
169 else:
169 dest_store = dest_path
170 dest_store = dest_path
170 # copy the requires file
171 # copy the requires file
171 force_copy(src_repo.join("requires"),
172 force_copy(src_repo.join("requires"),
172 os.path.join(dest_path, "requires"))
173 os.path.join(dest_path, "requires"))
173 # we lock here to avoid premature writing to the target
174 # we lock here to avoid premature writing to the target
174 dest_lock = lock.lock(os.path.join(dest_store, "lock"))
175 dest_lock = lock.lock(os.path.join(dest_store, "lock"))
175
176
176 files = ("data",
177 files = ("data",
177 "00manifest.d", "00manifest.i",
178 "00manifest.d", "00manifest.i",
178 "00changelog.d", "00changelog.i")
179 "00changelog.d", "00changelog.i")
179 for f in files:
180 for f in files:
180 src = os.path.join(src_store, f)
181 src = os.path.join(src_store, f)
181 dst = os.path.join(dest_store, f)
182 dst = os.path.join(dest_store, f)
182 force_copy(src, dst)
183 force_copy(src, dst)
183
184
184 # we need to re-init the repo after manually copying the data
185 # we need to re-init the repo after manually copying the data
185 # into it
186 # into it
186 dest_repo = repository(ui, dest)
187 dest_repo = repository(ui, dest)
187
188
188 else:
189 else:
189 dest_repo = repository(ui, dest, create=True)
190 dest_repo = repository(ui, dest, create=True)
190
191
191 revs = None
192 revs = None
192 if rev:
193 if rev:
193 if 'lookup' not in src_repo.capabilities:
194 if 'lookup' not in src_repo.capabilities:
194 raise util.Abort(_("src repository does not support revision "
195 raise util.Abort(_("src repository does not support revision "
195 "lookup and so doesn't support clone by "
196 "lookup and so doesn't support clone by "
196 "revision"))
197 "revision"))
197 revs = [src_repo.lookup(r) for r in rev]
198 revs = [src_repo.lookup(r) for r in rev]
198
199
199 if dest_repo.local():
200 if dest_repo.local():
200 dest_repo.clone(src_repo, heads=revs, stream=stream)
201 dest_repo.clone(src_repo, heads=revs, stream=stream)
201 elif src_repo.local():
202 elif src_repo.local():
202 src_repo.push(dest_repo, revs=revs)
203 src_repo.push(dest_repo, revs=revs)
203 else:
204 else:
204 raise util.Abort(_("clone from remote to remote not supported"))
205 raise util.Abort(_("clone from remote to remote not supported"))
205
206
206 if src_lock:
207 if src_lock:
207 src_lock.release()
208 src_lock.release()
208
209
209 if dest_repo.local():
210 if dest_repo.local():
210 fp = dest_repo.opener("hgrc", "w", text=True)
211 fp = dest_repo.opener("hgrc", "w", text=True)
211 fp.write("[paths]\n")
212 fp.write("[paths]\n")
212 fp.write("default = %s\n" % abspath)
213 fp.write("default = %s\n" % abspath)
213 fp.close()
214 fp.close()
214
215
215 if dest_lock:
216 if dest_lock:
216 dest_lock.release()
217 dest_lock.release()
217
218
218 if update:
219 if update:
219 _update(dest_repo, dest_repo.changelog.tip())
220 _update(dest_repo, dest_repo.changelog.tip())
220 if dir_cleanup:
221 if dir_cleanup:
221 dir_cleanup.close()
222 dir_cleanup.close()
222
223
223 return src_repo, dest_repo
224 return src_repo, dest_repo
224
225
225 def _showstats(repo, stats):
226 def _showstats(repo, stats):
226 stats = ((stats[0], _("updated")),
227 stats = ((stats[0], _("updated")),
227 (stats[1], _("merged")),
228 (stats[1], _("merged")),
228 (stats[2], _("removed")),
229 (stats[2], _("removed")),
229 (stats[3], _("unresolved")))
230 (stats[3], _("unresolved")))
230 note = ", ".join([_("%d files %s") % s for s in stats])
231 note = ", ".join([_("%d files %s") % s for s in stats])
231 repo.ui.status("%s\n" % note)
232 repo.ui.status("%s\n" % note)
232
233
233 def _update(repo, node): return update(repo, node)
234 def _update(repo, node): return update(repo, node)
234
235
235 def update(repo, node):
236 def update(repo, node):
236 """update the working directory to node, merging linear changes"""
237 """update the working directory to node, merging linear changes"""
237 pl = repo.parents()
238 pl = repo.parents()
238 stats = _merge.update(repo, node, False, False, None, None)
239 stats = _merge.update(repo, node, False, False, None, None)
239 _showstats(repo, stats)
240 _showstats(repo, stats)
240 if stats[3]:
241 if stats[3]:
241 repo.ui.status(_("There are unresolved merges with"
242 repo.ui.status(_("There are unresolved merges with"
242 " locally modified files.\n"))
243 " locally modified files.\n"))
243 if stats[1]:
244 if stats[1]:
244 repo.ui.status(_("You can finish the partial merge using:\n"))
245 repo.ui.status(_("You can finish the partial merge using:\n"))
245 else:
246 else:
246 repo.ui.status(_("You can redo the full merge using:\n"))
247 repo.ui.status(_("You can redo the full merge using:\n"))
247 # len(pl)==1, otherwise _merge.update() would have raised util.Abort:
248 # len(pl)==1, otherwise _merge.update() would have raised util.Abort:
248 repo.ui.status(_(" hg update %s\n hg update %s\n")
249 repo.ui.status(_(" hg update %s\n hg update %s\n")
249 % (pl[0].rev(), repo.changectx(node).rev()))
250 % (pl[0].rev(), repo.changectx(node).rev()))
250 return stats[3]
251 return stats[3]
251
252
252 def clean(repo, node, wlock=None, show_stats=True):
253 def clean(repo, node, wlock=None, show_stats=True):
253 """forcibly switch the working directory to node, clobbering changes"""
254 """forcibly switch the working directory to node, clobbering changes"""
254 stats = _merge.update(repo, node, False, True, None, wlock)
255 stats = _merge.update(repo, node, False, True, None, wlock)
255 if show_stats: _showstats(repo, stats)
256 if show_stats: _showstats(repo, stats)
256 return stats[3]
257 return stats[3]
257
258
258 def merge(repo, node, force=None, remind=True, wlock=None):
259 def merge(repo, node, force=None, remind=True, wlock=None):
259 """branch merge with node, resolving changes"""
260 """branch merge with node, resolving changes"""
260 stats = _merge.update(repo, node, True, force, False, wlock)
261 stats = _merge.update(repo, node, True, force, False, wlock)
261 _showstats(repo, stats)
262 _showstats(repo, stats)
262 if stats[3]:
263 if stats[3]:
263 pl = repo.parents()
264 pl = repo.parents()
264 repo.ui.status(_("There are unresolved merges,"
265 repo.ui.status(_("There are unresolved merges,"
265 " you can redo the full merge using:\n"
266 " you can redo the full merge using:\n"
266 " hg update -C %s\n"
267 " hg update -C %s\n"
267 " hg merge %s\n")
268 " hg merge %s\n")
268 % (pl[0].rev(), pl[1].rev()))
269 % (pl[0].rev(), pl[1].rev()))
269 elif remind:
270 elif remind:
270 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
271 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
271 return stats[3]
272 return stats[3]
272
273
273 def revert(repo, node, choose, wlock):
274 def revert(repo, node, choose, wlock):
274 """revert changes to revision in node without updating dirstate"""
275 """revert changes to revision in node without updating dirstate"""
275 return _merge.update(repo, node, False, True, choose, wlock)[3]
276 return _merge.update(repo, node, False, True, choose, wlock)[3]
276
277
277 def verify(repo):
278 def verify(repo):
278 """verify the consistency of a repository"""
279 """verify the consistency of a repository"""
279 return _verify.verify(repo)
280 return _verify.verify(repo)
@@ -1,11 +1,16 b''
1 # hgweb/__init__.py - web interface to a mercurial repository
1 # hgweb/__init__.py - web interface to a mercurial repository
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from mercurial.demandload import demandload
9 import hgweb_mod, hgwebdir_mod
10 demandload(globals(), "mercurial.hgweb.hgweb_mod:hgweb")
10
11 demandload(globals(), "mercurial.hgweb.hgwebdir_mod:hgwebdir")
11 def hgweb(*args, **kwargs):
12 return hgweb_mod.hgweb(*args, **kwargs)
13
14 def hgwebdir(*args, **kwargs):
15 return hgwebdir_mod.hgwebdir(*args, **kwargs)
16
@@ -1,63 +1,62 b''
1 # hgweb/common.py - Utility functions needed by hgweb_mod and hgwebdir_mod
1 # hgweb/common.py - Utility functions needed by hgweb_mod and hgwebdir_mod
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os, mimetypes
9 import os, mimetypes
10 import os.path
11
10
12 def get_mtime(repo_path):
11 def get_mtime(repo_path):
13 store_path = os.path.join(repo_path, ".hg")
12 store_path = os.path.join(repo_path, ".hg")
14 if not os.path.isdir(os.path.join(store_path, "data")):
13 if not os.path.isdir(os.path.join(store_path, "data")):
15 store_path = os.path.join(store_path, "store")
14 store_path = os.path.join(store_path, "store")
16 cl_path = os.path.join(store_path, "00changelog.i")
15 cl_path = os.path.join(store_path, "00changelog.i")
17 if os.path.exists(cl_path):
16 if os.path.exists(cl_path):
18 return os.stat(cl_path).st_mtime
17 return os.stat(cl_path).st_mtime
19 else:
18 else:
20 return os.stat(store_path).st_mtime
19 return os.stat(store_path).st_mtime
21
20
22 def staticfile(directory, fname, req):
21 def staticfile(directory, fname, req):
23 """return a file inside directory with guessed content-type header
22 """return a file inside directory with guessed content-type header
24
23
25 fname always uses '/' as directory separator and isn't allowed to
24 fname always uses '/' as directory separator and isn't allowed to
26 contain unusual path components.
25 contain unusual path components.
27 Content-type is guessed using the mimetypes module.
26 Content-type is guessed using the mimetypes module.
28 Return an empty string if fname is illegal or file not found.
27 Return an empty string if fname is illegal or file not found.
29
28
30 """
29 """
31 parts = fname.split('/')
30 parts = fname.split('/')
32 path = directory
31 path = directory
33 for part in parts:
32 for part in parts:
34 if (part in ('', os.curdir, os.pardir) or
33 if (part in ('', os.curdir, os.pardir) or
35 os.sep in part or os.altsep is not None and os.altsep in part):
34 os.sep in part or os.altsep is not None and os.altsep in part):
36 return ""
35 return ""
37 path = os.path.join(path, part)
36 path = os.path.join(path, part)
38 try:
37 try:
39 os.stat(path)
38 os.stat(path)
40 ct = mimetypes.guess_type(path)[0] or "text/plain"
39 ct = mimetypes.guess_type(path)[0] or "text/plain"
41 req.header([('Content-type', ct),
40 req.header([('Content-type', ct),
42 ('Content-length', os.path.getsize(path))])
41 ('Content-length', os.path.getsize(path))])
43 return file(path, 'rb').read()
42 return file(path, 'rb').read()
44 except (TypeError, OSError):
43 except (TypeError, OSError):
45 # illegal fname or unreadable file
44 # illegal fname or unreadable file
46 return ""
45 return ""
47
46
48 def style_map(templatepath, style):
47 def style_map(templatepath, style):
49 """Return path to mapfile for a given style.
48 """Return path to mapfile for a given style.
50
49
51 Searches mapfile in the following locations:
50 Searches mapfile in the following locations:
52 1. templatepath/style/map
51 1. templatepath/style/map
53 2. templatepath/map-style
52 2. templatepath/map-style
54 3. templatepath/map
53 3. templatepath/map
55 """
54 """
56 locations = style and [os.path.join(style, "map"), "map-"+style] or []
55 locations = style and [os.path.join(style, "map"), "map-"+style] or []
57 locations.append("map")
56 locations.append("map")
58 for location in locations:
57 for location in locations:
59 mapfile = os.path.join(templatepath, location)
58 mapfile = os.path.join(templatepath, location)
60 if os.path.isfile(mapfile):
59 if os.path.isfile(mapfile):
61 return mapfile
60 return mapfile
62 raise RuntimeError("No hgweb templates found in %r" % templatepath)
61 raise RuntimeError("No hgweb templates found in %r" % templatepath)
63
62
@@ -1,1165 +1,1161 b''
1 # hgweb/hgweb_mod.py - Web interface for a repository.
1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os
9 import os, mimetypes, re, zlib, ConfigParser, mimetools, cStringIO, sys
10 import os.path
10 import tempfile, urllib, bz2
11 import mimetypes
12 from mercurial.demandload import demandload
13 demandload(globals(), "re zlib ConfigParser mimetools cStringIO sys tempfile")
14 demandload(globals(), 'urllib bz2')
15 demandload(globals(), "mercurial:mdiff,ui,hg,util,archival,streamclone,patch")
16 demandload(globals(), "mercurial:revlog,templater")
17 demandload(globals(), "mercurial.hgweb.common:get_mtime,staticfile,style_map")
18 from mercurial.node import *
11 from mercurial.node import *
19 from mercurial.i18n import gettext as _
12 from mercurial.i18n import gettext as _
13 from mercurial import mdiff, ui, hg, util, archival, streamclone, patch
14 from mercurial import revlog, templater
15 from common import get_mtime, staticfile, style_map
20
16
21 def _up(p):
17 def _up(p):
22 if p[0] != "/":
18 if p[0] != "/":
23 p = "/" + p
19 p = "/" + p
24 if p[-1] == "/":
20 if p[-1] == "/":
25 p = p[:-1]
21 p = p[:-1]
26 up = os.path.dirname(p)
22 up = os.path.dirname(p)
27 if up == "/":
23 if up == "/":
28 return "/"
24 return "/"
29 return up + "/"
25 return up + "/"
30
26
31 def revnavgen(pos, pagelen, limit, nodefunc):
27 def revnavgen(pos, pagelen, limit, nodefunc):
32 def seq(factor, limit=None):
28 def seq(factor, limit=None):
33 if limit:
29 if limit:
34 yield limit
30 yield limit
35 if limit >= 20 and limit <= 40:
31 if limit >= 20 and limit <= 40:
36 yield 50
32 yield 50
37 else:
33 else:
38 yield 1 * factor
34 yield 1 * factor
39 yield 3 * factor
35 yield 3 * factor
40 for f in seq(factor * 10):
36 for f in seq(factor * 10):
41 yield f
37 yield f
42
38
43 def nav(**map):
39 def nav(**map):
44 l = []
40 l = []
45 last = 0
41 last = 0
46 for f in seq(1, pagelen):
42 for f in seq(1, pagelen):
47 if f < pagelen or f <= last:
43 if f < pagelen or f <= last:
48 continue
44 continue
49 if f > limit:
45 if f > limit:
50 break
46 break
51 last = f
47 last = f
52 if pos + f < limit:
48 if pos + f < limit:
53 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
49 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
54 if pos - f >= 0:
50 if pos - f >= 0:
55 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
51 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
56
52
57 try:
53 try:
58 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
54 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
59
55
60 for label, node in l:
56 for label, node in l:
61 yield {"label": label, "node": node}
57 yield {"label": label, "node": node}
62
58
63 yield {"label": "tip", "node": "tip"}
59 yield {"label": "tip", "node": "tip"}
64 except hg.RepoError:
60 except hg.RepoError:
65 pass
61 pass
66
62
67 return nav
63 return nav
68
64
69 class hgweb(object):
65 class hgweb(object):
70 def __init__(self, repo, name=None):
66 def __init__(self, repo, name=None):
71 if type(repo) == type(""):
67 if type(repo) == type(""):
72 self.repo = hg.repository(ui.ui(report_untrusted=False), repo)
68 self.repo = hg.repository(ui.ui(report_untrusted=False), repo)
73 else:
69 else:
74 self.repo = repo
70 self.repo = repo
75
71
76 self.mtime = -1
72 self.mtime = -1
77 self.reponame = name
73 self.reponame = name
78 self.archives = 'zip', 'gz', 'bz2'
74 self.archives = 'zip', 'gz', 'bz2'
79 self.stripecount = 1
75 self.stripecount = 1
80 # a repo owner may set web.templates in .hg/hgrc to get any file
76 # a repo owner may set web.templates in .hg/hgrc to get any file
81 # readable by the user running the CGI script
77 # readable by the user running the CGI script
82 self.templatepath = self.config("web", "templates",
78 self.templatepath = self.config("web", "templates",
83 templater.templatepath(),
79 templater.templatepath(),
84 untrusted=False)
80 untrusted=False)
85
81
86 # The CGI scripts are often run by a user different from the repo owner.
82 # The CGI scripts are often run by a user different from the repo owner.
87 # Trust the settings from the .hg/hgrc files by default.
83 # Trust the settings from the .hg/hgrc files by default.
88 def config(self, section, name, default=None, untrusted=True):
84 def config(self, section, name, default=None, untrusted=True):
89 return self.repo.ui.config(section, name, default,
85 return self.repo.ui.config(section, name, default,
90 untrusted=untrusted)
86 untrusted=untrusted)
91
87
92 def configbool(self, section, name, default=False, untrusted=True):
88 def configbool(self, section, name, default=False, untrusted=True):
93 return self.repo.ui.configbool(section, name, default,
89 return self.repo.ui.configbool(section, name, default,
94 untrusted=untrusted)
90 untrusted=untrusted)
95
91
96 def configlist(self, section, name, default=None, untrusted=True):
92 def configlist(self, section, name, default=None, untrusted=True):
97 return self.repo.ui.configlist(section, name, default,
93 return self.repo.ui.configlist(section, name, default,
98 untrusted=untrusted)
94 untrusted=untrusted)
99
95
100 def refresh(self):
96 def refresh(self):
101 mtime = get_mtime(self.repo.root)
97 mtime = get_mtime(self.repo.root)
102 if mtime != self.mtime:
98 if mtime != self.mtime:
103 self.mtime = mtime
99 self.mtime = mtime
104 self.repo = hg.repository(self.repo.ui, self.repo.root)
100 self.repo = hg.repository(self.repo.ui, self.repo.root)
105 self.maxchanges = int(self.config("web", "maxchanges", 10))
101 self.maxchanges = int(self.config("web", "maxchanges", 10))
106 self.stripecount = int(self.config("web", "stripes", 1))
102 self.stripecount = int(self.config("web", "stripes", 1))
107 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
103 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
108 self.maxfiles = int(self.config("web", "maxfiles", 10))
104 self.maxfiles = int(self.config("web", "maxfiles", 10))
109 self.allowpull = self.configbool("web", "allowpull", True)
105 self.allowpull = self.configbool("web", "allowpull", True)
110
106
111 def archivelist(self, nodeid):
107 def archivelist(self, nodeid):
112 allowed = self.configlist("web", "allow_archive")
108 allowed = self.configlist("web", "allow_archive")
113 for i, spec in self.archive_specs.iteritems():
109 for i, spec in self.archive_specs.iteritems():
114 if i in allowed or self.configbool("web", "allow" + i):
110 if i in allowed or self.configbool("web", "allow" + i):
115 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
111 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
116
112
117 def listfilediffs(self, files, changeset):
113 def listfilediffs(self, files, changeset):
118 for f in files[:self.maxfiles]:
114 for f in files[:self.maxfiles]:
119 yield self.t("filedifflink", node=hex(changeset), file=f)
115 yield self.t("filedifflink", node=hex(changeset), file=f)
120 if len(files) > self.maxfiles:
116 if len(files) > self.maxfiles:
121 yield self.t("fileellipses")
117 yield self.t("fileellipses")
122
118
123 def siblings(self, siblings=[], hiderev=None, **args):
119 def siblings(self, siblings=[], hiderev=None, **args):
124 siblings = [s for s in siblings if s.node() != nullid]
120 siblings = [s for s in siblings if s.node() != nullid]
125 if len(siblings) == 1 and siblings[0].rev() == hiderev:
121 if len(siblings) == 1 and siblings[0].rev() == hiderev:
126 return
122 return
127 for s in siblings:
123 for s in siblings:
128 d = {'node': hex(s.node()), 'rev': s.rev()}
124 d = {'node': hex(s.node()), 'rev': s.rev()}
129 if hasattr(s, 'path'):
125 if hasattr(s, 'path'):
130 d['file'] = s.path()
126 d['file'] = s.path()
131 d.update(args)
127 d.update(args)
132 yield d
128 yield d
133
129
134 def renamelink(self, fl, node):
130 def renamelink(self, fl, node):
135 r = fl.renamed(node)
131 r = fl.renamed(node)
136 if r:
132 if r:
137 return [dict(file=r[0], node=hex(r[1]))]
133 return [dict(file=r[0], node=hex(r[1]))]
138 return []
134 return []
139
135
140 def showtag(self, t1, node=nullid, **args):
136 def showtag(self, t1, node=nullid, **args):
141 for t in self.repo.nodetags(node):
137 for t in self.repo.nodetags(node):
142 yield self.t(t1, tag=t, **args)
138 yield self.t(t1, tag=t, **args)
143
139
144 def diff(self, node1, node2, files):
140 def diff(self, node1, node2, files):
145 def filterfiles(filters, files):
141 def filterfiles(filters, files):
146 l = [x for x in files if x in filters]
142 l = [x for x in files if x in filters]
147
143
148 for t in filters:
144 for t in filters:
149 if t and t[-1] != os.sep:
145 if t and t[-1] != os.sep:
150 t += os.sep
146 t += os.sep
151 l += [x for x in files if x.startswith(t)]
147 l += [x for x in files if x.startswith(t)]
152 return l
148 return l
153
149
154 parity = [0]
150 parity = [0]
155 def diffblock(diff, f, fn):
151 def diffblock(diff, f, fn):
156 yield self.t("diffblock",
152 yield self.t("diffblock",
157 lines=prettyprintlines(diff),
153 lines=prettyprintlines(diff),
158 parity=parity[0],
154 parity=parity[0],
159 file=f,
155 file=f,
160 filenode=hex(fn or nullid))
156 filenode=hex(fn or nullid))
161 parity[0] = 1 - parity[0]
157 parity[0] = 1 - parity[0]
162
158
163 def prettyprintlines(diff):
159 def prettyprintlines(diff):
164 for l in diff.splitlines(1):
160 for l in diff.splitlines(1):
165 if l.startswith('+'):
161 if l.startswith('+'):
166 yield self.t("difflineplus", line=l)
162 yield self.t("difflineplus", line=l)
167 elif l.startswith('-'):
163 elif l.startswith('-'):
168 yield self.t("difflineminus", line=l)
164 yield self.t("difflineminus", line=l)
169 elif l.startswith('@'):
165 elif l.startswith('@'):
170 yield self.t("difflineat", line=l)
166 yield self.t("difflineat", line=l)
171 else:
167 else:
172 yield self.t("diffline", line=l)
168 yield self.t("diffline", line=l)
173
169
174 r = self.repo
170 r = self.repo
175 cl = r.changelog
171 cl = r.changelog
176 mf = r.manifest
172 mf = r.manifest
177 change1 = cl.read(node1)
173 change1 = cl.read(node1)
178 change2 = cl.read(node2)
174 change2 = cl.read(node2)
179 mmap1 = mf.read(change1[0])
175 mmap1 = mf.read(change1[0])
180 mmap2 = mf.read(change2[0])
176 mmap2 = mf.read(change2[0])
181 date1 = util.datestr(change1[2])
177 date1 = util.datestr(change1[2])
182 date2 = util.datestr(change2[2])
178 date2 = util.datestr(change2[2])
183
179
184 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
180 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
185 if files:
181 if files:
186 modified, added, removed = map(lambda x: filterfiles(files, x),
182 modified, added, removed = map(lambda x: filterfiles(files, x),
187 (modified, added, removed))
183 (modified, added, removed))
188
184
189 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
185 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
190 for f in modified:
186 for f in modified:
191 to = r.file(f).read(mmap1[f])
187 to = r.file(f).read(mmap1[f])
192 tn = r.file(f).read(mmap2[f])
188 tn = r.file(f).read(mmap2[f])
193 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
189 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
194 opts=diffopts), f, tn)
190 opts=diffopts), f, tn)
195 for f in added:
191 for f in added:
196 to = None
192 to = None
197 tn = r.file(f).read(mmap2[f])
193 tn = r.file(f).read(mmap2[f])
198 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
194 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
199 opts=diffopts), f, tn)
195 opts=diffopts), f, tn)
200 for f in removed:
196 for f in removed:
201 to = r.file(f).read(mmap1[f])
197 to = r.file(f).read(mmap1[f])
202 tn = None
198 tn = None
203 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
199 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
204 opts=diffopts), f, tn)
200 opts=diffopts), f, tn)
205
201
206 def changelog(self, ctx, shortlog=False):
202 def changelog(self, ctx, shortlog=False):
207 def changelist(**map):
203 def changelist(**map):
208 parity = (start - end) & 1
204 parity = (start - end) & 1
209 cl = self.repo.changelog
205 cl = self.repo.changelog
210 l = [] # build a list in forward order for efficiency
206 l = [] # build a list in forward order for efficiency
211 for i in xrange(start, end):
207 for i in xrange(start, end):
212 ctx = self.repo.changectx(i)
208 ctx = self.repo.changectx(i)
213 n = ctx.node()
209 n = ctx.node()
214
210
215 l.insert(0, {"parity": parity,
211 l.insert(0, {"parity": parity,
216 "author": ctx.user(),
212 "author": ctx.user(),
217 "parent": self.siblings(ctx.parents(), i - 1),
213 "parent": self.siblings(ctx.parents(), i - 1),
218 "child": self.siblings(ctx.children(), i + 1),
214 "child": self.siblings(ctx.children(), i + 1),
219 "changelogtag": self.showtag("changelogtag",n),
215 "changelogtag": self.showtag("changelogtag",n),
220 "desc": ctx.description(),
216 "desc": ctx.description(),
221 "date": ctx.date(),
217 "date": ctx.date(),
222 "files": self.listfilediffs(ctx.files(), n),
218 "files": self.listfilediffs(ctx.files(), n),
223 "rev": i,
219 "rev": i,
224 "node": hex(n)})
220 "node": hex(n)})
225 parity = 1 - parity
221 parity = 1 - parity
226
222
227 for e in l:
223 for e in l:
228 yield e
224 yield e
229
225
230 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
226 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
231 cl = self.repo.changelog
227 cl = self.repo.changelog
232 count = cl.count()
228 count = cl.count()
233 pos = ctx.rev()
229 pos = ctx.rev()
234 start = max(0, pos - maxchanges + 1)
230 start = max(0, pos - maxchanges + 1)
235 end = min(count, start + maxchanges)
231 end = min(count, start + maxchanges)
236 pos = end - 1
232 pos = end - 1
237
233
238 changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
234 changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
239
235
240 yield self.t(shortlog and 'shortlog' or 'changelog',
236 yield self.t(shortlog and 'shortlog' or 'changelog',
241 changenav=changenav,
237 changenav=changenav,
242 node=hex(cl.tip()),
238 node=hex(cl.tip()),
243 rev=pos, changesets=count, entries=changelist,
239 rev=pos, changesets=count, entries=changelist,
244 archives=self.archivelist("tip"))
240 archives=self.archivelist("tip"))
245
241
246 def search(self, query):
242 def search(self, query):
247
243
248 def changelist(**map):
244 def changelist(**map):
249 cl = self.repo.changelog
245 cl = self.repo.changelog
250 count = 0
246 count = 0
251 qw = query.lower().split()
247 qw = query.lower().split()
252
248
253 def revgen():
249 def revgen():
254 for i in xrange(cl.count() - 1, 0, -100):
250 for i in xrange(cl.count() - 1, 0, -100):
255 l = []
251 l = []
256 for j in xrange(max(0, i - 100), i):
252 for j in xrange(max(0, i - 100), i):
257 ctx = self.repo.changectx(j)
253 ctx = self.repo.changectx(j)
258 l.append(ctx)
254 l.append(ctx)
259 l.reverse()
255 l.reverse()
260 for e in l:
256 for e in l:
261 yield e
257 yield e
262
258
263 for ctx in revgen():
259 for ctx in revgen():
264 miss = 0
260 miss = 0
265 for q in qw:
261 for q in qw:
266 if not (q in ctx.user().lower() or
262 if not (q in ctx.user().lower() or
267 q in ctx.description().lower() or
263 q in ctx.description().lower() or
268 q in " ".join(ctx.files()[:20]).lower()):
264 q in " ".join(ctx.files()[:20]).lower()):
269 miss = 1
265 miss = 1
270 break
266 break
271 if miss:
267 if miss:
272 continue
268 continue
273
269
274 count += 1
270 count += 1
275 n = ctx.node()
271 n = ctx.node()
276
272
277 yield self.t('searchentry',
273 yield self.t('searchentry',
278 parity=self.stripes(count),
274 parity=self.stripes(count),
279 author=ctx.user(),
275 author=ctx.user(),
280 parent=self.siblings(ctx.parents()),
276 parent=self.siblings(ctx.parents()),
281 child=self.siblings(ctx.children()),
277 child=self.siblings(ctx.children()),
282 changelogtag=self.showtag("changelogtag",n),
278 changelogtag=self.showtag("changelogtag",n),
283 desc=ctx.description(),
279 desc=ctx.description(),
284 date=ctx.date(),
280 date=ctx.date(),
285 files=self.listfilediffs(ctx.files(), n),
281 files=self.listfilediffs(ctx.files(), n),
286 rev=ctx.rev(),
282 rev=ctx.rev(),
287 node=hex(n))
283 node=hex(n))
288
284
289 if count >= self.maxchanges:
285 if count >= self.maxchanges:
290 break
286 break
291
287
292 cl = self.repo.changelog
288 cl = self.repo.changelog
293
289
294 yield self.t('search',
290 yield self.t('search',
295 query=query,
291 query=query,
296 node=hex(cl.tip()),
292 node=hex(cl.tip()),
297 entries=changelist)
293 entries=changelist)
298
294
299 def changeset(self, ctx):
295 def changeset(self, ctx):
300 n = ctx.node()
296 n = ctx.node()
301 parents = ctx.parents()
297 parents = ctx.parents()
302 p1 = parents[0].node()
298 p1 = parents[0].node()
303
299
304 files = []
300 files = []
305 parity = 0
301 parity = 0
306 for f in ctx.files():
302 for f in ctx.files():
307 files.append(self.t("filenodelink",
303 files.append(self.t("filenodelink",
308 node=hex(n), file=f,
304 node=hex(n), file=f,
309 parity=parity))
305 parity=parity))
310 parity = 1 - parity
306 parity = 1 - parity
311
307
312 def diff(**map):
308 def diff(**map):
313 yield self.diff(p1, n, None)
309 yield self.diff(p1, n, None)
314
310
315 yield self.t('changeset',
311 yield self.t('changeset',
316 diff=diff,
312 diff=diff,
317 rev=ctx.rev(),
313 rev=ctx.rev(),
318 node=hex(n),
314 node=hex(n),
319 parent=self.siblings(parents),
315 parent=self.siblings(parents),
320 child=self.siblings(ctx.children()),
316 child=self.siblings(ctx.children()),
321 changesettag=self.showtag("changesettag",n),
317 changesettag=self.showtag("changesettag",n),
322 author=ctx.user(),
318 author=ctx.user(),
323 desc=ctx.description(),
319 desc=ctx.description(),
324 date=ctx.date(),
320 date=ctx.date(),
325 files=files,
321 files=files,
326 archives=self.archivelist(hex(n)))
322 archives=self.archivelist(hex(n)))
327
323
328 def filelog(self, fctx):
324 def filelog(self, fctx):
329 f = fctx.path()
325 f = fctx.path()
330 fl = fctx.filelog()
326 fl = fctx.filelog()
331 count = fl.count()
327 count = fl.count()
332 pagelen = self.maxshortchanges
328 pagelen = self.maxshortchanges
333 pos = fctx.filerev()
329 pos = fctx.filerev()
334 start = max(0, pos - pagelen + 1)
330 start = max(0, pos - pagelen + 1)
335 end = min(count, start + pagelen)
331 end = min(count, start + pagelen)
336 pos = end - 1
332 pos = end - 1
337
333
338 def entries(**map):
334 def entries(**map):
339 l = []
335 l = []
340 parity = (count - 1) & 1
336 parity = (count - 1) & 1
341
337
342 for i in xrange(start, end):
338 for i in xrange(start, end):
343 ctx = fctx.filectx(i)
339 ctx = fctx.filectx(i)
344 n = fl.node(i)
340 n = fl.node(i)
345
341
346 l.insert(0, {"parity": parity,
342 l.insert(0, {"parity": parity,
347 "filerev": i,
343 "filerev": i,
348 "file": f,
344 "file": f,
349 "node": hex(ctx.node()),
345 "node": hex(ctx.node()),
350 "author": ctx.user(),
346 "author": ctx.user(),
351 "date": ctx.date(),
347 "date": ctx.date(),
352 "rename": self.renamelink(fl, n),
348 "rename": self.renamelink(fl, n),
353 "parent": self.siblings(fctx.parents()),
349 "parent": self.siblings(fctx.parents()),
354 "child": self.siblings(fctx.children()),
350 "child": self.siblings(fctx.children()),
355 "desc": ctx.description()})
351 "desc": ctx.description()})
356 parity = 1 - parity
352 parity = 1 - parity
357
353
358 for e in l:
354 for e in l:
359 yield e
355 yield e
360
356
361 nodefunc = lambda x: fctx.filectx(fileid=x)
357 nodefunc = lambda x: fctx.filectx(fileid=x)
362 nav = revnavgen(pos, pagelen, count, nodefunc)
358 nav = revnavgen(pos, pagelen, count, nodefunc)
363 yield self.t("filelog", file=f, node=hex(fctx.node()), nav=nav,
359 yield self.t("filelog", file=f, node=hex(fctx.node()), nav=nav,
364 entries=entries)
360 entries=entries)
365
361
366 def filerevision(self, fctx):
362 def filerevision(self, fctx):
367 f = fctx.path()
363 f = fctx.path()
368 text = fctx.data()
364 text = fctx.data()
369 fl = fctx.filelog()
365 fl = fctx.filelog()
370 n = fctx.filenode()
366 n = fctx.filenode()
371
367
372 mt = mimetypes.guess_type(f)[0]
368 mt = mimetypes.guess_type(f)[0]
373 rawtext = text
369 rawtext = text
374 if util.binary(text):
370 if util.binary(text):
375 mt = mt or 'application/octet-stream'
371 mt = mt or 'application/octet-stream'
376 text = "(binary:%s)" % mt
372 text = "(binary:%s)" % mt
377 mt = mt or 'text/plain'
373 mt = mt or 'text/plain'
378
374
379 def lines():
375 def lines():
380 for l, t in enumerate(text.splitlines(1)):
376 for l, t in enumerate(text.splitlines(1)):
381 yield {"line": t,
377 yield {"line": t,
382 "linenumber": "% 6d" % (l + 1),
378 "linenumber": "% 6d" % (l + 1),
383 "parity": self.stripes(l)}
379 "parity": self.stripes(l)}
384
380
385 yield self.t("filerevision",
381 yield self.t("filerevision",
386 file=f,
382 file=f,
387 path=_up(f),
383 path=_up(f),
388 text=lines(),
384 text=lines(),
389 raw=rawtext,
385 raw=rawtext,
390 mimetype=mt,
386 mimetype=mt,
391 rev=fctx.rev(),
387 rev=fctx.rev(),
392 node=hex(fctx.node()),
388 node=hex(fctx.node()),
393 author=fctx.user(),
389 author=fctx.user(),
394 date=fctx.date(),
390 date=fctx.date(),
395 desc=fctx.description(),
391 desc=fctx.description(),
396 parent=self.siblings(fctx.parents()),
392 parent=self.siblings(fctx.parents()),
397 child=self.siblings(fctx.children()),
393 child=self.siblings(fctx.children()),
398 rename=self.renamelink(fl, n),
394 rename=self.renamelink(fl, n),
399 permissions=fctx.manifest().execf(f))
395 permissions=fctx.manifest().execf(f))
400
396
401 def fileannotate(self, fctx):
397 def fileannotate(self, fctx):
402 f = fctx.path()
398 f = fctx.path()
403 n = fctx.filenode()
399 n = fctx.filenode()
404 fl = fctx.filelog()
400 fl = fctx.filelog()
405
401
406 def annotate(**map):
402 def annotate(**map):
407 parity = 0
403 parity = 0
408 last = None
404 last = None
409 for f, l in fctx.annotate(follow=True):
405 for f, l in fctx.annotate(follow=True):
410 fnode = f.filenode()
406 fnode = f.filenode()
411 name = self.repo.ui.shortuser(f.user())
407 name = self.repo.ui.shortuser(f.user())
412
408
413 if last != fnode:
409 if last != fnode:
414 parity = 1 - parity
410 parity = 1 - parity
415 last = fnode
411 last = fnode
416
412
417 yield {"parity": parity,
413 yield {"parity": parity,
418 "node": hex(f.node()),
414 "node": hex(f.node()),
419 "rev": f.rev(),
415 "rev": f.rev(),
420 "author": name,
416 "author": name,
421 "file": f.path(),
417 "file": f.path(),
422 "line": l}
418 "line": l}
423
419
424 yield self.t("fileannotate",
420 yield self.t("fileannotate",
425 file=f,
421 file=f,
426 annotate=annotate,
422 annotate=annotate,
427 path=_up(f),
423 path=_up(f),
428 rev=fctx.rev(),
424 rev=fctx.rev(),
429 node=hex(fctx.node()),
425 node=hex(fctx.node()),
430 author=fctx.user(),
426 author=fctx.user(),
431 date=fctx.date(),
427 date=fctx.date(),
432 desc=fctx.description(),
428 desc=fctx.description(),
433 rename=self.renamelink(fl, n),
429 rename=self.renamelink(fl, n),
434 parent=self.siblings(fctx.parents()),
430 parent=self.siblings(fctx.parents()),
435 child=self.siblings(fctx.children()),
431 child=self.siblings(fctx.children()),
436 permissions=fctx.manifest().execf(f))
432 permissions=fctx.manifest().execf(f))
437
433
438 def manifest(self, ctx, path):
434 def manifest(self, ctx, path):
439 mf = ctx.manifest()
435 mf = ctx.manifest()
440 node = ctx.node()
436 node = ctx.node()
441
437
442 files = {}
438 files = {}
443
439
444 if path and path[-1] != "/":
440 if path and path[-1] != "/":
445 path += "/"
441 path += "/"
446 l = len(path)
442 l = len(path)
447 abspath = "/" + path
443 abspath = "/" + path
448
444
449 for f, n in mf.items():
445 for f, n in mf.items():
450 if f[:l] != path:
446 if f[:l] != path:
451 continue
447 continue
452 remain = f[l:]
448 remain = f[l:]
453 if "/" in remain:
449 if "/" in remain:
454 short = remain[:remain.index("/") + 1] # bleah
450 short = remain[:remain.index("/") + 1] # bleah
455 files[short] = (f, None)
451 files[short] = (f, None)
456 else:
452 else:
457 short = os.path.basename(remain)
453 short = os.path.basename(remain)
458 files[short] = (f, n)
454 files[short] = (f, n)
459
455
460 def filelist(**map):
456 def filelist(**map):
461 parity = 0
457 parity = 0
462 fl = files.keys()
458 fl = files.keys()
463 fl.sort()
459 fl.sort()
464 for f in fl:
460 for f in fl:
465 full, fnode = files[f]
461 full, fnode = files[f]
466 if not fnode:
462 if not fnode:
467 continue
463 continue
468
464
469 yield {"file": full,
465 yield {"file": full,
470 "parity": self.stripes(parity),
466 "parity": self.stripes(parity),
471 "basename": f,
467 "basename": f,
472 "size": ctx.filectx(full).size(),
468 "size": ctx.filectx(full).size(),
473 "permissions": mf.execf(full)}
469 "permissions": mf.execf(full)}
474 parity += 1
470 parity += 1
475
471
476 def dirlist(**map):
472 def dirlist(**map):
477 parity = 0
473 parity = 0
478 fl = files.keys()
474 fl = files.keys()
479 fl.sort()
475 fl.sort()
480 for f in fl:
476 for f in fl:
481 full, fnode = files[f]
477 full, fnode = files[f]
482 if fnode:
478 if fnode:
483 continue
479 continue
484
480
485 yield {"parity": self.stripes(parity),
481 yield {"parity": self.stripes(parity),
486 "path": os.path.join(abspath, f),
482 "path": os.path.join(abspath, f),
487 "basename": f[:-1]}
483 "basename": f[:-1]}
488 parity += 1
484 parity += 1
489
485
490 yield self.t("manifest",
486 yield self.t("manifest",
491 rev=ctx.rev(),
487 rev=ctx.rev(),
492 node=hex(node),
488 node=hex(node),
493 path=abspath,
489 path=abspath,
494 up=_up(abspath),
490 up=_up(abspath),
495 fentries=filelist,
491 fentries=filelist,
496 dentries=dirlist,
492 dentries=dirlist,
497 archives=self.archivelist(hex(node)))
493 archives=self.archivelist(hex(node)))
498
494
499 def tags(self):
495 def tags(self):
500 cl = self.repo.changelog
496 cl = self.repo.changelog
501
497
502 i = self.repo.tagslist()
498 i = self.repo.tagslist()
503 i.reverse()
499 i.reverse()
504
500
505 def entries(notip=False, **map):
501 def entries(notip=False, **map):
506 parity = 0
502 parity = 0
507 for k, n in i:
503 for k, n in i:
508 if notip and k == "tip":
504 if notip and k == "tip":
509 continue
505 continue
510 yield {"parity": self.stripes(parity),
506 yield {"parity": self.stripes(parity),
511 "tag": k,
507 "tag": k,
512 "date": cl.read(n)[2],
508 "date": cl.read(n)[2],
513 "node": hex(n)}
509 "node": hex(n)}
514 parity += 1
510 parity += 1
515
511
516 yield self.t("tags",
512 yield self.t("tags",
517 node=hex(self.repo.changelog.tip()),
513 node=hex(self.repo.changelog.tip()),
518 entries=lambda **x: entries(False, **x),
514 entries=lambda **x: entries(False, **x),
519 entriesnotip=lambda **x: entries(True, **x))
515 entriesnotip=lambda **x: entries(True, **x))
520
516
521 def summary(self):
517 def summary(self):
522 cl = self.repo.changelog
518 cl = self.repo.changelog
523
519
524 i = self.repo.tagslist()
520 i = self.repo.tagslist()
525 i.reverse()
521 i.reverse()
526
522
527 def tagentries(**map):
523 def tagentries(**map):
528 parity = 0
524 parity = 0
529 count = 0
525 count = 0
530 for k, n in i:
526 for k, n in i:
531 if k == "tip": # skip tip
527 if k == "tip": # skip tip
532 continue;
528 continue;
533
529
534 count += 1
530 count += 1
535 if count > 10: # limit to 10 tags
531 if count > 10: # limit to 10 tags
536 break;
532 break;
537
533
538 c = cl.read(n)
534 c = cl.read(n)
539 t = c[2]
535 t = c[2]
540
536
541 yield self.t("tagentry",
537 yield self.t("tagentry",
542 parity = self.stripes(parity),
538 parity = self.stripes(parity),
543 tag = k,
539 tag = k,
544 node = hex(n),
540 node = hex(n),
545 date = t)
541 date = t)
546 parity += 1
542 parity += 1
547
543
548 def heads(**map):
544 def heads(**map):
549 parity = 0
545 parity = 0
550 count = 0
546 count = 0
551
547
552 for node in self.repo.heads():
548 for node in self.repo.heads():
553 count += 1
549 count += 1
554 if count > 10:
550 if count > 10:
555 break;
551 break;
556
552
557 ctx = self.repo.changectx(node)
553 ctx = self.repo.changectx(node)
558
554
559 yield {'parity': self.stripes(parity),
555 yield {'parity': self.stripes(parity),
560 'branch': ctx.branch(),
556 'branch': ctx.branch(),
561 'node': hex(node),
557 'node': hex(node),
562 'date': ctx.date()}
558 'date': ctx.date()}
563 parity += 1
559 parity += 1
564
560
565 def changelist(**map):
561 def changelist(**map):
566 parity = 0
562 parity = 0
567 cl = self.repo.changelog
563 cl = self.repo.changelog
568 l = [] # build a list in forward order for efficiency
564 l = [] # build a list in forward order for efficiency
569 for i in xrange(start, end):
565 for i in xrange(start, end):
570 n = cl.node(i)
566 n = cl.node(i)
571 changes = cl.read(n)
567 changes = cl.read(n)
572 hn = hex(n)
568 hn = hex(n)
573 t = changes[2]
569 t = changes[2]
574
570
575 l.insert(0, self.t(
571 l.insert(0, self.t(
576 'shortlogentry',
572 'shortlogentry',
577 parity = parity,
573 parity = parity,
578 author = changes[1],
574 author = changes[1],
579 desc = changes[4],
575 desc = changes[4],
580 date = t,
576 date = t,
581 rev = i,
577 rev = i,
582 node = hn))
578 node = hn))
583 parity = 1 - parity
579 parity = 1 - parity
584
580
585 yield l
581 yield l
586
582
587 count = cl.count()
583 count = cl.count()
588 start = max(0, count - self.maxchanges)
584 start = max(0, count - self.maxchanges)
589 end = min(count, start + self.maxchanges)
585 end = min(count, start + self.maxchanges)
590
586
591 yield self.t("summary",
587 yield self.t("summary",
592 desc = self.config("web", "description", "unknown"),
588 desc = self.config("web", "description", "unknown"),
593 owner = (self.config("ui", "username") or # preferred
589 owner = (self.config("ui", "username") or # preferred
594 self.config("web", "contact") or # deprecated
590 self.config("web", "contact") or # deprecated
595 self.config("web", "author", "unknown")), # also
591 self.config("web", "author", "unknown")), # also
596 lastchange = cl.read(cl.tip())[2],
592 lastchange = cl.read(cl.tip())[2],
597 tags = tagentries,
593 tags = tagentries,
598 heads = heads,
594 heads = heads,
599 shortlog = changelist,
595 shortlog = changelist,
600 node = hex(cl.tip()),
596 node = hex(cl.tip()),
601 archives=self.archivelist("tip"))
597 archives=self.archivelist("tip"))
602
598
603 def filediff(self, fctx):
599 def filediff(self, fctx):
604 n = fctx.node()
600 n = fctx.node()
605 path = fctx.path()
601 path = fctx.path()
606 parents = fctx.parents()
602 parents = fctx.parents()
607 p1 = parents and parents[0].node() or nullid
603 p1 = parents and parents[0].node() or nullid
608
604
609 def diff(**map):
605 def diff(**map):
610 yield self.diff(p1, n, [path])
606 yield self.diff(p1, n, [path])
611
607
612 yield self.t("filediff",
608 yield self.t("filediff",
613 file=path,
609 file=path,
614 node=hex(n),
610 node=hex(n),
615 rev=fctx.rev(),
611 rev=fctx.rev(),
616 parent=self.siblings(parents),
612 parent=self.siblings(parents),
617 child=self.siblings(fctx.children()),
613 child=self.siblings(fctx.children()),
618 diff=diff)
614 diff=diff)
619
615
620 archive_specs = {
616 archive_specs = {
621 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
617 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
622 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
618 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
623 'zip': ('application/zip', 'zip', '.zip', None),
619 'zip': ('application/zip', 'zip', '.zip', None),
624 }
620 }
625
621
626 def archive(self, req, cnode, type_):
622 def archive(self, req, cnode, type_):
627 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
623 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
628 name = "%s-%s" % (reponame, short(cnode))
624 name = "%s-%s" % (reponame, short(cnode))
629 mimetype, artype, extension, encoding = self.archive_specs[type_]
625 mimetype, artype, extension, encoding = self.archive_specs[type_]
630 headers = [('Content-type', mimetype),
626 headers = [('Content-type', mimetype),
631 ('Content-disposition', 'attachment; filename=%s%s' %
627 ('Content-disposition', 'attachment; filename=%s%s' %
632 (name, extension))]
628 (name, extension))]
633 if encoding:
629 if encoding:
634 headers.append(('Content-encoding', encoding))
630 headers.append(('Content-encoding', encoding))
635 req.header(headers)
631 req.header(headers)
636 archival.archive(self.repo, req.out, cnode, artype, prefix=name)
632 archival.archive(self.repo, req.out, cnode, artype, prefix=name)
637
633
638 # add tags to things
634 # add tags to things
639 # tags -> list of changesets corresponding to tags
635 # tags -> list of changesets corresponding to tags
640 # find tag, changeset, file
636 # find tag, changeset, file
641
637
642 def cleanpath(self, path):
638 def cleanpath(self, path):
643 path = path.lstrip('/')
639 path = path.lstrip('/')
644 return util.canonpath(self.repo.root, '', path)
640 return util.canonpath(self.repo.root, '', path)
645
641
646 def run(self):
642 def run(self):
647 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
643 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
648 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
644 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
649 import mercurial.hgweb.wsgicgi as wsgicgi
645 import mercurial.hgweb.wsgicgi as wsgicgi
650 from request import wsgiapplication
646 from request import wsgiapplication
651 def make_web_app():
647 def make_web_app():
652 return self
648 return self
653 wsgicgi.launch(wsgiapplication(make_web_app))
649 wsgicgi.launch(wsgiapplication(make_web_app))
654
650
655 def run_wsgi(self, req):
651 def run_wsgi(self, req):
656 def header(**map):
652 def header(**map):
657 header_file = cStringIO.StringIO(
653 header_file = cStringIO.StringIO(
658 ''.join(self.t("header", encoding = util._encoding, **map)))
654 ''.join(self.t("header", encoding = util._encoding, **map)))
659 msg = mimetools.Message(header_file, 0)
655 msg = mimetools.Message(header_file, 0)
660 req.header(msg.items())
656 req.header(msg.items())
661 yield header_file.read()
657 yield header_file.read()
662
658
663 def rawfileheader(**map):
659 def rawfileheader(**map):
664 req.header([('Content-type', map['mimetype']),
660 req.header([('Content-type', map['mimetype']),
665 ('Content-disposition', 'filename=%s' % map['file']),
661 ('Content-disposition', 'filename=%s' % map['file']),
666 ('Content-length', str(len(map['raw'])))])
662 ('Content-length', str(len(map['raw'])))])
667 yield ''
663 yield ''
668
664
669 def footer(**map):
665 def footer(**map):
670 yield self.t("footer", **map)
666 yield self.t("footer", **map)
671
667
672 def motd(**map):
668 def motd(**map):
673 yield self.config("web", "motd", "")
669 yield self.config("web", "motd", "")
674
670
675 def expand_form(form):
671 def expand_form(form):
676 shortcuts = {
672 shortcuts = {
677 'cl': [('cmd', ['changelog']), ('rev', None)],
673 'cl': [('cmd', ['changelog']), ('rev', None)],
678 'sl': [('cmd', ['shortlog']), ('rev', None)],
674 'sl': [('cmd', ['shortlog']), ('rev', None)],
679 'cs': [('cmd', ['changeset']), ('node', None)],
675 'cs': [('cmd', ['changeset']), ('node', None)],
680 'f': [('cmd', ['file']), ('filenode', None)],
676 'f': [('cmd', ['file']), ('filenode', None)],
681 'fl': [('cmd', ['filelog']), ('filenode', None)],
677 'fl': [('cmd', ['filelog']), ('filenode', None)],
682 'fd': [('cmd', ['filediff']), ('node', None)],
678 'fd': [('cmd', ['filediff']), ('node', None)],
683 'fa': [('cmd', ['annotate']), ('filenode', None)],
679 'fa': [('cmd', ['annotate']), ('filenode', None)],
684 'mf': [('cmd', ['manifest']), ('manifest', None)],
680 'mf': [('cmd', ['manifest']), ('manifest', None)],
685 'ca': [('cmd', ['archive']), ('node', None)],
681 'ca': [('cmd', ['archive']), ('node', None)],
686 'tags': [('cmd', ['tags'])],
682 'tags': [('cmd', ['tags'])],
687 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
683 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
688 'static': [('cmd', ['static']), ('file', None)]
684 'static': [('cmd', ['static']), ('file', None)]
689 }
685 }
690
686
691 for k in shortcuts.iterkeys():
687 for k in shortcuts.iterkeys():
692 if form.has_key(k):
688 if form.has_key(k):
693 for name, value in shortcuts[k]:
689 for name, value in shortcuts[k]:
694 if value is None:
690 if value is None:
695 value = form[k]
691 value = form[k]
696 form[name] = value
692 form[name] = value
697 del form[k]
693 del form[k]
698
694
699 def rewrite_request(req):
695 def rewrite_request(req):
700 '''translate new web interface to traditional format'''
696 '''translate new web interface to traditional format'''
701
697
702 def spliturl(req):
698 def spliturl(req):
703 def firstitem(query):
699 def firstitem(query):
704 return query.split('&', 1)[0].split(';', 1)[0]
700 return query.split('&', 1)[0].split(';', 1)[0]
705
701
706 def normurl(url):
702 def normurl(url):
707 inner = '/'.join([x for x in url.split('/') if x])
703 inner = '/'.join([x for x in url.split('/') if x])
708 tl = len(url) > 1 and url.endswith('/') and '/' or ''
704 tl = len(url) > 1 and url.endswith('/') and '/' or ''
709
705
710 return '%s%s%s' % (url.startswith('/') and '/' or '',
706 return '%s%s%s' % (url.startswith('/') and '/' or '',
711 inner, tl)
707 inner, tl)
712
708
713 root = normurl(urllib.unquote(req.env.get('REQUEST_URI', '').split('?', 1)[0]))
709 root = normurl(urllib.unquote(req.env.get('REQUEST_URI', '').split('?', 1)[0]))
714 pi = normurl(req.env.get('PATH_INFO', ''))
710 pi = normurl(req.env.get('PATH_INFO', ''))
715 if pi:
711 if pi:
716 # strip leading /
712 # strip leading /
717 pi = pi[1:]
713 pi = pi[1:]
718 if pi:
714 if pi:
719 root = root[:-len(pi)]
715 root = root[:-len(pi)]
720 if req.env.has_key('REPO_NAME'):
716 if req.env.has_key('REPO_NAME'):
721 rn = req.env['REPO_NAME'] + '/'
717 rn = req.env['REPO_NAME'] + '/'
722 root += rn
718 root += rn
723 query = pi[len(rn):]
719 query = pi[len(rn):]
724 else:
720 else:
725 query = pi
721 query = pi
726 else:
722 else:
727 root += '?'
723 root += '?'
728 query = firstitem(req.env['QUERY_STRING'])
724 query = firstitem(req.env['QUERY_STRING'])
729
725
730 return (root, query)
726 return (root, query)
731
727
732 req.url, query = spliturl(req)
728 req.url, query = spliturl(req)
733
729
734 if req.form.has_key('cmd'):
730 if req.form.has_key('cmd'):
735 # old style
731 # old style
736 return
732 return
737
733
738 args = query.split('/', 2)
734 args = query.split('/', 2)
739 if not args or not args[0]:
735 if not args or not args[0]:
740 return
736 return
741
737
742 cmd = args.pop(0)
738 cmd = args.pop(0)
743 style = cmd.rfind('-')
739 style = cmd.rfind('-')
744 if style != -1:
740 if style != -1:
745 req.form['style'] = [cmd[:style]]
741 req.form['style'] = [cmd[:style]]
746 cmd = cmd[style+1:]
742 cmd = cmd[style+1:]
747 # avoid accepting e.g. style parameter as command
743 # avoid accepting e.g. style parameter as command
748 if hasattr(self, 'do_' + cmd):
744 if hasattr(self, 'do_' + cmd):
749 req.form['cmd'] = [cmd]
745 req.form['cmd'] = [cmd]
750
746
751 if args and args[0]:
747 if args and args[0]:
752 node = args.pop(0)
748 node = args.pop(0)
753 req.form['node'] = [node]
749 req.form['node'] = [node]
754 if args:
750 if args:
755 req.form['file'] = args
751 req.form['file'] = args
756
752
757 if cmd == 'static':
753 if cmd == 'static':
758 req.form['file'] = req.form['node']
754 req.form['file'] = req.form['node']
759 elif cmd == 'archive':
755 elif cmd == 'archive':
760 fn = req.form['node'][0]
756 fn = req.form['node'][0]
761 for type_, spec in self.archive_specs.iteritems():
757 for type_, spec in self.archive_specs.iteritems():
762 ext = spec[2]
758 ext = spec[2]
763 if fn.endswith(ext):
759 if fn.endswith(ext):
764 req.form['node'] = [fn[:-len(ext)]]
760 req.form['node'] = [fn[:-len(ext)]]
765 req.form['type'] = [type_]
761 req.form['type'] = [type_]
766
762
767 def sessionvars(**map):
763 def sessionvars(**map):
768 fields = []
764 fields = []
769 if req.form.has_key('style'):
765 if req.form.has_key('style'):
770 style = req.form['style'][0]
766 style = req.form['style'][0]
771 if style != self.config('web', 'style', ''):
767 if style != self.config('web', 'style', ''):
772 fields.append(('style', style))
768 fields.append(('style', style))
773
769
774 separator = req.url[-1] == '?' and ';' or '?'
770 separator = req.url[-1] == '?' and ';' or '?'
775 for name, value in fields:
771 for name, value in fields:
776 yield dict(name=name, value=value, separator=separator)
772 yield dict(name=name, value=value, separator=separator)
777 separator = ';'
773 separator = ';'
778
774
779 self.refresh()
775 self.refresh()
780
776
781 expand_form(req.form)
777 expand_form(req.form)
782 rewrite_request(req)
778 rewrite_request(req)
783
779
784 style = self.config("web", "style", "")
780 style = self.config("web", "style", "")
785 if req.form.has_key('style'):
781 if req.form.has_key('style'):
786 style = req.form['style'][0]
782 style = req.form['style'][0]
787 mapfile = style_map(self.templatepath, style)
783 mapfile = style_map(self.templatepath, style)
788
784
789 port = req.env["SERVER_PORT"]
785 port = req.env["SERVER_PORT"]
790 port = port != "80" and (":" + port) or ""
786 port = port != "80" and (":" + port) or ""
791 urlbase = 'http://%s%s' % (req.env['SERVER_NAME'], port)
787 urlbase = 'http://%s%s' % (req.env['SERVER_NAME'], port)
792
788
793 if not self.reponame:
789 if not self.reponame:
794 self.reponame = (self.config("web", "name")
790 self.reponame = (self.config("web", "name")
795 or req.env.get('REPO_NAME')
791 or req.env.get('REPO_NAME')
796 or req.url.strip('/') or self.repo.root)
792 or req.url.strip('/') or self.repo.root)
797
793
798 self.t = templater.templater(mapfile, templater.common_filters,
794 self.t = templater.templater(mapfile, templater.common_filters,
799 defaults={"url": req.url,
795 defaults={"url": req.url,
800 "urlbase": urlbase,
796 "urlbase": urlbase,
801 "repo": self.reponame,
797 "repo": self.reponame,
802 "header": header,
798 "header": header,
803 "footer": footer,
799 "footer": footer,
804 "motd": motd,
800 "motd": motd,
805 "rawfileheader": rawfileheader,
801 "rawfileheader": rawfileheader,
806 "sessionvars": sessionvars
802 "sessionvars": sessionvars
807 })
803 })
808
804
809 if not req.form.has_key('cmd'):
805 if not req.form.has_key('cmd'):
810 req.form['cmd'] = [self.t.cache['default']]
806 req.form['cmd'] = [self.t.cache['default']]
811
807
812 cmd = req.form['cmd'][0]
808 cmd = req.form['cmd'][0]
813
809
814 method = getattr(self, 'do_' + cmd, None)
810 method = getattr(self, 'do_' + cmd, None)
815 if method:
811 if method:
816 try:
812 try:
817 method(req)
813 method(req)
818 except (hg.RepoError, revlog.RevlogError), inst:
814 except (hg.RepoError, revlog.RevlogError), inst:
819 req.write(self.t("error", error=str(inst)))
815 req.write(self.t("error", error=str(inst)))
820 else:
816 else:
821 req.write(self.t("error", error='No such method: ' + cmd))
817 req.write(self.t("error", error='No such method: ' + cmd))
822
818
823 def changectx(self, req):
819 def changectx(self, req):
824 if req.form.has_key('node'):
820 if req.form.has_key('node'):
825 changeid = req.form['node'][0]
821 changeid = req.form['node'][0]
826 elif req.form.has_key('manifest'):
822 elif req.form.has_key('manifest'):
827 changeid = req.form['manifest'][0]
823 changeid = req.form['manifest'][0]
828 else:
824 else:
829 changeid = self.repo.changelog.count() - 1
825 changeid = self.repo.changelog.count() - 1
830
826
831 try:
827 try:
832 ctx = self.repo.changectx(changeid)
828 ctx = self.repo.changectx(changeid)
833 except hg.RepoError:
829 except hg.RepoError:
834 man = self.repo.manifest
830 man = self.repo.manifest
835 mn = man.lookup(changeid)
831 mn = man.lookup(changeid)
836 ctx = self.repo.changectx(man.linkrev(mn))
832 ctx = self.repo.changectx(man.linkrev(mn))
837
833
838 return ctx
834 return ctx
839
835
840 def filectx(self, req):
836 def filectx(self, req):
841 path = self.cleanpath(req.form['file'][0])
837 path = self.cleanpath(req.form['file'][0])
842 if req.form.has_key('node'):
838 if req.form.has_key('node'):
843 changeid = req.form['node'][0]
839 changeid = req.form['node'][0]
844 else:
840 else:
845 changeid = req.form['filenode'][0]
841 changeid = req.form['filenode'][0]
846 try:
842 try:
847 ctx = self.repo.changectx(changeid)
843 ctx = self.repo.changectx(changeid)
848 fctx = ctx.filectx(path)
844 fctx = ctx.filectx(path)
849 except hg.RepoError:
845 except hg.RepoError:
850 fctx = self.repo.filectx(path, fileid=changeid)
846 fctx = self.repo.filectx(path, fileid=changeid)
851
847
852 return fctx
848 return fctx
853
849
854 def stripes(self, parity):
850 def stripes(self, parity):
855 "make horizontal stripes for easier reading"
851 "make horizontal stripes for easier reading"
856 if self.stripecount:
852 if self.stripecount:
857 return (1 + parity / self.stripecount) & 1
853 return (1 + parity / self.stripecount) & 1
858 else:
854 else:
859 return 0
855 return 0
860
856
861 def do_log(self, req):
857 def do_log(self, req):
862 if req.form.has_key('file') and req.form['file'][0]:
858 if req.form.has_key('file') and req.form['file'][0]:
863 self.do_filelog(req)
859 self.do_filelog(req)
864 else:
860 else:
865 self.do_changelog(req)
861 self.do_changelog(req)
866
862
867 def do_rev(self, req):
863 def do_rev(self, req):
868 self.do_changeset(req)
864 self.do_changeset(req)
869
865
870 def do_file(self, req):
866 def do_file(self, req):
871 path = self.cleanpath(req.form.get('file', [''])[0])
867 path = self.cleanpath(req.form.get('file', [''])[0])
872 if path:
868 if path:
873 try:
869 try:
874 req.write(self.filerevision(self.filectx(req)))
870 req.write(self.filerevision(self.filectx(req)))
875 return
871 return
876 except hg.RepoError:
872 except hg.RepoError:
877 pass
873 pass
878
874
879 req.write(self.manifest(self.changectx(req), path))
875 req.write(self.manifest(self.changectx(req), path))
880
876
881 def do_diff(self, req):
877 def do_diff(self, req):
882 self.do_filediff(req)
878 self.do_filediff(req)
883
879
884 def do_changelog(self, req, shortlog = False):
880 def do_changelog(self, req, shortlog = False):
885 if req.form.has_key('node'):
881 if req.form.has_key('node'):
886 ctx = self.changectx(req)
882 ctx = self.changectx(req)
887 else:
883 else:
888 if req.form.has_key('rev'):
884 if req.form.has_key('rev'):
889 hi = req.form['rev'][0]
885 hi = req.form['rev'][0]
890 else:
886 else:
891 hi = self.repo.changelog.count() - 1
887 hi = self.repo.changelog.count() - 1
892 try:
888 try:
893 ctx = self.repo.changectx(hi)
889 ctx = self.repo.changectx(hi)
894 except hg.RepoError:
890 except hg.RepoError:
895 req.write(self.search(hi)) # XXX redirect to 404 page?
891 req.write(self.search(hi)) # XXX redirect to 404 page?
896 return
892 return
897
893
898 req.write(self.changelog(ctx, shortlog = shortlog))
894 req.write(self.changelog(ctx, shortlog = shortlog))
899
895
900 def do_shortlog(self, req):
896 def do_shortlog(self, req):
901 self.do_changelog(req, shortlog = True)
897 self.do_changelog(req, shortlog = True)
902
898
903 def do_changeset(self, req):
899 def do_changeset(self, req):
904 req.write(self.changeset(self.changectx(req)))
900 req.write(self.changeset(self.changectx(req)))
905
901
906 def do_manifest(self, req):
902 def do_manifest(self, req):
907 req.write(self.manifest(self.changectx(req),
903 req.write(self.manifest(self.changectx(req),
908 self.cleanpath(req.form['path'][0])))
904 self.cleanpath(req.form['path'][0])))
909
905
910 def do_tags(self, req):
906 def do_tags(self, req):
911 req.write(self.tags())
907 req.write(self.tags())
912
908
913 def do_summary(self, req):
909 def do_summary(self, req):
914 req.write(self.summary())
910 req.write(self.summary())
915
911
916 def do_filediff(self, req):
912 def do_filediff(self, req):
917 req.write(self.filediff(self.filectx(req)))
913 req.write(self.filediff(self.filectx(req)))
918
914
919 def do_annotate(self, req):
915 def do_annotate(self, req):
920 req.write(self.fileannotate(self.filectx(req)))
916 req.write(self.fileannotate(self.filectx(req)))
921
917
922 def do_filelog(self, req):
918 def do_filelog(self, req):
923 req.write(self.filelog(self.filectx(req)))
919 req.write(self.filelog(self.filectx(req)))
924
920
925 def do_lookup(self, req):
921 def do_lookup(self, req):
926 try:
922 try:
927 r = hex(self.repo.lookup(req.form['key'][0]))
923 r = hex(self.repo.lookup(req.form['key'][0]))
928 success = 1
924 success = 1
929 except Exception,inst:
925 except Exception,inst:
930 r = str(inst)
926 r = str(inst)
931 success = 0
927 success = 0
932 resp = "%s %s\n" % (success, r)
928 resp = "%s %s\n" % (success, r)
933 req.httphdr("application/mercurial-0.1", length=len(resp))
929 req.httphdr("application/mercurial-0.1", length=len(resp))
934 req.write(resp)
930 req.write(resp)
935
931
936 def do_heads(self, req):
932 def do_heads(self, req):
937 resp = " ".join(map(hex, self.repo.heads())) + "\n"
933 resp = " ".join(map(hex, self.repo.heads())) + "\n"
938 req.httphdr("application/mercurial-0.1", length=len(resp))
934 req.httphdr("application/mercurial-0.1", length=len(resp))
939 req.write(resp)
935 req.write(resp)
940
936
941 def do_branches(self, req):
937 def do_branches(self, req):
942 nodes = []
938 nodes = []
943 if req.form.has_key('nodes'):
939 if req.form.has_key('nodes'):
944 nodes = map(bin, req.form['nodes'][0].split(" "))
940 nodes = map(bin, req.form['nodes'][0].split(" "))
945 resp = cStringIO.StringIO()
941 resp = cStringIO.StringIO()
946 for b in self.repo.branches(nodes):
942 for b in self.repo.branches(nodes):
947 resp.write(" ".join(map(hex, b)) + "\n")
943 resp.write(" ".join(map(hex, b)) + "\n")
948 resp = resp.getvalue()
944 resp = resp.getvalue()
949 req.httphdr("application/mercurial-0.1", length=len(resp))
945 req.httphdr("application/mercurial-0.1", length=len(resp))
950 req.write(resp)
946 req.write(resp)
951
947
952 def do_between(self, req):
948 def do_between(self, req):
953 if req.form.has_key('pairs'):
949 if req.form.has_key('pairs'):
954 pairs = [map(bin, p.split("-"))
950 pairs = [map(bin, p.split("-"))
955 for p in req.form['pairs'][0].split(" ")]
951 for p in req.form['pairs'][0].split(" ")]
956 resp = cStringIO.StringIO()
952 resp = cStringIO.StringIO()
957 for b in self.repo.between(pairs):
953 for b in self.repo.between(pairs):
958 resp.write(" ".join(map(hex, b)) + "\n")
954 resp.write(" ".join(map(hex, b)) + "\n")
959 resp = resp.getvalue()
955 resp = resp.getvalue()
960 req.httphdr("application/mercurial-0.1", length=len(resp))
956 req.httphdr("application/mercurial-0.1", length=len(resp))
961 req.write(resp)
957 req.write(resp)
962
958
963 def do_changegroup(self, req):
959 def do_changegroup(self, req):
964 req.httphdr("application/mercurial-0.1")
960 req.httphdr("application/mercurial-0.1")
965 nodes = []
961 nodes = []
966 if not self.allowpull:
962 if not self.allowpull:
967 return
963 return
968
964
969 if req.form.has_key('roots'):
965 if req.form.has_key('roots'):
970 nodes = map(bin, req.form['roots'][0].split(" "))
966 nodes = map(bin, req.form['roots'][0].split(" "))
971
967
972 z = zlib.compressobj()
968 z = zlib.compressobj()
973 f = self.repo.changegroup(nodes, 'serve')
969 f = self.repo.changegroup(nodes, 'serve')
974 while 1:
970 while 1:
975 chunk = f.read(4096)
971 chunk = f.read(4096)
976 if not chunk:
972 if not chunk:
977 break
973 break
978 req.write(z.compress(chunk))
974 req.write(z.compress(chunk))
979
975
980 req.write(z.flush())
976 req.write(z.flush())
981
977
982 def do_changegroupsubset(self, req):
978 def do_changegroupsubset(self, req):
983 req.httphdr("application/mercurial-0.1")
979 req.httphdr("application/mercurial-0.1")
984 bases = []
980 bases = []
985 heads = []
981 heads = []
986 if not self.allowpull:
982 if not self.allowpull:
987 return
983 return
988
984
989 if req.form.has_key('bases'):
985 if req.form.has_key('bases'):
990 bases = [bin(x) for x in req.form['bases'][0].split(' ')]
986 bases = [bin(x) for x in req.form['bases'][0].split(' ')]
991 if req.form.has_key('heads'):
987 if req.form.has_key('heads'):
992 heads = [bin(x) for x in req.form['heads'][0].split(' ')]
988 heads = [bin(x) for x in req.form['heads'][0].split(' ')]
993
989
994 z = zlib.compressobj()
990 z = zlib.compressobj()
995 f = self.repo.changegroupsubset(bases, heads, 'serve')
991 f = self.repo.changegroupsubset(bases, heads, 'serve')
996 while 1:
992 while 1:
997 chunk = f.read(4096)
993 chunk = f.read(4096)
998 if not chunk:
994 if not chunk:
999 break
995 break
1000 req.write(z.compress(chunk))
996 req.write(z.compress(chunk))
1001
997
1002 req.write(z.flush())
998 req.write(z.flush())
1003
999
1004 def do_archive(self, req):
1000 def do_archive(self, req):
1005 changeset = self.repo.lookup(req.form['node'][0])
1001 changeset = self.repo.lookup(req.form['node'][0])
1006 type_ = req.form['type'][0]
1002 type_ = req.form['type'][0]
1007 allowed = self.configlist("web", "allow_archive")
1003 allowed = self.configlist("web", "allow_archive")
1008 if (type_ in self.archives and (type_ in allowed or
1004 if (type_ in self.archives and (type_ in allowed or
1009 self.configbool("web", "allow" + type_, False))):
1005 self.configbool("web", "allow" + type_, False))):
1010 self.archive(req, changeset, type_)
1006 self.archive(req, changeset, type_)
1011 return
1007 return
1012
1008
1013 req.write(self.t("error"))
1009 req.write(self.t("error"))
1014
1010
1015 def do_static(self, req):
1011 def do_static(self, req):
1016 fname = req.form['file'][0]
1012 fname = req.form['file'][0]
1017 # a repo owner may set web.static in .hg/hgrc to get any file
1013 # a repo owner may set web.static in .hg/hgrc to get any file
1018 # readable by the user running the CGI script
1014 # readable by the user running the CGI script
1019 static = self.config("web", "static",
1015 static = self.config("web", "static",
1020 os.path.join(self.templatepath, "static"),
1016 os.path.join(self.templatepath, "static"),
1021 untrusted=False)
1017 untrusted=False)
1022 req.write(staticfile(static, fname, req)
1018 req.write(staticfile(static, fname, req)
1023 or self.t("error", error="%r not found" % fname))
1019 or self.t("error", error="%r not found" % fname))
1024
1020
1025 def do_capabilities(self, req):
1021 def do_capabilities(self, req):
1026 caps = ['lookup', 'changegroupsubset']
1022 caps = ['lookup', 'changegroupsubset']
1027 if self.configbool('server', 'uncompressed'):
1023 if self.configbool('server', 'uncompressed'):
1028 caps.append('stream=%d' % self.repo.revlogversion)
1024 caps.append('stream=%d' % self.repo.revlogversion)
1029 # XXX: make configurable and/or share code with do_unbundle:
1025 # XXX: make configurable and/or share code with do_unbundle:
1030 unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN']
1026 unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN']
1031 if unbundleversions:
1027 if unbundleversions:
1032 caps.append('unbundle=%s' % ','.join(unbundleversions))
1028 caps.append('unbundle=%s' % ','.join(unbundleversions))
1033 resp = ' '.join(caps)
1029 resp = ' '.join(caps)
1034 req.httphdr("application/mercurial-0.1", length=len(resp))
1030 req.httphdr("application/mercurial-0.1", length=len(resp))
1035 req.write(resp)
1031 req.write(resp)
1036
1032
1037 def check_perm(self, req, op, default):
1033 def check_perm(self, req, op, default):
1038 '''check permission for operation based on user auth.
1034 '''check permission for operation based on user auth.
1039 return true if op allowed, else false.
1035 return true if op allowed, else false.
1040 default is policy to use if no config given.'''
1036 default is policy to use if no config given.'''
1041
1037
1042 user = req.env.get('REMOTE_USER')
1038 user = req.env.get('REMOTE_USER')
1043
1039
1044 deny = self.configlist('web', 'deny_' + op)
1040 deny = self.configlist('web', 'deny_' + op)
1045 if deny and (not user or deny == ['*'] or user in deny):
1041 if deny and (not user or deny == ['*'] or user in deny):
1046 return False
1042 return False
1047
1043
1048 allow = self.configlist('web', 'allow_' + op)
1044 allow = self.configlist('web', 'allow_' + op)
1049 return (allow and (allow == ['*'] or user in allow)) or default
1045 return (allow and (allow == ['*'] or user in allow)) or default
1050
1046
1051 def do_unbundle(self, req):
1047 def do_unbundle(self, req):
1052 def bail(response, headers={}):
1048 def bail(response, headers={}):
1053 length = int(req.env['CONTENT_LENGTH'])
1049 length = int(req.env['CONTENT_LENGTH'])
1054 for s in util.filechunkiter(req, limit=length):
1050 for s in util.filechunkiter(req, limit=length):
1055 # drain incoming bundle, else client will not see
1051 # drain incoming bundle, else client will not see
1056 # response when run outside cgi script
1052 # response when run outside cgi script
1057 pass
1053 pass
1058 req.httphdr("application/mercurial-0.1", headers=headers)
1054 req.httphdr("application/mercurial-0.1", headers=headers)
1059 req.write('0\n')
1055 req.write('0\n')
1060 req.write(response)
1056 req.write(response)
1061
1057
1062 # require ssl by default, auth info cannot be sniffed and
1058 # require ssl by default, auth info cannot be sniffed and
1063 # replayed
1059 # replayed
1064 ssl_req = self.configbool('web', 'push_ssl', True)
1060 ssl_req = self.configbool('web', 'push_ssl', True)
1065 if ssl_req:
1061 if ssl_req:
1066 if not req.env.get('HTTPS'):
1062 if not req.env.get('HTTPS'):
1067 bail(_('ssl required\n'))
1063 bail(_('ssl required\n'))
1068 return
1064 return
1069 proto = 'https'
1065 proto = 'https'
1070 else:
1066 else:
1071 proto = 'http'
1067 proto = 'http'
1072
1068
1073 # do not allow push unless explicitly allowed
1069 # do not allow push unless explicitly allowed
1074 if not self.check_perm(req, 'push', False):
1070 if not self.check_perm(req, 'push', False):
1075 bail(_('push not authorized\n'),
1071 bail(_('push not authorized\n'),
1076 headers={'status': '401 Unauthorized'})
1072 headers={'status': '401 Unauthorized'})
1077 return
1073 return
1078
1074
1079 req.httphdr("application/mercurial-0.1")
1075 req.httphdr("application/mercurial-0.1")
1080
1076
1081 their_heads = req.form['heads'][0].split(' ')
1077 their_heads = req.form['heads'][0].split(' ')
1082
1078
1083 def check_heads():
1079 def check_heads():
1084 heads = map(hex, self.repo.heads())
1080 heads = map(hex, self.repo.heads())
1085 return their_heads == [hex('force')] or their_heads == heads
1081 return their_heads == [hex('force')] or their_heads == heads
1086
1082
1087 # fail early if possible
1083 # fail early if possible
1088 if not check_heads():
1084 if not check_heads():
1089 bail(_('unsynced changes\n'))
1085 bail(_('unsynced changes\n'))
1090 return
1086 return
1091
1087
1092 # do not lock repo until all changegroup data is
1088 # do not lock repo until all changegroup data is
1093 # streamed. save to temporary file.
1089 # streamed. save to temporary file.
1094
1090
1095 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
1091 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
1096 fp = os.fdopen(fd, 'wb+')
1092 fp = os.fdopen(fd, 'wb+')
1097 try:
1093 try:
1098 length = int(req.env['CONTENT_LENGTH'])
1094 length = int(req.env['CONTENT_LENGTH'])
1099 for s in util.filechunkiter(req, limit=length):
1095 for s in util.filechunkiter(req, limit=length):
1100 fp.write(s)
1096 fp.write(s)
1101
1097
1102 lock = self.repo.lock()
1098 lock = self.repo.lock()
1103 try:
1099 try:
1104 if not check_heads():
1100 if not check_heads():
1105 req.write('0\n')
1101 req.write('0\n')
1106 req.write(_('unsynced changes\n'))
1102 req.write(_('unsynced changes\n'))
1107 return
1103 return
1108
1104
1109 fp.seek(0)
1105 fp.seek(0)
1110 header = fp.read(6)
1106 header = fp.read(6)
1111 if not header.startswith("HG"):
1107 if not header.startswith("HG"):
1112 # old client with uncompressed bundle
1108 # old client with uncompressed bundle
1113 def generator(f):
1109 def generator(f):
1114 yield header
1110 yield header
1115 for chunk in f:
1111 for chunk in f:
1116 yield chunk
1112 yield chunk
1117 elif not header.startswith("HG10"):
1113 elif not header.startswith("HG10"):
1118 req.write("0\n")
1114 req.write("0\n")
1119 req.write(_("unknown bundle version\n"))
1115 req.write(_("unknown bundle version\n"))
1120 return
1116 return
1121 elif header == "HG10GZ":
1117 elif header == "HG10GZ":
1122 def generator(f):
1118 def generator(f):
1123 zd = zlib.decompressobj()
1119 zd = zlib.decompressobj()
1124 for chunk in f:
1120 for chunk in f:
1125 yield zd.decompress(chunk)
1121 yield zd.decompress(chunk)
1126 elif header == "HG10BZ":
1122 elif header == "HG10BZ":
1127 def generator(f):
1123 def generator(f):
1128 zd = bz2.BZ2Decompressor()
1124 zd = bz2.BZ2Decompressor()
1129 zd.decompress("BZ")
1125 zd.decompress("BZ")
1130 for chunk in f:
1126 for chunk in f:
1131 yield zd.decompress(chunk)
1127 yield zd.decompress(chunk)
1132 elif header == "HG10UN":
1128 elif header == "HG10UN":
1133 def generator(f):
1129 def generator(f):
1134 for chunk in f:
1130 for chunk in f:
1135 yield chunk
1131 yield chunk
1136 else:
1132 else:
1137 req.write("0\n")
1133 req.write("0\n")
1138 req.write(_("unknown bundle compression type\n"))
1134 req.write(_("unknown bundle compression type\n"))
1139 return
1135 return
1140 gen = generator(util.filechunkiter(fp, 4096))
1136 gen = generator(util.filechunkiter(fp, 4096))
1141
1137
1142 # send addchangegroup output to client
1138 # send addchangegroup output to client
1143
1139
1144 old_stdout = sys.stdout
1140 old_stdout = sys.stdout
1145 sys.stdout = cStringIO.StringIO()
1141 sys.stdout = cStringIO.StringIO()
1146
1142
1147 try:
1143 try:
1148 url = 'remote:%s:%s' % (proto,
1144 url = 'remote:%s:%s' % (proto,
1149 req.env.get('REMOTE_HOST', ''))
1145 req.env.get('REMOTE_HOST', ''))
1150 ret = self.repo.addchangegroup(util.chunkbuffer(gen),
1146 ret = self.repo.addchangegroup(util.chunkbuffer(gen),
1151 'serve', url)
1147 'serve', url)
1152 finally:
1148 finally:
1153 val = sys.stdout.getvalue()
1149 val = sys.stdout.getvalue()
1154 sys.stdout = old_stdout
1150 sys.stdout = old_stdout
1155 req.write('%d\n' % ret)
1151 req.write('%d\n' % ret)
1156 req.write(val)
1152 req.write(val)
1157 finally:
1153 finally:
1158 lock.release()
1154 lock.release()
1159 finally:
1155 finally:
1160 fp.close()
1156 fp.close()
1161 os.unlink(tempname)
1157 os.unlink(tempname)
1162
1158
1163 def do_stream_out(self, req):
1159 def do_stream_out(self, req):
1164 req.httphdr("application/mercurial-0.1")
1160 req.httphdr("application/mercurial-0.1")
1165 streamclone.stream_out(self.repo, req)
1161 streamclone.stream_out(self.repo, req)
@@ -1,211 +1,210 b''
1 # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories.
1 # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os
9 from mercurial import demandimport; demandimport.enable()
10 from mercurial.demandload import demandload
10 import os, mimetools, cStringIO
11 demandload(globals(), "mimetools cStringIO")
12 demandload(globals(), "mercurial:ui,hg,util,templater")
13 demandload(globals(), "mercurial.hgweb.hgweb_mod:hgweb")
14 demandload(globals(), "mercurial.hgweb.common:get_mtime,staticfile,style_map")
15 from mercurial.i18n import gettext as _
11 from mercurial.i18n import gettext as _
12 from mercurial import ui, hg, util, templater
13 from common import get_mtime, staticfile, style_map
14 from hgweb_mod import hgweb
16
15
17 # This is a stopgap
16 # This is a stopgap
18 class hgwebdir(object):
17 class hgwebdir(object):
19 def __init__(self, config):
18 def __init__(self, config):
20 def cleannames(items):
19 def cleannames(items):
21 return [(name.strip(os.sep), path) for name, path in items]
20 return [(name.strip(os.sep), path) for name, path in items]
22
21
23 self.motd = ""
22 self.motd = ""
24 self.style = ""
23 self.style = ""
25 self.repos_sorted = ('name', False)
24 self.repos_sorted = ('name', False)
26 if isinstance(config, (list, tuple)):
25 if isinstance(config, (list, tuple)):
27 self.repos = cleannames(config)
26 self.repos = cleannames(config)
28 self.repos_sorted = ('', False)
27 self.repos_sorted = ('', False)
29 elif isinstance(config, dict):
28 elif isinstance(config, dict):
30 self.repos = cleannames(config.items())
29 self.repos = cleannames(config.items())
31 self.repos.sort()
30 self.repos.sort()
32 else:
31 else:
33 cp = util.configparser()
32 cp = util.configparser()
34 cp.read(config)
33 cp.read(config)
35 self.repos = []
34 self.repos = []
36 if cp.has_section('web'):
35 if cp.has_section('web'):
37 if cp.has_option('web', 'motd'):
36 if cp.has_option('web', 'motd'):
38 self.motd = cp.get('web', 'motd')
37 self.motd = cp.get('web', 'motd')
39 if cp.has_option('web', 'style'):
38 if cp.has_option('web', 'style'):
40 self.style = cp.get('web', 'style')
39 self.style = cp.get('web', 'style')
41 if cp.has_section('paths'):
40 if cp.has_section('paths'):
42 self.repos.extend(cleannames(cp.items('paths')))
41 self.repos.extend(cleannames(cp.items('paths')))
43 if cp.has_section('collections'):
42 if cp.has_section('collections'):
44 for prefix, root in cp.items('collections'):
43 for prefix, root in cp.items('collections'):
45 for path in util.walkrepos(root):
44 for path in util.walkrepos(root):
46 repo = os.path.normpath(path)
45 repo = os.path.normpath(path)
47 name = repo
46 name = repo
48 if name.startswith(prefix):
47 if name.startswith(prefix):
49 name = name[len(prefix):]
48 name = name[len(prefix):]
50 self.repos.append((name.lstrip(os.sep), repo))
49 self.repos.append((name.lstrip(os.sep), repo))
51 self.repos.sort()
50 self.repos.sort()
52
51
53 def run(self):
52 def run(self):
54 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
53 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
55 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
54 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
56 import mercurial.hgweb.wsgicgi as wsgicgi
55 import mercurial.hgweb.wsgicgi as wsgicgi
57 from request import wsgiapplication
56 from request import wsgiapplication
58 def make_web_app():
57 def make_web_app():
59 return self
58 return self
60 wsgicgi.launch(wsgiapplication(make_web_app))
59 wsgicgi.launch(wsgiapplication(make_web_app))
61
60
62 def run_wsgi(self, req):
61 def run_wsgi(self, req):
63 def header(**map):
62 def header(**map):
64 header_file = cStringIO.StringIO(''.join(tmpl("header", **map)))
63 header_file = cStringIO.StringIO(''.join(tmpl("header", **map)))
65 msg = mimetools.Message(header_file, 0)
64 msg = mimetools.Message(header_file, 0)
66 req.header(msg.items())
65 req.header(msg.items())
67 yield header_file.read()
66 yield header_file.read()
68
67
69 def footer(**map):
68 def footer(**map):
70 yield tmpl("footer", **map)
69 yield tmpl("footer", **map)
71
70
72 def motd(**map):
71 def motd(**map):
73 yield self.motd
72 yield self.motd
74
73
75 url = req.env['REQUEST_URI'].split('?')[0]
74 url = req.env['REQUEST_URI'].split('?')[0]
76 if not url.endswith('/'):
75 if not url.endswith('/'):
77 url += '/'
76 url += '/'
78
77
79 style = self.style
78 style = self.style
80 if req.form.has_key('style'):
79 if req.form.has_key('style'):
81 style = req.form['style'][0]
80 style = req.form['style'][0]
82 mapfile = style_map(templater.templatepath(), style)
81 mapfile = style_map(templater.templatepath(), style)
83 tmpl = templater.templater(mapfile, templater.common_filters,
82 tmpl = templater.templater(mapfile, templater.common_filters,
84 defaults={"header": header,
83 defaults={"header": header,
85 "footer": footer,
84 "footer": footer,
86 "motd": motd,
85 "motd": motd,
87 "url": url})
86 "url": url})
88
87
89 def archivelist(ui, nodeid, url):
88 def archivelist(ui, nodeid, url):
90 allowed = ui.configlist("web", "allow_archive", untrusted=True)
89 allowed = ui.configlist("web", "allow_archive", untrusted=True)
91 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
90 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
92 if i[0] in allowed or ui.configbool("web", "allow" + i[0],
91 if i[0] in allowed or ui.configbool("web", "allow" + i[0],
93 untrusted=True):
92 untrusted=True):
94 yield {"type" : i[0], "extension": i[1],
93 yield {"type" : i[0], "extension": i[1],
95 "node": nodeid, "url": url}
94 "node": nodeid, "url": url}
96
95
97 def entries(sortcolumn="", descending=False, **map):
96 def entries(sortcolumn="", descending=False, **map):
98 def sessionvars(**map):
97 def sessionvars(**map):
99 fields = []
98 fields = []
100 if req.form.has_key('style'):
99 if req.form.has_key('style'):
101 style = req.form['style'][0]
100 style = req.form['style'][0]
102 if style != get('web', 'style', ''):
101 if style != get('web', 'style', ''):
103 fields.append(('style', style))
102 fields.append(('style', style))
104
103
105 separator = url[-1] == '?' and ';' or '?'
104 separator = url[-1] == '?' and ';' or '?'
106 for name, value in fields:
105 for name, value in fields:
107 yield dict(name=name, value=value, separator=separator)
106 yield dict(name=name, value=value, separator=separator)
108 separator = ';'
107 separator = ';'
109
108
110 rows = []
109 rows = []
111 parity = 0
110 parity = 0
112 for name, path in self.repos:
111 for name, path in self.repos:
113 u = ui.ui(report_untrusted=False)
112 u = ui.ui(report_untrusted=False)
114 try:
113 try:
115 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
114 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
116 except IOError:
115 except IOError:
117 pass
116 pass
118 def get(section, name, default=None):
117 def get(section, name, default=None):
119 return u.config(section, name, default, untrusted=True)
118 return u.config(section, name, default, untrusted=True)
120
119
121 url = ('/'.join([req.env["REQUEST_URI"].split('?')[0], name])
120 url = ('/'.join([req.env["REQUEST_URI"].split('?')[0], name])
122 .replace("//", "/")) + '/'
121 .replace("//", "/")) + '/'
123
122
124 # update time with local timezone
123 # update time with local timezone
125 try:
124 try:
126 d = (get_mtime(path), util.makedate()[1])
125 d = (get_mtime(path), util.makedate()[1])
127 except OSError:
126 except OSError:
128 continue
127 continue
129
128
130 contact = (get("ui", "username") or # preferred
129 contact = (get("ui", "username") or # preferred
131 get("web", "contact") or # deprecated
130 get("web", "contact") or # deprecated
132 get("web", "author", "")) # also
131 get("web", "author", "")) # also
133 description = get("web", "description", "")
132 description = get("web", "description", "")
134 name = get("web", "name", name)
133 name = get("web", "name", name)
135 row = dict(contact=contact or "unknown",
134 row = dict(contact=contact or "unknown",
136 contact_sort=contact.upper() or "unknown",
135 contact_sort=contact.upper() or "unknown",
137 name=name,
136 name=name,
138 name_sort=name,
137 name_sort=name,
139 url=url,
138 url=url,
140 description=description or "unknown",
139 description=description or "unknown",
141 description_sort=description.upper() or "unknown",
140 description_sort=description.upper() or "unknown",
142 lastchange=d,
141 lastchange=d,
143 lastchange_sort=d[1]-d[0],
142 lastchange_sort=d[1]-d[0],
144 sessionvars=sessionvars,
143 sessionvars=sessionvars,
145 archives=archivelist(u, "tip", url))
144 archives=archivelist(u, "tip", url))
146 if (not sortcolumn
145 if (not sortcolumn
147 or (sortcolumn, descending) == self.repos_sorted):
146 or (sortcolumn, descending) == self.repos_sorted):
148 # fast path for unsorted output
147 # fast path for unsorted output
149 row['parity'] = parity
148 row['parity'] = parity
150 parity = 1 - parity
149 parity = 1 - parity
151 yield row
150 yield row
152 else:
151 else:
153 rows.append((row["%s_sort" % sortcolumn], row))
152 rows.append((row["%s_sort" % sortcolumn], row))
154 if rows:
153 if rows:
155 rows.sort()
154 rows.sort()
156 if descending:
155 if descending:
157 rows.reverse()
156 rows.reverse()
158 for key, row in rows:
157 for key, row in rows:
159 row['parity'] = parity
158 row['parity'] = parity
160 parity = 1 - parity
159 parity = 1 - parity
161 yield row
160 yield row
162
161
163 virtual = req.env.get("PATH_INFO", "").strip('/')
162 virtual = req.env.get("PATH_INFO", "").strip('/')
164 if virtual.startswith('static/'):
163 if virtual.startswith('static/'):
165 static = os.path.join(templater.templatepath(), 'static')
164 static = os.path.join(templater.templatepath(), 'static')
166 fname = virtual[7:]
165 fname = virtual[7:]
167 req.write(staticfile(static, fname, req) or
166 req.write(staticfile(static, fname, req) or
168 tmpl('error', error='%r not found' % fname))
167 tmpl('error', error='%r not found' % fname))
169 elif virtual:
168 elif virtual:
170 while virtual:
169 while virtual:
171 real = dict(self.repos).get(virtual)
170 real = dict(self.repos).get(virtual)
172 if real:
171 if real:
173 break
172 break
174 up = virtual.rfind('/')
173 up = virtual.rfind('/')
175 if up < 0:
174 if up < 0:
176 break
175 break
177 virtual = virtual[:up]
176 virtual = virtual[:up]
178 if real:
177 if real:
179 req.env['REPO_NAME'] = virtual
178 req.env['REPO_NAME'] = virtual
180 try:
179 try:
181 hgweb(real).run_wsgi(req)
180 hgweb(real).run_wsgi(req)
182 except IOError, inst:
181 except IOError, inst:
183 req.write(tmpl("error", error=inst.strerror))
182 req.write(tmpl("error", error=inst.strerror))
184 except hg.RepoError, inst:
183 except hg.RepoError, inst:
185 req.write(tmpl("error", error=str(inst)))
184 req.write(tmpl("error", error=str(inst)))
186 else:
185 else:
187 req.write(tmpl("notfound", repo=virtual))
186 req.write(tmpl("notfound", repo=virtual))
188 else:
187 else:
189 if req.form.has_key('static'):
188 if req.form.has_key('static'):
190 static = os.path.join(templater.templatepath(), "static")
189 static = os.path.join(templater.templatepath(), "static")
191 fname = req.form['static'][0]
190 fname = req.form['static'][0]
192 req.write(staticfile(static, fname, req)
191 req.write(staticfile(static, fname, req)
193 or tmpl("error", error="%r not found" % fname))
192 or tmpl("error", error="%r not found" % fname))
194 else:
193 else:
195 sortable = ["name", "description", "contact", "lastchange"]
194 sortable = ["name", "description", "contact", "lastchange"]
196 sortcolumn, descending = self.repos_sorted
195 sortcolumn, descending = self.repos_sorted
197 if req.form.has_key('sort'):
196 if req.form.has_key('sort'):
198 sortcolumn = req.form['sort'][0]
197 sortcolumn = req.form['sort'][0]
199 descending = sortcolumn.startswith('-')
198 descending = sortcolumn.startswith('-')
200 if descending:
199 if descending:
201 sortcolumn = sortcolumn[1:]
200 sortcolumn = sortcolumn[1:]
202 if sortcolumn not in sortable:
201 if sortcolumn not in sortable:
203 sortcolumn = ""
202 sortcolumn = ""
204
203
205 sort = [("sort_%s" % column,
204 sort = [("sort_%s" % column,
206 "%s%s" % ((not descending and column == sortcolumn)
205 "%s%s" % ((not descending and column == sortcolumn)
207 and "-" or "", column))
206 and "-" or "", column))
208 for column in sortable]
207 for column in sortable]
209 req.write(tmpl("index", entries=entries,
208 req.write(tmpl("index", entries=entries,
210 sortcolumn=sortcolumn, descending=descending,
209 sortcolumn=sortcolumn, descending=descending,
211 **dict(sort)))
210 **dict(sort)))
@@ -1,90 +1,89 b''
1 # hgweb/request.py - An http request from either CGI or the standalone server.
1 # hgweb/request.py - An http request from either CGI or the standalone server.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from mercurial.demandload import demandload
9 import socket, sys, cgi, os, errno
10 demandload(globals(), "socket sys cgi os errno")
11 from mercurial.i18n import gettext as _
10 from mercurial.i18n import gettext as _
12
11
13 class wsgiapplication(object):
12 class wsgiapplication(object):
14 def __init__(self, destmaker):
13 def __init__(self, destmaker):
15 self.destmaker = destmaker
14 self.destmaker = destmaker
16
15
17 def __call__(self, wsgienv, start_response):
16 def __call__(self, wsgienv, start_response):
18 return _wsgirequest(self.destmaker(), wsgienv, start_response)
17 return _wsgirequest(self.destmaker(), wsgienv, start_response)
19
18
20 class _wsgioutputfile(object):
19 class _wsgioutputfile(object):
21 def __init__(self, request):
20 def __init__(self, request):
22 self.request = request
21 self.request = request
23
22
24 def write(self, data):
23 def write(self, data):
25 self.request.write(data)
24 self.request.write(data)
26 def writelines(self, lines):
25 def writelines(self, lines):
27 for line in lines:
26 for line in lines:
28 self.write(line)
27 self.write(line)
29 def flush(self):
28 def flush(self):
30 return None
29 return None
31 def close(self):
30 def close(self):
32 return None
31 return None
33
32
34 class _wsgirequest(object):
33 class _wsgirequest(object):
35 def __init__(self, destination, wsgienv, start_response):
34 def __init__(self, destination, wsgienv, start_response):
36 version = wsgienv['wsgi.version']
35 version = wsgienv['wsgi.version']
37 if (version < (1, 0)) or (version >= (2, 0)):
36 if (version < (1, 0)) or (version >= (2, 0)):
38 raise RuntimeError("Unknown and unsupported WSGI version %d.%d" \
37 raise RuntimeError("Unknown and unsupported WSGI version %d.%d" \
39 % version)
38 % version)
40 self.inp = wsgienv['wsgi.input']
39 self.inp = wsgienv['wsgi.input']
41 self.out = _wsgioutputfile(self)
40 self.out = _wsgioutputfile(self)
42 self.server_write = None
41 self.server_write = None
43 self.err = wsgienv['wsgi.errors']
42 self.err = wsgienv['wsgi.errors']
44 self.threaded = wsgienv['wsgi.multithread']
43 self.threaded = wsgienv['wsgi.multithread']
45 self.multiprocess = wsgienv['wsgi.multiprocess']
44 self.multiprocess = wsgienv['wsgi.multiprocess']
46 self.run_once = wsgienv['wsgi.run_once']
45 self.run_once = wsgienv['wsgi.run_once']
47 self.env = wsgienv
46 self.env = wsgienv
48 self.form = cgi.parse(self.inp, self.env, keep_blank_values=1)
47 self.form = cgi.parse(self.inp, self.env, keep_blank_values=1)
49 self.start_response = start_response
48 self.start_response = start_response
50 self.headers = []
49 self.headers = []
51 destination.run_wsgi(self)
50 destination.run_wsgi(self)
52
51
53 def __iter__(self):
52 def __iter__(self):
54 return iter([])
53 return iter([])
55
54
56 def read(self, count=-1):
55 def read(self, count=-1):
57 return self.inp.read(count)
56 return self.inp.read(count)
58
57
59 def write(self, *things):
58 def write(self, *things):
60 for thing in things:
59 for thing in things:
61 if hasattr(thing, "__iter__"):
60 if hasattr(thing, "__iter__"):
62 for part in thing:
61 for part in thing:
63 self.write(part)
62 self.write(part)
64 else:
63 else:
65 thing = str(thing)
64 thing = str(thing)
66 if self.server_write is None:
65 if self.server_write is None:
67 if not self.headers:
66 if not self.headers:
68 raise RuntimeError("request.write called before headers sent (%s)." % thing)
67 raise RuntimeError("request.write called before headers sent (%s)." % thing)
69 self.server_write = self.start_response('200 Script output follows',
68 self.server_write = self.start_response('200 Script output follows',
70 self.headers)
69 self.headers)
71 self.start_response = None
70 self.start_response = None
72 self.headers = None
71 self.headers = None
73 try:
72 try:
74 self.server_write(thing)
73 self.server_write(thing)
75 except socket.error, inst:
74 except socket.error, inst:
76 if inst[0] != errno.ECONNRESET:
75 if inst[0] != errno.ECONNRESET:
77 raise
76 raise
78
77
79 def header(self, headers=[('Content-type','text/html')]):
78 def header(self, headers=[('Content-type','text/html')]):
80 self.headers.extend(headers)
79 self.headers.extend(headers)
81
80
82 def httphdr(self, type, filename=None, length=0, headers={}):
81 def httphdr(self, type, filename=None, length=0, headers={}):
83 headers = headers.items()
82 headers = headers.items()
84 headers.append(('Content-type', type))
83 headers.append(('Content-type', type))
85 if filename:
84 if filename:
86 headers.append(('Content-disposition', 'attachment; filename=%s' %
85 headers.append(('Content-disposition', 'attachment; filename=%s' %
87 filename))
86 filename))
88 if length:
87 if length:
89 headers.append(('Content-length', str(length)))
88 headers.append(('Content-length', str(length)))
90 self.header(headers)
89 self.header(headers)
@@ -1,239 +1,239 b''
1 # hgweb/server.py - The standalone hg web server.
1 # hgweb/server.py - The standalone hg web server.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from mercurial.demandload import demandload
9 import os, sys, errno, urllib, BaseHTTPServer, socket, SocketServer
10 import os, sys, errno
10 from mercurial import ui, hg, util, templater
11 demandload(globals(), "urllib BaseHTTPServer socket SocketServer")
11 from hgweb_mod import hgweb
12 demandload(globals(), "mercurial:ui,hg,util,templater")
12 from hgwebdir_mod import hgwebdir
13 demandload(globals(), "hgweb_mod:hgweb hgwebdir_mod:hgwebdir request:wsgiapplication")
13 from request import wsgiapplication
14 from mercurial.i18n import gettext as _
14 from mercurial.i18n import gettext as _
15
15
16 def _splitURI(uri):
16 def _splitURI(uri):
17 """ Return path and query splited from uri
17 """ Return path and query splited from uri
18
18
19 Just like CGI environment, the path is unquoted, the query is
19 Just like CGI environment, the path is unquoted, the query is
20 not.
20 not.
21 """
21 """
22 if '?' in uri:
22 if '?' in uri:
23 path, query = uri.split('?', 1)
23 path, query = uri.split('?', 1)
24 else:
24 else:
25 path, query = uri, ''
25 path, query = uri, ''
26 return urllib.unquote(path), query
26 return urllib.unquote(path), query
27
27
28 class _error_logger(object):
28 class _error_logger(object):
29 def __init__(self, handler):
29 def __init__(self, handler):
30 self.handler = handler
30 self.handler = handler
31 def flush(self):
31 def flush(self):
32 pass
32 pass
33 def write(self, str):
33 def write(self, str):
34 self.writelines(str.split('\n'))
34 self.writelines(str.split('\n'))
35 def writelines(self, seq):
35 def writelines(self, seq):
36 for msg in seq:
36 for msg in seq:
37 self.handler.log_error("HG error: %s", msg)
37 self.handler.log_error("HG error: %s", msg)
38
38
39 class _hgwebhandler(object, BaseHTTPServer.BaseHTTPRequestHandler):
39 class _hgwebhandler(object, BaseHTTPServer.BaseHTTPRequestHandler):
40 def __init__(self, *args, **kargs):
40 def __init__(self, *args, **kargs):
41 self.protocol_version = 'HTTP/1.1'
41 self.protocol_version = 'HTTP/1.1'
42 BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kargs)
42 BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kargs)
43
43
44 def log_error(self, format, *args):
44 def log_error(self, format, *args):
45 errorlog = self.server.errorlog
45 errorlog = self.server.errorlog
46 errorlog.write("%s - - [%s] %s\n" % (self.address_string(),
46 errorlog.write("%s - - [%s] %s\n" % (self.address_string(),
47 self.log_date_time_string(),
47 self.log_date_time_string(),
48 format % args))
48 format % args))
49
49
50 def log_message(self, format, *args):
50 def log_message(self, format, *args):
51 accesslog = self.server.accesslog
51 accesslog = self.server.accesslog
52 accesslog.write("%s - - [%s] %s\n" % (self.address_string(),
52 accesslog.write("%s - - [%s] %s\n" % (self.address_string(),
53 self.log_date_time_string(),
53 self.log_date_time_string(),
54 format % args))
54 format % args))
55
55
56 def do_POST(self):
56 def do_POST(self):
57 try:
57 try:
58 self.do_hgweb()
58 self.do_hgweb()
59 except socket.error, inst:
59 except socket.error, inst:
60 if inst[0] != errno.EPIPE:
60 if inst[0] != errno.EPIPE:
61 raise
61 raise
62
62
63 def do_GET(self):
63 def do_GET(self):
64 self.do_POST()
64 self.do_POST()
65
65
66 def do_hgweb(self):
66 def do_hgweb(self):
67 path_info, query = _splitURI(self.path)
67 path_info, query = _splitURI(self.path)
68
68
69 env = {}
69 env = {}
70 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
70 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
71 env['REQUEST_METHOD'] = self.command
71 env['REQUEST_METHOD'] = self.command
72 env['SERVER_NAME'] = self.server.server_name
72 env['SERVER_NAME'] = self.server.server_name
73 env['SERVER_PORT'] = str(self.server.server_port)
73 env['SERVER_PORT'] = str(self.server.server_port)
74 env['REQUEST_URI'] = self.path
74 env['REQUEST_URI'] = self.path
75 env['PATH_INFO'] = path_info
75 env['PATH_INFO'] = path_info
76 if query:
76 if query:
77 env['QUERY_STRING'] = query
77 env['QUERY_STRING'] = query
78 host = self.address_string()
78 host = self.address_string()
79 if host != self.client_address[0]:
79 if host != self.client_address[0]:
80 env['REMOTE_HOST'] = host
80 env['REMOTE_HOST'] = host
81 env['REMOTE_ADDR'] = self.client_address[0]
81 env['REMOTE_ADDR'] = self.client_address[0]
82
82
83 if self.headers.typeheader is None:
83 if self.headers.typeheader is None:
84 env['CONTENT_TYPE'] = self.headers.type
84 env['CONTENT_TYPE'] = self.headers.type
85 else:
85 else:
86 env['CONTENT_TYPE'] = self.headers.typeheader
86 env['CONTENT_TYPE'] = self.headers.typeheader
87 length = self.headers.getheader('content-length')
87 length = self.headers.getheader('content-length')
88 if length:
88 if length:
89 env['CONTENT_LENGTH'] = length
89 env['CONTENT_LENGTH'] = length
90 for header in [h for h in self.headers.keys() \
90 for header in [h for h in self.headers.keys() \
91 if h not in ('content-type', 'content-length')]:
91 if h not in ('content-type', 'content-length')]:
92 hkey = 'HTTP_' + header.replace('-', '_').upper()
92 hkey = 'HTTP_' + header.replace('-', '_').upper()
93 hval = self.headers.getheader(header)
93 hval = self.headers.getheader(header)
94 hval = hval.replace('\n', '').strip()
94 hval = hval.replace('\n', '').strip()
95 if hval:
95 if hval:
96 env[hkey] = hval
96 env[hkey] = hval
97 env['SERVER_PROTOCOL'] = self.request_version
97 env['SERVER_PROTOCOL'] = self.request_version
98 env['wsgi.version'] = (1, 0)
98 env['wsgi.version'] = (1, 0)
99 env['wsgi.url_scheme'] = 'http'
99 env['wsgi.url_scheme'] = 'http'
100 env['wsgi.input'] = self.rfile
100 env['wsgi.input'] = self.rfile
101 env['wsgi.errors'] = _error_logger(self)
101 env['wsgi.errors'] = _error_logger(self)
102 env['wsgi.multithread'] = isinstance(self.server,
102 env['wsgi.multithread'] = isinstance(self.server,
103 SocketServer.ThreadingMixIn)
103 SocketServer.ThreadingMixIn)
104 env['wsgi.multiprocess'] = isinstance(self.server,
104 env['wsgi.multiprocess'] = isinstance(self.server,
105 SocketServer.ForkingMixIn)
105 SocketServer.ForkingMixIn)
106 env['wsgi.run_once'] = 0
106 env['wsgi.run_once'] = 0
107
107
108 self.close_connection = True
108 self.close_connection = True
109 self.saved_status = None
109 self.saved_status = None
110 self.saved_headers = []
110 self.saved_headers = []
111 self.sent_headers = False
111 self.sent_headers = False
112 self.length = None
112 self.length = None
113 req = self.server.reqmaker(env, self._start_response)
113 req = self.server.reqmaker(env, self._start_response)
114 for data in req:
114 for data in req:
115 if data:
115 if data:
116 self._write(data)
116 self._write(data)
117
117
118 def send_headers(self):
118 def send_headers(self):
119 if not self.saved_status:
119 if not self.saved_status:
120 raise AssertionError("Sending headers before start_response() called")
120 raise AssertionError("Sending headers before start_response() called")
121 saved_status = self.saved_status.split(None, 1)
121 saved_status = self.saved_status.split(None, 1)
122 saved_status[0] = int(saved_status[0])
122 saved_status[0] = int(saved_status[0])
123 self.send_response(*saved_status)
123 self.send_response(*saved_status)
124 should_close = True
124 should_close = True
125 for h in self.saved_headers:
125 for h in self.saved_headers:
126 self.send_header(*h)
126 self.send_header(*h)
127 if h[0].lower() == 'content-length':
127 if h[0].lower() == 'content-length':
128 should_close = False
128 should_close = False
129 self.length = int(h[1])
129 self.length = int(h[1])
130 # The value of the Connection header is a list of case-insensitive
130 # The value of the Connection header is a list of case-insensitive
131 # tokens separated by commas and optional whitespace.
131 # tokens separated by commas and optional whitespace.
132 if 'close' in [token.strip().lower() for token in
132 if 'close' in [token.strip().lower() for token in
133 self.headers.get('connection', '').split(',')]:
133 self.headers.get('connection', '').split(',')]:
134 should_close = True
134 should_close = True
135 if should_close:
135 if should_close:
136 self.send_header('Connection', 'close')
136 self.send_header('Connection', 'close')
137 self.close_connection = should_close
137 self.close_connection = should_close
138 self.end_headers()
138 self.end_headers()
139 self.sent_headers = True
139 self.sent_headers = True
140
140
141 def _start_response(self, http_status, headers, exc_info=None):
141 def _start_response(self, http_status, headers, exc_info=None):
142 code, msg = http_status.split(None, 1)
142 code, msg = http_status.split(None, 1)
143 code = int(code)
143 code = int(code)
144 self.saved_status = http_status
144 self.saved_status = http_status
145 bad_headers = ('connection', 'transfer-encoding')
145 bad_headers = ('connection', 'transfer-encoding')
146 self.saved_headers = [ h for h in headers \
146 self.saved_headers = [ h for h in headers \
147 if h[0].lower() not in bad_headers ]
147 if h[0].lower() not in bad_headers ]
148 return self._write
148 return self._write
149
149
150 def _write(self, data):
150 def _write(self, data):
151 if not self.saved_status:
151 if not self.saved_status:
152 raise AssertionError("data written before start_response() called")
152 raise AssertionError("data written before start_response() called")
153 elif not self.sent_headers:
153 elif not self.sent_headers:
154 self.send_headers()
154 self.send_headers()
155 if self.length is not None:
155 if self.length is not None:
156 if len(data) > self.length:
156 if len(data) > self.length:
157 raise AssertionError("Content-length header sent, but more bytes than specified are being written.")
157 raise AssertionError("Content-length header sent, but more bytes than specified are being written.")
158 self.length = self.length - len(data)
158 self.length = self.length - len(data)
159 self.wfile.write(data)
159 self.wfile.write(data)
160 self.wfile.flush()
160 self.wfile.flush()
161
161
162 def create_server(ui, repo):
162 def create_server(ui, repo):
163 use_threads = True
163 use_threads = True
164
164
165 def openlog(opt, default):
165 def openlog(opt, default):
166 if opt and opt != '-':
166 if opt and opt != '-':
167 return open(opt, 'w')
167 return open(opt, 'w')
168 return default
168 return default
169
169
170 address = ui.config("web", "address", "")
170 address = ui.config("web", "address", "")
171 port = int(ui.config("web", "port", 8000))
171 port = int(ui.config("web", "port", 8000))
172 use_ipv6 = ui.configbool("web", "ipv6")
172 use_ipv6 = ui.configbool("web", "ipv6")
173 webdir_conf = ui.config("web", "webdir_conf")
173 webdir_conf = ui.config("web", "webdir_conf")
174 accesslog = openlog(ui.config("web", "accesslog", "-"), sys.stdout)
174 accesslog = openlog(ui.config("web", "accesslog", "-"), sys.stdout)
175 errorlog = openlog(ui.config("web", "errorlog", "-"), sys.stderr)
175 errorlog = openlog(ui.config("web", "errorlog", "-"), sys.stderr)
176
176
177 if use_threads:
177 if use_threads:
178 try:
178 try:
179 from threading import activeCount
179 from threading import activeCount
180 except ImportError:
180 except ImportError:
181 use_threads = False
181 use_threads = False
182
182
183 if use_threads:
183 if use_threads:
184 _mixin = SocketServer.ThreadingMixIn
184 _mixin = SocketServer.ThreadingMixIn
185 else:
185 else:
186 if hasattr(os, "fork"):
186 if hasattr(os, "fork"):
187 _mixin = SocketServer.ForkingMixIn
187 _mixin = SocketServer.ForkingMixIn
188 else:
188 else:
189 class _mixin:
189 class _mixin:
190 pass
190 pass
191
191
192 class MercurialHTTPServer(object, _mixin, BaseHTTPServer.HTTPServer):
192 class MercurialHTTPServer(object, _mixin, BaseHTTPServer.HTTPServer):
193 def __init__(self, *args, **kargs):
193 def __init__(self, *args, **kargs):
194 BaseHTTPServer.HTTPServer.__init__(self, *args, **kargs)
194 BaseHTTPServer.HTTPServer.__init__(self, *args, **kargs)
195 self.accesslog = accesslog
195 self.accesslog = accesslog
196 self.errorlog = errorlog
196 self.errorlog = errorlog
197 self.repo = repo
197 self.repo = repo
198 self.webdir_conf = webdir_conf
198 self.webdir_conf = webdir_conf
199 self.webdirmaker = hgwebdir
199 self.webdirmaker = hgwebdir
200 self.repoviewmaker = hgweb
200 self.repoviewmaker = hgweb
201 self.reqmaker = wsgiapplication(self.make_handler)
201 self.reqmaker = wsgiapplication(self.make_handler)
202 self.daemon_threads = True
202 self.daemon_threads = True
203
203
204 addr, port = self.socket.getsockname()[:2]
204 addr, port = self.socket.getsockname()[:2]
205 if addr in ('0.0.0.0', '::'):
205 if addr in ('0.0.0.0', '::'):
206 addr = socket.gethostname()
206 addr = socket.gethostname()
207 else:
207 else:
208 try:
208 try:
209 addr = socket.gethostbyaddr(addr)[0]
209 addr = socket.gethostbyaddr(addr)[0]
210 except socket.error:
210 except socket.error:
211 pass
211 pass
212 self.addr, self.port = addr, port
212 self.addr, self.port = addr, port
213
213
214 def make_handler(self):
214 def make_handler(self):
215 if self.webdir_conf:
215 if self.webdir_conf:
216 hgwebobj = self.webdirmaker(self.webdir_conf)
216 hgwebobj = self.webdirmaker(self.webdir_conf)
217 elif self.repo is not None:
217 elif self.repo is not None:
218 hgwebobj = self.repoviewmaker(repo.__class__(repo.ui,
218 hgwebobj = self.repoviewmaker(repo.__class__(repo.ui,
219 repo.origroot))
219 repo.origroot))
220 else:
220 else:
221 raise hg.RepoError(_("There is no Mercurial repository here"
221 raise hg.RepoError(_("There is no Mercurial repository here"
222 " (.hg not found)"))
222 " (.hg not found)"))
223 return hgwebobj
223 return hgwebobj
224
224
225 class IPv6HTTPServer(MercurialHTTPServer):
225 class IPv6HTTPServer(MercurialHTTPServer):
226 address_family = getattr(socket, 'AF_INET6', None)
226 address_family = getattr(socket, 'AF_INET6', None)
227
227
228 def __init__(self, *args, **kwargs):
228 def __init__(self, *args, **kwargs):
229 if self.address_family is None:
229 if self.address_family is None:
230 raise hg.RepoError(_('IPv6 not available on this system'))
230 raise hg.RepoError(_('IPv6 not available on this system'))
231 super(IPv6HTTPServer, self).__init__(*args, **kwargs)
231 super(IPv6HTTPServer, self).__init__(*args, **kwargs)
232
232
233 try:
233 try:
234 if use_ipv6:
234 if use_ipv6:
235 return IPv6HTTPServer((address, port), _hgwebhandler)
235 return IPv6HTTPServer((address, port), _hgwebhandler)
236 else:
236 else:
237 return MercurialHTTPServer((address, port), _hgwebhandler)
237 return MercurialHTTPServer((address, port), _hgwebhandler)
238 except socket.error, inst:
238 except socket.error, inst:
239 raise util.Abort(_('cannot start server: %s') % inst.args[1])
239 raise util.Abort(_('cannot start server: %s') % inst.args[1])
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now