##// END OF EJS Templates
Convert all classes to new-style classes by deriving them from object.
Eric Hopper -
r1559:59b3639d default
parent child Browse files
Show More
@@ -1,287 +1,287 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # This software may be used and distributed according to the terms
3 # This software may be used and distributed according to the terms
4 # of the GNU General Public License, incorporated herein by reference.
4 # of the GNU General Public License, incorporated herein by reference.
5
5
6 from mercurial.demandload import demandload
6 from mercurial.demandload import demandload
7 demandload(globals(), "os sys sets")
7 demandload(globals(), "os sys sets")
8 from mercurial import hg
8 from mercurial import hg
9
9
10 versionstr = "0.0.3"
10 versionstr = "0.0.3"
11
11
12 def lookup_rev(ui, repo, rev=None):
12 def lookup_rev(ui, repo, rev=None):
13 """returns rev or the checked-out revision if rev is None"""
13 """returns rev or the checked-out revision if rev is None"""
14 if not rev is None:
14 if not rev is None:
15 return repo.lookup(rev)
15 return repo.lookup(rev)
16 parents = [p for p in repo.dirstate.parents() if p != hg.nullid]
16 parents = [p for p in repo.dirstate.parents() if p != hg.nullid]
17 if len(parents) != 1:
17 if len(parents) != 1:
18 ui.warn("unexpected number of parents\n")
18 ui.warn("unexpected number of parents\n")
19 ui.warn("please commit or revert\n")
19 ui.warn("please commit or revert\n")
20 sys.exit(1)
20 sys.exit(1)
21 return parents.pop()
21 return parents.pop()
22
22
23 def check_clean(ui, repo):
23 def check_clean(ui, repo):
24 c, a, d, u = repo.changes()
24 c, a, d, u = repo.changes()
25 if c or a or d:
25 if c or a or d:
26 ui.warn("Repository is not clean, please commit or revert\n")
26 ui.warn("Repository is not clean, please commit or revert\n")
27 sys.exit(1)
27 sys.exit(1)
28
28
29 class bisect:
29 class bisect(object):
30 """dichotomic search in the DAG of changesets"""
30 """dichotomic search in the DAG of changesets"""
31 def __init__(self, ui, repo):
31 def __init__(self, ui, repo):
32 self.repo = repo
32 self.repo = repo
33 self.path = os.path.join(repo.join(""), "bisect")
33 self.path = os.path.join(repo.join(""), "bisect")
34 self.ui = ui
34 self.ui = ui
35 self.goodrevs = []
35 self.goodrevs = []
36 self.badrev = None
36 self.badrev = None
37 self.good_dirty = 0
37 self.good_dirty = 0
38 self.bad_dirty = 0
38 self.bad_dirty = 0
39 self.good_path = os.path.join(self.path, "good")
39 self.good_path = os.path.join(self.path, "good")
40 self.bad_path = os.path.join(self.path, "bad")
40 self.bad_path = os.path.join(self.path, "bad")
41
41
42 s = self.good_path
42 s = self.good_path
43 if os.path.exists(s):
43 if os.path.exists(s):
44 self.goodrevs = self.repo.opener(s).read().splitlines()
44 self.goodrevs = self.repo.opener(s).read().splitlines()
45 self.goodrevs = [hg.bin(x) for x in self.goodrevs]
45 self.goodrevs = [hg.bin(x) for x in self.goodrevs]
46 s = self.bad_path
46 s = self.bad_path
47 if os.path.exists(s):
47 if os.path.exists(s):
48 r = self.repo.opener(s).read().splitlines()
48 r = self.repo.opener(s).read().splitlines()
49 if r:
49 if r:
50 self.badrev = hg.bin(r.pop(0))
50 self.badrev = hg.bin(r.pop(0))
51
51
52 def __del__(self):
52 def __del__(self):
53 if not os.path.isdir(self.path):
53 if not os.path.isdir(self.path):
54 return
54 return
55 f = self.repo.opener(self.good_path, "w")
55 f = self.repo.opener(self.good_path, "w")
56 f.write("\n".join([hg.hex(r) for r in self.goodrevs]))
56 f.write("\n".join([hg.hex(r) for r in self.goodrevs]))
57 if len(self.goodrevs) > 0:
57 if len(self.goodrevs) > 0:
58 f.write("\n")
58 f.write("\n")
59 f = self.repo.opener(self.bad_path, "w")
59 f = self.repo.opener(self.bad_path, "w")
60 if self.badrev:
60 if self.badrev:
61 f.write(hg.hex(self.badrev) + "\n")
61 f.write(hg.hex(self.badrev) + "\n")
62
62
63 def init(self):
63 def init(self):
64 """start a new bisection"""
64 """start a new bisection"""
65 if os.path.isdir(self.path):
65 if os.path.isdir(self.path):
66 self.ui.warn("bisect directory already exists\n")
66 self.ui.warn("bisect directory already exists\n")
67 return 1
67 return 1
68 os.mkdir(self.path)
68 os.mkdir(self.path)
69 check_clean(self.ui, self.repo)
69 check_clean(self.ui, self.repo)
70 return 0
70 return 0
71
71
72 def reset(self):
72 def reset(self):
73 """finish a bisection"""
73 """finish a bisection"""
74 if os.path.isdir(self.path):
74 if os.path.isdir(self.path):
75 sl = [self.bad_path, self.good_path]
75 sl = [self.bad_path, self.good_path]
76 for s in sl:
76 for s in sl:
77 if os.path.exists(s):
77 if os.path.exists(s):
78 os.unlink(s)
78 os.unlink(s)
79 os.rmdir(self.path)
79 os.rmdir(self.path)
80 # Not sure about this
80 # Not sure about this
81 #self.ui.write("Going back to tip\n")
81 #self.ui.write("Going back to tip\n")
82 #self.repo.update(self.repo.changelog.tip())
82 #self.repo.update(self.repo.changelog.tip())
83 return 1
83 return 1
84
84
85 def num_ancestors(self, head=None, stop=None):
85 def num_ancestors(self, head=None, stop=None):
86 """
86 """
87 returns a dict with the mapping:
87 returns a dict with the mapping:
88 node -> number of ancestors (self included)
88 node -> number of ancestors (self included)
89 for all nodes who are ancestor of head and
89 for all nodes who are ancestor of head and
90 not in stop.
90 not in stop.
91 """
91 """
92 if head is None:
92 if head is None:
93 head = self.badrev
93 head = self.badrev
94 return self.__ancestors_and_nb_ancestors(head, stop)[1]
94 return self.__ancestors_and_nb_ancestors(head, stop)[1]
95
95
96 def ancestors(self, head=None, stop=None):
96 def ancestors(self, head=None, stop=None):
97 """
97 """
98 returns the set of the ancestors of head (self included)
98 returns the set of the ancestors of head (self included)
99 who are not in stop.
99 who are not in stop.
100 """
100 """
101 if head is None:
101 if head is None:
102 head = self.badrev
102 head = self.badrev
103 return self.__ancestors_and_nb_ancestors(head, stop)[0]
103 return self.__ancestors_and_nb_ancestors(head, stop)[0]
104
104
105 def __ancestors_and_nb_ancestors(self, head, stop=None):
105 def __ancestors_and_nb_ancestors(self, head, stop=None):
106 """
106 """
107 if stop is None then ancestors of goodrevs are used as
107 if stop is None then ancestors of goodrevs are used as
108 lower limit.
108 lower limit.
109
109
110 returns (anc, n_child) where anc is the set of the ancestors of head
110 returns (anc, n_child) where anc is the set of the ancestors of head
111 and n_child is a dictionary with the following mapping:
111 and n_child is a dictionary with the following mapping:
112 node -> number of ancestors (self included)
112 node -> number of ancestors (self included)
113 """
113 """
114 cl = self.repo.changelog
114 cl = self.repo.changelog
115 if not stop:
115 if not stop:
116 stop = sets.Set([])
116 stop = sets.Set([])
117 for g in reversed(self.goodrevs):
117 for g in reversed(self.goodrevs):
118 if g in stop:
118 if g in stop:
119 continue
119 continue
120 stop.update(cl.reachable(g))
120 stop.update(cl.reachable(g))
121 def num_children(a):
121 def num_children(a):
122 """
122 """
123 returns a dictionnary with the following mapping
123 returns a dictionnary with the following mapping
124 node -> [number of children, empty set]
124 node -> [number of children, empty set]
125 """
125 """
126 d = {a: [0, sets.Set([])]}
126 d = {a: [0, sets.Set([])]}
127 for i in xrange(cl.rev(a)+1):
127 for i in xrange(cl.rev(a)+1):
128 n = cl.node(i)
128 n = cl.node(i)
129 if not d.has_key(n):
129 if not d.has_key(n):
130 d[n] = [0, sets.Set([])]
130 d[n] = [0, sets.Set([])]
131 parents = [p for p in cl.parents(n) if p != hg.nullid]
131 parents = [p for p in cl.parents(n) if p != hg.nullid]
132 for p in parents:
132 for p in parents:
133 d[p][0] += 1
133 d[p][0] += 1
134 return d
134 return d
135
135
136 if head in stop:
136 if head in stop:
137 self.ui.warn("Unconsistent state, %s is good and bad\n"
137 self.ui.warn("Unconsistent state, %s is good and bad\n"
138 % hg.hex(head))
138 % hg.hex(head))
139 sys.exit(1)
139 sys.exit(1)
140 n_child = num_children(head)
140 n_child = num_children(head)
141 for i in xrange(cl.rev(head)+1):
141 for i in xrange(cl.rev(head)+1):
142 n = cl.node(i)
142 n = cl.node(i)
143 parents = [p for p in cl.parents(n) if p != hg.nullid]
143 parents = [p for p in cl.parents(n) if p != hg.nullid]
144 for p in parents:
144 for p in parents:
145 n_child[p][0] -= 1
145 n_child[p][0] -= 1
146 if not n in stop:
146 if not n in stop:
147 n_child[n][1].union_update(n_child[p][1])
147 n_child[n][1].union_update(n_child[p][1])
148 if n_child[p][0] == 0:
148 if n_child[p][0] == 0:
149 n_child[p] = len(n_child[p][1])
149 n_child[p] = len(n_child[p][1])
150 if not n in stop:
150 if not n in stop:
151 n_child[n][1].add(n)
151 n_child[n][1].add(n)
152 if n_child[n][0] == 0:
152 if n_child[n][0] == 0:
153 if n == head:
153 if n == head:
154 anc = n_child[n][1]
154 anc = n_child[n][1]
155 n_child[n] = len(n_child[n][1])
155 n_child[n] = len(n_child[n][1])
156 return anc, n_child
156 return anc, n_child
157
157
158 def next(self):
158 def next(self):
159 if not self.badrev:
159 if not self.badrev:
160 self.ui.warn("You should give at least one bad\n")
160 self.ui.warn("You should give at least one bad\n")
161 sys.exit(1)
161 sys.exit(1)
162 if not self.goodrevs:
162 if not self.goodrevs:
163 self.ui.warn("No good revision given\n")
163 self.ui.warn("No good revision given\n")
164 self.ui.warn("Assuming the first revision is good\n")
164 self.ui.warn("Assuming the first revision is good\n")
165 ancestors, num_ancestors = self.__ancestors_and_nb_ancestors(self.badrev)
165 ancestors, num_ancestors = self.__ancestors_and_nb_ancestors(self.badrev)
166 tot = len(ancestors)
166 tot = len(ancestors)
167 if tot == 1:
167 if tot == 1:
168 if ancestors.pop() != self.badrev:
168 if ancestors.pop() != self.badrev:
169 self.ui.warn("Could not find the first bad revision\n")
169 self.ui.warn("Could not find the first bad revision\n")
170 sys.exit(1)
170 sys.exit(1)
171 self.ui.write(
171 self.ui.write(
172 "The first bad revision is : %s\n" % hg.hex(self.badrev))
172 "The first bad revision is : %s\n" % hg.hex(self.badrev))
173 sys.exit(0)
173 sys.exit(0)
174 self.ui.write("%d revisions left\n" % tot)
174 self.ui.write("%d revisions left\n" % tot)
175 best_rev = None
175 best_rev = None
176 best_len = -1
176 best_len = -1
177 for n in ancestors:
177 for n in ancestors:
178 l = num_ancestors[n]
178 l = num_ancestors[n]
179 l = min(l, tot - l)
179 l = min(l, tot - l)
180 if l > best_len:
180 if l > best_len:
181 best_len = l
181 best_len = l
182 best_rev = n
182 best_rev = n
183 return best_rev
183 return best_rev
184
184
185 def autonext(self):
185 def autonext(self):
186 """find and update to the next revision to test"""
186 """find and update to the next revision to test"""
187 check_clean(self.ui, self.repo)
187 check_clean(self.ui, self.repo)
188 rev = self.next()
188 rev = self.next()
189 self.ui.write("Now testing %s\n" % hg.hex(rev))
189 self.ui.write("Now testing %s\n" % hg.hex(rev))
190 return self.repo.update(rev, allow=True, force=True)
190 return self.repo.update(rev, allow=True, force=True)
191
191
192 def good(self, rev):
192 def good(self, rev):
193 self.goodrevs.append(rev)
193 self.goodrevs.append(rev)
194
194
195 def autogood(self, rev=None):
195 def autogood(self, rev=None):
196 """mark revision as good and update to the next revision to test"""
196 """mark revision as good and update to the next revision to test"""
197 check_clean(self.ui, self.repo)
197 check_clean(self.ui, self.repo)
198 rev = lookup_rev(self.ui, self.repo, rev)
198 rev = lookup_rev(self.ui, self.repo, rev)
199 self.good(rev)
199 self.good(rev)
200 if self.badrev:
200 if self.badrev:
201 self.autonext()
201 self.autonext()
202
202
203 def bad(self, rev):
203 def bad(self, rev):
204 self.badrev = rev
204 self.badrev = rev
205
205
206 def autobad(self, rev=None):
206 def autobad(self, rev=None):
207 """mark revision as bad and update to the next revision to test"""
207 """mark revision as bad and update to the next revision to test"""
208 check_clean(self.ui, self.repo)
208 check_clean(self.ui, self.repo)
209 rev = lookup_rev(self.ui, self.repo, rev)
209 rev = lookup_rev(self.ui, self.repo, rev)
210 self.bad(rev)
210 self.bad(rev)
211 if self.goodrevs:
211 if self.goodrevs:
212 self.autonext()
212 self.autonext()
213
213
214 # should we put it in the class ?
214 # should we put it in the class ?
215 def test(ui, repo, rev):
215 def test(ui, repo, rev):
216 """test the bisection code"""
216 """test the bisection code"""
217 b = bisect(ui, repo)
217 b = bisect(ui, repo)
218 rev = repo.lookup(rev)
218 rev = repo.lookup(rev)
219 ui.write("testing with rev %s\n" % hg.hex(rev))
219 ui.write("testing with rev %s\n" % hg.hex(rev))
220 anc = b.ancestors()
220 anc = b.ancestors()
221 while len(anc) > 1:
221 while len(anc) > 1:
222 if not rev in anc:
222 if not rev in anc:
223 ui.warn("failure while bisecting\n")
223 ui.warn("failure while bisecting\n")
224 sys.exit(1)
224 sys.exit(1)
225 ui.write("it worked :)\n")
225 ui.write("it worked :)\n")
226 new_rev = b.next()
226 new_rev = b.next()
227 ui.write("choosing if good or bad\n")
227 ui.write("choosing if good or bad\n")
228 if rev in b.ancestors(head=new_rev):
228 if rev in b.ancestors(head=new_rev):
229 b.bad(new_rev)
229 b.bad(new_rev)
230 ui.write("it is bad\n")
230 ui.write("it is bad\n")
231 else:
231 else:
232 b.good(new_rev)
232 b.good(new_rev)
233 ui.write("it is good\n")
233 ui.write("it is good\n")
234 anc = b.ancestors()
234 anc = b.ancestors()
235 repo.update(new_rev, allow=True, force=True)
235 repo.update(new_rev, allow=True, force=True)
236 for v in anc:
236 for v in anc:
237 if v != rev:
237 if v != rev:
238 ui.warn("fail to found cset! :(\n")
238 ui.warn("fail to found cset! :(\n")
239 return 1
239 return 1
240 ui.write("Found bad cset: %s\n" % hg.hex(b.badrev))
240 ui.write("Found bad cset: %s\n" % hg.hex(b.badrev))
241 ui.write("Everything is ok :)\n")
241 ui.write("Everything is ok :)\n")
242 return 0
242 return 0
243
243
244 def bisect_run(ui, repo, cmd=None, *args):
244 def bisect_run(ui, repo, cmd=None, *args):
245 """bisect extension: dichotomic search in the DAG of changesets
245 """bisect extension: dichotomic search in the DAG of changesets
246 for subcommands see "hg bisect help\"
246 for subcommands see "hg bisect help\"
247 """
247 """
248 def help_(cmd=None, *args):
248 def help_(cmd=None, *args):
249 """show help for a given bisect subcommand or all subcommands"""
249 """show help for a given bisect subcommand or all subcommands"""
250 cmdtable = bisectcmdtable
250 cmdtable = bisectcmdtable
251 if cmd:
251 if cmd:
252 doc = cmdtable[cmd][0].__doc__
252 doc = cmdtable[cmd][0].__doc__
253 synopsis = cmdtable[cmd][2]
253 synopsis = cmdtable[cmd][2]
254 ui.write(synopsis + "\n")
254 ui.write(synopsis + "\n")
255 ui.write("\n" + doc + "\n")
255 ui.write("\n" + doc + "\n")
256 return
256 return
257 ui.write("list of subcommands for the bisect extension\n\n")
257 ui.write("list of subcommands for the bisect extension\n\n")
258 cmds = cmdtable.keys()
258 cmds = cmdtable.keys()
259 cmds.sort()
259 cmds.sort()
260 m = max([len(c) for c in cmds])
260 m = max([len(c) for c in cmds])
261 for cmd in cmds:
261 for cmd in cmds:
262 doc = cmdtable[cmd][0].__doc__.splitlines(0)[0].rstrip()
262 doc = cmdtable[cmd][0].__doc__.splitlines(0)[0].rstrip()
263 ui.write(" %-*s %s\n" % (m, cmd, doc))
263 ui.write(" %-*s %s\n" % (m, cmd, doc))
264
264
265 b = bisect(ui, repo)
265 b = bisect(ui, repo)
266 bisectcmdtable = {
266 bisectcmdtable = {
267 "init": (b.init, 0, "hg bisect init"),
267 "init": (b.init, 0, "hg bisect init"),
268 "bad": (b.autobad, 1, "hg bisect bad [<rev>]"),
268 "bad": (b.autobad, 1, "hg bisect bad [<rev>]"),
269 "good": (b.autogood, 1, "hg bisect good [<rev>]"),
269 "good": (b.autogood, 1, "hg bisect good [<rev>]"),
270 "next": (b.autonext, 0, "hg bisect next"),
270 "next": (b.autonext, 0, "hg bisect next"),
271 "reset": (b.reset, 0, "hg bisect reset"),
271 "reset": (b.reset, 0, "hg bisect reset"),
272 "help": (help_, 1, "hg bisect help [<subcommand>]"),
272 "help": (help_, 1, "hg bisect help [<subcommand>]"),
273 }
273 }
274
274
275 if not bisectcmdtable.has_key(cmd):
275 if not bisectcmdtable.has_key(cmd):
276 ui.warn("bisect: Unknown sub-command\n")
276 ui.warn("bisect: Unknown sub-command\n")
277 return help_()
277 return help_()
278 if len(args) > bisectcmdtable[cmd][1]:
278 if len(args) > bisectcmdtable[cmd][1]:
279 ui.warn("bisect: Too many arguments\n")
279 ui.warn("bisect: Too many arguments\n")
280 return help_()
280 return help_()
281 return bisectcmdtable[cmd][0](*args)
281 return bisectcmdtable[cmd][0](*args)
282
282
283 cmdtable = {
283 cmdtable = {
284 "bisect": (bisect_run, [],
284 "bisect": (bisect_run, [],
285 "hg bisect [help|init|reset|next|good|bad]"),
285 "hg bisect [help|init|reset|next|good|bad]"),
286 #"bisect-test": (test, [], "hg bisect-test rev"),
286 #"bisect-test": (test, [], "hg bisect-test rev"),
287 }
287 }
@@ -1,2658 +1,2658 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 demandload(globals(), "fancyopts ui hg util lock revlog")
12 demandload(globals(), "fancyopts ui hg util lock revlog")
13 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
13 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
14 demandload(globals(), "errno socket version struct atexit sets bz2")
14 demandload(globals(), "errno socket version struct atexit sets bz2")
15
15
16 class UnknownCommand(Exception):
16 class UnknownCommand(Exception):
17 """Exception raised if command is not in the command table."""
17 """Exception raised if command is not in the command table."""
18 class AmbiguousCommand(Exception):
18 class AmbiguousCommand(Exception):
19 """Exception raised if command shortcut matches more than one command."""
19 """Exception raised if command shortcut matches more than one command."""
20
20
21 def filterfiles(filters, files):
21 def filterfiles(filters, files):
22 l = [x for x in files if x in filters]
22 l = [x for x in files if x in filters]
23
23
24 for t in filters:
24 for t in filters:
25 if t and t[-1] != "/":
25 if t and t[-1] != "/":
26 t += "/"
26 t += "/"
27 l += [x for x in files if x.startswith(t)]
27 l += [x for x in files if x.startswith(t)]
28 return l
28 return l
29
29
30 def relpath(repo, args):
30 def relpath(repo, args):
31 cwd = repo.getcwd()
31 cwd = repo.getcwd()
32 if cwd:
32 if cwd:
33 return [util.normpath(os.path.join(cwd, x)) for x in args]
33 return [util.normpath(os.path.join(cwd, x)) for x in args]
34 return args
34 return args
35
35
36 def matchpats(repo, cwd, pats=[], opts={}, head=''):
36 def matchpats(repo, cwd, pats=[], opts={}, head=''):
37 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
37 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
38 opts.get('exclude'), head)
38 opts.get('exclude'), head)
39
39
40 def makewalk(repo, pats, opts, head=''):
40 def makewalk(repo, pats, opts, head=''):
41 cwd = repo.getcwd()
41 cwd = repo.getcwd()
42 files, matchfn, anypats = matchpats(repo, cwd, pats, opts, head)
42 files, matchfn, anypats = matchpats(repo, cwd, pats, opts, head)
43 exact = dict(zip(files, files))
43 exact = dict(zip(files, files))
44 def walk():
44 def walk():
45 for src, fn in repo.walk(files=files, match=matchfn):
45 for src, fn in repo.walk(files=files, match=matchfn):
46 yield src, fn, util.pathto(cwd, fn), fn in exact
46 yield src, fn, util.pathto(cwd, fn), fn in exact
47 return files, matchfn, walk()
47 return files, matchfn, walk()
48
48
49 def walk(repo, pats, opts, head=''):
49 def walk(repo, pats, opts, head=''):
50 files, matchfn, results = makewalk(repo, pats, opts, head)
50 files, matchfn, results = makewalk(repo, pats, opts, head)
51 for r in results:
51 for r in results:
52 yield r
52 yield r
53
53
54 def walkchangerevs(ui, repo, cwd, pats, opts):
54 def walkchangerevs(ui, repo, cwd, pats, opts):
55 '''Iterate over files and the revs they changed in.
55 '''Iterate over files and the revs they changed in.
56
56
57 Callers most commonly need to iterate backwards over the history
57 Callers most commonly need to iterate backwards over the history
58 it is interested in. Doing so has awful (quadratic-looking)
58 it is interested in. Doing so has awful (quadratic-looking)
59 performance, so we use iterators in a "windowed" way.
59 performance, so we use iterators in a "windowed" way.
60
60
61 We walk a window of revisions in the desired order. Within the
61 We walk a window of revisions in the desired order. Within the
62 window, we first walk forwards to gather data, then in the desired
62 window, we first walk forwards to gather data, then in the desired
63 order (usually backwards) to display it.
63 order (usually backwards) to display it.
64
64
65 This function returns an (iterator, getchange) pair. The
65 This function returns an (iterator, getchange) pair. The
66 getchange function returns the changelog entry for a numeric
66 getchange function returns the changelog entry for a numeric
67 revision. The iterator yields 3-tuples. They will be of one of
67 revision. The iterator yields 3-tuples. They will be of one of
68 the following forms:
68 the following forms:
69
69
70 "window", incrementing, lastrev: stepping through a window,
70 "window", incrementing, lastrev: stepping through a window,
71 positive if walking forwards through revs, last rev in the
71 positive if walking forwards through revs, last rev in the
72 sequence iterated over - use to reset state for the current window
72 sequence iterated over - use to reset state for the current window
73
73
74 "add", rev, fns: out-of-order traversal of the given file names
74 "add", rev, fns: out-of-order traversal of the given file names
75 fns, which changed during revision rev - use to gather data for
75 fns, which changed during revision rev - use to gather data for
76 possible display
76 possible display
77
77
78 "iter", rev, None: in-order traversal of the revs earlier iterated
78 "iter", rev, None: in-order traversal of the revs earlier iterated
79 over with "add" - use to display data'''
79 over with "add" - use to display data'''
80
80
81 if repo.changelog.count() == 0:
81 if repo.changelog.count() == 0:
82 return [], False
82 return [], False
83
83
84 cwd = repo.getcwd()
84 cwd = repo.getcwd()
85 if not pats and cwd:
85 if not pats and cwd:
86 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
86 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
87 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
87 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
88 files, matchfn, anypats = matchpats(repo, (pats and cwd) or '',
88 files, matchfn, anypats = matchpats(repo, (pats and cwd) or '',
89 pats, opts)
89 pats, opts)
90 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
90 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
91 wanted = {}
91 wanted = {}
92 slowpath = anypats
92 slowpath = anypats
93 window = 300
93 window = 300
94 fncache = {}
94 fncache = {}
95
95
96 chcache = {}
96 chcache = {}
97 def getchange(rev):
97 def getchange(rev):
98 ch = chcache.get(rev)
98 ch = chcache.get(rev)
99 if ch is None:
99 if ch is None:
100 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
100 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
101 return ch
101 return ch
102
102
103 if not slowpath and not files:
103 if not slowpath and not files:
104 # No files, no patterns. Display all revs.
104 # No files, no patterns. Display all revs.
105 wanted = dict(zip(revs, revs))
105 wanted = dict(zip(revs, revs))
106 if not slowpath:
106 if not slowpath:
107 # Only files, no patterns. Check the history of each file.
107 # Only files, no patterns. Check the history of each file.
108 def filerevgen(filelog):
108 def filerevgen(filelog):
109 for i in xrange(filelog.count() - 1, -1, -window):
109 for i in xrange(filelog.count() - 1, -1, -window):
110 revs = []
110 revs = []
111 for j in xrange(max(0, i - window), i + 1):
111 for j in xrange(max(0, i - window), i + 1):
112 revs.append(filelog.linkrev(filelog.node(j)))
112 revs.append(filelog.linkrev(filelog.node(j)))
113 revs.reverse()
113 revs.reverse()
114 for rev in revs:
114 for rev in revs:
115 yield rev
115 yield rev
116
116
117 minrev, maxrev = min(revs), max(revs)
117 minrev, maxrev = min(revs), max(revs)
118 for file in files:
118 for file in files:
119 filelog = repo.file(file)
119 filelog = repo.file(file)
120 # A zero count may be a directory or deleted file, so
120 # A zero count may be a directory or deleted file, so
121 # try to find matching entries on the slow path.
121 # try to find matching entries on the slow path.
122 if filelog.count() == 0:
122 if filelog.count() == 0:
123 slowpath = True
123 slowpath = True
124 break
124 break
125 for rev in filerevgen(filelog):
125 for rev in filerevgen(filelog):
126 if rev <= maxrev:
126 if rev <= maxrev:
127 if rev < minrev:
127 if rev < minrev:
128 break
128 break
129 fncache.setdefault(rev, [])
129 fncache.setdefault(rev, [])
130 fncache[rev].append(file)
130 fncache[rev].append(file)
131 wanted[rev] = 1
131 wanted[rev] = 1
132 if slowpath:
132 if slowpath:
133 # The slow path checks files modified in every changeset.
133 # The slow path checks files modified in every changeset.
134 def changerevgen():
134 def changerevgen():
135 for i in xrange(repo.changelog.count() - 1, -1, -window):
135 for i in xrange(repo.changelog.count() - 1, -1, -window):
136 for j in xrange(max(0, i - window), i + 1):
136 for j in xrange(max(0, i - window), i + 1):
137 yield j, getchange(j)[3]
137 yield j, getchange(j)[3]
138
138
139 for rev, changefiles in changerevgen():
139 for rev, changefiles in changerevgen():
140 matches = filter(matchfn, changefiles)
140 matches = filter(matchfn, changefiles)
141 if matches:
141 if matches:
142 fncache[rev] = matches
142 fncache[rev] = matches
143 wanted[rev] = 1
143 wanted[rev] = 1
144
144
145 def iterate():
145 def iterate():
146 for i in xrange(0, len(revs), window):
146 for i in xrange(0, len(revs), window):
147 yield 'window', revs[0] < revs[-1], revs[-1]
147 yield 'window', revs[0] < revs[-1], revs[-1]
148 nrevs = [rev for rev in revs[i:min(i+window, len(revs))]
148 nrevs = [rev for rev in revs[i:min(i+window, len(revs))]
149 if rev in wanted]
149 if rev in wanted]
150 srevs = list(nrevs)
150 srevs = list(nrevs)
151 srevs.sort()
151 srevs.sort()
152 for rev in srevs:
152 for rev in srevs:
153 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
153 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
154 yield 'add', rev, fns
154 yield 'add', rev, fns
155 for rev in nrevs:
155 for rev in nrevs:
156 yield 'iter', rev, None
156 yield 'iter', rev, None
157 return iterate(), getchange
157 return iterate(), getchange
158
158
159 revrangesep = ':'
159 revrangesep = ':'
160
160
161 def revrange(ui, repo, revs, revlog=None):
161 def revrange(ui, repo, revs, revlog=None):
162 """Yield revision as strings from a list of revision specifications."""
162 """Yield revision as strings from a list of revision specifications."""
163 if revlog is None:
163 if revlog is None:
164 revlog = repo.changelog
164 revlog = repo.changelog
165 revcount = revlog.count()
165 revcount = revlog.count()
166 def fix(val, defval):
166 def fix(val, defval):
167 if not val:
167 if not val:
168 return defval
168 return defval
169 try:
169 try:
170 num = int(val)
170 num = int(val)
171 if str(num) != val:
171 if str(num) != val:
172 raise ValueError
172 raise ValueError
173 if num < 0: num += revcount
173 if num < 0: num += revcount
174 if num < 0: num = 0
174 if num < 0: num = 0
175 elif num >= revcount:
175 elif num >= revcount:
176 raise ValueError
176 raise ValueError
177 except ValueError:
177 except ValueError:
178 try:
178 try:
179 num = repo.changelog.rev(repo.lookup(val))
179 num = repo.changelog.rev(repo.lookup(val))
180 except KeyError:
180 except KeyError:
181 try:
181 try:
182 num = revlog.rev(revlog.lookup(val))
182 num = revlog.rev(revlog.lookup(val))
183 except KeyError:
183 except KeyError:
184 raise util.Abort(_('invalid revision identifier %s'), val)
184 raise util.Abort(_('invalid revision identifier %s'), val)
185 return num
185 return num
186 seen = {}
186 seen = {}
187 for spec in revs:
187 for spec in revs:
188 if spec.find(revrangesep) >= 0:
188 if spec.find(revrangesep) >= 0:
189 start, end = spec.split(revrangesep, 1)
189 start, end = spec.split(revrangesep, 1)
190 start = fix(start, 0)
190 start = fix(start, 0)
191 end = fix(end, revcount - 1)
191 end = fix(end, revcount - 1)
192 step = start > end and -1 or 1
192 step = start > end and -1 or 1
193 for rev in xrange(start, end+step, step):
193 for rev in xrange(start, end+step, step):
194 if rev in seen: continue
194 if rev in seen: continue
195 seen[rev] = 1
195 seen[rev] = 1
196 yield str(rev)
196 yield str(rev)
197 else:
197 else:
198 rev = fix(spec, None)
198 rev = fix(spec, None)
199 if rev in seen: continue
199 if rev in seen: continue
200 seen[rev] = 1
200 seen[rev] = 1
201 yield str(rev)
201 yield str(rev)
202
202
203 def make_filename(repo, r, pat, node=None,
203 def make_filename(repo, r, pat, node=None,
204 total=None, seqno=None, revwidth=None, pathname=None):
204 total=None, seqno=None, revwidth=None, pathname=None):
205 node_expander = {
205 node_expander = {
206 'H': lambda: hex(node),
206 'H': lambda: hex(node),
207 'R': lambda: str(r.rev(node)),
207 'R': lambda: str(r.rev(node)),
208 'h': lambda: short(node),
208 'h': lambda: short(node),
209 }
209 }
210 expander = {
210 expander = {
211 '%': lambda: '%',
211 '%': lambda: '%',
212 'b': lambda: os.path.basename(repo.root),
212 'b': lambda: os.path.basename(repo.root),
213 }
213 }
214
214
215 try:
215 try:
216 if node:
216 if node:
217 expander.update(node_expander)
217 expander.update(node_expander)
218 if node and revwidth is not None:
218 if node and revwidth is not None:
219 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
219 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
220 if total is not None:
220 if total is not None:
221 expander['N'] = lambda: str(total)
221 expander['N'] = lambda: str(total)
222 if seqno is not None:
222 if seqno is not None:
223 expander['n'] = lambda: str(seqno)
223 expander['n'] = lambda: str(seqno)
224 if total is not None and seqno is not None:
224 if total is not None and seqno is not None:
225 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
225 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
226 if pathname is not None:
226 if pathname is not None:
227 expander['s'] = lambda: os.path.basename(pathname)
227 expander['s'] = lambda: os.path.basename(pathname)
228 expander['d'] = lambda: os.path.dirname(pathname) or '.'
228 expander['d'] = lambda: os.path.dirname(pathname) or '.'
229 expander['p'] = lambda: pathname
229 expander['p'] = lambda: pathname
230
230
231 newname = []
231 newname = []
232 patlen = len(pat)
232 patlen = len(pat)
233 i = 0
233 i = 0
234 while i < patlen:
234 while i < patlen:
235 c = pat[i]
235 c = pat[i]
236 if c == '%':
236 if c == '%':
237 i += 1
237 i += 1
238 c = pat[i]
238 c = pat[i]
239 c = expander[c]()
239 c = expander[c]()
240 newname.append(c)
240 newname.append(c)
241 i += 1
241 i += 1
242 return ''.join(newname)
242 return ''.join(newname)
243 except KeyError, inst:
243 except KeyError, inst:
244 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
244 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
245 inst.args[0])
245 inst.args[0])
246
246
247 def make_file(repo, r, pat, node=None,
247 def make_file(repo, r, pat, node=None,
248 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
248 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
249 if not pat or pat == '-':
249 if not pat or pat == '-':
250 return 'w' in mode and sys.stdout or sys.stdin
250 return 'w' in mode and sys.stdout or sys.stdin
251 if hasattr(pat, 'write') and 'w' in mode:
251 if hasattr(pat, 'write') and 'w' in mode:
252 return pat
252 return pat
253 if hasattr(pat, 'read') and 'r' in mode:
253 if hasattr(pat, 'read') and 'r' in mode:
254 return pat
254 return pat
255 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
255 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
256 pathname),
256 pathname),
257 mode)
257 mode)
258
258
259 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
259 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
260 changes=None, text=False):
260 changes=None, text=False):
261 if not changes:
261 if not changes:
262 (c, a, d, u) = repo.changes(node1, node2, files, match=match)
262 (c, a, d, u) = repo.changes(node1, node2, files, match=match)
263 else:
263 else:
264 (c, a, d, u) = changes
264 (c, a, d, u) = changes
265 if files:
265 if files:
266 c, a, d = map(lambda x: filterfiles(files, x), (c, a, d))
266 c, a, d = map(lambda x: filterfiles(files, x), (c, a, d))
267
267
268 if not c and not a and not d:
268 if not c and not a and not d:
269 return
269 return
270
270
271 if node2:
271 if node2:
272 change = repo.changelog.read(node2)
272 change = repo.changelog.read(node2)
273 mmap2 = repo.manifest.read(change[0])
273 mmap2 = repo.manifest.read(change[0])
274 date2 = util.datestr(change[2])
274 date2 = util.datestr(change[2])
275 def read(f):
275 def read(f):
276 return repo.file(f).read(mmap2[f])
276 return repo.file(f).read(mmap2[f])
277 else:
277 else:
278 date2 = util.datestr()
278 date2 = util.datestr()
279 if not node1:
279 if not node1:
280 node1 = repo.dirstate.parents()[0]
280 node1 = repo.dirstate.parents()[0]
281 def read(f):
281 def read(f):
282 return repo.wfile(f).read()
282 return repo.wfile(f).read()
283
283
284 if ui.quiet:
284 if ui.quiet:
285 r = None
285 r = None
286 else:
286 else:
287 hexfunc = ui.verbose and hex or short
287 hexfunc = ui.verbose and hex or short
288 r = [hexfunc(node) for node in [node1, node2] if node]
288 r = [hexfunc(node) for node in [node1, node2] if node]
289
289
290 change = repo.changelog.read(node1)
290 change = repo.changelog.read(node1)
291 mmap = repo.manifest.read(change[0])
291 mmap = repo.manifest.read(change[0])
292 date1 = util.datestr(change[2])
292 date1 = util.datestr(change[2])
293
293
294 for f in c:
294 for f in c:
295 to = None
295 to = None
296 if f in mmap:
296 if f in mmap:
297 to = repo.file(f).read(mmap[f])
297 to = repo.file(f).read(mmap[f])
298 tn = read(f)
298 tn = read(f)
299 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
299 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
300 for f in a:
300 for f in a:
301 to = None
301 to = None
302 tn = read(f)
302 tn = read(f)
303 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
303 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
304 for f in d:
304 for f in d:
305 to = repo.file(f).read(mmap[f])
305 to = repo.file(f).read(mmap[f])
306 tn = None
306 tn = None
307 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
307 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
308
308
309 def trimuser(ui, name, rev, revcache):
309 def trimuser(ui, name, rev, revcache):
310 """trim the name of the user who committed a change"""
310 """trim the name of the user who committed a change"""
311 user = revcache.get(rev)
311 user = revcache.get(rev)
312 if user is None:
312 if user is None:
313 user = revcache[rev] = ui.shortuser(name)
313 user = revcache[rev] = ui.shortuser(name)
314 return user
314 return user
315
315
316 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
316 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
317 """show a single changeset or file revision"""
317 """show a single changeset or file revision"""
318 log = repo.changelog
318 log = repo.changelog
319 if changenode is None:
319 if changenode is None:
320 changenode = log.node(rev)
320 changenode = log.node(rev)
321 elif not rev:
321 elif not rev:
322 rev = log.rev(changenode)
322 rev = log.rev(changenode)
323
323
324 if ui.quiet:
324 if ui.quiet:
325 ui.write("%d:%s\n" % (rev, short(changenode)))
325 ui.write("%d:%s\n" % (rev, short(changenode)))
326 return
326 return
327
327
328 changes = log.read(changenode)
328 changes = log.read(changenode)
329 date = util.datestr(changes[2])
329 date = util.datestr(changes[2])
330
330
331 parents = [(log.rev(p), ui.verbose and hex(p) or short(p))
331 parents = [(log.rev(p), ui.verbose and hex(p) or short(p))
332 for p in log.parents(changenode)
332 for p in log.parents(changenode)
333 if ui.debugflag or p != nullid]
333 if ui.debugflag or p != nullid]
334 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
334 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
335 parents = []
335 parents = []
336
336
337 if ui.verbose:
337 if ui.verbose:
338 ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
338 ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
339 else:
339 else:
340 ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
340 ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
341
341
342 for tag in repo.nodetags(changenode):
342 for tag in repo.nodetags(changenode):
343 ui.status(_("tag: %s\n") % tag)
343 ui.status(_("tag: %s\n") % tag)
344 for parent in parents:
344 for parent in parents:
345 ui.write(_("parent: %d:%s\n") % parent)
345 ui.write(_("parent: %d:%s\n") % parent)
346
346
347 if brinfo and changenode in brinfo:
347 if brinfo and changenode in brinfo:
348 br = brinfo[changenode]
348 br = brinfo[changenode]
349 ui.write(_("branch: %s\n") % " ".join(br))
349 ui.write(_("branch: %s\n") % " ".join(br))
350
350
351 ui.debug(_("manifest: %d:%s\n") % (repo.manifest.rev(changes[0]),
351 ui.debug(_("manifest: %d:%s\n") % (repo.manifest.rev(changes[0]),
352 hex(changes[0])))
352 hex(changes[0])))
353 ui.status(_("user: %s\n") % changes[1])
353 ui.status(_("user: %s\n") % changes[1])
354 ui.status(_("date: %s\n") % date)
354 ui.status(_("date: %s\n") % date)
355
355
356 if ui.debugflag:
356 if ui.debugflag:
357 files = repo.changes(log.parents(changenode)[0], changenode)
357 files = repo.changes(log.parents(changenode)[0], changenode)
358 for key, value in zip([_("files:"), _("files+:"), _("files-:")], files):
358 for key, value in zip([_("files:"), _("files+:"), _("files-:")], files):
359 if value:
359 if value:
360 ui.note("%-12s %s\n" % (key, " ".join(value)))
360 ui.note("%-12s %s\n" % (key, " ".join(value)))
361 else:
361 else:
362 ui.note(_("files: %s\n") % " ".join(changes[3]))
362 ui.note(_("files: %s\n") % " ".join(changes[3]))
363
363
364 description = changes[4].strip()
364 description = changes[4].strip()
365 if description:
365 if description:
366 if ui.verbose:
366 if ui.verbose:
367 ui.status(_("description:\n"))
367 ui.status(_("description:\n"))
368 ui.status(description)
368 ui.status(description)
369 ui.status("\n\n")
369 ui.status("\n\n")
370 else:
370 else:
371 ui.status(_("summary: %s\n") % description.splitlines()[0])
371 ui.status(_("summary: %s\n") % description.splitlines()[0])
372 ui.status("\n")
372 ui.status("\n")
373
373
374 def show_version(ui):
374 def show_version(ui):
375 """output version and copyright information"""
375 """output version and copyright information"""
376 ui.write(_("Mercurial Distributed SCM (version %s)\n")
376 ui.write(_("Mercurial Distributed SCM (version %s)\n")
377 % version.get_version())
377 % version.get_version())
378 ui.status(_(
378 ui.status(_(
379 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
379 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
380 "This is free software; see the source for copying conditions. "
380 "This is free software; see the source for copying conditions. "
381 "There is NO\nwarranty; "
381 "There is NO\nwarranty; "
382 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
382 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
383 ))
383 ))
384
384
385 def help_(ui, cmd=None, with_version=False):
385 def help_(ui, cmd=None, with_version=False):
386 """show help for a given command or all commands"""
386 """show help for a given command or all commands"""
387 option_lists = []
387 option_lists = []
388 if cmd and cmd != 'shortlist':
388 if cmd and cmd != 'shortlist':
389 if with_version:
389 if with_version:
390 show_version(ui)
390 show_version(ui)
391 ui.write('\n')
391 ui.write('\n')
392 aliases, i = find(cmd)
392 aliases, i = find(cmd)
393 # synopsis
393 # synopsis
394 ui.write("%s\n\n" % i[2])
394 ui.write("%s\n\n" % i[2])
395
395
396 # description
396 # description
397 doc = i[0].__doc__
397 doc = i[0].__doc__
398 if ui.quiet:
398 if ui.quiet:
399 doc = doc.splitlines(0)[0]
399 doc = doc.splitlines(0)[0]
400 ui.write("%s\n" % doc.rstrip())
400 ui.write("%s\n" % doc.rstrip())
401
401
402 if not ui.quiet:
402 if not ui.quiet:
403 # aliases
403 # aliases
404 if len(aliases) > 1:
404 if len(aliases) > 1:
405 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
405 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
406
406
407 # options
407 # options
408 if i[1]:
408 if i[1]:
409 option_lists.append(("options", i[1]))
409 option_lists.append(("options", i[1]))
410
410
411 else:
411 else:
412 # program name
412 # program name
413 if ui.verbose or with_version:
413 if ui.verbose or with_version:
414 show_version(ui)
414 show_version(ui)
415 else:
415 else:
416 ui.status(_("Mercurial Distributed SCM\n"))
416 ui.status(_("Mercurial Distributed SCM\n"))
417 ui.status('\n')
417 ui.status('\n')
418
418
419 # list of commands
419 # list of commands
420 if cmd == "shortlist":
420 if cmd == "shortlist":
421 ui.status(_('basic commands (use "hg help" '
421 ui.status(_('basic commands (use "hg help" '
422 'for the full list or option "-v" for details):\n\n'))
422 'for the full list or option "-v" for details):\n\n'))
423 elif ui.verbose:
423 elif ui.verbose:
424 ui.status(_('list of commands:\n\n'))
424 ui.status(_('list of commands:\n\n'))
425 else:
425 else:
426 ui.status(_('list of commands (use "hg help -v" '
426 ui.status(_('list of commands (use "hg help -v" '
427 'to show aliases and global options):\n\n'))
427 'to show aliases and global options):\n\n'))
428
428
429 h = {}
429 h = {}
430 cmds = {}
430 cmds = {}
431 for c, e in table.items():
431 for c, e in table.items():
432 f = c.split("|")[0]
432 f = c.split("|")[0]
433 if cmd == "shortlist" and not f.startswith("^"):
433 if cmd == "shortlist" and not f.startswith("^"):
434 continue
434 continue
435 f = f.lstrip("^")
435 f = f.lstrip("^")
436 if not ui.debugflag and f.startswith("debug"):
436 if not ui.debugflag and f.startswith("debug"):
437 continue
437 continue
438 d = ""
438 d = ""
439 if e[0].__doc__:
439 if e[0].__doc__:
440 d = e[0].__doc__.splitlines(0)[0].rstrip()
440 d = e[0].__doc__.splitlines(0)[0].rstrip()
441 h[f] = d
441 h[f] = d
442 cmds[f]=c.lstrip("^")
442 cmds[f]=c.lstrip("^")
443
443
444 fns = h.keys()
444 fns = h.keys()
445 fns.sort()
445 fns.sort()
446 m = max(map(len, fns))
446 m = max(map(len, fns))
447 for f in fns:
447 for f in fns:
448 if ui.verbose:
448 if ui.verbose:
449 commands = cmds[f].replace("|",", ")
449 commands = cmds[f].replace("|",", ")
450 ui.write(" %s:\n %s\n"%(commands,h[f]))
450 ui.write(" %s:\n %s\n"%(commands,h[f]))
451 else:
451 else:
452 ui.write(' %-*s %s\n' % (m, f, h[f]))
452 ui.write(' %-*s %s\n' % (m, f, h[f]))
453
453
454 # global options
454 # global options
455 if ui.verbose:
455 if ui.verbose:
456 option_lists.append(("global options", globalopts))
456 option_lists.append(("global options", globalopts))
457
457
458 # list all option lists
458 # list all option lists
459 opt_output = []
459 opt_output = []
460 for title, options in option_lists:
460 for title, options in option_lists:
461 opt_output.append(("\n%s:\n" % title, None))
461 opt_output.append(("\n%s:\n" % title, None))
462 for shortopt, longopt, default, desc in options:
462 for shortopt, longopt, default, desc in options:
463 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
463 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
464 longopt and " --%s" % longopt),
464 longopt and " --%s" % longopt),
465 "%s%s" % (desc,
465 "%s%s" % (desc,
466 default and _(" (default: %s)") % default
466 default and _(" (default: %s)") % default
467 or "")))
467 or "")))
468
468
469 if opt_output:
469 if opt_output:
470 opts_len = max([len(line[0]) for line in opt_output if line[1]])
470 opts_len = max([len(line[0]) for line in opt_output if line[1]])
471 for first, second in opt_output:
471 for first, second in opt_output:
472 if second:
472 if second:
473 ui.write(" %-*s %s\n" % (opts_len, first, second))
473 ui.write(" %-*s %s\n" % (opts_len, first, second))
474 else:
474 else:
475 ui.write("%s\n" % first)
475 ui.write("%s\n" % first)
476
476
477 # Commands start here, listed alphabetically
477 # Commands start here, listed alphabetically
478
478
479 def add(ui, repo, *pats, **opts):
479 def add(ui, repo, *pats, **opts):
480 """add the specified files on the next commit
480 """add the specified files on the next commit
481
481
482 Schedule files to be version controlled and added to the repository.
482 Schedule files to be version controlled and added to the repository.
483
483
484 The files will be added to the repository at the next commit.
484 The files will be added to the repository at the next commit.
485
485
486 If no names are given, add all files in the current directory and
486 If no names are given, add all files in the current directory and
487 its subdirectories.
487 its subdirectories.
488 """
488 """
489
489
490 names = []
490 names = []
491 for src, abs, rel, exact in walk(repo, pats, opts):
491 for src, abs, rel, exact in walk(repo, pats, opts):
492 if exact:
492 if exact:
493 if ui.verbose: ui.status(_('adding %s\n') % rel)
493 if ui.verbose: ui.status(_('adding %s\n') % rel)
494 names.append(abs)
494 names.append(abs)
495 elif repo.dirstate.state(abs) == '?':
495 elif repo.dirstate.state(abs) == '?':
496 ui.status(_('adding %s\n') % rel)
496 ui.status(_('adding %s\n') % rel)
497 names.append(abs)
497 names.append(abs)
498 repo.add(names)
498 repo.add(names)
499
499
500 def addremove(ui, repo, *pats, **opts):
500 def addremove(ui, repo, *pats, **opts):
501 """add all new files, delete all missing files
501 """add all new files, delete all missing files
502
502
503 Add all new files and remove all missing files from the repository.
503 Add all new files and remove all missing files from the repository.
504
504
505 New files are ignored if they match any of the patterns in .hgignore. As
505 New files are ignored if they match any of the patterns in .hgignore. As
506 with add, these changes take effect at the next commit.
506 with add, these changes take effect at the next commit.
507 """
507 """
508 add, remove = [], []
508 add, remove = [], []
509 for src, abs, rel, exact in walk(repo, pats, opts):
509 for src, abs, rel, exact in walk(repo, pats, opts):
510 if src == 'f' and repo.dirstate.state(abs) == '?':
510 if src == 'f' and repo.dirstate.state(abs) == '?':
511 add.append(abs)
511 add.append(abs)
512 if ui.verbose or not exact:
512 if ui.verbose or not exact:
513 ui.status(_('adding %s\n') % rel)
513 ui.status(_('adding %s\n') % rel)
514 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
514 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
515 remove.append(abs)
515 remove.append(abs)
516 if ui.verbose or not exact:
516 if ui.verbose or not exact:
517 ui.status(_('removing %s\n') % rel)
517 ui.status(_('removing %s\n') % rel)
518 repo.add(add)
518 repo.add(add)
519 repo.remove(remove)
519 repo.remove(remove)
520
520
521 def annotate(ui, repo, *pats, **opts):
521 def annotate(ui, repo, *pats, **opts):
522 """show changeset information per file line
522 """show changeset information per file line
523
523
524 List changes in files, showing the revision id responsible for each line
524 List changes in files, showing the revision id responsible for each line
525
525
526 This command is useful to discover who did a change or when a change took
526 This command is useful to discover who did a change or when a change took
527 place.
527 place.
528
528
529 Without the -a option, annotate will avoid processing files it
529 Without the -a option, annotate will avoid processing files it
530 detects as binary. With -a, annotate will generate an annotation
530 detects as binary. With -a, annotate will generate an annotation
531 anyway, probably with undesirable results.
531 anyway, probably with undesirable results.
532 """
532 """
533 def getnode(rev):
533 def getnode(rev):
534 return short(repo.changelog.node(rev))
534 return short(repo.changelog.node(rev))
535
535
536 ucache = {}
536 ucache = {}
537 def getname(rev):
537 def getname(rev):
538 cl = repo.changelog.read(repo.changelog.node(rev))
538 cl = repo.changelog.read(repo.changelog.node(rev))
539 return trimuser(ui, cl[1], rev, ucache)
539 return trimuser(ui, cl[1], rev, ucache)
540
540
541 dcache = {}
541 dcache = {}
542 def getdate(rev):
542 def getdate(rev):
543 datestr = dcache.get(rev)
543 datestr = dcache.get(rev)
544 if datestr is None:
544 if datestr is None:
545 cl = repo.changelog.read(repo.changelog.node(rev))
545 cl = repo.changelog.read(repo.changelog.node(rev))
546 datestr = dcache[rev] = util.datestr(cl[2])
546 datestr = dcache[rev] = util.datestr(cl[2])
547 return datestr
547 return datestr
548
548
549 if not pats:
549 if not pats:
550 raise util.Abort(_('at least one file name or pattern required'))
550 raise util.Abort(_('at least one file name or pattern required'))
551
551
552 opmap = [['user', getname], ['number', str], ['changeset', getnode],
552 opmap = [['user', getname], ['number', str], ['changeset', getnode],
553 ['date', getdate]]
553 ['date', getdate]]
554 if not opts['user'] and not opts['changeset'] and not opts['date']:
554 if not opts['user'] and not opts['changeset'] and not opts['date']:
555 opts['number'] = 1
555 opts['number'] = 1
556
556
557 if opts['rev']:
557 if opts['rev']:
558 node = repo.changelog.lookup(opts['rev'])
558 node = repo.changelog.lookup(opts['rev'])
559 else:
559 else:
560 node = repo.dirstate.parents()[0]
560 node = repo.dirstate.parents()[0]
561 change = repo.changelog.read(node)
561 change = repo.changelog.read(node)
562 mmap = repo.manifest.read(change[0])
562 mmap = repo.manifest.read(change[0])
563
563
564 for src, abs, rel, exact in walk(repo, pats, opts):
564 for src, abs, rel, exact in walk(repo, pats, opts):
565 if abs not in mmap:
565 if abs not in mmap:
566 ui.warn(_("warning: %s is not in the repository!\n") % rel)
566 ui.warn(_("warning: %s is not in the repository!\n") % rel)
567 continue
567 continue
568
568
569 f = repo.file(abs)
569 f = repo.file(abs)
570 if not opts['text'] and util.binary(f.read(mmap[abs])):
570 if not opts['text'] and util.binary(f.read(mmap[abs])):
571 ui.write(_("%s: binary file\n") % rel)
571 ui.write(_("%s: binary file\n") % rel)
572 continue
572 continue
573
573
574 lines = f.annotate(mmap[abs])
574 lines = f.annotate(mmap[abs])
575 pieces = []
575 pieces = []
576
576
577 for o, f in opmap:
577 for o, f in opmap:
578 if opts[o]:
578 if opts[o]:
579 l = [f(n) for n, dummy in lines]
579 l = [f(n) for n, dummy in lines]
580 if l:
580 if l:
581 m = max(map(len, l))
581 m = max(map(len, l))
582 pieces.append(["%*s" % (m, x) for x in l])
582 pieces.append(["%*s" % (m, x) for x in l])
583
583
584 if pieces:
584 if pieces:
585 for p, l in zip(zip(*pieces), lines):
585 for p, l in zip(zip(*pieces), lines):
586 ui.write("%s: %s" % (" ".join(p), l[1]))
586 ui.write("%s: %s" % (" ".join(p), l[1]))
587
587
588 def bundle(ui, repo, fname, dest="default-push", **opts):
588 def bundle(ui, repo, fname, dest="default-push", **opts):
589 """create a changegroup file
589 """create a changegroup file
590
590
591 Generate a compressed changegroup file collecting all changesets
591 Generate a compressed changegroup file collecting all changesets
592 not found in the other repository.
592 not found in the other repository.
593
593
594 This file can then be transferred using conventional means and
594 This file can then be transferred using conventional means and
595 applied to another repository with the unbundle command. This is
595 applied to another repository with the unbundle command. This is
596 useful when native push and pull are not available or when
596 useful when native push and pull are not available or when
597 exporting an entire repository is undesirable. The standard file
597 exporting an entire repository is undesirable. The standard file
598 extension is ".hg".
598 extension is ".hg".
599
599
600 Unlike import/export, this exactly preserves all changeset
600 Unlike import/export, this exactly preserves all changeset
601 contents including permissions, rename data, and revision history.
601 contents including permissions, rename data, and revision history.
602 """
602 """
603 f = open(fname, "wb")
603 f = open(fname, "wb")
604 dest = ui.expandpath(dest, repo.root)
604 dest = ui.expandpath(dest, repo.root)
605 other = hg.repository(ui, dest)
605 other = hg.repository(ui, dest)
606 o = repo.findoutgoing(other)
606 o = repo.findoutgoing(other)
607 cg = repo.changegroup(o)
607 cg = repo.changegroup(o)
608
608
609 try:
609 try:
610 f.write("HG10")
610 f.write("HG10")
611 z = bz2.BZ2Compressor(9)
611 z = bz2.BZ2Compressor(9)
612 while 1:
612 while 1:
613 chunk = cg.read(4096)
613 chunk = cg.read(4096)
614 if not chunk:
614 if not chunk:
615 break
615 break
616 f.write(z.compress(chunk))
616 f.write(z.compress(chunk))
617 f.write(z.flush())
617 f.write(z.flush())
618 except:
618 except:
619 os.unlink(fname)
619 os.unlink(fname)
620 raise
620 raise
621
621
622 def cat(ui, repo, file1, *pats, **opts):
622 def cat(ui, repo, file1, *pats, **opts):
623 """output the latest or given revisions of files
623 """output the latest or given revisions of files
624
624
625 Print the specified files as they were at the given revision.
625 Print the specified files as they were at the given revision.
626 If no revision is given then the tip is used.
626 If no revision is given then the tip is used.
627
627
628 Output may be to a file, in which case the name of the file is
628 Output may be to a file, in which case the name of the file is
629 given using a format string. The formatting rules are the same as
629 given using a format string. The formatting rules are the same as
630 for the export command, with the following additions:
630 for the export command, with the following additions:
631
631
632 %s basename of file being printed
632 %s basename of file being printed
633 %d dirname of file being printed, or '.' if in repo root
633 %d dirname of file being printed, or '.' if in repo root
634 %p root-relative path name of file being printed
634 %p root-relative path name of file being printed
635 """
635 """
636 mf = {}
636 mf = {}
637 rev = opts['rev']
637 rev = opts['rev']
638 if rev:
638 if rev:
639 change = repo.changelog.read(repo.lookup(rev))
639 change = repo.changelog.read(repo.lookup(rev))
640 mf = repo.manifest.read(change[0])
640 mf = repo.manifest.read(change[0])
641 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts):
641 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts):
642 r = repo.file(abs)
642 r = repo.file(abs)
643 if rev:
643 if rev:
644 try:
644 try:
645 n = mf[abs]
645 n = mf[abs]
646 except (hg.RepoError, KeyError):
646 except (hg.RepoError, KeyError):
647 try:
647 try:
648 n = r.lookup(rev)
648 n = r.lookup(rev)
649 except KeyError, inst:
649 except KeyError, inst:
650 raise util.Abort(_('cannot find file %s in rev %s'), rel, rev)
650 raise util.Abort(_('cannot find file %s in rev %s'), rel, rev)
651 else:
651 else:
652 n = r.tip()
652 n = r.tip()
653 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
653 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
654 fp.write(r.read(n))
654 fp.write(r.read(n))
655
655
656 def clone(ui, source, dest=None, **opts):
656 def clone(ui, source, dest=None, **opts):
657 """make a copy of an existing repository
657 """make a copy of an existing repository
658
658
659 Create a copy of an existing repository in a new directory.
659 Create a copy of an existing repository in a new directory.
660
660
661 If no destination directory name is specified, it defaults to the
661 If no destination directory name is specified, it defaults to the
662 basename of the source.
662 basename of the source.
663
663
664 The location of the source is added to the new repository's
664 The location of the source is added to the new repository's
665 .hg/hgrc file, as the default to be used for future pulls.
665 .hg/hgrc file, as the default to be used for future pulls.
666
666
667 For efficiency, hardlinks are used for cloning whenever the source
667 For efficiency, hardlinks are used for cloning whenever the source
668 and destination are on the same filesystem. Some filesystems,
668 and destination are on the same filesystem. Some filesystems,
669 such as AFS, implement hardlinking incorrectly, but do not report
669 such as AFS, implement hardlinking incorrectly, but do not report
670 errors. In these cases, use the --pull option to avoid
670 errors. In these cases, use the --pull option to avoid
671 hardlinking.
671 hardlinking.
672 """
672 """
673 if dest is None:
673 if dest is None:
674 dest = os.path.basename(os.path.normpath(source))
674 dest = os.path.basename(os.path.normpath(source))
675
675
676 if os.path.exists(dest):
676 if os.path.exists(dest):
677 raise util.Abort(_("destination '%s' already exists"), dest)
677 raise util.Abort(_("destination '%s' already exists"), dest)
678
678
679 dest = os.path.realpath(dest)
679 dest = os.path.realpath(dest)
680
680
681 class Dircleanup:
681 class Dircleanup(object):
682 def __init__(self, dir_):
682 def __init__(self, dir_):
683 self.rmtree = shutil.rmtree
683 self.rmtree = shutil.rmtree
684 self.dir_ = dir_
684 self.dir_ = dir_
685 os.mkdir(dir_)
685 os.mkdir(dir_)
686 def close(self):
686 def close(self):
687 self.dir_ = None
687 self.dir_ = None
688 def __del__(self):
688 def __del__(self):
689 if self.dir_:
689 if self.dir_:
690 self.rmtree(self.dir_, True)
690 self.rmtree(self.dir_, True)
691
691
692 if opts['ssh']:
692 if opts['ssh']:
693 ui.setconfig("ui", "ssh", opts['ssh'])
693 ui.setconfig("ui", "ssh", opts['ssh'])
694 if opts['remotecmd']:
694 if opts['remotecmd']:
695 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
695 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
696
696
697 if not os.path.exists(source):
697 if not os.path.exists(source):
698 source = ui.expandpath(source)
698 source = ui.expandpath(source)
699
699
700 d = Dircleanup(dest)
700 d = Dircleanup(dest)
701 abspath = source
701 abspath = source
702 other = hg.repository(ui, source)
702 other = hg.repository(ui, source)
703
703
704 copy = False
704 copy = False
705 if other.dev() != -1:
705 if other.dev() != -1:
706 abspath = os.path.abspath(source)
706 abspath = os.path.abspath(source)
707 if not opts['pull'] and not opts['rev']:
707 if not opts['pull'] and not opts['rev']:
708 copy = True
708 copy = True
709
709
710 if copy:
710 if copy:
711 try:
711 try:
712 # we use a lock here because if we race with commit, we
712 # we use a lock here because if we race with commit, we
713 # can end up with extra data in the cloned revlogs that's
713 # can end up with extra data in the cloned revlogs that's
714 # not pointed to by changesets, thus causing verify to
714 # not pointed to by changesets, thus causing verify to
715 # fail
715 # fail
716 l1 = lock.lock(os.path.join(source, ".hg", "lock"))
716 l1 = lock.lock(os.path.join(source, ".hg", "lock"))
717 except OSError:
717 except OSError:
718 copy = False
718 copy = False
719
719
720 if copy:
720 if copy:
721 # we lock here to avoid premature writing to the target
721 # we lock here to avoid premature writing to the target
722 os.mkdir(os.path.join(dest, ".hg"))
722 os.mkdir(os.path.join(dest, ".hg"))
723 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
723 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
724
724
725 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
725 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
726 for f in files.split():
726 for f in files.split():
727 src = os.path.join(source, ".hg", f)
727 src = os.path.join(source, ".hg", f)
728 dst = os.path.join(dest, ".hg", f)
728 dst = os.path.join(dest, ".hg", f)
729 try:
729 try:
730 util.copyfiles(src, dst)
730 util.copyfiles(src, dst)
731 except OSError, inst:
731 except OSError, inst:
732 if inst.errno != errno.ENOENT: raise
732 if inst.errno != errno.ENOENT: raise
733
733
734 repo = hg.repository(ui, dest)
734 repo = hg.repository(ui, dest)
735
735
736 else:
736 else:
737 revs = None
737 revs = None
738 if opts['rev']:
738 if opts['rev']:
739 if not other.local():
739 if not other.local():
740 raise util.Abort("clone -r not supported yet for remote repositories.")
740 raise util.Abort("clone -r not supported yet for remote repositories.")
741 else:
741 else:
742 revs = [other.lookup(rev) for rev in opts['rev']]
742 revs = [other.lookup(rev) for rev in opts['rev']]
743 repo = hg.repository(ui, dest, create=1)
743 repo = hg.repository(ui, dest, create=1)
744 repo.pull(other, heads = revs)
744 repo.pull(other, heads = revs)
745
745
746 f = repo.opener("hgrc", "w", text=True)
746 f = repo.opener("hgrc", "w", text=True)
747 f.write("[paths]\n")
747 f.write("[paths]\n")
748 f.write("default = %s\n" % abspath)
748 f.write("default = %s\n" % abspath)
749 f.close()
749 f.close()
750
750
751 if not opts['noupdate']:
751 if not opts['noupdate']:
752 update(ui, repo)
752 update(ui, repo)
753
753
754 d.close()
754 d.close()
755
755
756 def commit(ui, repo, *pats, **opts):
756 def commit(ui, repo, *pats, **opts):
757 """commit the specified files or all outstanding changes
757 """commit the specified files or all outstanding changes
758
758
759 Commit changes to the given files into the repository.
759 Commit changes to the given files into the repository.
760
760
761 If a list of files is omitted, all changes reported by "hg status"
761 If a list of files is omitted, all changes reported by "hg status"
762 from the root of the repository will be commited.
762 from the root of the repository will be commited.
763
763
764 The HGEDITOR or EDITOR environment variables are used to start an
764 The HGEDITOR or EDITOR environment variables are used to start an
765 editor to add a commit comment.
765 editor to add a commit comment.
766 """
766 """
767 message = opts['message']
767 message = opts['message']
768 logfile = opts['logfile']
768 logfile = opts['logfile']
769
769
770 if message and logfile:
770 if message and logfile:
771 raise util.Abort(_('options --message and --logfile are mutually '
771 raise util.Abort(_('options --message and --logfile are mutually '
772 'exclusive'))
772 'exclusive'))
773 if not message and logfile:
773 if not message and logfile:
774 try:
774 try:
775 if logfile == '-':
775 if logfile == '-':
776 message = sys.stdin.read()
776 message = sys.stdin.read()
777 else:
777 else:
778 message = open(logfile).read()
778 message = open(logfile).read()
779 except IOError, inst:
779 except IOError, inst:
780 raise util.Abort(_("can't read commit message '%s': %s") %
780 raise util.Abort(_("can't read commit message '%s': %s") %
781 (logfile, inst.strerror))
781 (logfile, inst.strerror))
782
782
783 if opts['addremove']:
783 if opts['addremove']:
784 addremove(ui, repo, *pats, **opts)
784 addremove(ui, repo, *pats, **opts)
785 cwd = repo.getcwd()
785 cwd = repo.getcwd()
786 if not pats and cwd:
786 if not pats and cwd:
787 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
787 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
788 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
788 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
789 fns, match, anypats = matchpats(repo, (pats and repo.getcwd()) or '',
789 fns, match, anypats = matchpats(repo, (pats and repo.getcwd()) or '',
790 pats, opts)
790 pats, opts)
791 if pats:
791 if pats:
792 c, a, d, u = repo.changes(files=fns, match=match)
792 c, a, d, u = repo.changes(files=fns, match=match)
793 files = c + a + [fn for fn in d if repo.dirstate.state(fn) == 'r']
793 files = c + a + [fn for fn in d if repo.dirstate.state(fn) == 'r']
794 else:
794 else:
795 files = []
795 files = []
796 try:
796 try:
797 repo.commit(files, message, opts['user'], opts['date'], match)
797 repo.commit(files, message, opts['user'], opts['date'], match)
798 except ValueError, inst:
798 except ValueError, inst:
799 raise util.Abort(str(inst))
799 raise util.Abort(str(inst))
800
800
801 def docopy(ui, repo, pats, opts):
801 def docopy(ui, repo, pats, opts):
802 cwd = repo.getcwd()
802 cwd = repo.getcwd()
803 errors = 0
803 errors = 0
804 copied = []
804 copied = []
805 targets = {}
805 targets = {}
806
806
807 def okaytocopy(abs, rel, exact):
807 def okaytocopy(abs, rel, exact):
808 reasons = {'?': _('is not managed'),
808 reasons = {'?': _('is not managed'),
809 'a': _('has been marked for add')}
809 'a': _('has been marked for add')}
810 reason = reasons.get(repo.dirstate.state(abs))
810 reason = reasons.get(repo.dirstate.state(abs))
811 if reason:
811 if reason:
812 if exact: ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
812 if exact: ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
813 else:
813 else:
814 return True
814 return True
815
815
816 def copy(abssrc, relsrc, target, exact):
816 def copy(abssrc, relsrc, target, exact):
817 abstarget = util.canonpath(repo.root, cwd, target)
817 abstarget = util.canonpath(repo.root, cwd, target)
818 reltarget = util.pathto(cwd, abstarget)
818 reltarget = util.pathto(cwd, abstarget)
819 prevsrc = targets.get(abstarget)
819 prevsrc = targets.get(abstarget)
820 if prevsrc is not None:
820 if prevsrc is not None:
821 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
821 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
822 (reltarget, abssrc, prevsrc))
822 (reltarget, abssrc, prevsrc))
823 return
823 return
824 elif os.path.exists(reltarget):
824 elif os.path.exists(reltarget):
825 if opts['force']:
825 if opts['force']:
826 os.unlink(reltarget)
826 os.unlink(reltarget)
827 else:
827 else:
828 ui.warn(_('%s: not overwriting - file exists\n') %
828 ui.warn(_('%s: not overwriting - file exists\n') %
829 reltarget)
829 reltarget)
830 return
830 return
831 if ui.verbose or not exact:
831 if ui.verbose or not exact:
832 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
832 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
833 if not opts['after']:
833 if not opts['after']:
834 targetdir = os.path.dirname(reltarget) or '.'
834 targetdir = os.path.dirname(reltarget) or '.'
835 if not os.path.isdir(targetdir):
835 if not os.path.isdir(targetdir):
836 os.makedirs(targetdir)
836 os.makedirs(targetdir)
837 try:
837 try:
838 shutil.copyfile(relsrc, reltarget)
838 shutil.copyfile(relsrc, reltarget)
839 shutil.copymode(relsrc, reltarget)
839 shutil.copymode(relsrc, reltarget)
840 except shutil.Error, inst:
840 except shutil.Error, inst:
841 raise util.Abort(str(inst))
841 raise util.Abort(str(inst))
842 except IOError, inst:
842 except IOError, inst:
843 if inst.errno == errno.ENOENT:
843 if inst.errno == errno.ENOENT:
844 ui.warn(_('%s: deleted in working copy\n') % relsrc)
844 ui.warn(_('%s: deleted in working copy\n') % relsrc)
845 else:
845 else:
846 ui.warn(_('%s: cannot copy - %s\n') %
846 ui.warn(_('%s: cannot copy - %s\n') %
847 (relsrc, inst.strerror))
847 (relsrc, inst.strerror))
848 errors += 1
848 errors += 1
849 return
849 return
850 targets[abstarget] = abssrc
850 targets[abstarget] = abssrc
851 repo.copy(abssrc, abstarget)
851 repo.copy(abssrc, abstarget)
852 copied.append((abssrc, relsrc, exact))
852 copied.append((abssrc, relsrc, exact))
853
853
854 pats = list(pats)
854 pats = list(pats)
855 if not pats:
855 if not pats:
856 raise util.Abort(_('no source or destination specified'))
856 raise util.Abort(_('no source or destination specified'))
857 if len(pats) == 1:
857 if len(pats) == 1:
858 raise util.Abort(_('no destination specified'))
858 raise util.Abort(_('no destination specified'))
859 dest = pats.pop()
859 dest = pats.pop()
860 destdirexists = os.path.isdir(dest)
860 destdirexists = os.path.isdir(dest)
861 if (len(pats) > 1 or not os.path.exists(pats[0])) and not destdirexists:
861 if (len(pats) > 1 or not os.path.exists(pats[0])) and not destdirexists:
862 raise util.Abort(_('with multiple sources, destination must be an '
862 raise util.Abort(_('with multiple sources, destination must be an '
863 'existing directory'))
863 'existing directory'))
864
864
865 for pat in pats:
865 for pat in pats:
866 if os.path.isdir(pat):
866 if os.path.isdir(pat):
867 if destdirexists:
867 if destdirexists:
868 striplen = len(os.path.split(pat)[0])
868 striplen = len(os.path.split(pat)[0])
869 else:
869 else:
870 striplen = len(pat)
870 striplen = len(pat)
871 if striplen:
871 if striplen:
872 striplen += len(os.sep)
872 striplen += len(os.sep)
873 targetpath = lambda p: os.path.join(dest, p[striplen:])
873 targetpath = lambda p: os.path.join(dest, p[striplen:])
874 elif destdirexists:
874 elif destdirexists:
875 targetpath = lambda p: os.path.join(dest, os.path.basename(p))
875 targetpath = lambda p: os.path.join(dest, os.path.basename(p))
876 else:
876 else:
877 targetpath = lambda p: dest
877 targetpath = lambda p: dest
878 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
878 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
879 if okaytocopy(abssrc, relsrc, exact):
879 if okaytocopy(abssrc, relsrc, exact):
880 copy(abssrc, relsrc, targetpath(abssrc), exact)
880 copy(abssrc, relsrc, targetpath(abssrc), exact)
881
881
882 if errors:
882 if errors:
883 ui.warn(_('(consider using --after)\n'))
883 ui.warn(_('(consider using --after)\n'))
884 if len(copied) == 0:
884 if len(copied) == 0:
885 raise util.Abort(_('no files to copy'))
885 raise util.Abort(_('no files to copy'))
886 return errors, copied
886 return errors, copied
887
887
888 def copy(ui, repo, *pats, **opts):
888 def copy(ui, repo, *pats, **opts):
889 """mark files as copied for the next commit
889 """mark files as copied for the next commit
890
890
891 Mark dest as having copies of source files. If dest is a
891 Mark dest as having copies of source files. If dest is a
892 directory, copies are put in that directory. If dest is a file,
892 directory, copies are put in that directory. If dest is a file,
893 there can only be one source.
893 there can only be one source.
894
894
895 By default, this command copies the contents of files as they
895 By default, this command copies the contents of files as they
896 stand in the working directory. If invoked with --after, the
896 stand in the working directory. If invoked with --after, the
897 operation is recorded, but no copying is performed.
897 operation is recorded, but no copying is performed.
898
898
899 This command takes effect in the next commit.
899 This command takes effect in the next commit.
900
900
901 NOTE: This command should be treated as experimental. While it
901 NOTE: This command should be treated as experimental. While it
902 should properly record copied files, this information is not yet
902 should properly record copied files, this information is not yet
903 fully used by merge, nor fully reported by log.
903 fully used by merge, nor fully reported by log.
904 """
904 """
905 errs, copied = docopy(ui, repo, pats, opts)
905 errs, copied = docopy(ui, repo, pats, opts)
906 return errs
906 return errs
907
907
908 def debugancestor(ui, index, rev1, rev2):
908 def debugancestor(ui, index, rev1, rev2):
909 """find the ancestor revision of two revisions in a given index"""
909 """find the ancestor revision of two revisions in a given index"""
910 r = revlog.revlog(util.opener(os.getcwd()), index, "")
910 r = revlog.revlog(util.opener(os.getcwd()), index, "")
911 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
911 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
912 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
912 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
913
913
914 def debugcheckstate(ui, repo):
914 def debugcheckstate(ui, repo):
915 """validate the correctness of the current dirstate"""
915 """validate the correctness of the current dirstate"""
916 parent1, parent2 = repo.dirstate.parents()
916 parent1, parent2 = repo.dirstate.parents()
917 repo.dirstate.read()
917 repo.dirstate.read()
918 dc = repo.dirstate.map
918 dc = repo.dirstate.map
919 keys = dc.keys()
919 keys = dc.keys()
920 keys.sort()
920 keys.sort()
921 m1n = repo.changelog.read(parent1)[0]
921 m1n = repo.changelog.read(parent1)[0]
922 m2n = repo.changelog.read(parent2)[0]
922 m2n = repo.changelog.read(parent2)[0]
923 m1 = repo.manifest.read(m1n)
923 m1 = repo.manifest.read(m1n)
924 m2 = repo.manifest.read(m2n)
924 m2 = repo.manifest.read(m2n)
925 errors = 0
925 errors = 0
926 for f in dc:
926 for f in dc:
927 state = repo.dirstate.state(f)
927 state = repo.dirstate.state(f)
928 if state in "nr" and f not in m1:
928 if state in "nr" and f not in m1:
929 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
929 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
930 errors += 1
930 errors += 1
931 if state in "a" and f in m1:
931 if state in "a" and f in m1:
932 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
932 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
933 errors += 1
933 errors += 1
934 if state in "m" and f not in m1 and f not in m2:
934 if state in "m" and f not in m1 and f not in m2:
935 ui.warn(_("%s in state %s, but not in either manifest\n") %
935 ui.warn(_("%s in state %s, but not in either manifest\n") %
936 (f, state))
936 (f, state))
937 errors += 1
937 errors += 1
938 for f in m1:
938 for f in m1:
939 state = repo.dirstate.state(f)
939 state = repo.dirstate.state(f)
940 if state not in "nrm":
940 if state not in "nrm":
941 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
941 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
942 errors += 1
942 errors += 1
943 if errors:
943 if errors:
944 raise util.Abort(_(".hg/dirstate inconsistent with current parent's manifest"))
944 raise util.Abort(_(".hg/dirstate inconsistent with current parent's manifest"))
945
945
946 def debugconfig(ui):
946 def debugconfig(ui):
947 """show combined config settings from all hgrc files"""
947 """show combined config settings from all hgrc files"""
948 try:
948 try:
949 repo = hg.repository(ui)
949 repo = hg.repository(ui)
950 except hg.RepoError:
950 except hg.RepoError:
951 pass
951 pass
952 for section, name, value in ui.walkconfig():
952 for section, name, value in ui.walkconfig():
953 ui.write('%s.%s=%s\n' % (section, name, value))
953 ui.write('%s.%s=%s\n' % (section, name, value))
954
954
955 def debugsetparents(ui, repo, rev1, rev2=None):
955 def debugsetparents(ui, repo, rev1, rev2=None):
956 """manually set the parents of the current working directory
956 """manually set the parents of the current working directory
957
957
958 This is useful for writing repository conversion tools, but should
958 This is useful for writing repository conversion tools, but should
959 be used with care.
959 be used with care.
960 """
960 """
961
961
962 if not rev2:
962 if not rev2:
963 rev2 = hex(nullid)
963 rev2 = hex(nullid)
964
964
965 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
965 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
966
966
967 def debugstate(ui, repo):
967 def debugstate(ui, repo):
968 """show the contents of the current dirstate"""
968 """show the contents of the current dirstate"""
969 repo.dirstate.read()
969 repo.dirstate.read()
970 dc = repo.dirstate.map
970 dc = repo.dirstate.map
971 keys = dc.keys()
971 keys = dc.keys()
972 keys.sort()
972 keys.sort()
973 for file_ in keys:
973 for file_ in keys:
974 ui.write("%c %3o %10d %s %s\n"
974 ui.write("%c %3o %10d %s %s\n"
975 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
975 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
976 time.strftime("%x %X",
976 time.strftime("%x %X",
977 time.localtime(dc[file_][3])), file_))
977 time.localtime(dc[file_][3])), file_))
978 for f in repo.dirstate.copies:
978 for f in repo.dirstate.copies:
979 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
979 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
980
980
981 def debugdata(ui, file_, rev):
981 def debugdata(ui, file_, rev):
982 """dump the contents of an data file revision"""
982 """dump the contents of an data file revision"""
983 r = revlog.revlog(util.opener(os.getcwd()), file_[:-2] + ".i", file_)
983 r = revlog.revlog(util.opener(os.getcwd()), file_[:-2] + ".i", file_)
984 try:
984 try:
985 ui.write(r.revision(r.lookup(rev)))
985 ui.write(r.revision(r.lookup(rev)))
986 except KeyError:
986 except KeyError:
987 raise util.Abort(_('invalid revision identifier %s'), rev)
987 raise util.Abort(_('invalid revision identifier %s'), rev)
988
988
989 def debugindex(ui, file_):
989 def debugindex(ui, file_):
990 """dump the contents of an index file"""
990 """dump the contents of an index file"""
991 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
991 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
992 ui.write(" rev offset length base linkrev" +
992 ui.write(" rev offset length base linkrev" +
993 " nodeid p1 p2\n")
993 " nodeid p1 p2\n")
994 for i in range(r.count()):
994 for i in range(r.count()):
995 e = r.index[i]
995 e = r.index[i]
996 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
996 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
997 i, e[0], e[1], e[2], e[3],
997 i, e[0], e[1], e[2], e[3],
998 short(e[6]), short(e[4]), short(e[5])))
998 short(e[6]), short(e[4]), short(e[5])))
999
999
1000 def debugindexdot(ui, file_):
1000 def debugindexdot(ui, file_):
1001 """dump an index DAG as a .dot file"""
1001 """dump an index DAG as a .dot file"""
1002 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
1002 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
1003 ui.write("digraph G {\n")
1003 ui.write("digraph G {\n")
1004 for i in range(r.count()):
1004 for i in range(r.count()):
1005 e = r.index[i]
1005 e = r.index[i]
1006 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
1006 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
1007 if e[5] != nullid:
1007 if e[5] != nullid:
1008 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
1008 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
1009 ui.write("}\n")
1009 ui.write("}\n")
1010
1010
1011 def debugrename(ui, repo, file, rev=None):
1011 def debugrename(ui, repo, file, rev=None):
1012 """dump rename information"""
1012 """dump rename information"""
1013 r = repo.file(relpath(repo, [file])[0])
1013 r = repo.file(relpath(repo, [file])[0])
1014 if rev:
1014 if rev:
1015 try:
1015 try:
1016 # assume all revision numbers are for changesets
1016 # assume all revision numbers are for changesets
1017 n = repo.lookup(rev)
1017 n = repo.lookup(rev)
1018 change = repo.changelog.read(n)
1018 change = repo.changelog.read(n)
1019 m = repo.manifest.read(change[0])
1019 m = repo.manifest.read(change[0])
1020 n = m[relpath(repo, [file])[0]]
1020 n = m[relpath(repo, [file])[0]]
1021 except (hg.RepoError, KeyError):
1021 except (hg.RepoError, KeyError):
1022 n = r.lookup(rev)
1022 n = r.lookup(rev)
1023 else:
1023 else:
1024 n = r.tip()
1024 n = r.tip()
1025 m = r.renamed(n)
1025 m = r.renamed(n)
1026 if m:
1026 if m:
1027 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1027 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1028 else:
1028 else:
1029 ui.write(_("not renamed\n"))
1029 ui.write(_("not renamed\n"))
1030
1030
1031 def debugwalk(ui, repo, *pats, **opts):
1031 def debugwalk(ui, repo, *pats, **opts):
1032 """show how files match on given patterns"""
1032 """show how files match on given patterns"""
1033 items = list(walk(repo, pats, opts))
1033 items = list(walk(repo, pats, opts))
1034 if not items:
1034 if not items:
1035 return
1035 return
1036 fmt = '%%s %%-%ds %%-%ds %%s' % (
1036 fmt = '%%s %%-%ds %%-%ds %%s' % (
1037 max([len(abs) for (src, abs, rel, exact) in items]),
1037 max([len(abs) for (src, abs, rel, exact) in items]),
1038 max([len(rel) for (src, abs, rel, exact) in items]))
1038 max([len(rel) for (src, abs, rel, exact) in items]))
1039 for src, abs, rel, exact in items:
1039 for src, abs, rel, exact in items:
1040 line = fmt % (src, abs, rel, exact and 'exact' or '')
1040 line = fmt % (src, abs, rel, exact and 'exact' or '')
1041 ui.write("%s\n" % line.rstrip())
1041 ui.write("%s\n" % line.rstrip())
1042
1042
1043 def diff(ui, repo, *pats, **opts):
1043 def diff(ui, repo, *pats, **opts):
1044 """diff working directory (or selected files)
1044 """diff working directory (or selected files)
1045
1045
1046 Show differences between revisions for the specified files.
1046 Show differences between revisions for the specified files.
1047
1047
1048 Differences between files are shown using the unified diff format.
1048 Differences between files are shown using the unified diff format.
1049
1049
1050 When two revision arguments are given, then changes are shown
1050 When two revision arguments are given, then changes are shown
1051 between those revisions. If only one revision is specified then
1051 between those revisions. If only one revision is specified then
1052 that revision is compared to the working directory, and, when no
1052 that revision is compared to the working directory, and, when no
1053 revisions are specified, the working directory files are compared
1053 revisions are specified, the working directory files are compared
1054 to its parent.
1054 to its parent.
1055
1055
1056 Without the -a option, diff will avoid generating diffs of files
1056 Without the -a option, diff will avoid generating diffs of files
1057 it detects as binary. With -a, diff will generate a diff anyway,
1057 it detects as binary. With -a, diff will generate a diff anyway,
1058 probably with undesirable results.
1058 probably with undesirable results.
1059 """
1059 """
1060 node1, node2 = None, None
1060 node1, node2 = None, None
1061 revs = [repo.lookup(x) for x in opts['rev']]
1061 revs = [repo.lookup(x) for x in opts['rev']]
1062
1062
1063 if len(revs) > 0:
1063 if len(revs) > 0:
1064 node1 = revs[0]
1064 node1 = revs[0]
1065 if len(revs) > 1:
1065 if len(revs) > 1:
1066 node2 = revs[1]
1066 node2 = revs[1]
1067 if len(revs) > 2:
1067 if len(revs) > 2:
1068 raise util.Abort(_("too many revisions to diff"))
1068 raise util.Abort(_("too many revisions to diff"))
1069
1069
1070 fns, matchfn, anypats = matchpats(repo, repo.getcwd(), pats, opts)
1070 fns, matchfn, anypats = matchpats(repo, repo.getcwd(), pats, opts)
1071
1071
1072 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1072 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1073 text=opts['text'])
1073 text=opts['text'])
1074
1074
1075 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1075 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1076 node = repo.lookup(changeset)
1076 node = repo.lookup(changeset)
1077 prev, other = repo.changelog.parents(node)
1077 prev, other = repo.changelog.parents(node)
1078 change = repo.changelog.read(node)
1078 change = repo.changelog.read(node)
1079
1079
1080 fp = make_file(repo, repo.changelog, opts['output'],
1080 fp = make_file(repo, repo.changelog, opts['output'],
1081 node=node, total=total, seqno=seqno,
1081 node=node, total=total, seqno=seqno,
1082 revwidth=revwidth)
1082 revwidth=revwidth)
1083 if fp != sys.stdout:
1083 if fp != sys.stdout:
1084 ui.note("%s\n" % fp.name)
1084 ui.note("%s\n" % fp.name)
1085
1085
1086 fp.write("# HG changeset patch\n")
1086 fp.write("# HG changeset patch\n")
1087 fp.write("# User %s\n" % change[1])
1087 fp.write("# User %s\n" % change[1])
1088 fp.write("# Node ID %s\n" % hex(node))
1088 fp.write("# Node ID %s\n" % hex(node))
1089 fp.write("# Parent %s\n" % hex(prev))
1089 fp.write("# Parent %s\n" % hex(prev))
1090 if other != nullid:
1090 if other != nullid:
1091 fp.write("# Parent %s\n" % hex(other))
1091 fp.write("# Parent %s\n" % hex(other))
1092 fp.write(change[4].rstrip())
1092 fp.write(change[4].rstrip())
1093 fp.write("\n\n")
1093 fp.write("\n\n")
1094
1094
1095 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1095 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1096 if fp != sys.stdout:
1096 if fp != sys.stdout:
1097 fp.close()
1097 fp.close()
1098
1098
1099 def export(ui, repo, *changesets, **opts):
1099 def export(ui, repo, *changesets, **opts):
1100 """dump the header and diffs for one or more changesets
1100 """dump the header and diffs for one or more changesets
1101
1101
1102 Print the changeset header and diffs for one or more revisions.
1102 Print the changeset header and diffs for one or more revisions.
1103
1103
1104 The information shown in the changeset header is: author,
1104 The information shown in the changeset header is: author,
1105 changeset hash, parent and commit comment.
1105 changeset hash, parent and commit comment.
1106
1106
1107 Output may be to a file, in which case the name of the file is
1107 Output may be to a file, in which case the name of the file is
1108 given using a format string. The formatting rules are as follows:
1108 given using a format string. The formatting rules are as follows:
1109
1109
1110 %% literal "%" character
1110 %% literal "%" character
1111 %H changeset hash (40 bytes of hexadecimal)
1111 %H changeset hash (40 bytes of hexadecimal)
1112 %N number of patches being generated
1112 %N number of patches being generated
1113 %R changeset revision number
1113 %R changeset revision number
1114 %b basename of the exporting repository
1114 %b basename of the exporting repository
1115 %h short-form changeset hash (12 bytes of hexadecimal)
1115 %h short-form changeset hash (12 bytes of hexadecimal)
1116 %n zero-padded sequence number, starting at 1
1116 %n zero-padded sequence number, starting at 1
1117 %r zero-padded changeset revision number
1117 %r zero-padded changeset revision number
1118
1118
1119 Without the -a option, export will avoid generating diffs of files
1119 Without the -a option, export will avoid generating diffs of files
1120 it detects as binary. With -a, export will generate a diff anyway,
1120 it detects as binary. With -a, export will generate a diff anyway,
1121 probably with undesirable results.
1121 probably with undesirable results.
1122 """
1122 """
1123 if not changesets:
1123 if not changesets:
1124 raise util.Abort(_("export requires at least one changeset"))
1124 raise util.Abort(_("export requires at least one changeset"))
1125 seqno = 0
1125 seqno = 0
1126 revs = list(revrange(ui, repo, changesets))
1126 revs = list(revrange(ui, repo, changesets))
1127 total = len(revs)
1127 total = len(revs)
1128 revwidth = max(map(len, revs))
1128 revwidth = max(map(len, revs))
1129 ui.note(len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n"))
1129 ui.note(len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n"))
1130 for cset in revs:
1130 for cset in revs:
1131 seqno += 1
1131 seqno += 1
1132 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1132 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1133
1133
1134 def forget(ui, repo, *pats, **opts):
1134 def forget(ui, repo, *pats, **opts):
1135 """don't add the specified files on the next commit
1135 """don't add the specified files on the next commit
1136
1136
1137 Undo an 'hg add' scheduled for the next commit.
1137 Undo an 'hg add' scheduled for the next commit.
1138 """
1138 """
1139 forget = []
1139 forget = []
1140 for src, abs, rel, exact in walk(repo, pats, opts):
1140 for src, abs, rel, exact in walk(repo, pats, opts):
1141 if repo.dirstate.state(abs) == 'a':
1141 if repo.dirstate.state(abs) == 'a':
1142 forget.append(abs)
1142 forget.append(abs)
1143 if ui.verbose or not exact:
1143 if ui.verbose or not exact:
1144 ui.status(_('forgetting %s\n') % rel)
1144 ui.status(_('forgetting %s\n') % rel)
1145 repo.forget(forget)
1145 repo.forget(forget)
1146
1146
1147 def grep(ui, repo, pattern, *pats, **opts):
1147 def grep(ui, repo, pattern, *pats, **opts):
1148 """search for a pattern in specified files and revisions
1148 """search for a pattern in specified files and revisions
1149
1149
1150 Search revisions of files for a regular expression.
1150 Search revisions of files for a regular expression.
1151
1151
1152 This command behaves differently than Unix grep. It only accepts
1152 This command behaves differently than Unix grep. It only accepts
1153 Python/Perl regexps. It searches repository history, not the
1153 Python/Perl regexps. It searches repository history, not the
1154 working directory. It always prints the revision number in which
1154 working directory. It always prints the revision number in which
1155 a match appears.
1155 a match appears.
1156
1156
1157 By default, grep only prints output for the first revision of a
1157 By default, grep only prints output for the first revision of a
1158 file in which it finds a match. To get it to print every revision
1158 file in which it finds a match. To get it to print every revision
1159 that contains a change in match status ("-" for a match that
1159 that contains a change in match status ("-" for a match that
1160 becomes a non-match, or "+" for a non-match that becomes a match),
1160 becomes a non-match, or "+" for a non-match that becomes a match),
1161 use the --all flag.
1161 use the --all flag.
1162 """
1162 """
1163 reflags = 0
1163 reflags = 0
1164 if opts['ignore_case']:
1164 if opts['ignore_case']:
1165 reflags |= re.I
1165 reflags |= re.I
1166 regexp = re.compile(pattern, reflags)
1166 regexp = re.compile(pattern, reflags)
1167 sep, eol = ':', '\n'
1167 sep, eol = ':', '\n'
1168 if opts['print0']:
1168 if opts['print0']:
1169 sep = eol = '\0'
1169 sep = eol = '\0'
1170
1170
1171 fcache = {}
1171 fcache = {}
1172 def getfile(fn):
1172 def getfile(fn):
1173 if fn not in fcache:
1173 if fn not in fcache:
1174 fcache[fn] = repo.file(fn)
1174 fcache[fn] = repo.file(fn)
1175 return fcache[fn]
1175 return fcache[fn]
1176
1176
1177 def matchlines(body):
1177 def matchlines(body):
1178 begin = 0
1178 begin = 0
1179 linenum = 0
1179 linenum = 0
1180 while True:
1180 while True:
1181 match = regexp.search(body, begin)
1181 match = regexp.search(body, begin)
1182 if not match:
1182 if not match:
1183 break
1183 break
1184 mstart, mend = match.span()
1184 mstart, mend = match.span()
1185 linenum += body.count('\n', begin, mstart) + 1
1185 linenum += body.count('\n', begin, mstart) + 1
1186 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1186 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1187 lend = body.find('\n', mend)
1187 lend = body.find('\n', mend)
1188 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1188 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1189 begin = lend + 1
1189 begin = lend + 1
1190
1190
1191 class linestate:
1191 class linestate(object):
1192 def __init__(self, line, linenum, colstart, colend):
1192 def __init__(self, line, linenum, colstart, colend):
1193 self.line = line
1193 self.line = line
1194 self.linenum = linenum
1194 self.linenum = linenum
1195 self.colstart = colstart
1195 self.colstart = colstart
1196 self.colend = colend
1196 self.colend = colend
1197 def __eq__(self, other):
1197 def __eq__(self, other):
1198 return self.line == other.line
1198 return self.line == other.line
1199 def __hash__(self):
1199 def __hash__(self):
1200 return hash(self.line)
1200 return hash(self.line)
1201
1201
1202 matches = {}
1202 matches = {}
1203 def grepbody(fn, rev, body):
1203 def grepbody(fn, rev, body):
1204 matches[rev].setdefault(fn, {})
1204 matches[rev].setdefault(fn, {})
1205 m = matches[rev][fn]
1205 m = matches[rev][fn]
1206 for lnum, cstart, cend, line in matchlines(body):
1206 for lnum, cstart, cend, line in matchlines(body):
1207 s = linestate(line, lnum, cstart, cend)
1207 s = linestate(line, lnum, cstart, cend)
1208 m[s] = s
1208 m[s] = s
1209
1209
1210 prev = {}
1210 prev = {}
1211 ucache = {}
1211 ucache = {}
1212 def display(fn, rev, states, prevstates):
1212 def display(fn, rev, states, prevstates):
1213 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1213 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1214 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1214 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1215 counts = {'-': 0, '+': 0}
1215 counts = {'-': 0, '+': 0}
1216 filerevmatches = {}
1216 filerevmatches = {}
1217 for l in diff:
1217 for l in diff:
1218 if incrementing or not opts['all']:
1218 if incrementing or not opts['all']:
1219 change = ((l in prevstates) and '-') or '+'
1219 change = ((l in prevstates) and '-') or '+'
1220 r = rev
1220 r = rev
1221 else:
1221 else:
1222 change = ((l in states) and '-') or '+'
1222 change = ((l in states) and '-') or '+'
1223 r = prev[fn]
1223 r = prev[fn]
1224 cols = [fn, str(rev)]
1224 cols = [fn, str(rev)]
1225 if opts['line_number']: cols.append(str(l.linenum))
1225 if opts['line_number']: cols.append(str(l.linenum))
1226 if opts['all']: cols.append(change)
1226 if opts['all']: cols.append(change)
1227 if opts['user']: cols.append(trimuser(ui, getchange(rev)[1], rev,
1227 if opts['user']: cols.append(trimuser(ui, getchange(rev)[1], rev,
1228 ucache))
1228 ucache))
1229 if opts['files_with_matches']:
1229 if opts['files_with_matches']:
1230 c = (fn, rev)
1230 c = (fn, rev)
1231 if c in filerevmatches: continue
1231 if c in filerevmatches: continue
1232 filerevmatches[c] = 1
1232 filerevmatches[c] = 1
1233 else:
1233 else:
1234 cols.append(l.line)
1234 cols.append(l.line)
1235 ui.write(sep.join(cols), eol)
1235 ui.write(sep.join(cols), eol)
1236 counts[change] += 1
1236 counts[change] += 1
1237 return counts['+'], counts['-']
1237 return counts['+'], counts['-']
1238
1238
1239 fstate = {}
1239 fstate = {}
1240 skip = {}
1240 skip = {}
1241 changeiter, getchange = walkchangerevs(ui, repo, repo.getcwd(), pats, opts)
1241 changeiter, getchange = walkchangerevs(ui, repo, repo.getcwd(), pats, opts)
1242 count = 0
1242 count = 0
1243 incrementing = False
1243 incrementing = False
1244 for st, rev, fns in changeiter:
1244 for st, rev, fns in changeiter:
1245 if st == 'window':
1245 if st == 'window':
1246 incrementing = rev
1246 incrementing = rev
1247 matches.clear()
1247 matches.clear()
1248 elif st == 'add':
1248 elif st == 'add':
1249 change = repo.changelog.read(repo.lookup(str(rev)))
1249 change = repo.changelog.read(repo.lookup(str(rev)))
1250 mf = repo.manifest.read(change[0])
1250 mf = repo.manifest.read(change[0])
1251 matches[rev] = {}
1251 matches[rev] = {}
1252 for fn in fns:
1252 for fn in fns:
1253 if fn in skip: continue
1253 if fn in skip: continue
1254 fstate.setdefault(fn, {})
1254 fstate.setdefault(fn, {})
1255 try:
1255 try:
1256 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1256 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1257 except KeyError:
1257 except KeyError:
1258 pass
1258 pass
1259 elif st == 'iter':
1259 elif st == 'iter':
1260 states = matches[rev].items()
1260 states = matches[rev].items()
1261 states.sort()
1261 states.sort()
1262 for fn, m in states:
1262 for fn, m in states:
1263 if fn in skip: continue
1263 if fn in skip: continue
1264 if incrementing or not opts['all'] or fstate[fn]:
1264 if incrementing or not opts['all'] or fstate[fn]:
1265 pos, neg = display(fn, rev, m, fstate[fn])
1265 pos, neg = display(fn, rev, m, fstate[fn])
1266 count += pos + neg
1266 count += pos + neg
1267 if pos and not opts['all']:
1267 if pos and not opts['all']:
1268 skip[fn] = True
1268 skip[fn] = True
1269 fstate[fn] = m
1269 fstate[fn] = m
1270 prev[fn] = rev
1270 prev[fn] = rev
1271
1271
1272 if not incrementing:
1272 if not incrementing:
1273 fstate = fstate.items()
1273 fstate = fstate.items()
1274 fstate.sort()
1274 fstate.sort()
1275 for fn, state in fstate:
1275 for fn, state in fstate:
1276 if fn in skip: continue
1276 if fn in skip: continue
1277 display(fn, rev, {}, state)
1277 display(fn, rev, {}, state)
1278 return (count == 0 and 1) or 0
1278 return (count == 0 and 1) or 0
1279
1279
1280 def heads(ui, repo, **opts):
1280 def heads(ui, repo, **opts):
1281 """show current repository heads
1281 """show current repository heads
1282
1282
1283 Show all repository head changesets.
1283 Show all repository head changesets.
1284
1284
1285 Repository "heads" are changesets that don't have children
1285 Repository "heads" are changesets that don't have children
1286 changesets. They are where development generally takes place and
1286 changesets. They are where development generally takes place and
1287 are the usual targets for update and merge operations.
1287 are the usual targets for update and merge operations.
1288 """
1288 """
1289 if opts['rev']:
1289 if opts['rev']:
1290 heads = repo.heads(repo.lookup(opts['rev']))
1290 heads = repo.heads(repo.lookup(opts['rev']))
1291 else:
1291 else:
1292 heads = repo.heads()
1292 heads = repo.heads()
1293 br = None
1293 br = None
1294 if opts['branches']:
1294 if opts['branches']:
1295 br = repo.branchlookup(heads)
1295 br = repo.branchlookup(heads)
1296 for n in heads:
1296 for n in heads:
1297 show_changeset(ui, repo, changenode=n, brinfo=br)
1297 show_changeset(ui, repo, changenode=n, brinfo=br)
1298
1298
1299 def identify(ui, repo):
1299 def identify(ui, repo):
1300 """print information about the working copy
1300 """print information about the working copy
1301
1301
1302 Print a short summary of the current state of the repo.
1302 Print a short summary of the current state of the repo.
1303
1303
1304 This summary identifies the repository state using one or two parent
1304 This summary identifies the repository state using one or two parent
1305 hash identifiers, followed by a "+" if there are uncommitted changes
1305 hash identifiers, followed by a "+" if there are uncommitted changes
1306 in the working directory, followed by a list of tags for this revision.
1306 in the working directory, followed by a list of tags for this revision.
1307 """
1307 """
1308 parents = [p for p in repo.dirstate.parents() if p != nullid]
1308 parents = [p for p in repo.dirstate.parents() if p != nullid]
1309 if not parents:
1309 if not parents:
1310 ui.write(_("unknown\n"))
1310 ui.write(_("unknown\n"))
1311 return
1311 return
1312
1312
1313 hexfunc = ui.verbose and hex or short
1313 hexfunc = ui.verbose and hex or short
1314 (c, a, d, u) = repo.changes()
1314 (c, a, d, u) = repo.changes()
1315 output = ["%s%s" % ('+'.join([hexfunc(parent) for parent in parents]),
1315 output = ["%s%s" % ('+'.join([hexfunc(parent) for parent in parents]),
1316 (c or a or d) and "+" or "")]
1316 (c or a or d) and "+" or "")]
1317
1317
1318 if not ui.quiet:
1318 if not ui.quiet:
1319 # multiple tags for a single parent separated by '/'
1319 # multiple tags for a single parent separated by '/'
1320 parenttags = ['/'.join(tags)
1320 parenttags = ['/'.join(tags)
1321 for tags in map(repo.nodetags, parents) if tags]
1321 for tags in map(repo.nodetags, parents) if tags]
1322 # tags for multiple parents separated by ' + '
1322 # tags for multiple parents separated by ' + '
1323 if parenttags:
1323 if parenttags:
1324 output.append(' + '.join(parenttags))
1324 output.append(' + '.join(parenttags))
1325
1325
1326 ui.write("%s\n" % ' '.join(output))
1326 ui.write("%s\n" % ' '.join(output))
1327
1327
1328 def import_(ui, repo, patch1, *patches, **opts):
1328 def import_(ui, repo, patch1, *patches, **opts):
1329 """import an ordered set of patches
1329 """import an ordered set of patches
1330
1330
1331 Import a list of patches and commit them individually.
1331 Import a list of patches and commit them individually.
1332
1332
1333 If there are outstanding changes in the working directory, import
1333 If there are outstanding changes in the working directory, import
1334 will abort unless given the -f flag.
1334 will abort unless given the -f flag.
1335
1335
1336 If a patch looks like a mail message (its first line starts with
1336 If a patch looks like a mail message (its first line starts with
1337 "From " or looks like an RFC822 header), it will not be applied
1337 "From " or looks like an RFC822 header), it will not be applied
1338 unless the -f option is used. The importer neither parses nor
1338 unless the -f option is used. The importer neither parses nor
1339 discards mail headers, so use -f only to override the "mailness"
1339 discards mail headers, so use -f only to override the "mailness"
1340 safety check, not to import a real mail message.
1340 safety check, not to import a real mail message.
1341 """
1341 """
1342 patches = (patch1,) + patches
1342 patches = (patch1,) + patches
1343
1343
1344 if not opts['force']:
1344 if not opts['force']:
1345 (c, a, d, u) = repo.changes()
1345 (c, a, d, u) = repo.changes()
1346 if c or a or d:
1346 if c or a or d:
1347 raise util.Abort(_("outstanding uncommitted changes"))
1347 raise util.Abort(_("outstanding uncommitted changes"))
1348
1348
1349 d = opts["base"]
1349 d = opts["base"]
1350 strip = opts["strip"]
1350 strip = opts["strip"]
1351
1351
1352 mailre = re.compile(r'(?:From |[\w-]+:)')
1352 mailre = re.compile(r'(?:From |[\w-]+:)')
1353
1353
1354 # attempt to detect the start of a patch
1354 # attempt to detect the start of a patch
1355 # (this heuristic is borrowed from quilt)
1355 # (this heuristic is borrowed from quilt)
1356 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1356 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1357 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1357 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1358 '(---|\*\*\*)[ \t])')
1358 '(---|\*\*\*)[ \t])')
1359
1359
1360 for patch in patches:
1360 for patch in patches:
1361 ui.status(_("applying %s\n") % patch)
1361 ui.status(_("applying %s\n") % patch)
1362 pf = os.path.join(d, patch)
1362 pf = os.path.join(d, patch)
1363
1363
1364 message = []
1364 message = []
1365 user = None
1365 user = None
1366 hgpatch = False
1366 hgpatch = False
1367 for line in file(pf):
1367 for line in file(pf):
1368 line = line.rstrip()
1368 line = line.rstrip()
1369 if (not message and not hgpatch and
1369 if (not message and not hgpatch and
1370 mailre.match(line) and not opts['force']):
1370 mailre.match(line) and not opts['force']):
1371 if len(line) > 35: line = line[:32] + '...'
1371 if len(line) > 35: line = line[:32] + '...'
1372 raise util.Abort(_('first line looks like a '
1372 raise util.Abort(_('first line looks like a '
1373 'mail header: ') + line)
1373 'mail header: ') + line)
1374 if diffre.match(line):
1374 if diffre.match(line):
1375 break
1375 break
1376 elif hgpatch:
1376 elif hgpatch:
1377 # parse values when importing the result of an hg export
1377 # parse values when importing the result of an hg export
1378 if line.startswith("# User "):
1378 if line.startswith("# User "):
1379 user = line[7:]
1379 user = line[7:]
1380 ui.debug(_('User: %s\n') % user)
1380 ui.debug(_('User: %s\n') % user)
1381 elif not line.startswith("# ") and line:
1381 elif not line.startswith("# ") and line:
1382 message.append(line)
1382 message.append(line)
1383 hgpatch = False
1383 hgpatch = False
1384 elif line == '# HG changeset patch':
1384 elif line == '# HG changeset patch':
1385 hgpatch = True
1385 hgpatch = True
1386 message = [] # We may have collected garbage
1386 message = [] # We may have collected garbage
1387 else:
1387 else:
1388 message.append(line)
1388 message.append(line)
1389
1389
1390 # make sure message isn't empty
1390 # make sure message isn't empty
1391 if not message:
1391 if not message:
1392 message = _("imported patch %s\n") % patch
1392 message = _("imported patch %s\n") % patch
1393 else:
1393 else:
1394 message = "%s\n" % '\n'.join(message)
1394 message = "%s\n" % '\n'.join(message)
1395 ui.debug(_('message:\n%s\n') % message)
1395 ui.debug(_('message:\n%s\n') % message)
1396
1396
1397 files = util.patch(strip, pf, ui)
1397 files = util.patch(strip, pf, ui)
1398
1398
1399 if len(files) > 0:
1399 if len(files) > 0:
1400 addremove(ui, repo, *files)
1400 addremove(ui, repo, *files)
1401 repo.commit(files, message, user)
1401 repo.commit(files, message, user)
1402
1402
1403 def incoming(ui, repo, source="default", **opts):
1403 def incoming(ui, repo, source="default", **opts):
1404 """show new changesets found in source
1404 """show new changesets found in source
1405
1405
1406 Show new changesets found in the specified repo or the default
1406 Show new changesets found in the specified repo or the default
1407 pull repo. These are the changesets that would be pulled if a pull
1407 pull repo. These are the changesets that would be pulled if a pull
1408 was requested.
1408 was requested.
1409
1409
1410 Currently only local repositories are supported.
1410 Currently only local repositories are supported.
1411 """
1411 """
1412 source = ui.expandpath(source, repo.root)
1412 source = ui.expandpath(source, repo.root)
1413 other = hg.repository(ui, source)
1413 other = hg.repository(ui, source)
1414 if not other.local():
1414 if not other.local():
1415 raise util.Abort(_("incoming doesn't work for remote repositories yet"))
1415 raise util.Abort(_("incoming doesn't work for remote repositories yet"))
1416 o = repo.findincoming(other)
1416 o = repo.findincoming(other)
1417 if not o:
1417 if not o:
1418 return
1418 return
1419 o = other.changelog.nodesbetween(o)[0]
1419 o = other.changelog.nodesbetween(o)[0]
1420 if opts['newest_first']:
1420 if opts['newest_first']:
1421 o.reverse()
1421 o.reverse()
1422 for n in o:
1422 for n in o:
1423 parents = [p for p in other.changelog.parents(n) if p != nullid]
1423 parents = [p for p in other.changelog.parents(n) if p != nullid]
1424 if opts['no_merges'] and len(parents) == 2:
1424 if opts['no_merges'] and len(parents) == 2:
1425 continue
1425 continue
1426 show_changeset(ui, other, changenode=n)
1426 show_changeset(ui, other, changenode=n)
1427 if opts['patch']:
1427 if opts['patch']:
1428 prev = (parents and parents[0]) or nullid
1428 prev = (parents and parents[0]) or nullid
1429 dodiff(ui, ui, other, prev, n)
1429 dodiff(ui, ui, other, prev, n)
1430 ui.write("\n")
1430 ui.write("\n")
1431
1431
1432 def init(ui, dest="."):
1432 def init(ui, dest="."):
1433 """create a new repository in the given directory
1433 """create a new repository in the given directory
1434
1434
1435 Initialize a new repository in the given directory. If the given
1435 Initialize a new repository in the given directory. If the given
1436 directory does not exist, it is created.
1436 directory does not exist, it is created.
1437
1437
1438 If no directory is given, the current directory is used.
1438 If no directory is given, the current directory is used.
1439 """
1439 """
1440 if not os.path.exists(dest):
1440 if not os.path.exists(dest):
1441 os.mkdir(dest)
1441 os.mkdir(dest)
1442 hg.repository(ui, dest, create=1)
1442 hg.repository(ui, dest, create=1)
1443
1443
1444 def locate(ui, repo, *pats, **opts):
1444 def locate(ui, repo, *pats, **opts):
1445 """locate files matching specific patterns
1445 """locate files matching specific patterns
1446
1446
1447 Print all files under Mercurial control whose names match the
1447 Print all files under Mercurial control whose names match the
1448 given patterns.
1448 given patterns.
1449
1449
1450 This command searches the current directory and its
1450 This command searches the current directory and its
1451 subdirectories. To search an entire repository, move to the root
1451 subdirectories. To search an entire repository, move to the root
1452 of the repository.
1452 of the repository.
1453
1453
1454 If no patterns are given to match, this command prints all file
1454 If no patterns are given to match, this command prints all file
1455 names.
1455 names.
1456
1456
1457 If you want to feed the output of this command into the "xargs"
1457 If you want to feed the output of this command into the "xargs"
1458 command, use the "-0" option to both this command and "xargs".
1458 command, use the "-0" option to both this command and "xargs".
1459 This will avoid the problem of "xargs" treating single filenames
1459 This will avoid the problem of "xargs" treating single filenames
1460 that contain white space as multiple filenames.
1460 that contain white space as multiple filenames.
1461 """
1461 """
1462 end = opts['print0'] and '\0' or '\n'
1462 end = opts['print0'] and '\0' or '\n'
1463
1463
1464 for src, abs, rel, exact in walk(repo, pats, opts, '(?:.*/|)'):
1464 for src, abs, rel, exact in walk(repo, pats, opts, '(?:.*/|)'):
1465 if repo.dirstate.state(abs) == '?':
1465 if repo.dirstate.state(abs) == '?':
1466 continue
1466 continue
1467 if opts['fullpath']:
1467 if opts['fullpath']:
1468 ui.write(os.path.join(repo.root, abs), end)
1468 ui.write(os.path.join(repo.root, abs), end)
1469 else:
1469 else:
1470 ui.write(rel, end)
1470 ui.write(rel, end)
1471
1471
1472 def log(ui, repo, *pats, **opts):
1472 def log(ui, repo, *pats, **opts):
1473 """show revision history of entire repository or files
1473 """show revision history of entire repository or files
1474
1474
1475 Print the revision history of the specified files or the entire project.
1475 Print the revision history of the specified files or the entire project.
1476
1476
1477 By default this command outputs: changeset id and hash, tags,
1477 By default this command outputs: changeset id and hash, tags,
1478 parents, user, date and time, and a summary for each commit. The
1478 parents, user, date and time, and a summary for each commit. The
1479 -v switch adds some more detail, such as changed files, manifest
1479 -v switch adds some more detail, such as changed files, manifest
1480 hashes or message signatures.
1480 hashes or message signatures.
1481 """
1481 """
1482 class dui:
1482 class dui(object):
1483 # Implement and delegate some ui protocol. Save hunks of
1483 # Implement and delegate some ui protocol. Save hunks of
1484 # output for later display in the desired order.
1484 # output for later display in the desired order.
1485 def __init__(self, ui):
1485 def __init__(self, ui):
1486 self.ui = ui
1486 self.ui = ui
1487 self.hunk = {}
1487 self.hunk = {}
1488 def bump(self, rev):
1488 def bump(self, rev):
1489 self.rev = rev
1489 self.rev = rev
1490 self.hunk[rev] = []
1490 self.hunk[rev] = []
1491 def note(self, *args):
1491 def note(self, *args):
1492 if self.verbose:
1492 if self.verbose:
1493 self.write(*args)
1493 self.write(*args)
1494 def status(self, *args):
1494 def status(self, *args):
1495 if not self.quiet:
1495 if not self.quiet:
1496 self.write(*args)
1496 self.write(*args)
1497 def write(self, *args):
1497 def write(self, *args):
1498 self.hunk[self.rev].append(args)
1498 self.hunk[self.rev].append(args)
1499 def debug(self, *args):
1499 def debug(self, *args):
1500 if self.debugflag:
1500 if self.debugflag:
1501 self.write(*args)
1501 self.write(*args)
1502 def __getattr__(self, key):
1502 def __getattr__(self, key):
1503 return getattr(self.ui, key)
1503 return getattr(self.ui, key)
1504 cwd = repo.getcwd()
1504 cwd = repo.getcwd()
1505 if not pats and cwd:
1505 if not pats and cwd:
1506 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
1506 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
1507 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
1507 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
1508 changeiter, getchange = walkchangerevs(ui, repo, (pats and cwd) or '',
1508 changeiter, getchange = walkchangerevs(ui, repo, (pats and cwd) or '',
1509 pats, opts)
1509 pats, opts)
1510 for st, rev, fns in changeiter:
1510 for st, rev, fns in changeiter:
1511 if st == 'window':
1511 if st == 'window':
1512 du = dui(ui)
1512 du = dui(ui)
1513 elif st == 'add':
1513 elif st == 'add':
1514 du.bump(rev)
1514 du.bump(rev)
1515 changenode = repo.changelog.node(rev)
1515 changenode = repo.changelog.node(rev)
1516 parents = [p for p in repo.changelog.parents(changenode)
1516 parents = [p for p in repo.changelog.parents(changenode)
1517 if p != nullid]
1517 if p != nullid]
1518 if opts['no_merges'] and len(parents) == 2:
1518 if opts['no_merges'] and len(parents) == 2:
1519 continue
1519 continue
1520 if opts['only_merges'] and len(parents) != 2:
1520 if opts['only_merges'] and len(parents) != 2:
1521 continue
1521 continue
1522
1522
1523 br = None
1523 br = None
1524 if opts['keyword']:
1524 if opts['keyword']:
1525 changes = repo.changelog.read(repo.changelog.node(rev))
1525 changes = repo.changelog.read(repo.changelog.node(rev))
1526 miss = 0
1526 miss = 0
1527 for k in [kw.lower() for kw in opts['keyword']]:
1527 for k in [kw.lower() for kw in opts['keyword']]:
1528 if not (k in changes[1].lower() or
1528 if not (k in changes[1].lower() or
1529 k in changes[4].lower() or
1529 k in changes[4].lower() or
1530 k in " ".join(changes[3][:20]).lower()):
1530 k in " ".join(changes[3][:20]).lower()):
1531 miss = 1
1531 miss = 1
1532 break
1532 break
1533 if miss:
1533 if miss:
1534 continue
1534 continue
1535
1535
1536 if opts['branch']:
1536 if opts['branch']:
1537 br = repo.branchlookup([repo.changelog.node(rev)])
1537 br = repo.branchlookup([repo.changelog.node(rev)])
1538
1538
1539 show_changeset(du, repo, rev, brinfo=br)
1539 show_changeset(du, repo, rev, brinfo=br)
1540 if opts['patch']:
1540 if opts['patch']:
1541 prev = (parents and parents[0]) or nullid
1541 prev = (parents and parents[0]) or nullid
1542 dodiff(du, du, repo, prev, changenode, fns)
1542 dodiff(du, du, repo, prev, changenode, fns)
1543 du.write("\n\n")
1543 du.write("\n\n")
1544 elif st == 'iter':
1544 elif st == 'iter':
1545 for args in du.hunk[rev]:
1545 for args in du.hunk[rev]:
1546 ui.write(*args)
1546 ui.write(*args)
1547
1547
1548 def manifest(ui, repo, rev=None):
1548 def manifest(ui, repo, rev=None):
1549 """output the latest or given revision of the project manifest
1549 """output the latest or given revision of the project manifest
1550
1550
1551 Print a list of version controlled files for the given revision.
1551 Print a list of version controlled files for the given revision.
1552
1552
1553 The manifest is the list of files being version controlled. If no revision
1553 The manifest is the list of files being version controlled. If no revision
1554 is given then the tip is used.
1554 is given then the tip is used.
1555 """
1555 """
1556 if rev:
1556 if rev:
1557 try:
1557 try:
1558 # assume all revision numbers are for changesets
1558 # assume all revision numbers are for changesets
1559 n = repo.lookup(rev)
1559 n = repo.lookup(rev)
1560 change = repo.changelog.read(n)
1560 change = repo.changelog.read(n)
1561 n = change[0]
1561 n = change[0]
1562 except hg.RepoError:
1562 except hg.RepoError:
1563 n = repo.manifest.lookup(rev)
1563 n = repo.manifest.lookup(rev)
1564 else:
1564 else:
1565 n = repo.manifest.tip()
1565 n = repo.manifest.tip()
1566 m = repo.manifest.read(n)
1566 m = repo.manifest.read(n)
1567 mf = repo.manifest.readflags(n)
1567 mf = repo.manifest.readflags(n)
1568 files = m.keys()
1568 files = m.keys()
1569 files.sort()
1569 files.sort()
1570
1570
1571 for f in files:
1571 for f in files:
1572 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1572 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1573
1573
1574 def outgoing(ui, repo, dest="default-push", **opts):
1574 def outgoing(ui, repo, dest="default-push", **opts):
1575 """show changesets not found in destination
1575 """show changesets not found in destination
1576
1576
1577 Show changesets not found in the specified destination repo or the
1577 Show changesets not found in the specified destination repo or the
1578 default push repo. These are the changesets that would be pushed
1578 default push repo. These are the changesets that would be pushed
1579 if a push was requested.
1579 if a push was requested.
1580 """
1580 """
1581 dest = ui.expandpath(dest, repo.root)
1581 dest = ui.expandpath(dest, repo.root)
1582 other = hg.repository(ui, dest)
1582 other = hg.repository(ui, dest)
1583 o = repo.findoutgoing(other)
1583 o = repo.findoutgoing(other)
1584 o = repo.changelog.nodesbetween(o)[0]
1584 o = repo.changelog.nodesbetween(o)[0]
1585 if opts['newest_first']:
1585 if opts['newest_first']:
1586 o.reverse()
1586 o.reverse()
1587 for n in o:
1587 for n in o:
1588 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1588 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1589 if opts['no_merges'] and len(parents) == 2:
1589 if opts['no_merges'] and len(parents) == 2:
1590 continue
1590 continue
1591 show_changeset(ui, repo, changenode=n)
1591 show_changeset(ui, repo, changenode=n)
1592 if opts['patch']:
1592 if opts['patch']:
1593 prev = (parents and parents[0]) or nullid
1593 prev = (parents and parents[0]) or nullid
1594 dodiff(ui, ui, repo, prev, n)
1594 dodiff(ui, ui, repo, prev, n)
1595 ui.write("\n")
1595 ui.write("\n")
1596
1596
1597 def parents(ui, repo, rev=None):
1597 def parents(ui, repo, rev=None):
1598 """show the parents of the working dir or revision
1598 """show the parents of the working dir or revision
1599
1599
1600 Print the working directory's parent revisions.
1600 Print the working directory's parent revisions.
1601 """
1601 """
1602 if rev:
1602 if rev:
1603 p = repo.changelog.parents(repo.lookup(rev))
1603 p = repo.changelog.parents(repo.lookup(rev))
1604 else:
1604 else:
1605 p = repo.dirstate.parents()
1605 p = repo.dirstate.parents()
1606
1606
1607 for n in p:
1607 for n in p:
1608 if n != nullid:
1608 if n != nullid:
1609 show_changeset(ui, repo, changenode=n)
1609 show_changeset(ui, repo, changenode=n)
1610
1610
1611 def paths(ui, search=None):
1611 def paths(ui, search=None):
1612 """show definition of symbolic path names
1612 """show definition of symbolic path names
1613
1613
1614 Show definition of symbolic path name NAME. If no name is given, show
1614 Show definition of symbolic path name NAME. If no name is given, show
1615 definition of available names.
1615 definition of available names.
1616
1616
1617 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1617 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1618 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1618 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1619 """
1619 """
1620 try:
1620 try:
1621 repo = hg.repository(ui=ui)
1621 repo = hg.repository(ui=ui)
1622 except hg.RepoError:
1622 except hg.RepoError:
1623 pass
1623 pass
1624
1624
1625 if search:
1625 if search:
1626 for name, path in ui.configitems("paths"):
1626 for name, path in ui.configitems("paths"):
1627 if name == search:
1627 if name == search:
1628 ui.write("%s\n" % path)
1628 ui.write("%s\n" % path)
1629 return
1629 return
1630 ui.warn(_("not found!\n"))
1630 ui.warn(_("not found!\n"))
1631 return 1
1631 return 1
1632 else:
1632 else:
1633 for name, path in ui.configitems("paths"):
1633 for name, path in ui.configitems("paths"):
1634 ui.write("%s = %s\n" % (name, path))
1634 ui.write("%s = %s\n" % (name, path))
1635
1635
1636 def pull(ui, repo, source="default", **opts):
1636 def pull(ui, repo, source="default", **opts):
1637 """pull changes from the specified source
1637 """pull changes from the specified source
1638
1638
1639 Pull changes from a remote repository to a local one.
1639 Pull changes from a remote repository to a local one.
1640
1640
1641 This finds all changes from the repository at the specified path
1641 This finds all changes from the repository at the specified path
1642 or URL and adds them to the local repository. By default, this
1642 or URL and adds them to the local repository. By default, this
1643 does not update the copy of the project in the working directory.
1643 does not update the copy of the project in the working directory.
1644
1644
1645 Valid URLs are of the form:
1645 Valid URLs are of the form:
1646
1646
1647 local/filesystem/path
1647 local/filesystem/path
1648 http://[user@]host[:port][/path]
1648 http://[user@]host[:port][/path]
1649 https://[user@]host[:port][/path]
1649 https://[user@]host[:port][/path]
1650 ssh://[user@]host[:port][/path]
1650 ssh://[user@]host[:port][/path]
1651
1651
1652 SSH requires an accessible shell account on the destination machine
1652 SSH requires an accessible shell account on the destination machine
1653 and a copy of hg in the remote path. With SSH, paths are relative
1653 and a copy of hg in the remote path. With SSH, paths are relative
1654 to the remote user's home directory by default; use two slashes at
1654 to the remote user's home directory by default; use two slashes at
1655 the start of a path to specify it as relative to the filesystem root.
1655 the start of a path to specify it as relative to the filesystem root.
1656 """
1656 """
1657 source = ui.expandpath(source, repo.root)
1657 source = ui.expandpath(source, repo.root)
1658 ui.status(_('pulling from %s\n') % (source))
1658 ui.status(_('pulling from %s\n') % (source))
1659
1659
1660 if opts['ssh']:
1660 if opts['ssh']:
1661 ui.setconfig("ui", "ssh", opts['ssh'])
1661 ui.setconfig("ui", "ssh", opts['ssh'])
1662 if opts['remotecmd']:
1662 if opts['remotecmd']:
1663 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1663 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1664
1664
1665 other = hg.repository(ui, source)
1665 other = hg.repository(ui, source)
1666 revs = None
1666 revs = None
1667 if opts['rev'] and not other.local():
1667 if opts['rev'] and not other.local():
1668 raise util.Abort("pull -r doesn't work for remote repositories yet")
1668 raise util.Abort("pull -r doesn't work for remote repositories yet")
1669 elif opts['rev']:
1669 elif opts['rev']:
1670 revs = [other.lookup(rev) for rev in opts['rev']]
1670 revs = [other.lookup(rev) for rev in opts['rev']]
1671 r = repo.pull(other, heads=revs)
1671 r = repo.pull(other, heads=revs)
1672 if not r:
1672 if not r:
1673 if opts['update']:
1673 if opts['update']:
1674 return update(ui, repo)
1674 return update(ui, repo)
1675 else:
1675 else:
1676 ui.status(_("(run 'hg update' to get a working copy)\n"))
1676 ui.status(_("(run 'hg update' to get a working copy)\n"))
1677
1677
1678 return r
1678 return r
1679
1679
1680 def push(ui, repo, dest="default-push", force=False, ssh=None, remotecmd=None):
1680 def push(ui, repo, dest="default-push", force=False, ssh=None, remotecmd=None):
1681 """push changes to the specified destination
1681 """push changes to the specified destination
1682
1682
1683 Push changes from the local repository to the given destination.
1683 Push changes from the local repository to the given destination.
1684
1684
1685 This is the symmetrical operation for pull. It helps to move
1685 This is the symmetrical operation for pull. It helps to move
1686 changes from the current repository to a different one. If the
1686 changes from the current repository to a different one. If the
1687 destination is local this is identical to a pull in that directory
1687 destination is local this is identical to a pull in that directory
1688 from the current one.
1688 from the current one.
1689
1689
1690 By default, push will refuse to run if it detects the result would
1690 By default, push will refuse to run if it detects the result would
1691 increase the number of remote heads. This generally indicates the
1691 increase the number of remote heads. This generally indicates the
1692 the client has forgotten to sync and merge before pushing.
1692 the client has forgotten to sync and merge before pushing.
1693
1693
1694 Valid URLs are of the form:
1694 Valid URLs are of the form:
1695
1695
1696 local/filesystem/path
1696 local/filesystem/path
1697 ssh://[user@]host[:port][/path]
1697 ssh://[user@]host[:port][/path]
1698
1698
1699 SSH requires an accessible shell account on the destination
1699 SSH requires an accessible shell account on the destination
1700 machine and a copy of hg in the remote path.
1700 machine and a copy of hg in the remote path.
1701 """
1701 """
1702 dest = ui.expandpath(dest, repo.root)
1702 dest = ui.expandpath(dest, repo.root)
1703 ui.status('pushing to %s\n' % (dest))
1703 ui.status('pushing to %s\n' % (dest))
1704
1704
1705 if ssh:
1705 if ssh:
1706 ui.setconfig("ui", "ssh", ssh)
1706 ui.setconfig("ui", "ssh", ssh)
1707 if remotecmd:
1707 if remotecmd:
1708 ui.setconfig("ui", "remotecmd", remotecmd)
1708 ui.setconfig("ui", "remotecmd", remotecmd)
1709
1709
1710 other = hg.repository(ui, dest)
1710 other = hg.repository(ui, dest)
1711 r = repo.push(other, force)
1711 r = repo.push(other, force)
1712 return r
1712 return r
1713
1713
1714 def rawcommit(ui, repo, *flist, **rc):
1714 def rawcommit(ui, repo, *flist, **rc):
1715 """raw commit interface
1715 """raw commit interface
1716
1716
1717 Lowlevel commit, for use in helper scripts.
1717 Lowlevel commit, for use in helper scripts.
1718
1718
1719 This command is not intended to be used by normal users, as it is
1719 This command is not intended to be used by normal users, as it is
1720 primarily useful for importing from other SCMs.
1720 primarily useful for importing from other SCMs.
1721 """
1721 """
1722 message = rc['message']
1722 message = rc['message']
1723 if not message and rc['logfile']:
1723 if not message and rc['logfile']:
1724 try:
1724 try:
1725 message = open(rc['logfile']).read()
1725 message = open(rc['logfile']).read()
1726 except IOError:
1726 except IOError:
1727 pass
1727 pass
1728 if not message and not rc['logfile']:
1728 if not message and not rc['logfile']:
1729 raise util.Abort(_("missing commit message"))
1729 raise util.Abort(_("missing commit message"))
1730
1730
1731 files = relpath(repo, list(flist))
1731 files = relpath(repo, list(flist))
1732 if rc['files']:
1732 if rc['files']:
1733 files += open(rc['files']).read().splitlines()
1733 files += open(rc['files']).read().splitlines()
1734
1734
1735 rc['parent'] = map(repo.lookup, rc['parent'])
1735 rc['parent'] = map(repo.lookup, rc['parent'])
1736
1736
1737 try:
1737 try:
1738 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1738 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1739 except ValueError, inst:
1739 except ValueError, inst:
1740 raise util.Abort(str(inst))
1740 raise util.Abort(str(inst))
1741
1741
1742 def recover(ui, repo):
1742 def recover(ui, repo):
1743 """roll back an interrupted transaction
1743 """roll back an interrupted transaction
1744
1744
1745 Recover from an interrupted commit or pull.
1745 Recover from an interrupted commit or pull.
1746
1746
1747 This command tries to fix the repository status after an interrupted
1747 This command tries to fix the repository status after an interrupted
1748 operation. It should only be necessary when Mercurial suggests it.
1748 operation. It should only be necessary when Mercurial suggests it.
1749 """
1749 """
1750 if repo.recover():
1750 if repo.recover():
1751 return repo.verify()
1751 return repo.verify()
1752 return False
1752 return False
1753
1753
1754 def remove(ui, repo, pat, *pats, **opts):
1754 def remove(ui, repo, pat, *pats, **opts):
1755 """remove the specified files on the next commit
1755 """remove the specified files on the next commit
1756
1756
1757 Schedule the indicated files for removal from the repository.
1757 Schedule the indicated files for removal from the repository.
1758
1758
1759 This command schedules the files to be removed at the next commit.
1759 This command schedules the files to be removed at the next commit.
1760 This only removes files from the current branch, not from the
1760 This only removes files from the current branch, not from the
1761 entire project history. If the files still exist in the working
1761 entire project history. If the files still exist in the working
1762 directory, they will be deleted from it.
1762 directory, they will be deleted from it.
1763 """
1763 """
1764 names = []
1764 names = []
1765 def okaytoremove(abs, rel, exact):
1765 def okaytoremove(abs, rel, exact):
1766 c, a, d, u = repo.changes(files = [abs])
1766 c, a, d, u = repo.changes(files = [abs])
1767 reason = None
1767 reason = None
1768 if c: reason = _('is modified')
1768 if c: reason = _('is modified')
1769 elif a: reason = _('has been marked for add')
1769 elif a: reason = _('has been marked for add')
1770 elif u: reason = _('is not managed')
1770 elif u: reason = _('is not managed')
1771 if reason:
1771 if reason:
1772 if exact: ui.warn(_('not removing %s: file %s\n') % (rel, reason))
1772 if exact: ui.warn(_('not removing %s: file %s\n') % (rel, reason))
1773 else:
1773 else:
1774 return True
1774 return True
1775 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
1775 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
1776 if okaytoremove(abs, rel, exact):
1776 if okaytoremove(abs, rel, exact):
1777 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1777 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1778 names.append(abs)
1778 names.append(abs)
1779 repo.remove(names, unlink=True)
1779 repo.remove(names, unlink=True)
1780
1780
1781 def rename(ui, repo, *pats, **opts):
1781 def rename(ui, repo, *pats, **opts):
1782 """rename files; equivalent of copy + remove
1782 """rename files; equivalent of copy + remove
1783
1783
1784 Mark dest as copies of sources; mark sources for deletion. If
1784 Mark dest as copies of sources; mark sources for deletion. If
1785 dest is a directory, copies are put in that directory. If dest is
1785 dest is a directory, copies are put in that directory. If dest is
1786 a file, there can only be one source.
1786 a file, there can only be one source.
1787
1787
1788 By default, this command copies the contents of files as they
1788 By default, this command copies the contents of files as they
1789 stand in the working directory. If invoked with --after, the
1789 stand in the working directory. If invoked with --after, the
1790 operation is recorded, but no copying is performed.
1790 operation is recorded, but no copying is performed.
1791
1791
1792 This command takes effect in the next commit.
1792 This command takes effect in the next commit.
1793
1793
1794 NOTE: This command should be treated as experimental. While it
1794 NOTE: This command should be treated as experimental. While it
1795 should properly record rename files, this information is not yet
1795 should properly record rename files, this information is not yet
1796 fully used by merge, nor fully reported by log.
1796 fully used by merge, nor fully reported by log.
1797 """
1797 """
1798 errs, copied = docopy(ui, repo, pats, opts)
1798 errs, copied = docopy(ui, repo, pats, opts)
1799 names = []
1799 names = []
1800 for abs, rel, exact in copied:
1800 for abs, rel, exact in copied:
1801 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1801 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1802 names.append(abs)
1802 names.append(abs)
1803 repo.remove(names, unlink=True)
1803 repo.remove(names, unlink=True)
1804 return errs
1804 return errs
1805
1805
1806 def revert(ui, repo, *pats, **opts):
1806 def revert(ui, repo, *pats, **opts):
1807 """revert modified files or dirs back to their unmodified states
1807 """revert modified files or dirs back to their unmodified states
1808
1808
1809 Revert any uncommitted modifications made to the named files or
1809 Revert any uncommitted modifications made to the named files or
1810 directories. This restores the contents of the affected files to
1810 directories. This restores the contents of the affected files to
1811 an unmodified state.
1811 an unmodified state.
1812
1812
1813 If a file has been deleted, it is recreated. If the executable
1813 If a file has been deleted, it is recreated. If the executable
1814 mode of a file was changed, it is reset.
1814 mode of a file was changed, it is reset.
1815
1815
1816 If names are given, all files matching the names are reverted.
1816 If names are given, all files matching the names are reverted.
1817
1817
1818 If no names are given, all files in the current directory and
1818 If no names are given, all files in the current directory and
1819 its subdirectories are reverted.
1819 its subdirectories are reverted.
1820 """
1820 """
1821 node = opts['rev'] and repo.lookup(opts['rev']) or \
1821 node = opts['rev'] and repo.lookup(opts['rev']) or \
1822 repo.dirstate.parents()[0]
1822 repo.dirstate.parents()[0]
1823
1823
1824 files, choose, anypats = matchpats(repo, repo.getcwd(), pats, opts)
1824 files, choose, anypats = matchpats(repo, repo.getcwd(), pats, opts)
1825 (c, a, d, u) = repo.changes(match=choose)
1825 (c, a, d, u) = repo.changes(match=choose)
1826 repo.forget(a)
1826 repo.forget(a)
1827 repo.undelete(d)
1827 repo.undelete(d)
1828
1828
1829 return repo.update(node, False, True, choose, False)
1829 return repo.update(node, False, True, choose, False)
1830
1830
1831 def root(ui, repo):
1831 def root(ui, repo):
1832 """print the root (top) of the current working dir
1832 """print the root (top) of the current working dir
1833
1833
1834 Print the root directory of the current repository.
1834 Print the root directory of the current repository.
1835 """
1835 """
1836 ui.write(repo.root + "\n")
1836 ui.write(repo.root + "\n")
1837
1837
1838 def serve(ui, repo, **opts):
1838 def serve(ui, repo, **opts):
1839 """export the repository via HTTP
1839 """export the repository via HTTP
1840
1840
1841 Start a local HTTP repository browser and pull server.
1841 Start a local HTTP repository browser and pull server.
1842
1842
1843 By default, the server logs accesses to stdout and errors to
1843 By default, the server logs accesses to stdout and errors to
1844 stderr. Use the "-A" and "-E" options to log to files.
1844 stderr. Use the "-A" and "-E" options to log to files.
1845 """
1845 """
1846
1846
1847 if opts["stdio"]:
1847 if opts["stdio"]:
1848 fin, fout = sys.stdin, sys.stdout
1848 fin, fout = sys.stdin, sys.stdout
1849 sys.stdout = sys.stderr
1849 sys.stdout = sys.stderr
1850
1850
1851 # Prevent insertion/deletion of CRs
1851 # Prevent insertion/deletion of CRs
1852 util.set_binary(fin)
1852 util.set_binary(fin)
1853 util.set_binary(fout)
1853 util.set_binary(fout)
1854
1854
1855 def getarg():
1855 def getarg():
1856 argline = fin.readline()[:-1]
1856 argline = fin.readline()[:-1]
1857 arg, l = argline.split()
1857 arg, l = argline.split()
1858 val = fin.read(int(l))
1858 val = fin.read(int(l))
1859 return arg, val
1859 return arg, val
1860 def respond(v):
1860 def respond(v):
1861 fout.write("%d\n" % len(v))
1861 fout.write("%d\n" % len(v))
1862 fout.write(v)
1862 fout.write(v)
1863 fout.flush()
1863 fout.flush()
1864
1864
1865 lock = None
1865 lock = None
1866
1866
1867 while 1:
1867 while 1:
1868 cmd = fin.readline()[:-1]
1868 cmd = fin.readline()[:-1]
1869 if cmd == '':
1869 if cmd == '':
1870 return
1870 return
1871 if cmd == "heads":
1871 if cmd == "heads":
1872 h = repo.heads()
1872 h = repo.heads()
1873 respond(" ".join(map(hex, h)) + "\n")
1873 respond(" ".join(map(hex, h)) + "\n")
1874 if cmd == "lock":
1874 if cmd == "lock":
1875 lock = repo.lock()
1875 lock = repo.lock()
1876 respond("")
1876 respond("")
1877 if cmd == "unlock":
1877 if cmd == "unlock":
1878 if lock:
1878 if lock:
1879 lock.release()
1879 lock.release()
1880 lock = None
1880 lock = None
1881 respond("")
1881 respond("")
1882 elif cmd == "branches":
1882 elif cmd == "branches":
1883 arg, nodes = getarg()
1883 arg, nodes = getarg()
1884 nodes = map(bin, nodes.split(" "))
1884 nodes = map(bin, nodes.split(" "))
1885 r = []
1885 r = []
1886 for b in repo.branches(nodes):
1886 for b in repo.branches(nodes):
1887 r.append(" ".join(map(hex, b)) + "\n")
1887 r.append(" ".join(map(hex, b)) + "\n")
1888 respond("".join(r))
1888 respond("".join(r))
1889 elif cmd == "between":
1889 elif cmd == "between":
1890 arg, pairs = getarg()
1890 arg, pairs = getarg()
1891 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
1891 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
1892 r = []
1892 r = []
1893 for b in repo.between(pairs):
1893 for b in repo.between(pairs):
1894 r.append(" ".join(map(hex, b)) + "\n")
1894 r.append(" ".join(map(hex, b)) + "\n")
1895 respond("".join(r))
1895 respond("".join(r))
1896 elif cmd == "changegroup":
1896 elif cmd == "changegroup":
1897 nodes = []
1897 nodes = []
1898 arg, roots = getarg()
1898 arg, roots = getarg()
1899 nodes = map(bin, roots.split(" "))
1899 nodes = map(bin, roots.split(" "))
1900
1900
1901 cg = repo.changegroup(nodes)
1901 cg = repo.changegroup(nodes)
1902 while 1:
1902 while 1:
1903 d = cg.read(4096)
1903 d = cg.read(4096)
1904 if not d:
1904 if not d:
1905 break
1905 break
1906 fout.write(d)
1906 fout.write(d)
1907
1907
1908 fout.flush()
1908 fout.flush()
1909
1909
1910 elif cmd == "addchangegroup":
1910 elif cmd == "addchangegroup":
1911 if not lock:
1911 if not lock:
1912 respond("not locked")
1912 respond("not locked")
1913 continue
1913 continue
1914 respond("")
1914 respond("")
1915
1915
1916 r = repo.addchangegroup(fin)
1916 r = repo.addchangegroup(fin)
1917 respond("")
1917 respond("")
1918
1918
1919 optlist = "name templates style address port ipv6 accesslog errorlog"
1919 optlist = "name templates style address port ipv6 accesslog errorlog"
1920 for o in optlist.split():
1920 for o in optlist.split():
1921 if opts[o]:
1921 if opts[o]:
1922 ui.setconfig("web", o, opts[o])
1922 ui.setconfig("web", o, opts[o])
1923
1923
1924 try:
1924 try:
1925 httpd = hgweb.create_server(repo)
1925 httpd = hgweb.create_server(repo)
1926 except socket.error, inst:
1926 except socket.error, inst:
1927 raise util.Abort('cannot start server: ' + inst.args[1])
1927 raise util.Abort('cannot start server: ' + inst.args[1])
1928
1928
1929 if ui.verbose:
1929 if ui.verbose:
1930 addr, port = httpd.socket.getsockname()
1930 addr, port = httpd.socket.getsockname()
1931 if addr == '0.0.0.0':
1931 if addr == '0.0.0.0':
1932 addr = socket.gethostname()
1932 addr = socket.gethostname()
1933 else:
1933 else:
1934 try:
1934 try:
1935 addr = socket.gethostbyaddr(addr)[0]
1935 addr = socket.gethostbyaddr(addr)[0]
1936 except socket.error:
1936 except socket.error:
1937 pass
1937 pass
1938 if port != 80:
1938 if port != 80:
1939 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
1939 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
1940 else:
1940 else:
1941 ui.status(_('listening at http://%s/\n') % addr)
1941 ui.status(_('listening at http://%s/\n') % addr)
1942 httpd.serve_forever()
1942 httpd.serve_forever()
1943
1943
1944 def status(ui, repo, *pats, **opts):
1944 def status(ui, repo, *pats, **opts):
1945 """show changed files in the working directory
1945 """show changed files in the working directory
1946
1946
1947 Show changed files in the working directory. If no names are
1947 Show changed files in the working directory. If no names are
1948 given, all files are shown. Otherwise, only files matching the
1948 given, all files are shown. Otherwise, only files matching the
1949 given names are shown.
1949 given names are shown.
1950
1950
1951 The codes used to show the status of files are:
1951 The codes used to show the status of files are:
1952 M = modified
1952 M = modified
1953 A = added
1953 A = added
1954 R = removed
1954 R = removed
1955 ? = not tracked
1955 ? = not tracked
1956 """
1956 """
1957
1957
1958 cwd = repo.getcwd()
1958 cwd = repo.getcwd()
1959 files, matchfn, anypats = matchpats(repo, cwd, pats, opts)
1959 files, matchfn, anypats = matchpats(repo, cwd, pats, opts)
1960 (c, a, d, u) = [[util.pathto(cwd, x) for x in n]
1960 (c, a, d, u) = [[util.pathto(cwd, x) for x in n]
1961 for n in repo.changes(files=files, match=matchfn)]
1961 for n in repo.changes(files=files, match=matchfn)]
1962
1962
1963 changetypes = [(_('modified'), 'M', c),
1963 changetypes = [(_('modified'), 'M', c),
1964 (_('added'), 'A', a),
1964 (_('added'), 'A', a),
1965 (_('removed'), 'R', d),
1965 (_('removed'), 'R', d),
1966 (_('unknown'), '?', u)]
1966 (_('unknown'), '?', u)]
1967
1967
1968 end = opts['print0'] and '\0' or '\n'
1968 end = opts['print0'] and '\0' or '\n'
1969
1969
1970 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
1970 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
1971 or changetypes):
1971 or changetypes):
1972 if opts['no_status']:
1972 if opts['no_status']:
1973 format = "%%s%s" % end
1973 format = "%%s%s" % end
1974 else:
1974 else:
1975 format = "%s %%s%s" % (char, end);
1975 format = "%s %%s%s" % (char, end);
1976
1976
1977 for f in changes:
1977 for f in changes:
1978 ui.write(format % f)
1978 ui.write(format % f)
1979
1979
1980 def tag(ui, repo, name, rev=None, **opts):
1980 def tag(ui, repo, name, rev=None, **opts):
1981 """add a tag for the current tip or a given revision
1981 """add a tag for the current tip or a given revision
1982
1982
1983 Name a particular revision using <name>.
1983 Name a particular revision using <name>.
1984
1984
1985 Tags are used to name particular revisions of the repository and are
1985 Tags are used to name particular revisions of the repository and are
1986 very useful to compare different revision, to go back to significant
1986 very useful to compare different revision, to go back to significant
1987 earlier versions or to mark branch points as releases, etc.
1987 earlier versions or to mark branch points as releases, etc.
1988
1988
1989 If no revision is given, the tip is used.
1989 If no revision is given, the tip is used.
1990
1990
1991 To facilitate version control, distribution, and merging of tags,
1991 To facilitate version control, distribution, and merging of tags,
1992 they are stored as a file named ".hgtags" which is managed
1992 they are stored as a file named ".hgtags" which is managed
1993 similarly to other project files and can be hand-edited if
1993 similarly to other project files and can be hand-edited if
1994 necessary.
1994 necessary.
1995 """
1995 """
1996 if name == "tip":
1996 if name == "tip":
1997 raise util.Abort(_("the name 'tip' is reserved"))
1997 raise util.Abort(_("the name 'tip' is reserved"))
1998 if 'rev' in opts:
1998 if 'rev' in opts:
1999 rev = opts['rev']
1999 rev = opts['rev']
2000 if rev:
2000 if rev:
2001 r = hex(repo.lookup(rev))
2001 r = hex(repo.lookup(rev))
2002 else:
2002 else:
2003 r = hex(repo.changelog.tip())
2003 r = hex(repo.changelog.tip())
2004
2004
2005 if name.find(revrangesep) >= 0:
2005 if name.find(revrangesep) >= 0:
2006 raise util.Abort(_("'%s' cannot be used in a tag name") % revrangesep)
2006 raise util.Abort(_("'%s' cannot be used in a tag name") % revrangesep)
2007
2007
2008 if opts['local']:
2008 if opts['local']:
2009 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2009 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2010 return
2010 return
2011
2011
2012 (c, a, d, u) = repo.changes()
2012 (c, a, d, u) = repo.changes()
2013 for x in (c, a, d, u):
2013 for x in (c, a, d, u):
2014 if ".hgtags" in x:
2014 if ".hgtags" in x:
2015 raise util.Abort(_("working copy of .hgtags is changed "
2015 raise util.Abort(_("working copy of .hgtags is changed "
2016 "(please commit .hgtags manually)"))
2016 "(please commit .hgtags manually)"))
2017
2017
2018 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2018 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2019 if repo.dirstate.state(".hgtags") == '?':
2019 if repo.dirstate.state(".hgtags") == '?':
2020 repo.add([".hgtags"])
2020 repo.add([".hgtags"])
2021
2021
2022 message = (opts['message'] or
2022 message = (opts['message'] or
2023 _("Added tag %s for changeset %s") % (name, r))
2023 _("Added tag %s for changeset %s") % (name, r))
2024 try:
2024 try:
2025 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2025 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2026 except ValueError, inst:
2026 except ValueError, inst:
2027 raise util.Abort(str(inst))
2027 raise util.Abort(str(inst))
2028
2028
2029 def tags(ui, repo):
2029 def tags(ui, repo):
2030 """list repository tags
2030 """list repository tags
2031
2031
2032 List the repository tags.
2032 List the repository tags.
2033
2033
2034 This lists both regular and local tags.
2034 This lists both regular and local tags.
2035 """
2035 """
2036
2036
2037 l = repo.tagslist()
2037 l = repo.tagslist()
2038 l.reverse()
2038 l.reverse()
2039 for t, n in l:
2039 for t, n in l:
2040 try:
2040 try:
2041 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2041 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2042 except KeyError:
2042 except KeyError:
2043 r = " ?:?"
2043 r = " ?:?"
2044 ui.write("%-30s %s\n" % (t, r))
2044 ui.write("%-30s %s\n" % (t, r))
2045
2045
2046 def tip(ui, repo):
2046 def tip(ui, repo):
2047 """show the tip revision
2047 """show the tip revision
2048
2048
2049 Show the tip revision.
2049 Show the tip revision.
2050 """
2050 """
2051 n = repo.changelog.tip()
2051 n = repo.changelog.tip()
2052 show_changeset(ui, repo, changenode=n)
2052 show_changeset(ui, repo, changenode=n)
2053
2053
2054 def unbundle(ui, repo, fname):
2054 def unbundle(ui, repo, fname):
2055 """apply a changegroup file
2055 """apply a changegroup file
2056
2056
2057 Apply a compressed changegroup file generated by the bundle
2057 Apply a compressed changegroup file generated by the bundle
2058 command.
2058 command.
2059 """
2059 """
2060 f = urllib.urlopen(fname)
2060 f = urllib.urlopen(fname)
2061
2061
2062 if f.read(4) != "HG10":
2062 if f.read(4) != "HG10":
2063 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2063 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2064
2064
2065 def bzgenerator(f):
2065 def bzgenerator(f):
2066 zd = bz2.BZ2Decompressor()
2066 zd = bz2.BZ2Decompressor()
2067 for chunk in f:
2067 for chunk in f:
2068 yield zd.decompress(chunk)
2068 yield zd.decompress(chunk)
2069
2069
2070 bzgen = bzgenerator(util.filechunkiter(f, 4096))
2070 bzgen = bzgenerator(util.filechunkiter(f, 4096))
2071 repo.addchangegroup(util.chunkbuffer(bzgen))
2071 repo.addchangegroup(util.chunkbuffer(bzgen))
2072
2072
2073 def undo(ui, repo):
2073 def undo(ui, repo):
2074 """undo the last commit or pull
2074 """undo the last commit or pull
2075
2075
2076 Roll back the last pull or commit transaction on the
2076 Roll back the last pull or commit transaction on the
2077 repository, restoring the project to its earlier state.
2077 repository, restoring the project to its earlier state.
2078
2078
2079 This command should be used with care. There is only one level of
2079 This command should be used with care. There is only one level of
2080 undo and there is no redo.
2080 undo and there is no redo.
2081
2081
2082 This command is not intended for use on public repositories. Once
2082 This command is not intended for use on public repositories. Once
2083 a change is visible for pull by other users, undoing it locally is
2083 a change is visible for pull by other users, undoing it locally is
2084 ineffective.
2084 ineffective.
2085 """
2085 """
2086 repo.undo()
2086 repo.undo()
2087
2087
2088 def update(ui, repo, node=None, merge=False, clean=False, branch=None):
2088 def update(ui, repo, node=None, merge=False, clean=False, branch=None):
2089 """update or merge working directory
2089 """update or merge working directory
2090
2090
2091 Update the working directory to the specified revision.
2091 Update the working directory to the specified revision.
2092
2092
2093 If there are no outstanding changes in the working directory and
2093 If there are no outstanding changes in the working directory and
2094 there is a linear relationship between the current version and the
2094 there is a linear relationship between the current version and the
2095 requested version, the result is the requested version.
2095 requested version, the result is the requested version.
2096
2096
2097 Otherwise the result is a merge between the contents of the
2097 Otherwise the result is a merge between the contents of the
2098 current working directory and the requested version. Files that
2098 current working directory and the requested version. Files that
2099 changed between either parent are marked as changed for the next
2099 changed between either parent are marked as changed for the next
2100 commit and a commit must be performed before any further updates
2100 commit and a commit must be performed before any further updates
2101 are allowed.
2101 are allowed.
2102
2102
2103 By default, update will refuse to run if doing so would require
2103 By default, update will refuse to run if doing so would require
2104 merging or discarding local changes.
2104 merging or discarding local changes.
2105 """
2105 """
2106 if branch:
2106 if branch:
2107 br = repo.branchlookup(branch=branch)
2107 br = repo.branchlookup(branch=branch)
2108 found = []
2108 found = []
2109 for x in br:
2109 for x in br:
2110 if branch in br[x]:
2110 if branch in br[x]:
2111 found.append(x)
2111 found.append(x)
2112 if len(found) > 1:
2112 if len(found) > 1:
2113 ui.warn(_("Found multiple heads for %s\n") % branch)
2113 ui.warn(_("Found multiple heads for %s\n") % branch)
2114 for x in found:
2114 for x in found:
2115 show_changeset(ui, repo, changenode=x, brinfo=br)
2115 show_changeset(ui, repo, changenode=x, brinfo=br)
2116 return 1
2116 return 1
2117 if len(found) == 1:
2117 if len(found) == 1:
2118 node = found[0]
2118 node = found[0]
2119 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2119 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2120 else:
2120 else:
2121 ui.warn(_("branch %s not found\n") % (branch))
2121 ui.warn(_("branch %s not found\n") % (branch))
2122 return 1
2122 return 1
2123 else:
2123 else:
2124 node = node and repo.lookup(node) or repo.changelog.tip()
2124 node = node and repo.lookup(node) or repo.changelog.tip()
2125 return repo.update(node, allow=merge, force=clean)
2125 return repo.update(node, allow=merge, force=clean)
2126
2126
2127 def verify(ui, repo):
2127 def verify(ui, repo):
2128 """verify the integrity of the repository
2128 """verify the integrity of the repository
2129
2129
2130 Verify the integrity of the current repository.
2130 Verify the integrity of the current repository.
2131
2131
2132 This will perform an extensive check of the repository's
2132 This will perform an extensive check of the repository's
2133 integrity, validating the hashes and checksums of each entry in
2133 integrity, validating the hashes and checksums of each entry in
2134 the changelog, manifest, and tracked files, as well as the
2134 the changelog, manifest, and tracked files, as well as the
2135 integrity of their crosslinks and indices.
2135 integrity of their crosslinks and indices.
2136 """
2136 """
2137 return repo.verify()
2137 return repo.verify()
2138
2138
2139 # Command options and aliases are listed here, alphabetically
2139 # Command options and aliases are listed here, alphabetically
2140
2140
2141 table = {
2141 table = {
2142 "^add":
2142 "^add":
2143 (add,
2143 (add,
2144 [('I', 'include', [], _('include names matching the given patterns')),
2144 [('I', 'include', [], _('include names matching the given patterns')),
2145 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2145 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2146 "hg add [OPTION]... [FILE]..."),
2146 "hg add [OPTION]... [FILE]..."),
2147 "addremove":
2147 "addremove":
2148 (addremove,
2148 (addremove,
2149 [('I', 'include', [], _('include names matching the given patterns')),
2149 [('I', 'include', [], _('include names matching the given patterns')),
2150 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2150 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2151 "hg addremove [OPTION]... [FILE]..."),
2151 "hg addremove [OPTION]... [FILE]..."),
2152 "^annotate":
2152 "^annotate":
2153 (annotate,
2153 (annotate,
2154 [('r', 'rev', '', _('annotate the specified revision')),
2154 [('r', 'rev', '', _('annotate the specified revision')),
2155 ('a', 'text', None, _('treat all files as text')),
2155 ('a', 'text', None, _('treat all files as text')),
2156 ('u', 'user', None, _('list the author')),
2156 ('u', 'user', None, _('list the author')),
2157 ('d', 'date', None, _('list the date')),
2157 ('d', 'date', None, _('list the date')),
2158 ('n', 'number', None, _('list the revision number (default)')),
2158 ('n', 'number', None, _('list the revision number (default)')),
2159 ('c', 'changeset', None, _('list the changeset')),
2159 ('c', 'changeset', None, _('list the changeset')),
2160 ('I', 'include', [], _('include names matching the given patterns')),
2160 ('I', 'include', [], _('include names matching the given patterns')),
2161 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2161 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2162 _('hg annotate [OPTION]... FILE...')),
2162 _('hg annotate [OPTION]... FILE...')),
2163 "bundle":
2163 "bundle":
2164 (bundle,
2164 (bundle,
2165 [],
2165 [],
2166 _('hg bundle FILE DEST')),
2166 _('hg bundle FILE DEST')),
2167 "cat":
2167 "cat":
2168 (cat,
2168 (cat,
2169 [('I', 'include', [], _('include names matching the given patterns')),
2169 [('I', 'include', [], _('include names matching the given patterns')),
2170 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2170 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2171 ('o', 'output', "", _('print output to file with formatted name')),
2171 ('o', 'output', "", _('print output to file with formatted name')),
2172 ('r', 'rev', '', _('print the given revision'))],
2172 ('r', 'rev', '', _('print the given revision'))],
2173 _('hg cat [OPTION]... FILE...')),
2173 _('hg cat [OPTION]... FILE...')),
2174 "^clone":
2174 "^clone":
2175 (clone,
2175 (clone,
2176 [('U', 'noupdate', None, _('do not update the new working directory')),
2176 [('U', 'noupdate', None, _('do not update the new working directory')),
2177 ('e', 'ssh', "", _('specify ssh command to use')),
2177 ('e', 'ssh', "", _('specify ssh command to use')),
2178 ('', 'pull', None, _('use pull protocol to copy metadata')),
2178 ('', 'pull', None, _('use pull protocol to copy metadata')),
2179 ('r', 'rev', [], _('a changeset you would like to have after cloning')),
2179 ('r', 'rev', [], _('a changeset you would like to have after cloning')),
2180 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2180 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2181 _('hg clone [OPTION]... SOURCE [DEST]')),
2181 _('hg clone [OPTION]... SOURCE [DEST]')),
2182 "^commit|ci":
2182 "^commit|ci":
2183 (commit,
2183 (commit,
2184 [('A', 'addremove', None, _('run addremove during commit')),
2184 [('A', 'addremove', None, _('run addremove during commit')),
2185 ('I', 'include', [], _('include names matching the given patterns')),
2185 ('I', 'include', [], _('include names matching the given patterns')),
2186 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2186 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2187 ('m', 'message', "", _('use <text> as commit message')),
2187 ('m', 'message', "", _('use <text> as commit message')),
2188 ('l', 'logfile', "", _('read the commit message from <file>')),
2188 ('l', 'logfile', "", _('read the commit message from <file>')),
2189 ('d', 'date', "", _('record datecode as commit date')),
2189 ('d', 'date', "", _('record datecode as commit date')),
2190 ('u', 'user', "", _('record user as commiter'))],
2190 ('u', 'user', "", _('record user as commiter'))],
2191 _('hg commit [OPTION]... [FILE]...')),
2191 _('hg commit [OPTION]... [FILE]...')),
2192 "copy|cp": (copy,
2192 "copy|cp": (copy,
2193 [('I', 'include', [], _('include names matching the given patterns')),
2193 [('I', 'include', [], _('include names matching the given patterns')),
2194 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2194 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2195 ('A', 'after', None, _('record a copy that has already occurred')),
2195 ('A', 'after', None, _('record a copy that has already occurred')),
2196 ('f', 'force', None, _('forcibly copy over an existing managed file'))],
2196 ('f', 'force', None, _('forcibly copy over an existing managed file'))],
2197 _('hg copy [OPTION]... [SOURCE]... DEST')),
2197 _('hg copy [OPTION]... [SOURCE]... DEST')),
2198 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2198 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2199 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2199 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2200 "debugconfig": (debugconfig, [], _('debugconfig')),
2200 "debugconfig": (debugconfig, [], _('debugconfig')),
2201 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2201 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2202 "debugstate": (debugstate, [], _('debugstate')),
2202 "debugstate": (debugstate, [], _('debugstate')),
2203 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2203 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2204 "debugindex": (debugindex, [], _('debugindex FILE')),
2204 "debugindex": (debugindex, [], _('debugindex FILE')),
2205 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2205 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2206 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2206 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2207 "debugwalk":
2207 "debugwalk":
2208 (debugwalk,
2208 (debugwalk,
2209 [('I', 'include', [], _('include names matching the given patterns')),
2209 [('I', 'include', [], _('include names matching the given patterns')),
2210 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2210 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2211 _('debugwalk [OPTION]... [FILE]...')),
2211 _('debugwalk [OPTION]... [FILE]...')),
2212 "^diff":
2212 "^diff":
2213 (diff,
2213 (diff,
2214 [('r', 'rev', [], _('revision')),
2214 [('r', 'rev', [], _('revision')),
2215 ('a', 'text', None, _('treat all files as text')),
2215 ('a', 'text', None, _('treat all files as text')),
2216 ('I', 'include', [], _('include names matching the given patterns')),
2216 ('I', 'include', [], _('include names matching the given patterns')),
2217 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2217 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2218 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2218 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2219 "^export":
2219 "^export":
2220 (export,
2220 (export,
2221 [('o', 'output', "", _('print output to file with formatted name')),
2221 [('o', 'output', "", _('print output to file with formatted name')),
2222 ('a', 'text', None, _('treat all files as text'))],
2222 ('a', 'text', None, _('treat all files as text'))],
2223 "hg export [-a] [-o OUTFILE] REV..."),
2223 "hg export [-a] [-o OUTFILE] REV..."),
2224 "forget":
2224 "forget":
2225 (forget,
2225 (forget,
2226 [('I', 'include', [], _('include names matching the given patterns')),
2226 [('I', 'include', [], _('include names matching the given patterns')),
2227 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2227 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2228 "hg forget [OPTION]... FILE..."),
2228 "hg forget [OPTION]... FILE..."),
2229 "grep":
2229 "grep":
2230 (grep,
2230 (grep,
2231 [('0', 'print0', None, _('end fields with NUL')),
2231 [('0', 'print0', None, _('end fields with NUL')),
2232 ('I', 'include', [], _('include names matching the given patterns')),
2232 ('I', 'include', [], _('include names matching the given patterns')),
2233 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2233 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2234 ('', 'all', None, _('print all revisions that match')),
2234 ('', 'all', None, _('print all revisions that match')),
2235 ('i', 'ignore-case', None, _('ignore case when matching')),
2235 ('i', 'ignore-case', None, _('ignore case when matching')),
2236 ('l', 'files-with-matches', None, _('print only filenames and revs that match')),
2236 ('l', 'files-with-matches', None, _('print only filenames and revs that match')),
2237 ('n', 'line-number', None, _('print matching line numbers')),
2237 ('n', 'line-number', None, _('print matching line numbers')),
2238 ('r', 'rev', [], _('search in given revision range')),
2238 ('r', 'rev', [], _('search in given revision range')),
2239 ('u', 'user', None, _('print user who committed change'))],
2239 ('u', 'user', None, _('print user who committed change'))],
2240 "hg grep [OPTION]... PATTERN [FILE]..."),
2240 "hg grep [OPTION]... PATTERN [FILE]..."),
2241 "heads":
2241 "heads":
2242 (heads,
2242 (heads,
2243 [('b', 'branches', None, _('find branch info')),
2243 [('b', 'branches', None, _('find branch info')),
2244 ('r', 'rev', "", _('show only heads which are descendants of rev'))],
2244 ('r', 'rev', "", _('show only heads which are descendants of rev'))],
2245 _('hg heads [-b] [-r <rev>]')),
2245 _('hg heads [-b] [-r <rev>]')),
2246 "help": (help_, [], _('hg help [COMMAND]')),
2246 "help": (help_, [], _('hg help [COMMAND]')),
2247 "identify|id": (identify, [], _('hg identify')),
2247 "identify|id": (identify, [], _('hg identify')),
2248 "import|patch":
2248 "import|patch":
2249 (import_,
2249 (import_,
2250 [('p', 'strip', 1, _('directory strip option for patch. This has the same\n') +
2250 [('p', 'strip', 1, _('directory strip option for patch. This has the same\n') +
2251 _('meaning as the corresponding patch option')),
2251 _('meaning as the corresponding patch option')),
2252 ('f', 'force', None, _('skip check for outstanding uncommitted changes')),
2252 ('f', 'force', None, _('skip check for outstanding uncommitted changes')),
2253 ('b', 'base', "", _('base path'))],
2253 ('b', 'base', "", _('base path'))],
2254 "hg import [-f] [-p NUM] [-b BASE] PATCH..."),
2254 "hg import [-f] [-p NUM] [-b BASE] PATCH..."),
2255 "incoming|in": (incoming,
2255 "incoming|in": (incoming,
2256 [('M', 'no-merges', None, _("do not show merges")),
2256 [('M', 'no-merges', None, _("do not show merges")),
2257 ('p', 'patch', None, _('show patch')),
2257 ('p', 'patch', None, _('show patch')),
2258 ('n', 'newest-first', None, _('show newest record first'))],
2258 ('n', 'newest-first', None, _('show newest record first'))],
2259 _('hg incoming [-p] [-n] [-M] [SOURCE]')),
2259 _('hg incoming [-p] [-n] [-M] [SOURCE]')),
2260 "^init": (init, [], _('hg init [DEST]')),
2260 "^init": (init, [], _('hg init [DEST]')),
2261 "locate":
2261 "locate":
2262 (locate,
2262 (locate,
2263 [('r', 'rev', '', _('search the repository as it stood at rev')),
2263 [('r', 'rev', '', _('search the repository as it stood at rev')),
2264 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
2264 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
2265 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
2265 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
2266 ('I', 'include', [], _('include names matching the given patterns')),
2266 ('I', 'include', [], _('include names matching the given patterns')),
2267 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2267 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2268 _('hg locate [OPTION]... [PATTERN]...')),
2268 _('hg locate [OPTION]... [PATTERN]...')),
2269 "^log|history":
2269 "^log|history":
2270 (log,
2270 (log,
2271 [('I', 'include', [], _('include names matching the given patterns')),
2271 [('I', 'include', [], _('include names matching the given patterns')),
2272 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2272 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2273 ('b', 'branch', None, _('show branches')),
2273 ('b', 'branch', None, _('show branches')),
2274 ('k', 'keyword', [], _('search for a keyword')),
2274 ('k', 'keyword', [], _('search for a keyword')),
2275 ('r', 'rev', [], _('show the specified revision or range')),
2275 ('r', 'rev', [], _('show the specified revision or range')),
2276 ('M', 'no-merges', None, _("do not show merges")),
2276 ('M', 'no-merges', None, _("do not show merges")),
2277 ('m', 'only-merges', None, _("show only merges")),
2277 ('m', 'only-merges', None, _("show only merges")),
2278 ('p', 'patch', None, _('show patch'))],
2278 ('p', 'patch', None, _('show patch'))],
2279 _('hg log [-I] [-X] [-r REV]... [-p] [FILE]')),
2279 _('hg log [-I] [-X] [-r REV]... [-p] [FILE]')),
2280 "manifest": (manifest, [], _('hg manifest [REV]')),
2280 "manifest": (manifest, [], _('hg manifest [REV]')),
2281 "outgoing|out": (outgoing,
2281 "outgoing|out": (outgoing,
2282 [('M', 'no-merges', None, _("do not show merges")),
2282 [('M', 'no-merges', None, _("do not show merges")),
2283 ('p', 'patch', None, _('show patch')),
2283 ('p', 'patch', None, _('show patch')),
2284 ('n', 'newest-first', None, _('show newest record first'))],
2284 ('n', 'newest-first', None, _('show newest record first'))],
2285 _('hg outgoing [-p] [-n] [-M] [DEST]')),
2285 _('hg outgoing [-p] [-n] [-M] [DEST]')),
2286 "^parents": (parents, [], _('hg parents [REV]')),
2286 "^parents": (parents, [], _('hg parents [REV]')),
2287 "paths": (paths, [], _('hg paths [NAME]')),
2287 "paths": (paths, [], _('hg paths [NAME]')),
2288 "^pull":
2288 "^pull":
2289 (pull,
2289 (pull,
2290 [('u', 'update', None, _('update the working directory to tip after pull')),
2290 [('u', 'update', None, _('update the working directory to tip after pull')),
2291 ('e', 'ssh', "", _('specify ssh command to use')),
2291 ('e', 'ssh', "", _('specify ssh command to use')),
2292 ('r', 'rev', [], _('a specific revision you would like to pull')),
2292 ('r', 'rev', [], _('a specific revision you would like to pull')),
2293 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2293 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2294 _('hg pull [-u] [-e FILE] [-r rev] [--remotecmd FILE] [SOURCE]')),
2294 _('hg pull [-u] [-e FILE] [-r rev] [--remotecmd FILE] [SOURCE]')),
2295 "^push":
2295 "^push":
2296 (push,
2296 (push,
2297 [('f', 'force', None, _('force push')),
2297 [('f', 'force', None, _('force push')),
2298 ('e', 'ssh', "", _('specify ssh command to use')),
2298 ('e', 'ssh', "", _('specify ssh command to use')),
2299 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2299 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2300 _('hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]')),
2300 _('hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]')),
2301 "rawcommit":
2301 "rawcommit":
2302 (rawcommit,
2302 (rawcommit,
2303 [('p', 'parent', [], _('parent')),
2303 [('p', 'parent', [], _('parent')),
2304 ('d', 'date', "", _('date code')),
2304 ('d', 'date', "", _('date code')),
2305 ('u', 'user', "", _('user')),
2305 ('u', 'user', "", _('user')),
2306 ('F', 'files', "", _('file list')),
2306 ('F', 'files', "", _('file list')),
2307 ('m', 'message', "", _('commit message')),
2307 ('m', 'message', "", _('commit message')),
2308 ('l', 'logfile', "", _('commit message file'))],
2308 ('l', 'logfile', "", _('commit message file'))],
2309 _('hg rawcommit [OPTION]... [FILE]...')),
2309 _('hg rawcommit [OPTION]... [FILE]...')),
2310 "recover": (recover, [], _("hg recover")),
2310 "recover": (recover, [], _("hg recover")),
2311 "^remove|rm": (remove,
2311 "^remove|rm": (remove,
2312 [('I', 'include', [], _('include names matching the given patterns')),
2312 [('I', 'include', [], _('include names matching the given patterns')),
2313 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2313 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2314 _("hg remove [OPTION]... FILE...")),
2314 _("hg remove [OPTION]... FILE...")),
2315 "rename|mv": (rename,
2315 "rename|mv": (rename,
2316 [('I', 'include', [], _('include names matching the given patterns')),
2316 [('I', 'include', [], _('include names matching the given patterns')),
2317 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2317 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2318 ('A', 'after', None, _('record a rename that has already occurred')),
2318 ('A', 'after', None, _('record a rename that has already occurred')),
2319 ('f', 'force', None, _('forcibly copy over an existing managed file'))],
2319 ('f', 'force', None, _('forcibly copy over an existing managed file'))],
2320 _('hg rename [OPTION]... [SOURCE]... DEST')),
2320 _('hg rename [OPTION]... [SOURCE]... DEST')),
2321 "^revert":
2321 "^revert":
2322 (revert,
2322 (revert,
2323 [('I', 'include', [], _('include names matching the given patterns')),
2323 [('I', 'include', [], _('include names matching the given patterns')),
2324 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2324 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2325 ("r", "rev", "", _("revision to revert to"))],
2325 ("r", "rev", "", _("revision to revert to"))],
2326 _("hg revert [-n] [-r REV] [NAME]...")),
2326 _("hg revert [-n] [-r REV] [NAME]...")),
2327 "root": (root, [], _("hg root")),
2327 "root": (root, [], _("hg root")),
2328 "^serve":
2328 "^serve":
2329 (serve,
2329 (serve,
2330 [('A', 'accesslog', '', _('name of access log file to write to')),
2330 [('A', 'accesslog', '', _('name of access log file to write to')),
2331 ('E', 'errorlog', '', _('name of error log file to write to')),
2331 ('E', 'errorlog', '', _('name of error log file to write to')),
2332 ('p', 'port', 0, _('port to use (default: 8000)')),
2332 ('p', 'port', 0, _('port to use (default: 8000)')),
2333 ('a', 'address', '', _('address to use')),
2333 ('a', 'address', '', _('address to use')),
2334 ('n', 'name', "", _('name to show in web pages (default: working dir)')),
2334 ('n', 'name', "", _('name to show in web pages (default: working dir)')),
2335 ('', 'stdio', None, _('for remote clients')),
2335 ('', 'stdio', None, _('for remote clients')),
2336 ('t', 'templates', "", _('web templates to use')),
2336 ('t', 'templates', "", _('web templates to use')),
2337 ('', 'style', "", _('template style to use')),
2337 ('', 'style', "", _('template style to use')),
2338 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2338 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2339 _("hg serve [OPTION]...")),
2339 _("hg serve [OPTION]...")),
2340 "^status|st":
2340 "^status|st":
2341 (status,
2341 (status,
2342 [('m', 'modified', None, _('show only modified files')),
2342 [('m', 'modified', None, _('show only modified files')),
2343 ('a', 'added', None, _('show only added files')),
2343 ('a', 'added', None, _('show only added files')),
2344 ('r', 'removed', None, _('show only removed files')),
2344 ('r', 'removed', None, _('show only removed files')),
2345 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2345 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2346 ('n', 'no-status', None, _('hide status prefix')),
2346 ('n', 'no-status', None, _('hide status prefix')),
2347 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
2347 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
2348 ('I', 'include', [], _('include names matching the given patterns')),
2348 ('I', 'include', [], _('include names matching the given patterns')),
2349 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2349 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2350 _("hg status [OPTION]... [FILE]...")),
2350 _("hg status [OPTION]... [FILE]...")),
2351 "tag":
2351 "tag":
2352 (tag,
2352 (tag,
2353 [('l', 'local', None, _('make the tag local')),
2353 [('l', 'local', None, _('make the tag local')),
2354 ('m', 'message', "", _('message for tag commit log entry')),
2354 ('m', 'message', "", _('message for tag commit log entry')),
2355 ('d', 'date', "", _('record datecode as commit date')),
2355 ('d', 'date', "", _('record datecode as commit date')),
2356 ('u', 'user', "", _('record user as commiter')),
2356 ('u', 'user', "", _('record user as commiter')),
2357 ('r', 'rev', "", _('revision to tag'))],
2357 ('r', 'rev', "", _('revision to tag'))],
2358 _('hg tag [OPTION]... NAME [REV]')),
2358 _('hg tag [OPTION]... NAME [REV]')),
2359 "tags": (tags, [], _('hg tags')),
2359 "tags": (tags, [], _('hg tags')),
2360 "tip": (tip, [], _('hg tip')),
2360 "tip": (tip, [], _('hg tip')),
2361 "unbundle":
2361 "unbundle":
2362 (unbundle,
2362 (unbundle,
2363 [],
2363 [],
2364 _('hg unbundle FILE')),
2364 _('hg unbundle FILE')),
2365 "undo": (undo, [], _('hg undo')),
2365 "undo": (undo, [], _('hg undo')),
2366 "^update|up|checkout|co":
2366 "^update|up|checkout|co":
2367 (update,
2367 (update,
2368 [('b', 'branch', "", _('checkout the head of a specific branch')),
2368 [('b', 'branch', "", _('checkout the head of a specific branch')),
2369 ('m', 'merge', None, _('allow merging of branches')),
2369 ('m', 'merge', None, _('allow merging of branches')),
2370 ('C', 'clean', None, _('overwrite locally modified files'))],
2370 ('C', 'clean', None, _('overwrite locally modified files'))],
2371 _('hg update [-b TAG] [-m] [-C] [REV]')),
2371 _('hg update [-b TAG] [-m] [-C] [REV]')),
2372 "verify": (verify, [], _('hg verify')),
2372 "verify": (verify, [], _('hg verify')),
2373 "version": (show_version, [], _('hg version')),
2373 "version": (show_version, [], _('hg version')),
2374 }
2374 }
2375
2375
2376 globalopts = [
2376 globalopts = [
2377 ('R', 'repository', "", _("repository root directory")),
2377 ('R', 'repository', "", _("repository root directory")),
2378 ('', 'cwd', '', _("change working directory")),
2378 ('', 'cwd', '', _("change working directory")),
2379 ('y', 'noninteractive', None, _("do not prompt, assume 'yes' for any required answers")),
2379 ('y', 'noninteractive', None, _("do not prompt, assume 'yes' for any required answers")),
2380 ('q', 'quiet', None, _("suppress output")),
2380 ('q', 'quiet', None, _("suppress output")),
2381 ('v', 'verbose', None, _("enable additional output")),
2381 ('v', 'verbose', None, _("enable additional output")),
2382 ('', 'debug', None, _("enable debugging output")),
2382 ('', 'debug', None, _("enable debugging output")),
2383 ('', 'debugger', None, _("start debugger")),
2383 ('', 'debugger', None, _("start debugger")),
2384 ('', 'traceback', None, _("print traceback on exception")),
2384 ('', 'traceback', None, _("print traceback on exception")),
2385 ('', 'time', None, _("time how long the command takes")),
2385 ('', 'time', None, _("time how long the command takes")),
2386 ('', 'profile', None, _("print command execution profile")),
2386 ('', 'profile', None, _("print command execution profile")),
2387 ('', 'version', None, _("output version information and exit")),
2387 ('', 'version', None, _("output version information and exit")),
2388 ('h', 'help', None, _("display help and exit")),
2388 ('h', 'help', None, _("display help and exit")),
2389 ]
2389 ]
2390
2390
2391 norepo = ("clone init version help debugancestor debugconfig debugdata"
2391 norepo = ("clone init version help debugancestor debugconfig debugdata"
2392 " debugindex debugindexdot paths")
2392 " debugindex debugindexdot paths")
2393
2393
2394 def find(cmd):
2394 def find(cmd):
2395 """Return (aliases, command table entry) for command string."""
2395 """Return (aliases, command table entry) for command string."""
2396 choice = None
2396 choice = None
2397 for e in table.keys():
2397 for e in table.keys():
2398 aliases = e.lstrip("^").split("|")
2398 aliases = e.lstrip("^").split("|")
2399 if cmd in aliases:
2399 if cmd in aliases:
2400 return aliases, table[e]
2400 return aliases, table[e]
2401 for a in aliases:
2401 for a in aliases:
2402 if a.startswith(cmd):
2402 if a.startswith(cmd):
2403 if choice:
2403 if choice:
2404 raise AmbiguousCommand(cmd)
2404 raise AmbiguousCommand(cmd)
2405 else:
2405 else:
2406 choice = aliases, table[e]
2406 choice = aliases, table[e]
2407 break
2407 break
2408 if choice:
2408 if choice:
2409 return choice
2409 return choice
2410
2410
2411 raise UnknownCommand(cmd)
2411 raise UnknownCommand(cmd)
2412
2412
2413 class SignalInterrupt(Exception):
2413 class SignalInterrupt(Exception):
2414 """Exception raised on SIGTERM and SIGHUP."""
2414 """Exception raised on SIGTERM and SIGHUP."""
2415
2415
2416 def catchterm(*args):
2416 def catchterm(*args):
2417 raise SignalInterrupt
2417 raise SignalInterrupt
2418
2418
2419 def run():
2419 def run():
2420 sys.exit(dispatch(sys.argv[1:]))
2420 sys.exit(dispatch(sys.argv[1:]))
2421
2421
2422 class ParseError(Exception):
2422 class ParseError(Exception):
2423 """Exception raised on errors in parsing the command line."""
2423 """Exception raised on errors in parsing the command line."""
2424
2424
2425 def parse(ui, args):
2425 def parse(ui, args):
2426 options = {}
2426 options = {}
2427 cmdoptions = {}
2427 cmdoptions = {}
2428
2428
2429 try:
2429 try:
2430 args = fancyopts.fancyopts(args, globalopts, options)
2430 args = fancyopts.fancyopts(args, globalopts, options)
2431 except fancyopts.getopt.GetoptError, inst:
2431 except fancyopts.getopt.GetoptError, inst:
2432 raise ParseError(None, inst)
2432 raise ParseError(None, inst)
2433
2433
2434 if args:
2434 if args:
2435 cmd, args = args[0], args[1:]
2435 cmd, args = args[0], args[1:]
2436 defaults = ui.config("defaults", cmd)
2436 defaults = ui.config("defaults", cmd)
2437 if defaults:
2437 if defaults:
2438 args = defaults.split() + args
2438 args = defaults.split() + args
2439
2439
2440 aliases, i = find(cmd)
2440 aliases, i = find(cmd)
2441 cmd = aliases[0]
2441 cmd = aliases[0]
2442 c = list(i[1])
2442 c = list(i[1])
2443 else:
2443 else:
2444 cmd = None
2444 cmd = None
2445 c = []
2445 c = []
2446
2446
2447 # combine global options into local
2447 # combine global options into local
2448 for o in globalopts:
2448 for o in globalopts:
2449 c.append((o[0], o[1], options[o[1]], o[3]))
2449 c.append((o[0], o[1], options[o[1]], o[3]))
2450
2450
2451 try:
2451 try:
2452 args = fancyopts.fancyopts(args, c, cmdoptions)
2452 args = fancyopts.fancyopts(args, c, cmdoptions)
2453 except fancyopts.getopt.GetoptError, inst:
2453 except fancyopts.getopt.GetoptError, inst:
2454 raise ParseError(cmd, inst)
2454 raise ParseError(cmd, inst)
2455
2455
2456 # separate global options back out
2456 # separate global options back out
2457 for o in globalopts:
2457 for o in globalopts:
2458 n = o[1]
2458 n = o[1]
2459 options[n] = cmdoptions[n]
2459 options[n] = cmdoptions[n]
2460 del cmdoptions[n]
2460 del cmdoptions[n]
2461
2461
2462 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2462 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2463
2463
2464 def dispatch(args):
2464 def dispatch(args):
2465 signal.signal(signal.SIGTERM, catchterm)
2465 signal.signal(signal.SIGTERM, catchterm)
2466 try:
2466 try:
2467 signal.signal(signal.SIGHUP, catchterm)
2467 signal.signal(signal.SIGHUP, catchterm)
2468 except AttributeError:
2468 except AttributeError:
2469 pass
2469 pass
2470
2470
2471 try:
2471 try:
2472 u = ui.ui()
2472 u = ui.ui()
2473 except util.Abort, inst:
2473 except util.Abort, inst:
2474 sys.stderr.write(_("abort: %s\n") % inst)
2474 sys.stderr.write(_("abort: %s\n") % inst)
2475 sys.exit(1)
2475 sys.exit(1)
2476
2476
2477 external = []
2477 external = []
2478 for x in u.extensions():
2478 for x in u.extensions():
2479 def on_exception(exc, inst):
2479 def on_exception(exc, inst):
2480 u.warn(_("*** failed to import extension %s\n") % x[1])
2480 u.warn(_("*** failed to import extension %s\n") % x[1])
2481 u.warn("%s\n" % inst)
2481 u.warn("%s\n" % inst)
2482 if "--traceback" in sys.argv[1:]:
2482 if "--traceback" in sys.argv[1:]:
2483 traceback.print_exc()
2483 traceback.print_exc()
2484 if x[1]:
2484 if x[1]:
2485 try:
2485 try:
2486 mod = imp.load_source(x[0], x[1])
2486 mod = imp.load_source(x[0], x[1])
2487 except Exception, inst:
2487 except Exception, inst:
2488 on_exception(Exception, inst)
2488 on_exception(Exception, inst)
2489 continue
2489 continue
2490 else:
2490 else:
2491 def importh(name):
2491 def importh(name):
2492 mod = __import__(name)
2492 mod = __import__(name)
2493 components = name.split('.')
2493 components = name.split('.')
2494 for comp in components[1:]:
2494 for comp in components[1:]:
2495 mod = getattr(mod, comp)
2495 mod = getattr(mod, comp)
2496 return mod
2496 return mod
2497 try:
2497 try:
2498 mod = importh(x[0])
2498 mod = importh(x[0])
2499 except Exception, inst:
2499 except Exception, inst:
2500 on_exception(Exception, inst)
2500 on_exception(Exception, inst)
2501 continue
2501 continue
2502
2502
2503 external.append(mod)
2503 external.append(mod)
2504 for x in external:
2504 for x in external:
2505 cmdtable = getattr(x, 'cmdtable', {})
2505 cmdtable = getattr(x, 'cmdtable', {})
2506 for t in cmdtable:
2506 for t in cmdtable:
2507 if t in table:
2507 if t in table:
2508 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
2508 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
2509 table.update(cmdtable)
2509 table.update(cmdtable)
2510
2510
2511 try:
2511 try:
2512 cmd, func, args, options, cmdoptions = parse(u, args)
2512 cmd, func, args, options, cmdoptions = parse(u, args)
2513 except ParseError, inst:
2513 except ParseError, inst:
2514 if inst.args[0]:
2514 if inst.args[0]:
2515 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
2515 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
2516 help_(u, inst.args[0])
2516 help_(u, inst.args[0])
2517 else:
2517 else:
2518 u.warn(_("hg: %s\n") % inst.args[1])
2518 u.warn(_("hg: %s\n") % inst.args[1])
2519 help_(u, 'shortlist')
2519 help_(u, 'shortlist')
2520 sys.exit(-1)
2520 sys.exit(-1)
2521 except AmbiguousCommand, inst:
2521 except AmbiguousCommand, inst:
2522 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2522 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2523 sys.exit(1)
2523 sys.exit(1)
2524 except UnknownCommand, inst:
2524 except UnknownCommand, inst:
2525 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2525 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2526 help_(u, 'shortlist')
2526 help_(u, 'shortlist')
2527 sys.exit(1)
2527 sys.exit(1)
2528
2528
2529 if options["time"]:
2529 if options["time"]:
2530 def get_times():
2530 def get_times():
2531 t = os.times()
2531 t = os.times()
2532 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2532 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2533 t = (t[0], t[1], t[2], t[3], time.clock())
2533 t = (t[0], t[1], t[2], t[3], time.clock())
2534 return t
2534 return t
2535 s = get_times()
2535 s = get_times()
2536 def print_time():
2536 def print_time():
2537 t = get_times()
2537 t = get_times()
2538 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
2538 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
2539 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2539 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2540 atexit.register(print_time)
2540 atexit.register(print_time)
2541
2541
2542 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2542 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2543 not options["noninteractive"])
2543 not options["noninteractive"])
2544
2544
2545 # enter the debugger before command execution
2545 # enter the debugger before command execution
2546 if options['debugger']:
2546 if options['debugger']:
2547 pdb.set_trace()
2547 pdb.set_trace()
2548
2548
2549 try:
2549 try:
2550 try:
2550 try:
2551 if options['help']:
2551 if options['help']:
2552 help_(u, cmd, options['version'])
2552 help_(u, cmd, options['version'])
2553 sys.exit(0)
2553 sys.exit(0)
2554 elif options['version']:
2554 elif options['version']:
2555 show_version(u)
2555 show_version(u)
2556 sys.exit(0)
2556 sys.exit(0)
2557 elif not cmd:
2557 elif not cmd:
2558 help_(u, 'shortlist')
2558 help_(u, 'shortlist')
2559 sys.exit(0)
2559 sys.exit(0)
2560
2560
2561 if options['cwd']:
2561 if options['cwd']:
2562 try:
2562 try:
2563 os.chdir(options['cwd'])
2563 os.chdir(options['cwd'])
2564 except OSError, inst:
2564 except OSError, inst:
2565 raise util.Abort('%s: %s' %
2565 raise util.Abort('%s: %s' %
2566 (options['cwd'], inst.strerror))
2566 (options['cwd'], inst.strerror))
2567
2567
2568 if cmd not in norepo.split():
2568 if cmd not in norepo.split():
2569 path = options["repository"] or ""
2569 path = options["repository"] or ""
2570 repo = hg.repository(ui=u, path=path)
2570 repo = hg.repository(ui=u, path=path)
2571 for x in external:
2571 for x in external:
2572 if hasattr(x, 'reposetup'): x.reposetup(u, repo)
2572 if hasattr(x, 'reposetup'): x.reposetup(u, repo)
2573 d = lambda: func(u, repo, *args, **cmdoptions)
2573 d = lambda: func(u, repo, *args, **cmdoptions)
2574 else:
2574 else:
2575 d = lambda: func(u, *args, **cmdoptions)
2575 d = lambda: func(u, *args, **cmdoptions)
2576
2576
2577 if options['profile']:
2577 if options['profile']:
2578 import hotshot, hotshot.stats
2578 import hotshot, hotshot.stats
2579 prof = hotshot.Profile("hg.prof")
2579 prof = hotshot.Profile("hg.prof")
2580 r = prof.runcall(d)
2580 r = prof.runcall(d)
2581 prof.close()
2581 prof.close()
2582 stats = hotshot.stats.load("hg.prof")
2582 stats = hotshot.stats.load("hg.prof")
2583 stats.strip_dirs()
2583 stats.strip_dirs()
2584 stats.sort_stats('time', 'calls')
2584 stats.sort_stats('time', 'calls')
2585 stats.print_stats(40)
2585 stats.print_stats(40)
2586 return r
2586 return r
2587 else:
2587 else:
2588 return d()
2588 return d()
2589 except:
2589 except:
2590 # enter the debugger when we hit an exception
2590 # enter the debugger when we hit an exception
2591 if options['debugger']:
2591 if options['debugger']:
2592 pdb.post_mortem(sys.exc_info()[2])
2592 pdb.post_mortem(sys.exc_info()[2])
2593 if options['traceback']:
2593 if options['traceback']:
2594 traceback.print_exc()
2594 traceback.print_exc()
2595 raise
2595 raise
2596 except hg.RepoError, inst:
2596 except hg.RepoError, inst:
2597 u.warn(_("abort: "), inst, "!\n")
2597 u.warn(_("abort: "), inst, "!\n")
2598 except revlog.RevlogError, inst:
2598 except revlog.RevlogError, inst:
2599 u.warn(_("abort: "), inst, "!\n")
2599 u.warn(_("abort: "), inst, "!\n")
2600 except SignalInterrupt:
2600 except SignalInterrupt:
2601 u.warn(_("killed!\n"))
2601 u.warn(_("killed!\n"))
2602 except KeyboardInterrupt:
2602 except KeyboardInterrupt:
2603 try:
2603 try:
2604 u.warn(_("interrupted!\n"))
2604 u.warn(_("interrupted!\n"))
2605 except IOError, inst:
2605 except IOError, inst:
2606 if inst.errno == errno.EPIPE:
2606 if inst.errno == errno.EPIPE:
2607 if u.debugflag:
2607 if u.debugflag:
2608 u.warn(_("\nbroken pipe\n"))
2608 u.warn(_("\nbroken pipe\n"))
2609 else:
2609 else:
2610 raise
2610 raise
2611 except IOError, inst:
2611 except IOError, inst:
2612 if hasattr(inst, "code"):
2612 if hasattr(inst, "code"):
2613 u.warn(_("abort: %s\n") % inst)
2613 u.warn(_("abort: %s\n") % inst)
2614 elif hasattr(inst, "reason"):
2614 elif hasattr(inst, "reason"):
2615 u.warn(_("abort: error: %s\n") % inst.reason[1])
2615 u.warn(_("abort: error: %s\n") % inst.reason[1])
2616 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
2616 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
2617 if u.debugflag:
2617 if u.debugflag:
2618 u.warn(_("broken pipe\n"))
2618 u.warn(_("broken pipe\n"))
2619 elif getattr(inst, "strerror", None):
2619 elif getattr(inst, "strerror", None):
2620 if getattr(inst, "filename", None):
2620 if getattr(inst, "filename", None):
2621 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
2621 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
2622 else:
2622 else:
2623 u.warn(_("abort: %s\n") % inst.strerror)
2623 u.warn(_("abort: %s\n") % inst.strerror)
2624 else:
2624 else:
2625 raise
2625 raise
2626 except OSError, inst:
2626 except OSError, inst:
2627 if hasattr(inst, "filename"):
2627 if hasattr(inst, "filename"):
2628 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
2628 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
2629 else:
2629 else:
2630 u.warn(_("abort: %s\n") % inst.strerror)
2630 u.warn(_("abort: %s\n") % inst.strerror)
2631 except util.Abort, inst:
2631 except util.Abort, inst:
2632 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
2632 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
2633 sys.exit(1)
2633 sys.exit(1)
2634 except TypeError, inst:
2634 except TypeError, inst:
2635 # was this an argument error?
2635 # was this an argument error?
2636 tb = traceback.extract_tb(sys.exc_info()[2])
2636 tb = traceback.extract_tb(sys.exc_info()[2])
2637 if len(tb) > 2: # no
2637 if len(tb) > 2: # no
2638 raise
2638 raise
2639 u.debug(inst, "\n")
2639 u.debug(inst, "\n")
2640 u.warn(_("%s: invalid arguments\n") % cmd)
2640 u.warn(_("%s: invalid arguments\n") % cmd)
2641 help_(u, cmd)
2641 help_(u, cmd)
2642 except AmbiguousCommand, inst:
2642 except AmbiguousCommand, inst:
2643 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2643 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2644 help_(u, 'shortlist')
2644 help_(u, 'shortlist')
2645 except UnknownCommand, inst:
2645 except UnknownCommand, inst:
2646 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2646 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2647 help_(u, 'shortlist')
2647 help_(u, 'shortlist')
2648 except SystemExit:
2648 except SystemExit:
2649 # don't catch this in the catch-all below
2649 # don't catch this in the catch-all below
2650 raise
2650 raise
2651 except:
2651 except:
2652 u.warn(_("** unknown exception encountered, details follow\n"))
2652 u.warn(_("** unknown exception encountered, details follow\n"))
2653 u.warn(_("** report bug details to mercurial@selenic.com\n"))
2653 u.warn(_("** report bug details to mercurial@selenic.com\n"))
2654 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
2654 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
2655 % version.get_version())
2655 % version.get_version())
2656 raise
2656 raise
2657
2657
2658 sys.exit(-1)
2658 sys.exit(-1)
@@ -1,411 +1,411 b''
1 """
1 """
2 dirstate.py - working directory tracking for mercurial
2 dirstate.py - working directory tracking for mercurial
3
3
4 Copyright 2005 Matt Mackall <mpm@selenic.com>
4 Copyright 2005 Matt Mackall <mpm@selenic.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8 """
8 """
9
9
10 import struct, os
10 import struct, os
11 from node import *
11 from node import *
12 from i18n import gettext as _
12 from i18n import gettext as _
13 from demandload import *
13 from demandload import *
14 demandload(globals(), "time bisect stat util re errno")
14 demandload(globals(), "time bisect stat util re errno")
15
15
16 class dirstate:
16 class dirstate(object):
17 def __init__(self, opener, ui, root):
17 def __init__(self, opener, ui, root):
18 self.opener = opener
18 self.opener = opener
19 self.root = root
19 self.root = root
20 self.dirty = 0
20 self.dirty = 0
21 self.ui = ui
21 self.ui = ui
22 self.map = None
22 self.map = None
23 self.pl = None
23 self.pl = None
24 self.copies = {}
24 self.copies = {}
25 self.ignorefunc = None
25 self.ignorefunc = None
26 self.blockignore = False
26 self.blockignore = False
27
27
28 def wjoin(self, f):
28 def wjoin(self, f):
29 return os.path.join(self.root, f)
29 return os.path.join(self.root, f)
30
30
31 def getcwd(self):
31 def getcwd(self):
32 cwd = os.getcwd()
32 cwd = os.getcwd()
33 if cwd == self.root: return ''
33 if cwd == self.root: return ''
34 return cwd[len(self.root) + 1:]
34 return cwd[len(self.root) + 1:]
35
35
36 def hgignore(self):
36 def hgignore(self):
37 '''return the contents of .hgignore as a list of patterns.
37 '''return the contents of .hgignore as a list of patterns.
38
38
39 trailing white space is dropped.
39 trailing white space is dropped.
40 the escape character is backslash.
40 the escape character is backslash.
41 comments start with #.
41 comments start with #.
42 empty lines are skipped.
42 empty lines are skipped.
43
43
44 lines can be of the following formats:
44 lines can be of the following formats:
45
45
46 syntax: regexp # defaults following lines to non-rooted regexps
46 syntax: regexp # defaults following lines to non-rooted regexps
47 syntax: glob # defaults following lines to non-rooted globs
47 syntax: glob # defaults following lines to non-rooted globs
48 re:pattern # non-rooted regular expression
48 re:pattern # non-rooted regular expression
49 glob:pattern # non-rooted glob
49 glob:pattern # non-rooted glob
50 pattern # pattern of the current default type'''
50 pattern # pattern of the current default type'''
51 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
51 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
52 def parselines(fp):
52 def parselines(fp):
53 for line in fp:
53 for line in fp:
54 escape = False
54 escape = False
55 for i in xrange(len(line)):
55 for i in xrange(len(line)):
56 if escape: escape = False
56 if escape: escape = False
57 elif line[i] == '\\': escape = True
57 elif line[i] == '\\': escape = True
58 elif line[i] == '#': break
58 elif line[i] == '#': break
59 line = line[:i].rstrip()
59 line = line[:i].rstrip()
60 if line: yield line
60 if line: yield line
61 pats = []
61 pats = []
62 try:
62 try:
63 fp = open(self.wjoin('.hgignore'))
63 fp = open(self.wjoin('.hgignore'))
64 syntax = 'relre:'
64 syntax = 'relre:'
65 for line in parselines(fp):
65 for line in parselines(fp):
66 if line.startswith('syntax:'):
66 if line.startswith('syntax:'):
67 s = line[7:].strip()
67 s = line[7:].strip()
68 try:
68 try:
69 syntax = syntaxes[s]
69 syntax = syntaxes[s]
70 except KeyError:
70 except KeyError:
71 self.ui.warn(_("ignoring invalid syntax '%s'\n") % s)
71 self.ui.warn(_("ignoring invalid syntax '%s'\n") % s)
72 continue
72 continue
73 pat = syntax + line
73 pat = syntax + line
74 for s in syntaxes.values():
74 for s in syntaxes.values():
75 if line.startswith(s):
75 if line.startswith(s):
76 pat = line
76 pat = line
77 break
77 break
78 pats.append(pat)
78 pats.append(pat)
79 except IOError: pass
79 except IOError: pass
80 return pats
80 return pats
81
81
82 def ignore(self, fn):
82 def ignore(self, fn):
83 '''default match function used by dirstate and localrepository.
83 '''default match function used by dirstate and localrepository.
84 this honours the .hgignore file, and nothing more.'''
84 this honours the .hgignore file, and nothing more.'''
85 if self.blockignore:
85 if self.blockignore:
86 return False
86 return False
87 if not self.ignorefunc:
87 if not self.ignorefunc:
88 ignore = self.hgignore()
88 ignore = self.hgignore()
89 if ignore:
89 if ignore:
90 files, self.ignorefunc, anypats = util.matcher(self.root,
90 files, self.ignorefunc, anypats = util.matcher(self.root,
91 inc=ignore)
91 inc=ignore)
92 else:
92 else:
93 self.ignorefunc = util.never
93 self.ignorefunc = util.never
94 return self.ignorefunc(fn)
94 return self.ignorefunc(fn)
95
95
96 def __del__(self):
96 def __del__(self):
97 if self.dirty:
97 if self.dirty:
98 self.write()
98 self.write()
99
99
100 def __getitem__(self, key):
100 def __getitem__(self, key):
101 try:
101 try:
102 return self.map[key]
102 return self.map[key]
103 except TypeError:
103 except TypeError:
104 self.lazyread()
104 self.lazyread()
105 return self[key]
105 return self[key]
106
106
107 def __contains__(self, key):
107 def __contains__(self, key):
108 self.lazyread()
108 self.lazyread()
109 return key in self.map
109 return key in self.map
110
110
111 def parents(self):
111 def parents(self):
112 self.lazyread()
112 self.lazyread()
113 return self.pl
113 return self.pl
114
114
115 def markdirty(self):
115 def markdirty(self):
116 if not self.dirty:
116 if not self.dirty:
117 self.dirty = 1
117 self.dirty = 1
118
118
119 def setparents(self, p1, p2=nullid):
119 def setparents(self, p1, p2=nullid):
120 self.lazyread()
120 self.lazyread()
121 self.markdirty()
121 self.markdirty()
122 self.pl = p1, p2
122 self.pl = p1, p2
123
123
124 def state(self, key):
124 def state(self, key):
125 try:
125 try:
126 return self[key][0]
126 return self[key][0]
127 except KeyError:
127 except KeyError:
128 return "?"
128 return "?"
129
129
130 def lazyread(self):
130 def lazyread(self):
131 if self.map is None:
131 if self.map is None:
132 self.read()
132 self.read()
133
133
134 def read(self):
134 def read(self):
135 self.map = {}
135 self.map = {}
136 self.pl = [nullid, nullid]
136 self.pl = [nullid, nullid]
137 try:
137 try:
138 st = self.opener("dirstate").read()
138 st = self.opener("dirstate").read()
139 if not st: return
139 if not st: return
140 except: return
140 except: return
141
141
142 self.pl = [st[:20], st[20: 40]]
142 self.pl = [st[:20], st[20: 40]]
143
143
144 pos = 40
144 pos = 40
145 while pos < len(st):
145 while pos < len(st):
146 e = struct.unpack(">cllll", st[pos:pos+17])
146 e = struct.unpack(">cllll", st[pos:pos+17])
147 l = e[4]
147 l = e[4]
148 pos += 17
148 pos += 17
149 f = st[pos:pos + l]
149 f = st[pos:pos + l]
150 if '\0' in f:
150 if '\0' in f:
151 f, c = f.split('\0')
151 f, c = f.split('\0')
152 self.copies[f] = c
152 self.copies[f] = c
153 self.map[f] = e[:4]
153 self.map[f] = e[:4]
154 pos += l
154 pos += l
155
155
156 def copy(self, source, dest):
156 def copy(self, source, dest):
157 self.lazyread()
157 self.lazyread()
158 self.markdirty()
158 self.markdirty()
159 self.copies[dest] = source
159 self.copies[dest] = source
160
160
161 def copied(self, file):
161 def copied(self, file):
162 return self.copies.get(file, None)
162 return self.copies.get(file, None)
163
163
164 def update(self, files, state, **kw):
164 def update(self, files, state, **kw):
165 ''' current states:
165 ''' current states:
166 n normal
166 n normal
167 m needs merging
167 m needs merging
168 r marked for removal
168 r marked for removal
169 a marked for addition'''
169 a marked for addition'''
170
170
171 if not files: return
171 if not files: return
172 self.lazyread()
172 self.lazyread()
173 self.markdirty()
173 self.markdirty()
174 for f in files:
174 for f in files:
175 if state == "r":
175 if state == "r":
176 self.map[f] = ('r', 0, 0, 0)
176 self.map[f] = ('r', 0, 0, 0)
177 else:
177 else:
178 s = os.lstat(self.wjoin(f))
178 s = os.lstat(self.wjoin(f))
179 st_size = kw.get('st_size', s.st_size)
179 st_size = kw.get('st_size', s.st_size)
180 st_mtime = kw.get('st_mtime', s.st_mtime)
180 st_mtime = kw.get('st_mtime', s.st_mtime)
181 self.map[f] = (state, s.st_mode, st_size, st_mtime)
181 self.map[f] = (state, s.st_mode, st_size, st_mtime)
182 if self.copies.has_key(f):
182 if self.copies.has_key(f):
183 del self.copies[f]
183 del self.copies[f]
184
184
185 def forget(self, files):
185 def forget(self, files):
186 if not files: return
186 if not files: return
187 self.lazyread()
187 self.lazyread()
188 self.markdirty()
188 self.markdirty()
189 for f in files:
189 for f in files:
190 try:
190 try:
191 del self.map[f]
191 del self.map[f]
192 except KeyError:
192 except KeyError:
193 self.ui.warn(_("not in dirstate: %s!\n") % f)
193 self.ui.warn(_("not in dirstate: %s!\n") % f)
194 pass
194 pass
195
195
196 def clear(self):
196 def clear(self):
197 self.map = {}
197 self.map = {}
198 self.markdirty()
198 self.markdirty()
199
199
200 def write(self):
200 def write(self):
201 st = self.opener("dirstate", "w", atomic=True)
201 st = self.opener("dirstate", "w", atomic=True)
202 st.write("".join(self.pl))
202 st.write("".join(self.pl))
203 for f, e in self.map.items():
203 for f, e in self.map.items():
204 c = self.copied(f)
204 c = self.copied(f)
205 if c:
205 if c:
206 f = f + "\0" + c
206 f = f + "\0" + c
207 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
207 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
208 st.write(e + f)
208 st.write(e + f)
209 self.dirty = 0
209 self.dirty = 0
210
210
211 def filterfiles(self, files):
211 def filterfiles(self, files):
212 ret = {}
212 ret = {}
213 unknown = []
213 unknown = []
214
214
215 for x in files:
215 for x in files:
216 if x == '.':
216 if x == '.':
217 return self.map.copy()
217 return self.map.copy()
218 if x not in self.map:
218 if x not in self.map:
219 unknown.append(x)
219 unknown.append(x)
220 else:
220 else:
221 ret[x] = self.map[x]
221 ret[x] = self.map[x]
222
222
223 if not unknown:
223 if not unknown:
224 return ret
224 return ret
225
225
226 b = self.map.keys()
226 b = self.map.keys()
227 b.sort()
227 b.sort()
228 blen = len(b)
228 blen = len(b)
229
229
230 for x in unknown:
230 for x in unknown:
231 bs = bisect.bisect(b, x)
231 bs = bisect.bisect(b, x)
232 if bs != 0 and b[bs-1] == x:
232 if bs != 0 and b[bs-1] == x:
233 ret[x] = self.map[x]
233 ret[x] = self.map[x]
234 continue
234 continue
235 while bs < blen:
235 while bs < blen:
236 s = b[bs]
236 s = b[bs]
237 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
237 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
238 ret[s] = self.map[s]
238 ret[s] = self.map[s]
239 else:
239 else:
240 break
240 break
241 bs += 1
241 bs += 1
242 return ret
242 return ret
243
243
244 def supported_type(self, f, st, verbose=False):
244 def supported_type(self, f, st, verbose=False):
245 if stat.S_ISREG(st.st_mode):
245 if stat.S_ISREG(st.st_mode):
246 return True
246 return True
247 if verbose:
247 if verbose:
248 kind = 'unknown'
248 kind = 'unknown'
249 if stat.S_ISCHR(st.st_mode): kind = _('character device')
249 if stat.S_ISCHR(st.st_mode): kind = _('character device')
250 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
250 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
251 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
251 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
252 elif stat.S_ISLNK(st.st_mode): kind = _('symbolic link')
252 elif stat.S_ISLNK(st.st_mode): kind = _('symbolic link')
253 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
253 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
254 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
254 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
255 self.ui.warn(_('%s: unsupported file type (type is %s)\n') % (
255 self.ui.warn(_('%s: unsupported file type (type is %s)\n') % (
256 util.pathto(self.getcwd(), f),
256 util.pathto(self.getcwd(), f),
257 kind))
257 kind))
258 return False
258 return False
259
259
260 def statwalk(self, files=None, match=util.always, dc=None):
260 def statwalk(self, files=None, match=util.always, dc=None):
261 self.lazyread()
261 self.lazyread()
262
262
263 # walk all files by default
263 # walk all files by default
264 if not files:
264 if not files:
265 files = [self.root]
265 files = [self.root]
266 if not dc:
266 if not dc:
267 dc = self.map.copy()
267 dc = self.map.copy()
268 elif not dc:
268 elif not dc:
269 dc = self.filterfiles(files)
269 dc = self.filterfiles(files)
270
270
271 def statmatch(file, stat):
271 def statmatch(file, stat):
272 file = util.pconvert(file)
272 file = util.pconvert(file)
273 if file not in dc and self.ignore(file):
273 if file not in dc and self.ignore(file):
274 return False
274 return False
275 return match(file)
275 return match(file)
276
276
277 return self.walkhelper(files=files, statmatch=statmatch, dc=dc)
277 return self.walkhelper(files=files, statmatch=statmatch, dc=dc)
278
278
279 def walk(self, files=None, match=util.always, dc=None):
279 def walk(self, files=None, match=util.always, dc=None):
280 # filter out the stat
280 # filter out the stat
281 for src, f, st in self.statwalk(files, match, dc):
281 for src, f, st in self.statwalk(files, match, dc):
282 yield src, f
282 yield src, f
283
283
284 # walk recursively through the directory tree, finding all files
284 # walk recursively through the directory tree, finding all files
285 # matched by the statmatch function
285 # matched by the statmatch function
286 #
286 #
287 # results are yielded in a tuple (src, filename, st), where src
287 # results are yielded in a tuple (src, filename, st), where src
288 # is one of:
288 # is one of:
289 # 'f' the file was found in the directory tree
289 # 'f' the file was found in the directory tree
290 # 'm' the file was only in the dirstate and not in the tree
290 # 'm' the file was only in the dirstate and not in the tree
291 # and st is the stat result if the file was found in the directory.
291 # and st is the stat result if the file was found in the directory.
292 #
292 #
293 # dc is an optional arg for the current dirstate. dc is not modified
293 # dc is an optional arg for the current dirstate. dc is not modified
294 # directly by this function, but might be modified by your statmatch call.
294 # directly by this function, but might be modified by your statmatch call.
295 #
295 #
296 def walkhelper(self, files, statmatch, dc):
296 def walkhelper(self, files, statmatch, dc):
297 # recursion free walker, faster than os.walk.
297 # recursion free walker, faster than os.walk.
298 def findfiles(s):
298 def findfiles(s):
299 work = [s]
299 work = [s]
300 while work:
300 while work:
301 top = work.pop()
301 top = work.pop()
302 names = os.listdir(top)
302 names = os.listdir(top)
303 names.sort()
303 names.sort()
304 # nd is the top of the repository dir tree
304 # nd is the top of the repository dir tree
305 nd = util.normpath(top[len(self.root) + 1:])
305 nd = util.normpath(top[len(self.root) + 1:])
306 if nd == '.': nd = ''
306 if nd == '.': nd = ''
307 for f in names:
307 for f in names:
308 np = os.path.join(nd, f)
308 np = os.path.join(nd, f)
309 if seen(np):
309 if seen(np):
310 continue
310 continue
311 p = os.path.join(top, f)
311 p = os.path.join(top, f)
312 # don't trip over symlinks
312 # don't trip over symlinks
313 st = os.lstat(p)
313 st = os.lstat(p)
314 if stat.S_ISDIR(st.st_mode):
314 if stat.S_ISDIR(st.st_mode):
315 ds = os.path.join(nd, f +'/')
315 ds = os.path.join(nd, f +'/')
316 if statmatch(ds, st):
316 if statmatch(ds, st):
317 work.append(p)
317 work.append(p)
318 if statmatch(np, st) and np in dc:
318 if statmatch(np, st) and np in dc:
319 yield 'm', util.pconvert(np), st
319 yield 'm', util.pconvert(np), st
320 elif statmatch(np, st):
320 elif statmatch(np, st):
321 if self.supported_type(np, st):
321 if self.supported_type(np, st):
322 yield 'f', util.pconvert(np), st
322 yield 'f', util.pconvert(np), st
323 elif np in dc:
323 elif np in dc:
324 yield 'm', util.pconvert(np), st
324 yield 'm', util.pconvert(np), st
325
325
326 known = {'.hg': 1}
326 known = {'.hg': 1}
327 def seen(fn):
327 def seen(fn):
328 if fn in known: return True
328 if fn in known: return True
329 known[fn] = 1
329 known[fn] = 1
330
330
331 # step one, find all files that match our criteria
331 # step one, find all files that match our criteria
332 files.sort()
332 files.sort()
333 for ff in util.unique(files):
333 for ff in util.unique(files):
334 f = self.wjoin(ff)
334 f = self.wjoin(ff)
335 try:
335 try:
336 st = os.lstat(f)
336 st = os.lstat(f)
337 except OSError, inst:
337 except OSError, inst:
338 if ff not in dc: self.ui.warn('%s: %s\n' % (
338 if ff not in dc: self.ui.warn('%s: %s\n' % (
339 util.pathto(self.getcwd(), ff),
339 util.pathto(self.getcwd(), ff),
340 inst.strerror))
340 inst.strerror))
341 continue
341 continue
342 if stat.S_ISDIR(st.st_mode):
342 if stat.S_ISDIR(st.st_mode):
343 cmp1 = (lambda x, y: cmp(x[1], y[1]))
343 cmp1 = (lambda x, y: cmp(x[1], y[1]))
344 sorted = [ x for x in findfiles(f) ]
344 sorted = [ x for x in findfiles(f) ]
345 sorted.sort(cmp1)
345 sorted.sort(cmp1)
346 for e in sorted:
346 for e in sorted:
347 yield e
347 yield e
348 else:
348 else:
349 ff = util.normpath(ff)
349 ff = util.normpath(ff)
350 if seen(ff):
350 if seen(ff):
351 continue
351 continue
352 self.blockignore = True
352 self.blockignore = True
353 if statmatch(ff, st):
353 if statmatch(ff, st):
354 if self.supported_type(ff, st, verbose=True):
354 if self.supported_type(ff, st, verbose=True):
355 yield 'f', ff, st
355 yield 'f', ff, st
356 elif ff in dc:
356 elif ff in dc:
357 yield 'm', ff, st
357 yield 'm', ff, st
358 self.blockignore = False
358 self.blockignore = False
359
359
360 # step two run through anything left in the dc hash and yield
360 # step two run through anything left in the dc hash and yield
361 # if we haven't already seen it
361 # if we haven't already seen it
362 ks = dc.keys()
362 ks = dc.keys()
363 ks.sort()
363 ks.sort()
364 for k in ks:
364 for k in ks:
365 if not seen(k) and (statmatch(k, None)):
365 if not seen(k) and (statmatch(k, None)):
366 yield 'm', k, None
366 yield 'm', k, None
367
367
368 def changes(self, files=None, match=util.always):
368 def changes(self, files=None, match=util.always):
369 lookup, modified, added, unknown = [], [], [], []
369 lookup, modified, added, unknown = [], [], [], []
370 removed, deleted = [], []
370 removed, deleted = [], []
371
371
372 for src, fn, st in self.statwalk(files, match):
372 for src, fn, st in self.statwalk(files, match):
373 try:
373 try:
374 type, mode, size, time = self[fn]
374 type, mode, size, time = self[fn]
375 except KeyError:
375 except KeyError:
376 unknown.append(fn)
376 unknown.append(fn)
377 continue
377 continue
378 if src == 'm':
378 if src == 'm':
379 nonexistent = True
379 nonexistent = True
380 if not st:
380 if not st:
381 try:
381 try:
382 f = self.wjoin(fn)
382 f = self.wjoin(fn)
383 st = os.lstat(f)
383 st = os.lstat(f)
384 except OSError, inst:
384 except OSError, inst:
385 if inst.errno != errno.ENOENT:
385 if inst.errno != errno.ENOENT:
386 raise
386 raise
387 st = None
387 st = None
388 # We need to re-check that it is a valid file
388 # We need to re-check that it is a valid file
389 if st and self.supported_type(fn, st):
389 if st and self.supported_type(fn, st):
390 nonexistent = False
390 nonexistent = False
391 # XXX: what to do with file no longer present in the fs
391 # XXX: what to do with file no longer present in the fs
392 # who are not removed in the dirstate ?
392 # who are not removed in the dirstate ?
393 if nonexistent and type in "nm":
393 if nonexistent and type in "nm":
394 deleted.append(fn)
394 deleted.append(fn)
395 continue
395 continue
396 # check the common case first
396 # check the common case first
397 if type == 'n':
397 if type == 'n':
398 if not st:
398 if not st:
399 st = os.stat(fn)
399 st = os.stat(fn)
400 if size != st.st_size or (mode ^ st.st_mode) & 0100:
400 if size != st.st_size or (mode ^ st.st_mode) & 0100:
401 modified.append(fn)
401 modified.append(fn)
402 elif time != st.st_mtime:
402 elif time != st.st_mtime:
403 lookup.append(fn)
403 lookup.append(fn)
404 elif type == 'm':
404 elif type == 'm':
405 modified.append(fn)
405 modified.append(fn)
406 elif type == 'a':
406 elif type == 'a':
407 added.append(fn)
407 added.append(fn)
408 elif type == 'r':
408 elif type == 'r':
409 removed.append(fn)
409 removed.append(fn)
410
410
411 return (lookup, modified, added, removed + deleted, unknown)
411 return (lookup, modified, added, removed + deleted, unknown)
@@ -1,1025 +1,1025 b''
1 # hgweb.py - web interface to a mercurial repository
1 # hgweb.py - web interface to a mercurial repository
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os, cgi, sys
9 import os, cgi, sys
10 from demandload import demandload
10 from demandload import demandload
11 demandload(globals(), "mdiff time re socket zlib errno ui hg ConfigParser")
11 demandload(globals(), "mdiff time re socket zlib errno ui hg ConfigParser")
12 demandload(globals(), "zipfile tempfile StringIO tarfile BaseHTTPServer util")
12 demandload(globals(), "zipfile tempfile StringIO tarfile BaseHTTPServer util")
13 demandload(globals(), "mimetypes")
13 demandload(globals(), "mimetypes")
14 from node import *
14 from node import *
15 from i18n import gettext as _
15 from i18n import gettext as _
16
16
17 def templatepath():
17 def templatepath():
18 for f in "templates", "../templates":
18 for f in "templates", "../templates":
19 p = os.path.join(os.path.dirname(__file__), f)
19 p = os.path.join(os.path.dirname(__file__), f)
20 if os.path.isdir(p):
20 if os.path.isdir(p):
21 return p
21 return p
22
22
23 def age(x):
23 def age(x):
24 def plural(t, c):
24 def plural(t, c):
25 if c == 1:
25 if c == 1:
26 return t
26 return t
27 return t + "s"
27 return t + "s"
28 def fmt(t, c):
28 def fmt(t, c):
29 return "%d %s" % (c, plural(t, c))
29 return "%d %s" % (c, plural(t, c))
30
30
31 now = time.time()
31 now = time.time()
32 then = x[0]
32 then = x[0]
33 delta = max(1, int(now - then))
33 delta = max(1, int(now - then))
34
34
35 scales = [["second", 1],
35 scales = [["second", 1],
36 ["minute", 60],
36 ["minute", 60],
37 ["hour", 3600],
37 ["hour", 3600],
38 ["day", 3600 * 24],
38 ["day", 3600 * 24],
39 ["week", 3600 * 24 * 7],
39 ["week", 3600 * 24 * 7],
40 ["month", 3600 * 24 * 30],
40 ["month", 3600 * 24 * 30],
41 ["year", 3600 * 24 * 365]]
41 ["year", 3600 * 24 * 365]]
42
42
43 scales.reverse()
43 scales.reverse()
44
44
45 for t, s in scales:
45 for t, s in scales:
46 n = delta / s
46 n = delta / s
47 if n >= 2 or s == 1:
47 if n >= 2 or s == 1:
48 return fmt(t, n)
48 return fmt(t, n)
49
49
50 def nl2br(text):
50 def nl2br(text):
51 return text.replace('\n', '<br/>\n')
51 return text.replace('\n', '<br/>\n')
52
52
53 def obfuscate(text):
53 def obfuscate(text):
54 return ''.join(['&#%d;' % ord(c) for c in text])
54 return ''.join(['&#%d;' % ord(c) for c in text])
55
55
56 def up(p):
56 def up(p):
57 if p[0] != "/":
57 if p[0] != "/":
58 p = "/" + p
58 p = "/" + p
59 if p[-1] == "/":
59 if p[-1] == "/":
60 p = p[:-1]
60 p = p[:-1]
61 up = os.path.dirname(p)
61 up = os.path.dirname(p)
62 if up == "/":
62 if up == "/":
63 return "/"
63 return "/"
64 return up + "/"
64 return up + "/"
65
65
66 def get_mtime(repo_path):
66 def get_mtime(repo_path):
67 hg_path = os.path.join(repo_path, ".hg")
67 hg_path = os.path.join(repo_path, ".hg")
68 cl_path = os.path.join(hg_path, "00changelog.i")
68 cl_path = os.path.join(hg_path, "00changelog.i")
69 if os.path.exists(os.path.join(cl_path)):
69 if os.path.exists(os.path.join(cl_path)):
70 return os.stat(cl_path).st_mtime
70 return os.stat(cl_path).st_mtime
71 else:
71 else:
72 return os.stat(hg_path).st_mtime
72 return os.stat(hg_path).st_mtime
73
73
74 class hgrequest:
74 class hgrequest(object):
75 def __init__(self, inp=None, out=None, env=None):
75 def __init__(self, inp=None, out=None, env=None):
76 self.inp = inp or sys.stdin
76 self.inp = inp or sys.stdin
77 self.out = out or sys.stdout
77 self.out = out or sys.stdout
78 self.env = env or os.environ
78 self.env = env or os.environ
79 self.form = cgi.parse(self.inp, self.env, keep_blank_values=1)
79 self.form = cgi.parse(self.inp, self.env, keep_blank_values=1)
80
80
81 def write(self, *things):
81 def write(self, *things):
82 for thing in things:
82 for thing in things:
83 if hasattr(thing, "__iter__"):
83 if hasattr(thing, "__iter__"):
84 for part in thing:
84 for part in thing:
85 self.write(part)
85 self.write(part)
86 else:
86 else:
87 try:
87 try:
88 self.out.write(str(thing))
88 self.out.write(str(thing))
89 except socket.error, inst:
89 except socket.error, inst:
90 if inst[0] != errno.ECONNRESET:
90 if inst[0] != errno.ECONNRESET:
91 raise
91 raise
92
92
93 def header(self, headers=[('Content-type','text/html')]):
93 def header(self, headers=[('Content-type','text/html')]):
94 for header in headers:
94 for header in headers:
95 self.out.write("%s: %s\r\n" % header)
95 self.out.write("%s: %s\r\n" % header)
96 self.out.write("\r\n")
96 self.out.write("\r\n")
97
97
98 def httphdr(self, type, file="", size=0):
98 def httphdr(self, type, file="", size=0):
99
99
100 headers = [('Content-type', type)]
100 headers = [('Content-type', type)]
101 if file:
101 if file:
102 headers.append(('Content-disposition', 'attachment; filename=%s' % file))
102 headers.append(('Content-disposition', 'attachment; filename=%s' % file))
103 if size > 0:
103 if size > 0:
104 headers.append(('Content-length', str(size)))
104 headers.append(('Content-length', str(size)))
105 self.header(headers)
105 self.header(headers)
106
106
107 class templater:
107 class templater(object):
108 def __init__(self, mapfile, filters={}, defaults={}):
108 def __init__(self, mapfile, filters={}, defaults={}):
109 self.cache = {}
109 self.cache = {}
110 self.map = {}
110 self.map = {}
111 self.base = os.path.dirname(mapfile)
111 self.base = os.path.dirname(mapfile)
112 self.filters = filters
112 self.filters = filters
113 self.defaults = defaults
113 self.defaults = defaults
114
114
115 for l in file(mapfile):
115 for l in file(mapfile):
116 m = re.match(r'(\S+)\s*=\s*"(.*)"$', l)
116 m = re.match(r'(\S+)\s*=\s*"(.*)"$', l)
117 if m:
117 if m:
118 self.cache[m.group(1)] = m.group(2)
118 self.cache[m.group(1)] = m.group(2)
119 else:
119 else:
120 m = re.match(r'(\S+)\s*=\s*(\S+)', l)
120 m = re.match(r'(\S+)\s*=\s*(\S+)', l)
121 if m:
121 if m:
122 self.map[m.group(1)] = os.path.join(self.base, m.group(2))
122 self.map[m.group(1)] = os.path.join(self.base, m.group(2))
123 else:
123 else:
124 raise LookupError(_("unknown map entry '%s'") % l)
124 raise LookupError(_("unknown map entry '%s'") % l)
125
125
126 def __call__(self, t, **map):
126 def __call__(self, t, **map):
127 m = self.defaults.copy()
127 m = self.defaults.copy()
128 m.update(map)
128 m.update(map)
129 try:
129 try:
130 tmpl = self.cache[t]
130 tmpl = self.cache[t]
131 except KeyError:
131 except KeyError:
132 tmpl = self.cache[t] = file(self.map[t]).read()
132 tmpl = self.cache[t] = file(self.map[t]).read()
133 return self.template(tmpl, self.filters, **m)
133 return self.template(tmpl, self.filters, **m)
134
134
135 def template(self, tmpl, filters={}, **map):
135 def template(self, tmpl, filters={}, **map):
136 while tmpl:
136 while tmpl:
137 m = re.search(r"#([a-zA-Z0-9]+)((%[a-zA-Z0-9]+)*)((\|[a-zA-Z0-9]+)*)#", tmpl)
137 m = re.search(r"#([a-zA-Z0-9]+)((%[a-zA-Z0-9]+)*)((\|[a-zA-Z0-9]+)*)#", tmpl)
138 if m:
138 if m:
139 yield tmpl[:m.start(0)]
139 yield tmpl[:m.start(0)]
140 v = map.get(m.group(1), "")
140 v = map.get(m.group(1), "")
141 v = callable(v) and v(**map) or v
141 v = callable(v) and v(**map) or v
142
142
143 format = m.group(2)
143 format = m.group(2)
144 fl = m.group(4)
144 fl = m.group(4)
145
145
146 if format:
146 if format:
147 q = v.__iter__
147 q = v.__iter__
148 for i in q():
148 for i in q():
149 lm = map.copy()
149 lm = map.copy()
150 lm.update(i)
150 lm.update(i)
151 yield self(format[1:], **lm)
151 yield self(format[1:], **lm)
152
152
153 v = ""
153 v = ""
154
154
155 elif fl:
155 elif fl:
156 for f in fl.split("|")[1:]:
156 for f in fl.split("|")[1:]:
157 v = filters[f](v)
157 v = filters[f](v)
158
158
159 yield v
159 yield v
160 tmpl = tmpl[m.end(0):]
160 tmpl = tmpl[m.end(0):]
161 else:
161 else:
162 yield tmpl
162 yield tmpl
163 return
163 return
164
164
165 common_filters = {
165 common_filters = {
166 "escape": cgi.escape,
166 "escape": cgi.escape,
167 "strip": lambda x: x.strip(),
167 "strip": lambda x: x.strip(),
168 "age": age,
168 "age": age,
169 "date": lambda x: util.datestr(x),
169 "date": lambda x: util.datestr(x),
170 "addbreaks": nl2br,
170 "addbreaks": nl2br,
171 "obfuscate": obfuscate,
171 "obfuscate": obfuscate,
172 "short": (lambda x: x[:12]),
172 "short": (lambda x: x[:12]),
173 "firstline": (lambda x: x.splitlines(1)[0]),
173 "firstline": (lambda x: x.splitlines(1)[0]),
174 "permissions": (lambda x: x and "-rwxr-xr-x" or "-rw-r--r--"),
174 "permissions": (lambda x: x and "-rwxr-xr-x" or "-rw-r--r--"),
175 "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S"),
175 "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S"),
176 }
176 }
177
177
178 class hgweb:
178 class hgweb(object):
179 def __init__(self, repo, name=None):
179 def __init__(self, repo, name=None):
180 if type(repo) == type(""):
180 if type(repo) == type(""):
181 self.repo = hg.repository(ui.ui(), repo)
181 self.repo = hg.repository(ui.ui(), repo)
182 else:
182 else:
183 self.repo = repo
183 self.repo = repo
184
184
185 self.mtime = -1
185 self.mtime = -1
186 self.reponame = name
186 self.reponame = name
187 self.archives = 'zip', 'gz', 'bz2'
187 self.archives = 'zip', 'gz', 'bz2'
188
188
189 def refresh(self):
189 def refresh(self):
190 mtime = get_mtime(self.repo.root)
190 mtime = get_mtime(self.repo.root)
191 if mtime != self.mtime:
191 if mtime != self.mtime:
192 self.mtime = mtime
192 self.mtime = mtime
193 self.repo = hg.repository(self.repo.ui, self.repo.root)
193 self.repo = hg.repository(self.repo.ui, self.repo.root)
194 self.maxchanges = int(self.repo.ui.config("web", "maxchanges", 10))
194 self.maxchanges = int(self.repo.ui.config("web", "maxchanges", 10))
195 self.maxfiles = int(self.repo.ui.config("web", "maxfiles", 10))
195 self.maxfiles = int(self.repo.ui.config("web", "maxfiles", 10))
196 self.allowpull = self.repo.ui.configbool("web", "allowpull", True)
196 self.allowpull = self.repo.ui.configbool("web", "allowpull", True)
197
197
198 def archivelist(self, nodeid):
198 def archivelist(self, nodeid):
199 for i in self.archives:
199 for i in self.archives:
200 if self.repo.ui.configbool("web", "allow" + i, False):
200 if self.repo.ui.configbool("web", "allow" + i, False):
201 yield {"type" : i, "node" : nodeid}
201 yield {"type" : i, "node" : nodeid}
202
202
203 def listfiles(self, files, mf):
203 def listfiles(self, files, mf):
204 for f in files[:self.maxfiles]:
204 for f in files[:self.maxfiles]:
205 yield self.t("filenodelink", node=hex(mf[f]), file=f)
205 yield self.t("filenodelink", node=hex(mf[f]), file=f)
206 if len(files) > self.maxfiles:
206 if len(files) > self.maxfiles:
207 yield self.t("fileellipses")
207 yield self.t("fileellipses")
208
208
209 def listfilediffs(self, files, changeset):
209 def listfilediffs(self, files, changeset):
210 for f in files[:self.maxfiles]:
210 for f in files[:self.maxfiles]:
211 yield self.t("filedifflink", node=hex(changeset), file=f)
211 yield self.t("filedifflink", node=hex(changeset), file=f)
212 if len(files) > self.maxfiles:
212 if len(files) > self.maxfiles:
213 yield self.t("fileellipses")
213 yield self.t("fileellipses")
214
214
215 def parents(self, node, parents=[], rev=None, hide=False, **args):
215 def parents(self, node, parents=[], rev=None, hide=False, **args):
216 if not rev:
216 if not rev:
217 rev = lambda x: ""
217 rev = lambda x: ""
218 parents = [p for p in parents if p != nullid]
218 parents = [p for p in parents if p != nullid]
219 if hide and len(parents) == 1 and rev(parents[0]) == rev(node) - 1:
219 if hide and len(parents) == 1 and rev(parents[0]) == rev(node) - 1:
220 return
220 return
221 for p in parents:
221 for p in parents:
222 yield dict(node=hex(p), rev=rev(p), **args)
222 yield dict(node=hex(p), rev=rev(p), **args)
223
223
224 def showtag(self, t1, node=nullid, **args):
224 def showtag(self, t1, node=nullid, **args):
225 for t in self.repo.nodetags(node):
225 for t in self.repo.nodetags(node):
226 yield self.t(t1, tag=t, **args)
226 yield self.t(t1, tag=t, **args)
227
227
228 def diff(self, node1, node2, files):
228 def diff(self, node1, node2, files):
229 def filterfiles(list, files):
229 def filterfiles(list, files):
230 l = [x for x in list if x in files]
230 l = [x for x in list if x in files]
231
231
232 for f in files:
232 for f in files:
233 if f[-1] != os.sep:
233 if f[-1] != os.sep:
234 f += os.sep
234 f += os.sep
235 l += [x for x in list if x.startswith(f)]
235 l += [x for x in list if x.startswith(f)]
236 return l
236 return l
237
237
238 parity = [0]
238 parity = [0]
239 def diffblock(diff, f, fn):
239 def diffblock(diff, f, fn):
240 yield self.t("diffblock",
240 yield self.t("diffblock",
241 lines=prettyprintlines(diff),
241 lines=prettyprintlines(diff),
242 parity=parity[0],
242 parity=parity[0],
243 file=f,
243 file=f,
244 filenode=hex(fn or nullid))
244 filenode=hex(fn or nullid))
245 parity[0] = 1 - parity[0]
245 parity[0] = 1 - parity[0]
246
246
247 def prettyprintlines(diff):
247 def prettyprintlines(diff):
248 for l in diff.splitlines(1):
248 for l in diff.splitlines(1):
249 if l.startswith('+'):
249 if l.startswith('+'):
250 yield self.t("difflineplus", line=l)
250 yield self.t("difflineplus", line=l)
251 elif l.startswith('-'):
251 elif l.startswith('-'):
252 yield self.t("difflineminus", line=l)
252 yield self.t("difflineminus", line=l)
253 elif l.startswith('@'):
253 elif l.startswith('@'):
254 yield self.t("difflineat", line=l)
254 yield self.t("difflineat", line=l)
255 else:
255 else:
256 yield self.t("diffline", line=l)
256 yield self.t("diffline", line=l)
257
257
258 r = self.repo
258 r = self.repo
259 cl = r.changelog
259 cl = r.changelog
260 mf = r.manifest
260 mf = r.manifest
261 change1 = cl.read(node1)
261 change1 = cl.read(node1)
262 change2 = cl.read(node2)
262 change2 = cl.read(node2)
263 mmap1 = mf.read(change1[0])
263 mmap1 = mf.read(change1[0])
264 mmap2 = mf.read(change2[0])
264 mmap2 = mf.read(change2[0])
265 date1 = util.datestr(change1[2])
265 date1 = util.datestr(change1[2])
266 date2 = util.datestr(change2[2])
266 date2 = util.datestr(change2[2])
267
267
268 c, a, d, u = r.changes(node1, node2)
268 c, a, d, u = r.changes(node1, node2)
269 if files:
269 if files:
270 c, a, d = map(lambda x: filterfiles(x, files), (c, a, d))
270 c, a, d = map(lambda x: filterfiles(x, files), (c, a, d))
271
271
272 for f in c:
272 for f in c:
273 to = r.file(f).read(mmap1[f])
273 to = r.file(f).read(mmap1[f])
274 tn = r.file(f).read(mmap2[f])
274 tn = r.file(f).read(mmap2[f])
275 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
275 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
276 for f in a:
276 for f in a:
277 to = None
277 to = None
278 tn = r.file(f).read(mmap2[f])
278 tn = r.file(f).read(mmap2[f])
279 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
279 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
280 for f in d:
280 for f in d:
281 to = r.file(f).read(mmap1[f])
281 to = r.file(f).read(mmap1[f])
282 tn = None
282 tn = None
283 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
283 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
284
284
285 def changelog(self, pos):
285 def changelog(self, pos):
286 def changenav(**map):
286 def changenav(**map):
287 def seq(factor=1):
287 def seq(factor=1):
288 yield 1 * factor
288 yield 1 * factor
289 yield 3 * factor
289 yield 3 * factor
290 #yield 5 * factor
290 #yield 5 * factor
291 for f in seq(factor * 10):
291 for f in seq(factor * 10):
292 yield f
292 yield f
293
293
294 l = []
294 l = []
295 for f in seq():
295 for f in seq():
296 if f < self.maxchanges / 2:
296 if f < self.maxchanges / 2:
297 continue
297 continue
298 if f > count:
298 if f > count:
299 break
299 break
300 r = "%d" % f
300 r = "%d" % f
301 if pos + f < count:
301 if pos + f < count:
302 l.append(("+" + r, pos + f))
302 l.append(("+" + r, pos + f))
303 if pos - f >= 0:
303 if pos - f >= 0:
304 l.insert(0, ("-" + r, pos - f))
304 l.insert(0, ("-" + r, pos - f))
305
305
306 yield {"rev": 0, "label": "(0)"}
306 yield {"rev": 0, "label": "(0)"}
307
307
308 for label, rev in l:
308 for label, rev in l:
309 yield {"label": label, "rev": rev}
309 yield {"label": label, "rev": rev}
310
310
311 yield {"label": "tip", "rev": "tip"}
311 yield {"label": "tip", "rev": "tip"}
312
312
313 def changelist(**map):
313 def changelist(**map):
314 parity = (start - end) & 1
314 parity = (start - end) & 1
315 cl = self.repo.changelog
315 cl = self.repo.changelog
316 l = [] # build a list in forward order for efficiency
316 l = [] # build a list in forward order for efficiency
317 for i in range(start, end):
317 for i in range(start, end):
318 n = cl.node(i)
318 n = cl.node(i)
319 changes = cl.read(n)
319 changes = cl.read(n)
320 hn = hex(n)
320 hn = hex(n)
321
321
322 l.insert(0, {"parity": parity,
322 l.insert(0, {"parity": parity,
323 "author": changes[1],
323 "author": changes[1],
324 "parent": self.parents(n, cl.parents(n), cl.rev,
324 "parent": self.parents(n, cl.parents(n), cl.rev,
325 hide=True),
325 hide=True),
326 "changelogtag": self.showtag("changelogtag",n),
326 "changelogtag": self.showtag("changelogtag",n),
327 "manifest": hex(changes[0]),
327 "manifest": hex(changes[0]),
328 "desc": changes[4],
328 "desc": changes[4],
329 "date": changes[2],
329 "date": changes[2],
330 "files": self.listfilediffs(changes[3], n),
330 "files": self.listfilediffs(changes[3], n),
331 "rev": i,
331 "rev": i,
332 "node": hn})
332 "node": hn})
333 parity = 1 - parity
333 parity = 1 - parity
334
334
335 for e in l:
335 for e in l:
336 yield e
336 yield e
337
337
338 cl = self.repo.changelog
338 cl = self.repo.changelog
339 mf = cl.read(cl.tip())[0]
339 mf = cl.read(cl.tip())[0]
340 count = cl.count()
340 count = cl.count()
341 start = max(0, pos - self.maxchanges + 1)
341 start = max(0, pos - self.maxchanges + 1)
342 end = min(count, start + self.maxchanges)
342 end = min(count, start + self.maxchanges)
343 pos = end - 1
343 pos = end - 1
344
344
345 yield self.t('changelog',
345 yield self.t('changelog',
346 changenav=changenav,
346 changenav=changenav,
347 manifest=hex(mf),
347 manifest=hex(mf),
348 rev=pos, changesets=count, entries=changelist)
348 rev=pos, changesets=count, entries=changelist)
349
349
350 def search(self, query):
350 def search(self, query):
351
351
352 def changelist(**map):
352 def changelist(**map):
353 cl = self.repo.changelog
353 cl = self.repo.changelog
354 count = 0
354 count = 0
355 qw = query.lower().split()
355 qw = query.lower().split()
356
356
357 def revgen():
357 def revgen():
358 for i in range(cl.count() - 1, 0, -100):
358 for i in range(cl.count() - 1, 0, -100):
359 l = []
359 l = []
360 for j in range(max(0, i - 100), i):
360 for j in range(max(0, i - 100), i):
361 n = cl.node(j)
361 n = cl.node(j)
362 changes = cl.read(n)
362 changes = cl.read(n)
363 l.append((n, j, changes))
363 l.append((n, j, changes))
364 l.reverse()
364 l.reverse()
365 for e in l:
365 for e in l:
366 yield e
366 yield e
367
367
368 for n, i, changes in revgen():
368 for n, i, changes in revgen():
369 miss = 0
369 miss = 0
370 for q in qw:
370 for q in qw:
371 if not (q in changes[1].lower() or
371 if not (q in changes[1].lower() or
372 q in changes[4].lower() or
372 q in changes[4].lower() or
373 q in " ".join(changes[3][:20]).lower()):
373 q in " ".join(changes[3][:20]).lower()):
374 miss = 1
374 miss = 1
375 break
375 break
376 if miss:
376 if miss:
377 continue
377 continue
378
378
379 count += 1
379 count += 1
380 hn = hex(n)
380 hn = hex(n)
381
381
382 yield self.t('searchentry',
382 yield self.t('searchentry',
383 parity=count & 1,
383 parity=count & 1,
384 author=changes[1],
384 author=changes[1],
385 parent=self.parents(n, cl.parents(n), cl.rev),
385 parent=self.parents(n, cl.parents(n), cl.rev),
386 changelogtag=self.showtag("changelogtag",n),
386 changelogtag=self.showtag("changelogtag",n),
387 manifest=hex(changes[0]),
387 manifest=hex(changes[0]),
388 desc=changes[4],
388 desc=changes[4],
389 date=changes[2],
389 date=changes[2],
390 files=self.listfilediffs(changes[3], n),
390 files=self.listfilediffs(changes[3], n),
391 rev=i,
391 rev=i,
392 node=hn)
392 node=hn)
393
393
394 if count >= self.maxchanges:
394 if count >= self.maxchanges:
395 break
395 break
396
396
397 cl = self.repo.changelog
397 cl = self.repo.changelog
398 mf = cl.read(cl.tip())[0]
398 mf = cl.read(cl.tip())[0]
399
399
400 yield self.t('search',
400 yield self.t('search',
401 query=query,
401 query=query,
402 manifest=hex(mf),
402 manifest=hex(mf),
403 entries=changelist)
403 entries=changelist)
404
404
405 def changeset(self, nodeid):
405 def changeset(self, nodeid):
406 cl = self.repo.changelog
406 cl = self.repo.changelog
407 n = self.repo.lookup(nodeid)
407 n = self.repo.lookup(nodeid)
408 nodeid = hex(n)
408 nodeid = hex(n)
409 changes = cl.read(n)
409 changes = cl.read(n)
410 p1 = cl.parents(n)[0]
410 p1 = cl.parents(n)[0]
411
411
412 files = []
412 files = []
413 mf = self.repo.manifest.read(changes[0])
413 mf = self.repo.manifest.read(changes[0])
414 for f in changes[3]:
414 for f in changes[3]:
415 files.append(self.t("filenodelink",
415 files.append(self.t("filenodelink",
416 filenode=hex(mf.get(f, nullid)), file=f))
416 filenode=hex(mf.get(f, nullid)), file=f))
417
417
418 def diff(**map):
418 def diff(**map):
419 yield self.diff(p1, n, None)
419 yield self.diff(p1, n, None)
420
420
421 yield self.t('changeset',
421 yield self.t('changeset',
422 diff=diff,
422 diff=diff,
423 rev=cl.rev(n),
423 rev=cl.rev(n),
424 node=nodeid,
424 node=nodeid,
425 parent=self.parents(n, cl.parents(n), cl.rev),
425 parent=self.parents(n, cl.parents(n), cl.rev),
426 changesettag=self.showtag("changesettag",n),
426 changesettag=self.showtag("changesettag",n),
427 manifest=hex(changes[0]),
427 manifest=hex(changes[0]),
428 author=changes[1],
428 author=changes[1],
429 desc=changes[4],
429 desc=changes[4],
430 date=changes[2],
430 date=changes[2],
431 files=files,
431 files=files,
432 archives=self.archivelist(nodeid))
432 archives=self.archivelist(nodeid))
433
433
434 def filelog(self, f, filenode):
434 def filelog(self, f, filenode):
435 cl = self.repo.changelog
435 cl = self.repo.changelog
436 fl = self.repo.file(f)
436 fl = self.repo.file(f)
437 filenode = hex(fl.lookup(filenode))
437 filenode = hex(fl.lookup(filenode))
438 count = fl.count()
438 count = fl.count()
439
439
440 def entries(**map):
440 def entries(**map):
441 l = []
441 l = []
442 parity = (count - 1) & 1
442 parity = (count - 1) & 1
443
443
444 for i in range(count):
444 for i in range(count):
445 n = fl.node(i)
445 n = fl.node(i)
446 lr = fl.linkrev(n)
446 lr = fl.linkrev(n)
447 cn = cl.node(lr)
447 cn = cl.node(lr)
448 cs = cl.read(cl.node(lr))
448 cs = cl.read(cl.node(lr))
449
449
450 l.insert(0, {"parity": parity,
450 l.insert(0, {"parity": parity,
451 "filenode": hex(n),
451 "filenode": hex(n),
452 "filerev": i,
452 "filerev": i,
453 "file": f,
453 "file": f,
454 "node": hex(cn),
454 "node": hex(cn),
455 "author": cs[1],
455 "author": cs[1],
456 "date": cs[2],
456 "date": cs[2],
457 "parent": self.parents(n, fl.parents(n),
457 "parent": self.parents(n, fl.parents(n),
458 fl.rev, file=f),
458 fl.rev, file=f),
459 "desc": cs[4]})
459 "desc": cs[4]})
460 parity = 1 - parity
460 parity = 1 - parity
461
461
462 for e in l:
462 for e in l:
463 yield e
463 yield e
464
464
465 yield self.t("filelog", file=f, filenode=filenode, entries=entries)
465 yield self.t("filelog", file=f, filenode=filenode, entries=entries)
466
466
467 def filerevision(self, f, node):
467 def filerevision(self, f, node):
468 fl = self.repo.file(f)
468 fl = self.repo.file(f)
469 n = fl.lookup(node)
469 n = fl.lookup(node)
470 node = hex(n)
470 node = hex(n)
471 text = fl.read(n)
471 text = fl.read(n)
472 changerev = fl.linkrev(n)
472 changerev = fl.linkrev(n)
473 cl = self.repo.changelog
473 cl = self.repo.changelog
474 cn = cl.node(changerev)
474 cn = cl.node(changerev)
475 cs = cl.read(cn)
475 cs = cl.read(cn)
476 mfn = cs[0]
476 mfn = cs[0]
477
477
478 mt = mimetypes.guess_type(f)[0]
478 mt = mimetypes.guess_type(f)[0]
479 rawtext = text
479 rawtext = text
480 if util.binary(text):
480 if util.binary(text):
481 text = "(binary:%s)" % mt
481 text = "(binary:%s)" % mt
482
482
483 def lines():
483 def lines():
484 for l, t in enumerate(text.splitlines(1)):
484 for l, t in enumerate(text.splitlines(1)):
485 yield {"line": t,
485 yield {"line": t,
486 "linenumber": "% 6d" % (l + 1),
486 "linenumber": "% 6d" % (l + 1),
487 "parity": l & 1}
487 "parity": l & 1}
488
488
489 yield self.t("filerevision",
489 yield self.t("filerevision",
490 file=f,
490 file=f,
491 filenode=node,
491 filenode=node,
492 path=up(f),
492 path=up(f),
493 text=lines(),
493 text=lines(),
494 raw=rawtext,
494 raw=rawtext,
495 mimetype=mt,
495 mimetype=mt,
496 rev=changerev,
496 rev=changerev,
497 node=hex(cn),
497 node=hex(cn),
498 manifest=hex(mfn),
498 manifest=hex(mfn),
499 author=cs[1],
499 author=cs[1],
500 date=cs[2],
500 date=cs[2],
501 parent=self.parents(n, fl.parents(n), fl.rev, file=f),
501 parent=self.parents(n, fl.parents(n), fl.rev, file=f),
502 permissions=self.repo.manifest.readflags(mfn)[f])
502 permissions=self.repo.manifest.readflags(mfn)[f])
503
503
504 def fileannotate(self, f, node):
504 def fileannotate(self, f, node):
505 bcache = {}
505 bcache = {}
506 ncache = {}
506 ncache = {}
507 fl = self.repo.file(f)
507 fl = self.repo.file(f)
508 n = fl.lookup(node)
508 n = fl.lookup(node)
509 node = hex(n)
509 node = hex(n)
510 changerev = fl.linkrev(n)
510 changerev = fl.linkrev(n)
511
511
512 cl = self.repo.changelog
512 cl = self.repo.changelog
513 cn = cl.node(changerev)
513 cn = cl.node(changerev)
514 cs = cl.read(cn)
514 cs = cl.read(cn)
515 mfn = cs[0]
515 mfn = cs[0]
516
516
517 def annotate(**map):
517 def annotate(**map):
518 parity = 1
518 parity = 1
519 last = None
519 last = None
520 for r, l in fl.annotate(n):
520 for r, l in fl.annotate(n):
521 try:
521 try:
522 cnode = ncache[r]
522 cnode = ncache[r]
523 except KeyError:
523 except KeyError:
524 cnode = ncache[r] = self.repo.changelog.node(r)
524 cnode = ncache[r] = self.repo.changelog.node(r)
525
525
526 try:
526 try:
527 name = bcache[r]
527 name = bcache[r]
528 except KeyError:
528 except KeyError:
529 cl = self.repo.changelog.read(cnode)
529 cl = self.repo.changelog.read(cnode)
530 bcache[r] = name = self.repo.ui.shortuser(cl[1])
530 bcache[r] = name = self.repo.ui.shortuser(cl[1])
531
531
532 if last != cnode:
532 if last != cnode:
533 parity = 1 - parity
533 parity = 1 - parity
534 last = cnode
534 last = cnode
535
535
536 yield {"parity": parity,
536 yield {"parity": parity,
537 "node": hex(cnode),
537 "node": hex(cnode),
538 "rev": r,
538 "rev": r,
539 "author": name,
539 "author": name,
540 "file": f,
540 "file": f,
541 "line": l}
541 "line": l}
542
542
543 yield self.t("fileannotate",
543 yield self.t("fileannotate",
544 file=f,
544 file=f,
545 filenode=node,
545 filenode=node,
546 annotate=annotate,
546 annotate=annotate,
547 path=up(f),
547 path=up(f),
548 rev=changerev,
548 rev=changerev,
549 node=hex(cn),
549 node=hex(cn),
550 manifest=hex(mfn),
550 manifest=hex(mfn),
551 author=cs[1],
551 author=cs[1],
552 date=cs[2],
552 date=cs[2],
553 parent=self.parents(n, fl.parents(n), fl.rev, file=f),
553 parent=self.parents(n, fl.parents(n), fl.rev, file=f),
554 permissions=self.repo.manifest.readflags(mfn)[f])
554 permissions=self.repo.manifest.readflags(mfn)[f])
555
555
556 def manifest(self, mnode, path):
556 def manifest(self, mnode, path):
557 man = self.repo.manifest
557 man = self.repo.manifest
558 mn = man.lookup(mnode)
558 mn = man.lookup(mnode)
559 mnode = hex(mn)
559 mnode = hex(mn)
560 mf = man.read(mn)
560 mf = man.read(mn)
561 rev = man.rev(mn)
561 rev = man.rev(mn)
562 node = self.repo.changelog.node(rev)
562 node = self.repo.changelog.node(rev)
563 mff = man.readflags(mn)
563 mff = man.readflags(mn)
564
564
565 files = {}
565 files = {}
566
566
567 p = path[1:]
567 p = path[1:]
568 l = len(p)
568 l = len(p)
569
569
570 for f,n in mf.items():
570 for f,n in mf.items():
571 if f[:l] != p:
571 if f[:l] != p:
572 continue
572 continue
573 remain = f[l:]
573 remain = f[l:]
574 if "/" in remain:
574 if "/" in remain:
575 short = remain[:remain.find("/") + 1] # bleah
575 short = remain[:remain.find("/") + 1] # bleah
576 files[short] = (f, None)
576 files[short] = (f, None)
577 else:
577 else:
578 short = os.path.basename(remain)
578 short = os.path.basename(remain)
579 files[short] = (f, n)
579 files[short] = (f, n)
580
580
581 def filelist(**map):
581 def filelist(**map):
582 parity = 0
582 parity = 0
583 fl = files.keys()
583 fl = files.keys()
584 fl.sort()
584 fl.sort()
585 for f in fl:
585 for f in fl:
586 full, fnode = files[f]
586 full, fnode = files[f]
587 if not fnode:
587 if not fnode:
588 continue
588 continue
589
589
590 yield {"file": full,
590 yield {"file": full,
591 "manifest": mnode,
591 "manifest": mnode,
592 "filenode": hex(fnode),
592 "filenode": hex(fnode),
593 "parity": parity,
593 "parity": parity,
594 "basename": f,
594 "basename": f,
595 "permissions": mff[full]}
595 "permissions": mff[full]}
596 parity = 1 - parity
596 parity = 1 - parity
597
597
598 def dirlist(**map):
598 def dirlist(**map):
599 parity = 0
599 parity = 0
600 fl = files.keys()
600 fl = files.keys()
601 fl.sort()
601 fl.sort()
602 for f in fl:
602 for f in fl:
603 full, fnode = files[f]
603 full, fnode = files[f]
604 if fnode:
604 if fnode:
605 continue
605 continue
606
606
607 yield {"parity": parity,
607 yield {"parity": parity,
608 "path": os.path.join(path, f),
608 "path": os.path.join(path, f),
609 "manifest": mnode,
609 "manifest": mnode,
610 "basename": f[:-1]}
610 "basename": f[:-1]}
611 parity = 1 - parity
611 parity = 1 - parity
612
612
613 yield self.t("manifest",
613 yield self.t("manifest",
614 manifest=mnode,
614 manifest=mnode,
615 rev=rev,
615 rev=rev,
616 node=hex(node),
616 node=hex(node),
617 path=path,
617 path=path,
618 up=up(path),
618 up=up(path),
619 fentries=filelist,
619 fentries=filelist,
620 dentries=dirlist,
620 dentries=dirlist,
621 archives=self.archivelist(hex(node)))
621 archives=self.archivelist(hex(node)))
622
622
623 def tags(self):
623 def tags(self):
624 cl = self.repo.changelog
624 cl = self.repo.changelog
625 mf = cl.read(cl.tip())[0]
625 mf = cl.read(cl.tip())[0]
626
626
627 i = self.repo.tagslist()
627 i = self.repo.tagslist()
628 i.reverse()
628 i.reverse()
629
629
630 def entries(**map):
630 def entries(**map):
631 parity = 0
631 parity = 0
632 for k,n in i:
632 for k,n in i:
633 yield {"parity": parity,
633 yield {"parity": parity,
634 "tag": k,
634 "tag": k,
635 "node": hex(n)}
635 "node": hex(n)}
636 parity = 1 - parity
636 parity = 1 - parity
637
637
638 yield self.t("tags",
638 yield self.t("tags",
639 manifest=hex(mf),
639 manifest=hex(mf),
640 entries=entries)
640 entries=entries)
641
641
642 def filediff(self, file, changeset):
642 def filediff(self, file, changeset):
643 cl = self.repo.changelog
643 cl = self.repo.changelog
644 n = self.repo.lookup(changeset)
644 n = self.repo.lookup(changeset)
645 changeset = hex(n)
645 changeset = hex(n)
646 p1 = cl.parents(n)[0]
646 p1 = cl.parents(n)[0]
647 cs = cl.read(n)
647 cs = cl.read(n)
648 mf = self.repo.manifest.read(cs[0])
648 mf = self.repo.manifest.read(cs[0])
649
649
650 def diff(**map):
650 def diff(**map):
651 yield self.diff(p1, n, file)
651 yield self.diff(p1, n, file)
652
652
653 yield self.t("filediff",
653 yield self.t("filediff",
654 file=file,
654 file=file,
655 filenode=hex(mf.get(file, nullid)),
655 filenode=hex(mf.get(file, nullid)),
656 node=changeset,
656 node=changeset,
657 rev=self.repo.changelog.rev(n),
657 rev=self.repo.changelog.rev(n),
658 parent=self.parents(n, cl.parents(n), cl.rev),
658 parent=self.parents(n, cl.parents(n), cl.rev),
659 diff=diff)
659 diff=diff)
660
660
661 def archive(self, req, cnode, type):
661 def archive(self, req, cnode, type):
662 cs = self.repo.changelog.read(cnode)
662 cs = self.repo.changelog.read(cnode)
663 mnode = cs[0]
663 mnode = cs[0]
664 mf = self.repo.manifest.read(mnode)
664 mf = self.repo.manifest.read(mnode)
665 rev = self.repo.manifest.rev(mnode)
665 rev = self.repo.manifest.rev(mnode)
666 reponame = re.sub(r"\W+", "-", self.reponame)
666 reponame = re.sub(r"\W+", "-", self.reponame)
667 name = "%s-%s/" % (reponame, short(cnode))
667 name = "%s-%s/" % (reponame, short(cnode))
668
668
669 files = mf.keys()
669 files = mf.keys()
670 files.sort()
670 files.sort()
671
671
672 if type == 'zip':
672 if type == 'zip':
673 tmp = tempfile.mkstemp()[1]
673 tmp = tempfile.mkstemp()[1]
674 try:
674 try:
675 zf = zipfile.ZipFile(tmp, "w", zipfile.ZIP_DEFLATED)
675 zf = zipfile.ZipFile(tmp, "w", zipfile.ZIP_DEFLATED)
676
676
677 for f in files:
677 for f in files:
678 zf.writestr(name + f, self.repo.file(f).read(mf[f]))
678 zf.writestr(name + f, self.repo.file(f).read(mf[f]))
679 zf.close()
679 zf.close()
680
680
681 f = open(tmp, 'r')
681 f = open(tmp, 'r')
682 req.httphdr('application/zip', name[:-1] + '.zip',
682 req.httphdr('application/zip', name[:-1] + '.zip',
683 os.path.getsize(tmp))
683 os.path.getsize(tmp))
684 req.write(f.read())
684 req.write(f.read())
685 f.close()
685 f.close()
686 finally:
686 finally:
687 os.unlink(tmp)
687 os.unlink(tmp)
688
688
689 else:
689 else:
690 tf = tarfile.TarFile.open(mode='w|' + type, fileobj=req.out)
690 tf = tarfile.TarFile.open(mode='w|' + type, fileobj=req.out)
691 mff = self.repo.manifest.readflags(mnode)
691 mff = self.repo.manifest.readflags(mnode)
692 mtime = int(time.time())
692 mtime = int(time.time())
693
693
694 if type == "gz":
694 if type == "gz":
695 encoding = "gzip"
695 encoding = "gzip"
696 else:
696 else:
697 encoding = "x-bzip2"
697 encoding = "x-bzip2"
698 req.header([('Content-type', 'application/x-tar'),
698 req.header([('Content-type', 'application/x-tar'),
699 ('Content-disposition', 'attachment; filename=%s%s%s' %
699 ('Content-disposition', 'attachment; filename=%s%s%s' %
700 (name[:-1], '.tar.', type)),
700 (name[:-1], '.tar.', type)),
701 ('Content-encoding', encoding)])
701 ('Content-encoding', encoding)])
702 for fname in files:
702 for fname in files:
703 rcont = self.repo.file(fname).read(mf[fname])
703 rcont = self.repo.file(fname).read(mf[fname])
704 finfo = tarfile.TarInfo(name + fname)
704 finfo = tarfile.TarInfo(name + fname)
705 finfo.mtime = mtime
705 finfo.mtime = mtime
706 finfo.size = len(rcont)
706 finfo.size = len(rcont)
707 finfo.mode = mff[fname] and 0755 or 0644
707 finfo.mode = mff[fname] and 0755 or 0644
708 tf.addfile(finfo, StringIO.StringIO(rcont))
708 tf.addfile(finfo, StringIO.StringIO(rcont))
709 tf.close()
709 tf.close()
710
710
711 # add tags to things
711 # add tags to things
712 # tags -> list of changesets corresponding to tags
712 # tags -> list of changesets corresponding to tags
713 # find tag, changeset, file
713 # find tag, changeset, file
714
714
715 def run(self, req=hgrequest()):
715 def run(self, req=hgrequest()):
716 def header(**map):
716 def header(**map):
717 yield self.t("header", **map)
717 yield self.t("header", **map)
718
718
719 def footer(**map):
719 def footer(**map):
720 yield self.t("footer", **map)
720 yield self.t("footer", **map)
721
721
722 def expand_form(form):
722 def expand_form(form):
723 shortcuts = {
723 shortcuts = {
724 'cl': [('cmd', ['changelog']), ('rev', None)],
724 'cl': [('cmd', ['changelog']), ('rev', None)],
725 'cs': [('cmd', ['changeset']), ('node', None)],
725 'cs': [('cmd', ['changeset']), ('node', None)],
726 'f': [('cmd', ['file']), ('filenode', None)],
726 'f': [('cmd', ['file']), ('filenode', None)],
727 'fl': [('cmd', ['filelog']), ('filenode', None)],
727 'fl': [('cmd', ['filelog']), ('filenode', None)],
728 'fd': [('cmd', ['filediff']), ('node', None)],
728 'fd': [('cmd', ['filediff']), ('node', None)],
729 'fa': [('cmd', ['annotate']), ('filenode', None)],
729 'fa': [('cmd', ['annotate']), ('filenode', None)],
730 'mf': [('cmd', ['manifest']), ('manifest', None)],
730 'mf': [('cmd', ['manifest']), ('manifest', None)],
731 'ca': [('cmd', ['archive']), ('node', None)],
731 'ca': [('cmd', ['archive']), ('node', None)],
732 'tags': [('cmd', ['tags'])],
732 'tags': [('cmd', ['tags'])],
733 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
733 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
734 }
734 }
735
735
736 for k in shortcuts.iterkeys():
736 for k in shortcuts.iterkeys():
737 if form.has_key(k):
737 if form.has_key(k):
738 for name, value in shortcuts[k]:
738 for name, value in shortcuts[k]:
739 if value is None:
739 if value is None:
740 value = form[k]
740 value = form[k]
741 form[name] = value
741 form[name] = value
742 del form[k]
742 del form[k]
743
743
744 self.refresh()
744 self.refresh()
745
745
746 expand_form(req.form)
746 expand_form(req.form)
747
747
748 t = self.repo.ui.config("web", "templates", templatepath())
748 t = self.repo.ui.config("web", "templates", templatepath())
749 m = os.path.join(t, "map")
749 m = os.path.join(t, "map")
750 style = self.repo.ui.config("web", "style", "")
750 style = self.repo.ui.config("web", "style", "")
751 if req.form.has_key('style'):
751 if req.form.has_key('style'):
752 style = req.form['style'][0]
752 style = req.form['style'][0]
753 if style:
753 if style:
754 b = os.path.basename("map-" + style)
754 b = os.path.basename("map-" + style)
755 p = os.path.join(t, b)
755 p = os.path.join(t, b)
756 if os.path.isfile(p):
756 if os.path.isfile(p):
757 m = p
757 m = p
758
758
759 port = req.env["SERVER_PORT"]
759 port = req.env["SERVER_PORT"]
760 port = port != "80" and (":" + port) or ""
760 port = port != "80" and (":" + port) or ""
761 uri = req.env["REQUEST_URI"]
761 uri = req.env["REQUEST_URI"]
762 if "?" in uri:
762 if "?" in uri:
763 uri = uri.split("?")[0]
763 uri = uri.split("?")[0]
764 url = "http://%s%s%s" % (req.env["SERVER_NAME"], port, uri)
764 url = "http://%s%s%s" % (req.env["SERVER_NAME"], port, uri)
765 if not self.reponame:
765 if not self.reponame:
766 self.reponame = (self.repo.ui.config("web", "name")
766 self.reponame = (self.repo.ui.config("web", "name")
767 or uri.strip('/') or self.repo.root)
767 or uri.strip('/') or self.repo.root)
768
768
769 self.t = templater(m, common_filters,
769 self.t = templater(m, common_filters,
770 {"url": url,
770 {"url": url,
771 "repo": self.reponame,
771 "repo": self.reponame,
772 "header": header,
772 "header": header,
773 "footer": footer,
773 "footer": footer,
774 })
774 })
775
775
776 if not req.form.has_key('cmd'):
776 if not req.form.has_key('cmd'):
777 req.form['cmd'] = [self.t.cache['default'],]
777 req.form['cmd'] = [self.t.cache['default'],]
778
778
779 if req.form['cmd'][0] == 'changelog':
779 if req.form['cmd'][0] == 'changelog':
780 c = self.repo.changelog.count() - 1
780 c = self.repo.changelog.count() - 1
781 hi = c
781 hi = c
782 if req.form.has_key('rev'):
782 if req.form.has_key('rev'):
783 hi = req.form['rev'][0]
783 hi = req.form['rev'][0]
784 try:
784 try:
785 hi = self.repo.changelog.rev(self.repo.lookup(hi))
785 hi = self.repo.changelog.rev(self.repo.lookup(hi))
786 except hg.RepoError:
786 except hg.RepoError:
787 req.write(self.search(hi))
787 req.write(self.search(hi))
788 return
788 return
789
789
790 req.write(self.changelog(hi))
790 req.write(self.changelog(hi))
791
791
792 elif req.form['cmd'][0] == 'changeset':
792 elif req.form['cmd'][0] == 'changeset':
793 req.write(self.changeset(req.form['node'][0]))
793 req.write(self.changeset(req.form['node'][0]))
794
794
795 elif req.form['cmd'][0] == 'manifest':
795 elif req.form['cmd'][0] == 'manifest':
796 req.write(self.manifest(req.form['manifest'][0], req.form['path'][0]))
796 req.write(self.manifest(req.form['manifest'][0], req.form['path'][0]))
797
797
798 elif req.form['cmd'][0] == 'tags':
798 elif req.form['cmd'][0] == 'tags':
799 req.write(self.tags())
799 req.write(self.tags())
800
800
801 elif req.form['cmd'][0] == 'filediff':
801 elif req.form['cmd'][0] == 'filediff':
802 req.write(self.filediff(req.form['file'][0], req.form['node'][0]))
802 req.write(self.filediff(req.form['file'][0], req.form['node'][0]))
803
803
804 elif req.form['cmd'][0] == 'file':
804 elif req.form['cmd'][0] == 'file':
805 req.write(self.filerevision(req.form['file'][0], req.form['filenode'][0]))
805 req.write(self.filerevision(req.form['file'][0], req.form['filenode'][0]))
806
806
807 elif req.form['cmd'][0] == 'annotate':
807 elif req.form['cmd'][0] == 'annotate':
808 req.write(self.fileannotate(req.form['file'][0], req.form['filenode'][0]))
808 req.write(self.fileannotate(req.form['file'][0], req.form['filenode'][0]))
809
809
810 elif req.form['cmd'][0] == 'filelog':
810 elif req.form['cmd'][0] == 'filelog':
811 req.write(self.filelog(req.form['file'][0], req.form['filenode'][0]))
811 req.write(self.filelog(req.form['file'][0], req.form['filenode'][0]))
812
812
813 elif req.form['cmd'][0] == 'heads':
813 elif req.form['cmd'][0] == 'heads':
814 req.httphdr("application/mercurial-0.1")
814 req.httphdr("application/mercurial-0.1")
815 h = self.repo.heads()
815 h = self.repo.heads()
816 req.write(" ".join(map(hex, h)) + "\n")
816 req.write(" ".join(map(hex, h)) + "\n")
817
817
818 elif req.form['cmd'][0] == 'branches':
818 elif req.form['cmd'][0] == 'branches':
819 req.httphdr("application/mercurial-0.1")
819 req.httphdr("application/mercurial-0.1")
820 nodes = []
820 nodes = []
821 if req.form.has_key('nodes'):
821 if req.form.has_key('nodes'):
822 nodes = map(bin, req.form['nodes'][0].split(" "))
822 nodes = map(bin, req.form['nodes'][0].split(" "))
823 for b in self.repo.branches(nodes):
823 for b in self.repo.branches(nodes):
824 req.write(" ".join(map(hex, b)) + "\n")
824 req.write(" ".join(map(hex, b)) + "\n")
825
825
826 elif req.form['cmd'][0] == 'between':
826 elif req.form['cmd'][0] == 'between':
827 req.httphdr("application/mercurial-0.1")
827 req.httphdr("application/mercurial-0.1")
828 nodes = []
828 nodes = []
829 if req.form.has_key('pairs'):
829 if req.form.has_key('pairs'):
830 pairs = [map(bin, p.split("-"))
830 pairs = [map(bin, p.split("-"))
831 for p in req.form['pairs'][0].split(" ")]
831 for p in req.form['pairs'][0].split(" ")]
832 for b in self.repo.between(pairs):
832 for b in self.repo.between(pairs):
833 req.write(" ".join(map(hex, b)) + "\n")
833 req.write(" ".join(map(hex, b)) + "\n")
834
834
835 elif req.form['cmd'][0] == 'changegroup':
835 elif req.form['cmd'][0] == 'changegroup':
836 req.httphdr("application/mercurial-0.1")
836 req.httphdr("application/mercurial-0.1")
837 nodes = []
837 nodes = []
838 if not self.allowpull:
838 if not self.allowpull:
839 return
839 return
840
840
841 if req.form.has_key('roots'):
841 if req.form.has_key('roots'):
842 nodes = map(bin, req.form['roots'][0].split(" "))
842 nodes = map(bin, req.form['roots'][0].split(" "))
843
843
844 z = zlib.compressobj()
844 z = zlib.compressobj()
845 f = self.repo.changegroup(nodes)
845 f = self.repo.changegroup(nodes)
846 while 1:
846 while 1:
847 chunk = f.read(4096)
847 chunk = f.read(4096)
848 if not chunk:
848 if not chunk:
849 break
849 break
850 req.write(z.compress(chunk))
850 req.write(z.compress(chunk))
851
851
852 req.write(z.flush())
852 req.write(z.flush())
853
853
854 elif req.form['cmd'][0] == 'archive':
854 elif req.form['cmd'][0] == 'archive':
855 changeset = self.repo.lookup(req.form['node'][0])
855 changeset = self.repo.lookup(req.form['node'][0])
856 type = req.form['type'][0]
856 type = req.form['type'][0]
857 if (type in self.archives and
857 if (type in self.archives and
858 self.repo.ui.configbool("web", "allow" + type, False)):
858 self.repo.ui.configbool("web", "allow" + type, False)):
859 self.archive(req, changeset, type)
859 self.archive(req, changeset, type)
860 return
860 return
861
861
862 req.write(self.t("error"))
862 req.write(self.t("error"))
863
863
864 else:
864 else:
865 req.write(self.t("error"))
865 req.write(self.t("error"))
866
866
867 def create_server(repo):
867 def create_server(repo):
868
868
869 def openlog(opt, default):
869 def openlog(opt, default):
870 if opt and opt != '-':
870 if opt and opt != '-':
871 return open(opt, 'w')
871 return open(opt, 'w')
872 return default
872 return default
873
873
874 address = repo.ui.config("web", "address", "")
874 address = repo.ui.config("web", "address", "")
875 port = int(repo.ui.config("web", "port", 8000))
875 port = int(repo.ui.config("web", "port", 8000))
876 use_ipv6 = repo.ui.configbool("web", "ipv6")
876 use_ipv6 = repo.ui.configbool("web", "ipv6")
877 accesslog = openlog(repo.ui.config("web", "accesslog", "-"), sys.stdout)
877 accesslog = openlog(repo.ui.config("web", "accesslog", "-"), sys.stdout)
878 errorlog = openlog(repo.ui.config("web", "errorlog", "-"), sys.stderr)
878 errorlog = openlog(repo.ui.config("web", "errorlog", "-"), sys.stderr)
879
879
880 class IPv6HTTPServer(BaseHTTPServer.HTTPServer):
880 class IPv6HTTPServer(BaseHTTPServer.HTTPServer):
881 address_family = getattr(socket, 'AF_INET6', None)
881 address_family = getattr(socket, 'AF_INET6', None)
882
882
883 def __init__(self, *args, **kwargs):
883 def __init__(self, *args, **kwargs):
884 if self.address_family is None:
884 if self.address_family is None:
885 raise hg.RepoError(_('IPv6 not available on this system'))
885 raise hg.RepoError(_('IPv6 not available on this system'))
886 BaseHTTPServer.HTTPServer.__init__(self, *args, **kwargs)
886 BaseHTTPServer.HTTPServer.__init__(self, *args, **kwargs)
887
887
888 class hgwebhandler(BaseHTTPServer.BaseHTTPRequestHandler):
888 class hgwebhandler(BaseHTTPServer.BaseHTTPRequestHandler):
889 def log_error(self, format, *args):
889 def log_error(self, format, *args):
890 errorlog.write("%s - - [%s] %s\n" % (self.address_string(),
890 errorlog.write("%s - - [%s] %s\n" % (self.address_string(),
891 self.log_date_time_string(),
891 self.log_date_time_string(),
892 format % args))
892 format % args))
893
893
894 def log_message(self, format, *args):
894 def log_message(self, format, *args):
895 accesslog.write("%s - - [%s] %s\n" % (self.address_string(),
895 accesslog.write("%s - - [%s] %s\n" % (self.address_string(),
896 self.log_date_time_string(),
896 self.log_date_time_string(),
897 format % args))
897 format % args))
898
898
899 def do_POST(self):
899 def do_POST(self):
900 try:
900 try:
901 self.do_hgweb()
901 self.do_hgweb()
902 except socket.error, inst:
902 except socket.error, inst:
903 if inst[0] != errno.EPIPE:
903 if inst[0] != errno.EPIPE:
904 raise
904 raise
905
905
906 def do_GET(self):
906 def do_GET(self):
907 self.do_POST()
907 self.do_POST()
908
908
909 def do_hgweb(self):
909 def do_hgweb(self):
910 query = ""
910 query = ""
911 p = self.path.find("?")
911 p = self.path.find("?")
912 if p:
912 if p:
913 query = self.path[p + 1:]
913 query = self.path[p + 1:]
914 query = query.replace('+', ' ')
914 query = query.replace('+', ' ')
915
915
916 env = {}
916 env = {}
917 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
917 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
918 env['REQUEST_METHOD'] = self.command
918 env['REQUEST_METHOD'] = self.command
919 env['SERVER_NAME'] = self.server.server_name
919 env['SERVER_NAME'] = self.server.server_name
920 env['SERVER_PORT'] = str(self.server.server_port)
920 env['SERVER_PORT'] = str(self.server.server_port)
921 env['REQUEST_URI'] = "/"
921 env['REQUEST_URI'] = "/"
922 if query:
922 if query:
923 env['QUERY_STRING'] = query
923 env['QUERY_STRING'] = query
924 host = self.address_string()
924 host = self.address_string()
925 if host != self.client_address[0]:
925 if host != self.client_address[0]:
926 env['REMOTE_HOST'] = host
926 env['REMOTE_HOST'] = host
927 env['REMOTE_ADDR'] = self.client_address[0]
927 env['REMOTE_ADDR'] = self.client_address[0]
928
928
929 if self.headers.typeheader is None:
929 if self.headers.typeheader is None:
930 env['CONTENT_TYPE'] = self.headers.type
930 env['CONTENT_TYPE'] = self.headers.type
931 else:
931 else:
932 env['CONTENT_TYPE'] = self.headers.typeheader
932 env['CONTENT_TYPE'] = self.headers.typeheader
933 length = self.headers.getheader('content-length')
933 length = self.headers.getheader('content-length')
934 if length:
934 if length:
935 env['CONTENT_LENGTH'] = length
935 env['CONTENT_LENGTH'] = length
936 accept = []
936 accept = []
937 for line in self.headers.getallmatchingheaders('accept'):
937 for line in self.headers.getallmatchingheaders('accept'):
938 if line[:1] in "\t\n\r ":
938 if line[:1] in "\t\n\r ":
939 accept.append(line.strip())
939 accept.append(line.strip())
940 else:
940 else:
941 accept = accept + line[7:].split(',')
941 accept = accept + line[7:].split(',')
942 env['HTTP_ACCEPT'] = ','.join(accept)
942 env['HTTP_ACCEPT'] = ','.join(accept)
943
943
944 req = hgrequest(self.rfile, self.wfile, env)
944 req = hgrequest(self.rfile, self.wfile, env)
945 self.send_response(200, "Script output follows")
945 self.send_response(200, "Script output follows")
946 hg.run(req)
946 hg.run(req)
947
947
948 hg = hgweb(repo)
948 hg = hgweb(repo)
949 if use_ipv6:
949 if use_ipv6:
950 return IPv6HTTPServer((address, port), hgwebhandler)
950 return IPv6HTTPServer((address, port), hgwebhandler)
951 else:
951 else:
952 return BaseHTTPServer.HTTPServer((address, port), hgwebhandler)
952 return BaseHTTPServer.HTTPServer((address, port), hgwebhandler)
953
953
954 # This is a stopgap
954 # This is a stopgap
955 class hgwebdir:
955 class hgwebdir(object):
956 def __init__(self, config):
956 def __init__(self, config):
957 def cleannames(items):
957 def cleannames(items):
958 return [(name.strip('/'), path) for name, path in items]
958 return [(name.strip('/'), path) for name, path in items]
959
959
960 if type(config) == type([]):
960 if type(config) == type([]):
961 self.repos = cleannames(config)
961 self.repos = cleannames(config)
962 elif type(config) == type({}):
962 elif type(config) == type({}):
963 self.repos = cleannames(config.items())
963 self.repos = cleannames(config.items())
964 self.repos.sort()
964 self.repos.sort()
965 else:
965 else:
966 cp = ConfigParser.SafeConfigParser()
966 cp = ConfigParser.SafeConfigParser()
967 cp.read(config)
967 cp.read(config)
968 self.repos = cleannames(cp.items("paths"))
968 self.repos = cleannames(cp.items("paths"))
969 self.repos.sort()
969 self.repos.sort()
970
970
971 def run(self, req=hgrequest()):
971 def run(self, req=hgrequest()):
972 def header(**map):
972 def header(**map):
973 yield tmpl("header", **map)
973 yield tmpl("header", **map)
974
974
975 def footer(**map):
975 def footer(**map):
976 yield tmpl("footer", **map)
976 yield tmpl("footer", **map)
977
977
978 m = os.path.join(templatepath(), "map")
978 m = os.path.join(templatepath(), "map")
979 tmpl = templater(m, common_filters,
979 tmpl = templater(m, common_filters,
980 {"header": header, "footer": footer})
980 {"header": header, "footer": footer})
981
981
982 def entries(**map):
982 def entries(**map):
983 parity = 0
983 parity = 0
984 for name, path in self.repos:
984 for name, path in self.repos:
985 u = ui.ui()
985 u = ui.ui()
986 try:
986 try:
987 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
987 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
988 except IOError:
988 except IOError:
989 pass
989 pass
990 get = u.config
990 get = u.config
991
991
992 url = ('/'.join([req.env["REQUEST_URI"].split('?')[0], name])
992 url = ('/'.join([req.env["REQUEST_URI"].split('?')[0], name])
993 .replace("//", "/"))
993 .replace("//", "/"))
994
994
995 # update time with local timezone
995 # update time with local timezone
996 try:
996 try:
997 d = (get_mtime(path), util.makedate()[1])
997 d = (get_mtime(path), util.makedate()[1])
998 except OSError:
998 except OSError:
999 continue
999 continue
1000
1000
1001 yield dict(contact=(get("ui", "username") or # preferred
1001 yield dict(contact=(get("ui", "username") or # preferred
1002 get("web", "contact") or # deprecated
1002 get("web", "contact") or # deprecated
1003 get("web", "author", "unknown")), # also
1003 get("web", "author", "unknown")), # also
1004 name=get("web", "name", name),
1004 name=get("web", "name", name),
1005 url=url,
1005 url=url,
1006 parity=parity,
1006 parity=parity,
1007 shortdesc=get("web", "description", "unknown"),
1007 shortdesc=get("web", "description", "unknown"),
1008 lastupdate=d)
1008 lastupdate=d)
1009
1009
1010 parity = 1 - parity
1010 parity = 1 - parity
1011
1011
1012 virtual = req.env.get("PATH_INFO", "").strip('/')
1012 virtual = req.env.get("PATH_INFO", "").strip('/')
1013 if virtual:
1013 if virtual:
1014 real = dict(self.repos).get(virtual)
1014 real = dict(self.repos).get(virtual)
1015 if real:
1015 if real:
1016 try:
1016 try:
1017 hgweb(real).run(req)
1017 hgweb(real).run(req)
1018 except IOError, inst:
1018 except IOError, inst:
1019 req.write(tmpl("error", error=inst.strerror))
1019 req.write(tmpl("error", error=inst.strerror))
1020 except hg.RepoError, inst:
1020 except hg.RepoError, inst:
1021 req.write(tmpl("error", error=str(inst)))
1021 req.write(tmpl("error", error=str(inst)))
1022 else:
1022 else:
1023 req.write(tmpl("notfound", repo=virtual))
1023 req.write(tmpl("notfound", repo=virtual))
1024 else:
1024 else:
1025 req.write(tmpl("index", entries=entries))
1025 req.write(tmpl("index", entries=entries))
@@ -1,24 +1,24 b''
1 # httprangereader.py - just what it says
1 # httprangereader.py - just what it says
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import byterange, urllib2
8 import byterange, urllib2
9
9
10 class httprangereader:
10 class httprangereader(object):
11 def __init__(self, url):
11 def __init__(self, url):
12 self.url = url
12 self.url = url
13 self.pos = 0
13 self.pos = 0
14 def seek(self, pos):
14 def seek(self, pos):
15 self.pos = pos
15 self.pos = pos
16 def read(self, bytes=None):
16 def read(self, bytes=None):
17 opener = urllib2.build_opener(byterange.HTTPRangeHandler())
17 opener = urllib2.build_opener(byterange.HTTPRangeHandler())
18 urllib2.install_opener(opener)
18 urllib2.install_opener(opener)
19 req = urllib2.Request(self.url)
19 req = urllib2.Request(self.url)
20 end = ''
20 end = ''
21 if bytes: end = self.pos + bytes
21 if bytes: end = self.pos + bytes
22 req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
22 req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
23 f = urllib2.urlopen(req)
23 f = urllib2.urlopen(req)
24 return f.read()
24 return f.read()
@@ -1,1780 +1,1780 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import struct, os, util
8 import struct, os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
14
14
15 class localrepository:
15 class localrepository(object):
16 def __init__(self, ui, path=None, create=0):
16 def __init__(self, ui, path=None, create=0):
17 if not path:
17 if not path:
18 p = os.getcwd()
18 p = os.getcwd()
19 while not os.path.isdir(os.path.join(p, ".hg")):
19 while not os.path.isdir(os.path.join(p, ".hg")):
20 oldp = p
20 oldp = p
21 p = os.path.dirname(p)
21 p = os.path.dirname(p)
22 if p == oldp: raise repo.RepoError(_("no repo found"))
22 if p == oldp: raise repo.RepoError(_("no repo found"))
23 path = p
23 path = p
24 self.path = os.path.join(path, ".hg")
24 self.path = os.path.join(path, ".hg")
25
25
26 if not create and not os.path.isdir(self.path):
26 if not create and not os.path.isdir(self.path):
27 raise repo.RepoError(_("repository %s not found") % self.path)
27 raise repo.RepoError(_("repository %s not found") % self.path)
28
28
29 self.root = os.path.abspath(path)
29 self.root = os.path.abspath(path)
30 self.ui = ui
30 self.ui = ui
31 self.opener = util.opener(self.path)
31 self.opener = util.opener(self.path)
32 self.wopener = util.opener(self.root)
32 self.wopener = util.opener(self.root)
33 self.manifest = manifest.manifest(self.opener)
33 self.manifest = manifest.manifest(self.opener)
34 self.changelog = changelog.changelog(self.opener)
34 self.changelog = changelog.changelog(self.opener)
35 self.tagscache = None
35 self.tagscache = None
36 self.nodetagscache = None
36 self.nodetagscache = None
37 self.encodepats = None
37 self.encodepats = None
38 self.decodepats = None
38 self.decodepats = None
39
39
40 if create:
40 if create:
41 os.mkdir(self.path)
41 os.mkdir(self.path)
42 os.mkdir(self.join("data"))
42 os.mkdir(self.join("data"))
43
43
44 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
44 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
45 try:
45 try:
46 self.ui.readconfig(self.join("hgrc"))
46 self.ui.readconfig(self.join("hgrc"))
47 except IOError: pass
47 except IOError: pass
48
48
49 def hook(self, name, **args):
49 def hook(self, name, **args):
50 def runhook(name, cmd):
50 def runhook(name, cmd):
51 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
51 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
52 old = {}
52 old = {}
53 for k, v in args.items():
53 for k, v in args.items():
54 k = k.upper()
54 k = k.upper()
55 old[k] = os.environ.get(k, None)
55 old[k] = os.environ.get(k, None)
56 os.environ[k] = v
56 os.environ[k] = v
57
57
58 # Hooks run in the repository root
58 # Hooks run in the repository root
59 olddir = os.getcwd()
59 olddir = os.getcwd()
60 os.chdir(self.root)
60 os.chdir(self.root)
61 r = os.system(cmd)
61 r = os.system(cmd)
62 os.chdir(olddir)
62 os.chdir(olddir)
63
63
64 for k, v in old.items():
64 for k, v in old.items():
65 if v != None:
65 if v != None:
66 os.environ[k] = v
66 os.environ[k] = v
67 else:
67 else:
68 del os.environ[k]
68 del os.environ[k]
69
69
70 if r:
70 if r:
71 self.ui.warn(_("abort: %s hook failed with status %d!\n") %
71 self.ui.warn(_("abort: %s hook failed with status %d!\n") %
72 (name, r))
72 (name, r))
73 return False
73 return False
74 return True
74 return True
75
75
76 r = True
76 r = True
77 for hname, cmd in self.ui.configitems("hooks"):
77 for hname, cmd in self.ui.configitems("hooks"):
78 s = hname.split(".")
78 s = hname.split(".")
79 if s[0] == name and cmd:
79 if s[0] == name and cmd:
80 r = runhook(hname, cmd) and r
80 r = runhook(hname, cmd) and r
81 return r
81 return r
82
82
83 def tags(self):
83 def tags(self):
84 '''return a mapping of tag to node'''
84 '''return a mapping of tag to node'''
85 if not self.tagscache:
85 if not self.tagscache:
86 self.tagscache = {}
86 self.tagscache = {}
87 def addtag(self, k, n):
87 def addtag(self, k, n):
88 try:
88 try:
89 bin_n = bin(n)
89 bin_n = bin(n)
90 except TypeError:
90 except TypeError:
91 bin_n = ''
91 bin_n = ''
92 self.tagscache[k.strip()] = bin_n
92 self.tagscache[k.strip()] = bin_n
93
93
94 try:
94 try:
95 # read each head of the tags file, ending with the tip
95 # read each head of the tags file, ending with the tip
96 # and add each tag found to the map, with "newer" ones
96 # and add each tag found to the map, with "newer" ones
97 # taking precedence
97 # taking precedence
98 fl = self.file(".hgtags")
98 fl = self.file(".hgtags")
99 h = fl.heads()
99 h = fl.heads()
100 h.reverse()
100 h.reverse()
101 for r in h:
101 for r in h:
102 for l in fl.read(r).splitlines():
102 for l in fl.read(r).splitlines():
103 if l:
103 if l:
104 n, k = l.split(" ", 1)
104 n, k = l.split(" ", 1)
105 addtag(self, k, n)
105 addtag(self, k, n)
106 except KeyError:
106 except KeyError:
107 pass
107 pass
108
108
109 try:
109 try:
110 f = self.opener("localtags")
110 f = self.opener("localtags")
111 for l in f:
111 for l in f:
112 n, k = l.split(" ", 1)
112 n, k = l.split(" ", 1)
113 addtag(self, k, n)
113 addtag(self, k, n)
114 except IOError:
114 except IOError:
115 pass
115 pass
116
116
117 self.tagscache['tip'] = self.changelog.tip()
117 self.tagscache['tip'] = self.changelog.tip()
118
118
119 return self.tagscache
119 return self.tagscache
120
120
121 def tagslist(self):
121 def tagslist(self):
122 '''return a list of tags ordered by revision'''
122 '''return a list of tags ordered by revision'''
123 l = []
123 l = []
124 for t, n in self.tags().items():
124 for t, n in self.tags().items():
125 try:
125 try:
126 r = self.changelog.rev(n)
126 r = self.changelog.rev(n)
127 except:
127 except:
128 r = -2 # sort to the beginning of the list if unknown
128 r = -2 # sort to the beginning of the list if unknown
129 l.append((r,t,n))
129 l.append((r,t,n))
130 l.sort()
130 l.sort()
131 return [(t,n) for r,t,n in l]
131 return [(t,n) for r,t,n in l]
132
132
133 def nodetags(self, node):
133 def nodetags(self, node):
134 '''return the tags associated with a node'''
134 '''return the tags associated with a node'''
135 if not self.nodetagscache:
135 if not self.nodetagscache:
136 self.nodetagscache = {}
136 self.nodetagscache = {}
137 for t,n in self.tags().items():
137 for t,n in self.tags().items():
138 self.nodetagscache.setdefault(n,[]).append(t)
138 self.nodetagscache.setdefault(n,[]).append(t)
139 return self.nodetagscache.get(node, [])
139 return self.nodetagscache.get(node, [])
140
140
141 def lookup(self, key):
141 def lookup(self, key):
142 try:
142 try:
143 return self.tags()[key]
143 return self.tags()[key]
144 except KeyError:
144 except KeyError:
145 try:
145 try:
146 return self.changelog.lookup(key)
146 return self.changelog.lookup(key)
147 except:
147 except:
148 raise repo.RepoError(_("unknown revision '%s'") % key)
148 raise repo.RepoError(_("unknown revision '%s'") % key)
149
149
150 def dev(self):
150 def dev(self):
151 return os.stat(self.path).st_dev
151 return os.stat(self.path).st_dev
152
152
153 def local(self):
153 def local(self):
154 return True
154 return True
155
155
156 def join(self, f):
156 def join(self, f):
157 return os.path.join(self.path, f)
157 return os.path.join(self.path, f)
158
158
159 def wjoin(self, f):
159 def wjoin(self, f):
160 return os.path.join(self.root, f)
160 return os.path.join(self.root, f)
161
161
162 def file(self, f):
162 def file(self, f):
163 if f[0] == '/': f = f[1:]
163 if f[0] == '/': f = f[1:]
164 return filelog.filelog(self.opener, f)
164 return filelog.filelog(self.opener, f)
165
165
166 def getcwd(self):
166 def getcwd(self):
167 return self.dirstate.getcwd()
167 return self.dirstate.getcwd()
168
168
169 def wfile(self, f, mode='r'):
169 def wfile(self, f, mode='r'):
170 return self.wopener(f, mode)
170 return self.wopener(f, mode)
171
171
172 def wread(self, filename):
172 def wread(self, filename):
173 if self.encodepats == None:
173 if self.encodepats == None:
174 l = []
174 l = []
175 for pat, cmd in self.ui.configitems("encode"):
175 for pat, cmd in self.ui.configitems("encode"):
176 mf = util.matcher("", "/", [pat], [], [])[1]
176 mf = util.matcher("", "/", [pat], [], [])[1]
177 l.append((mf, cmd))
177 l.append((mf, cmd))
178 self.encodepats = l
178 self.encodepats = l
179
179
180 data = self.wopener(filename, 'r').read()
180 data = self.wopener(filename, 'r').read()
181
181
182 for mf, cmd in self.encodepats:
182 for mf, cmd in self.encodepats:
183 if mf(filename):
183 if mf(filename):
184 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
184 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
185 data = util.filter(data, cmd)
185 data = util.filter(data, cmd)
186 break
186 break
187
187
188 return data
188 return data
189
189
190 def wwrite(self, filename, data, fd=None):
190 def wwrite(self, filename, data, fd=None):
191 if self.decodepats == None:
191 if self.decodepats == None:
192 l = []
192 l = []
193 for pat, cmd in self.ui.configitems("decode"):
193 for pat, cmd in self.ui.configitems("decode"):
194 mf = util.matcher("", "/", [pat], [], [])[1]
194 mf = util.matcher("", "/", [pat], [], [])[1]
195 l.append((mf, cmd))
195 l.append((mf, cmd))
196 self.decodepats = l
196 self.decodepats = l
197
197
198 for mf, cmd in self.decodepats:
198 for mf, cmd in self.decodepats:
199 if mf(filename):
199 if mf(filename):
200 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
200 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
201 data = util.filter(data, cmd)
201 data = util.filter(data, cmd)
202 break
202 break
203
203
204 if fd:
204 if fd:
205 return fd.write(data)
205 return fd.write(data)
206 return self.wopener(filename, 'w').write(data)
206 return self.wopener(filename, 'w').write(data)
207
207
208 def transaction(self):
208 def transaction(self):
209 # save dirstate for undo
209 # save dirstate for undo
210 try:
210 try:
211 ds = self.opener("dirstate").read()
211 ds = self.opener("dirstate").read()
212 except IOError:
212 except IOError:
213 ds = ""
213 ds = ""
214 self.opener("journal.dirstate", "w").write(ds)
214 self.opener("journal.dirstate", "w").write(ds)
215
215
216 def after():
216 def after():
217 util.rename(self.join("journal"), self.join("undo"))
217 util.rename(self.join("journal"), self.join("undo"))
218 util.rename(self.join("journal.dirstate"),
218 util.rename(self.join("journal.dirstate"),
219 self.join("undo.dirstate"))
219 self.join("undo.dirstate"))
220
220
221 return transaction.transaction(self.ui.warn, self.opener,
221 return transaction.transaction(self.ui.warn, self.opener,
222 self.join("journal"), after)
222 self.join("journal"), after)
223
223
224 def recover(self):
224 def recover(self):
225 lock = self.lock()
225 lock = self.lock()
226 if os.path.exists(self.join("journal")):
226 if os.path.exists(self.join("journal")):
227 self.ui.status(_("rolling back interrupted transaction\n"))
227 self.ui.status(_("rolling back interrupted transaction\n"))
228 transaction.rollback(self.opener, self.join("journal"))
228 transaction.rollback(self.opener, self.join("journal"))
229 return True
229 return True
230 else:
230 else:
231 self.ui.warn(_("no interrupted transaction available\n"))
231 self.ui.warn(_("no interrupted transaction available\n"))
232 return False
232 return False
233
233
234 def undo(self):
234 def undo(self):
235 wlock = self.wlock()
235 wlock = self.wlock()
236 lock = self.lock()
236 lock = self.lock()
237 if os.path.exists(self.join("undo")):
237 if os.path.exists(self.join("undo")):
238 self.ui.status(_("rolling back last transaction\n"))
238 self.ui.status(_("rolling back last transaction\n"))
239 transaction.rollback(self.opener, self.join("undo"))
239 transaction.rollback(self.opener, self.join("undo"))
240 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
240 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
241 self.dirstate.read()
241 self.dirstate.read()
242 else:
242 else:
243 self.ui.warn(_("no undo information available\n"))
243 self.ui.warn(_("no undo information available\n"))
244
244
245 def lock(self, wait=1):
245 def lock(self, wait=1):
246 try:
246 try:
247 return lock.lock(self.join("lock"), 0)
247 return lock.lock(self.join("lock"), 0)
248 except lock.LockHeld, inst:
248 except lock.LockHeld, inst:
249 if wait:
249 if wait:
250 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
250 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
251 return lock.lock(self.join("lock"), wait)
251 return lock.lock(self.join("lock"), wait)
252 raise inst
252 raise inst
253
253
254 def wlock(self, wait=1):
254 def wlock(self, wait=1):
255 try:
255 try:
256 wlock = lock.lock(self.join("wlock"), 0, self.dirstate.write)
256 wlock = lock.lock(self.join("wlock"), 0, self.dirstate.write)
257 except lock.LockHeld, inst:
257 except lock.LockHeld, inst:
258 if not wait:
258 if not wait:
259 raise inst
259 raise inst
260 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
260 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
261 wlock = lock.lock(self.join("wlock"), wait, self.dirstate.write)
261 wlock = lock.lock(self.join("wlock"), wait, self.dirstate.write)
262 self.dirstate.read()
262 self.dirstate.read()
263 return wlock
263 return wlock
264
264
265 def rawcommit(self, files, text, user, date, p1=None, p2=None):
265 def rawcommit(self, files, text, user, date, p1=None, p2=None):
266 orig_parent = self.dirstate.parents()[0] or nullid
266 orig_parent = self.dirstate.parents()[0] or nullid
267 p1 = p1 or self.dirstate.parents()[0] or nullid
267 p1 = p1 or self.dirstate.parents()[0] or nullid
268 p2 = p2 or self.dirstate.parents()[1] or nullid
268 p2 = p2 or self.dirstate.parents()[1] or nullid
269 c1 = self.changelog.read(p1)
269 c1 = self.changelog.read(p1)
270 c2 = self.changelog.read(p2)
270 c2 = self.changelog.read(p2)
271 m1 = self.manifest.read(c1[0])
271 m1 = self.manifest.read(c1[0])
272 mf1 = self.manifest.readflags(c1[0])
272 mf1 = self.manifest.readflags(c1[0])
273 m2 = self.manifest.read(c2[0])
273 m2 = self.manifest.read(c2[0])
274 changed = []
274 changed = []
275
275
276 if orig_parent == p1:
276 if orig_parent == p1:
277 update_dirstate = 1
277 update_dirstate = 1
278 else:
278 else:
279 update_dirstate = 0
279 update_dirstate = 0
280
280
281 wlock = self.wlock()
281 wlock = self.wlock()
282 lock = self.lock()
282 lock = self.lock()
283 tr = self.transaction()
283 tr = self.transaction()
284 mm = m1.copy()
284 mm = m1.copy()
285 mfm = mf1.copy()
285 mfm = mf1.copy()
286 linkrev = self.changelog.count()
286 linkrev = self.changelog.count()
287 for f in files:
287 for f in files:
288 try:
288 try:
289 t = self.wread(f)
289 t = self.wread(f)
290 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
290 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
291 r = self.file(f)
291 r = self.file(f)
292 mfm[f] = tm
292 mfm[f] = tm
293
293
294 fp1 = m1.get(f, nullid)
294 fp1 = m1.get(f, nullid)
295 fp2 = m2.get(f, nullid)
295 fp2 = m2.get(f, nullid)
296
296
297 # is the same revision on two branches of a merge?
297 # is the same revision on two branches of a merge?
298 if fp2 == fp1:
298 if fp2 == fp1:
299 fp2 = nullid
299 fp2 = nullid
300
300
301 if fp2 != nullid:
301 if fp2 != nullid:
302 # is one parent an ancestor of the other?
302 # is one parent an ancestor of the other?
303 fpa = r.ancestor(fp1, fp2)
303 fpa = r.ancestor(fp1, fp2)
304 if fpa == fp1:
304 if fpa == fp1:
305 fp1, fp2 = fp2, nullid
305 fp1, fp2 = fp2, nullid
306 elif fpa == fp2:
306 elif fpa == fp2:
307 fp2 = nullid
307 fp2 = nullid
308
308
309 # is the file unmodified from the parent?
309 # is the file unmodified from the parent?
310 if t == r.read(fp1):
310 if t == r.read(fp1):
311 # record the proper existing parent in manifest
311 # record the proper existing parent in manifest
312 # no need to add a revision
312 # no need to add a revision
313 mm[f] = fp1
313 mm[f] = fp1
314 continue
314 continue
315
315
316 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
316 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
317 changed.append(f)
317 changed.append(f)
318 if update_dirstate:
318 if update_dirstate:
319 self.dirstate.update([f], "n")
319 self.dirstate.update([f], "n")
320 except IOError:
320 except IOError:
321 try:
321 try:
322 del mm[f]
322 del mm[f]
323 del mfm[f]
323 del mfm[f]
324 if update_dirstate:
324 if update_dirstate:
325 self.dirstate.forget([f])
325 self.dirstate.forget([f])
326 except:
326 except:
327 # deleted from p2?
327 # deleted from p2?
328 pass
328 pass
329
329
330 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
330 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
331 user = user or self.ui.username()
331 user = user or self.ui.username()
332 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
332 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
333 tr.close()
333 tr.close()
334 if update_dirstate:
334 if update_dirstate:
335 self.dirstate.setparents(n, nullid)
335 self.dirstate.setparents(n, nullid)
336
336
337 def commit(self, files = None, text = "", user = None, date = None,
337 def commit(self, files = None, text = "", user = None, date = None,
338 match = util.always, force=False):
338 match = util.always, force=False):
339 commit = []
339 commit = []
340 remove = []
340 remove = []
341 changed = []
341 changed = []
342
342
343 if files:
343 if files:
344 for f in files:
344 for f in files:
345 s = self.dirstate.state(f)
345 s = self.dirstate.state(f)
346 if s in 'nmai':
346 if s in 'nmai':
347 commit.append(f)
347 commit.append(f)
348 elif s == 'r':
348 elif s == 'r':
349 remove.append(f)
349 remove.append(f)
350 else:
350 else:
351 self.ui.warn(_("%s not tracked!\n") % f)
351 self.ui.warn(_("%s not tracked!\n") % f)
352 else:
352 else:
353 (c, a, d, u) = self.changes(match=match)
353 (c, a, d, u) = self.changes(match=match)
354 commit = c + a
354 commit = c + a
355 remove = d
355 remove = d
356
356
357 p1, p2 = self.dirstate.parents()
357 p1, p2 = self.dirstate.parents()
358 c1 = self.changelog.read(p1)
358 c1 = self.changelog.read(p1)
359 c2 = self.changelog.read(p2)
359 c2 = self.changelog.read(p2)
360 m1 = self.manifest.read(c1[0])
360 m1 = self.manifest.read(c1[0])
361 mf1 = self.manifest.readflags(c1[0])
361 mf1 = self.manifest.readflags(c1[0])
362 m2 = self.manifest.read(c2[0])
362 m2 = self.manifest.read(c2[0])
363
363
364 if not commit and not remove and not force and p2 == nullid:
364 if not commit and not remove and not force and p2 == nullid:
365 self.ui.status(_("nothing changed\n"))
365 self.ui.status(_("nothing changed\n"))
366 return None
366 return None
367
367
368 if not self.hook("precommit"):
368 if not self.hook("precommit"):
369 return None
369 return None
370
370
371 wlock = self.wlock()
371 wlock = self.wlock()
372 lock = self.lock()
372 lock = self.lock()
373 tr = self.transaction()
373 tr = self.transaction()
374
374
375 # check in files
375 # check in files
376 new = {}
376 new = {}
377 linkrev = self.changelog.count()
377 linkrev = self.changelog.count()
378 commit.sort()
378 commit.sort()
379 for f in commit:
379 for f in commit:
380 self.ui.note(f + "\n")
380 self.ui.note(f + "\n")
381 try:
381 try:
382 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
382 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
383 t = self.wread(f)
383 t = self.wread(f)
384 except IOError:
384 except IOError:
385 self.ui.warn(_("trouble committing %s!\n") % f)
385 self.ui.warn(_("trouble committing %s!\n") % f)
386 raise
386 raise
387
387
388 r = self.file(f)
388 r = self.file(f)
389
389
390 meta = {}
390 meta = {}
391 cp = self.dirstate.copied(f)
391 cp = self.dirstate.copied(f)
392 if cp:
392 if cp:
393 meta["copy"] = cp
393 meta["copy"] = cp
394 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
394 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
395 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
395 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
396 fp1, fp2 = nullid, nullid
396 fp1, fp2 = nullid, nullid
397 else:
397 else:
398 fp1 = m1.get(f, nullid)
398 fp1 = m1.get(f, nullid)
399 fp2 = m2.get(f, nullid)
399 fp2 = m2.get(f, nullid)
400
400
401 # is the same revision on two branches of a merge?
401 # is the same revision on two branches of a merge?
402 if fp2 == fp1:
402 if fp2 == fp1:
403 fp2 = nullid
403 fp2 = nullid
404
404
405 if fp2 != nullid:
405 if fp2 != nullid:
406 # is one parent an ancestor of the other?
406 # is one parent an ancestor of the other?
407 fpa = r.ancestor(fp1, fp2)
407 fpa = r.ancestor(fp1, fp2)
408 if fpa == fp1:
408 if fpa == fp1:
409 fp1, fp2 = fp2, nullid
409 fp1, fp2 = fp2, nullid
410 elif fpa == fp2:
410 elif fpa == fp2:
411 fp2 = nullid
411 fp2 = nullid
412
412
413 # is the file unmodified from the parent?
413 # is the file unmodified from the parent?
414 if not meta and t == r.read(fp1):
414 if not meta and t == r.read(fp1):
415 # record the proper existing parent in manifest
415 # record the proper existing parent in manifest
416 # no need to add a revision
416 # no need to add a revision
417 new[f] = fp1
417 new[f] = fp1
418 continue
418 continue
419
419
420 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
420 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
421 # remember what we've added so that we can later calculate
421 # remember what we've added so that we can later calculate
422 # the files to pull from a set of changesets
422 # the files to pull from a set of changesets
423 changed.append(f)
423 changed.append(f)
424
424
425 # update manifest
425 # update manifest
426 m1.update(new)
426 m1.update(new)
427 for f in remove:
427 for f in remove:
428 if f in m1:
428 if f in m1:
429 del m1[f]
429 del m1[f]
430 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
430 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
431 (new, remove))
431 (new, remove))
432
432
433 # add changeset
433 # add changeset
434 new = new.keys()
434 new = new.keys()
435 new.sort()
435 new.sort()
436
436
437 if not text:
437 if not text:
438 edittext = ""
438 edittext = ""
439 if p2 != nullid:
439 if p2 != nullid:
440 edittext += "HG: branch merge\n"
440 edittext += "HG: branch merge\n"
441 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
441 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
442 edittext += "".join(["HG: changed %s\n" % f for f in changed])
442 edittext += "".join(["HG: changed %s\n" % f for f in changed])
443 edittext += "".join(["HG: removed %s\n" % f for f in remove])
443 edittext += "".join(["HG: removed %s\n" % f for f in remove])
444 if not changed and not remove:
444 if not changed and not remove:
445 edittext += "HG: no files changed\n"
445 edittext += "HG: no files changed\n"
446 edittext = self.ui.edit(edittext)
446 edittext = self.ui.edit(edittext)
447 if not edittext.rstrip():
447 if not edittext.rstrip():
448 return None
448 return None
449 text = edittext
449 text = edittext
450
450
451 user = user or self.ui.username()
451 user = user or self.ui.username()
452 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
452 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
453 tr.close()
453 tr.close()
454
454
455 self.dirstate.setparents(n)
455 self.dirstate.setparents(n)
456 self.dirstate.update(new, "n")
456 self.dirstate.update(new, "n")
457 self.dirstate.forget(remove)
457 self.dirstate.forget(remove)
458
458
459 if not self.hook("commit", node=hex(n)):
459 if not self.hook("commit", node=hex(n)):
460 return None
460 return None
461 return n
461 return n
462
462
463 def walk(self, node=None, files=[], match=util.always):
463 def walk(self, node=None, files=[], match=util.always):
464 if node:
464 if node:
465 for fn in self.manifest.read(self.changelog.read(node)[0]):
465 for fn in self.manifest.read(self.changelog.read(node)[0]):
466 if match(fn): yield 'm', fn
466 if match(fn): yield 'm', fn
467 else:
467 else:
468 for src, fn in self.dirstate.walk(files, match):
468 for src, fn in self.dirstate.walk(files, match):
469 yield src, fn
469 yield src, fn
470
470
471 def changes(self, node1 = None, node2 = None, files = [],
471 def changes(self, node1 = None, node2 = None, files = [],
472 match = util.always):
472 match = util.always):
473 mf2, u = None, []
473 mf2, u = None, []
474
474
475 def fcmp(fn, mf):
475 def fcmp(fn, mf):
476 t1 = self.wread(fn)
476 t1 = self.wread(fn)
477 t2 = self.file(fn).read(mf.get(fn, nullid))
477 t2 = self.file(fn).read(mf.get(fn, nullid))
478 return cmp(t1, t2)
478 return cmp(t1, t2)
479
479
480 def mfmatches(node):
480 def mfmatches(node):
481 mf = dict(self.manifest.read(node))
481 mf = dict(self.manifest.read(node))
482 for fn in mf.keys():
482 for fn in mf.keys():
483 if not match(fn):
483 if not match(fn):
484 del mf[fn]
484 del mf[fn]
485 return mf
485 return mf
486
486
487 # are we comparing the working directory?
487 # are we comparing the working directory?
488 if not node2:
488 if not node2:
489 try:
489 try:
490 wlock = self.wlock(wait=0)
490 wlock = self.wlock(wait=0)
491 except lock.LockHeld:
491 except lock.LockHeld:
492 wlock = None
492 wlock = None
493 l, c, a, d, u = self.dirstate.changes(files, match)
493 l, c, a, d, u = self.dirstate.changes(files, match)
494
494
495 # are we comparing working dir against its parent?
495 # are we comparing working dir against its parent?
496 if not node1:
496 if not node1:
497 if l:
497 if l:
498 # do a full compare of any files that might have changed
498 # do a full compare of any files that might have changed
499 change = self.changelog.read(self.dirstate.parents()[0])
499 change = self.changelog.read(self.dirstate.parents()[0])
500 mf2 = mfmatches(change[0])
500 mf2 = mfmatches(change[0])
501 for f in l:
501 for f in l:
502 if fcmp(f, mf2):
502 if fcmp(f, mf2):
503 c.append(f)
503 c.append(f)
504 elif wlock is not None:
504 elif wlock is not None:
505 self.dirstate.update([f], "n")
505 self.dirstate.update([f], "n")
506
506
507 for l in c, a, d, u:
507 for l in c, a, d, u:
508 l.sort()
508 l.sort()
509
509
510 return (c, a, d, u)
510 return (c, a, d, u)
511
511
512 # are we comparing working dir against non-tip?
512 # are we comparing working dir against non-tip?
513 # generate a pseudo-manifest for the working dir
513 # generate a pseudo-manifest for the working dir
514 if not node2:
514 if not node2:
515 if not mf2:
515 if not mf2:
516 change = self.changelog.read(self.dirstate.parents()[0])
516 change = self.changelog.read(self.dirstate.parents()[0])
517 mf2 = mfmatches(change[0])
517 mf2 = mfmatches(change[0])
518 for f in a + c + l:
518 for f in a + c + l:
519 mf2[f] = ""
519 mf2[f] = ""
520 for f in d:
520 for f in d:
521 if f in mf2: del mf2[f]
521 if f in mf2: del mf2[f]
522 else:
522 else:
523 change = self.changelog.read(node2)
523 change = self.changelog.read(node2)
524 mf2 = mfmatches(change[0])
524 mf2 = mfmatches(change[0])
525
525
526 # flush lists from dirstate before comparing manifests
526 # flush lists from dirstate before comparing manifests
527 c, a = [], []
527 c, a = [], []
528
528
529 change = self.changelog.read(node1)
529 change = self.changelog.read(node1)
530 mf1 = mfmatches(change[0])
530 mf1 = mfmatches(change[0])
531
531
532 for fn in mf2:
532 for fn in mf2:
533 if mf1.has_key(fn):
533 if mf1.has_key(fn):
534 if mf1[fn] != mf2[fn]:
534 if mf1[fn] != mf2[fn]:
535 if mf2[fn] != "" or fcmp(fn, mf1):
535 if mf2[fn] != "" or fcmp(fn, mf1):
536 c.append(fn)
536 c.append(fn)
537 del mf1[fn]
537 del mf1[fn]
538 else:
538 else:
539 a.append(fn)
539 a.append(fn)
540
540
541 d = mf1.keys()
541 d = mf1.keys()
542
542
543 for l in c, a, d, u:
543 for l in c, a, d, u:
544 l.sort()
544 l.sort()
545
545
546 return (c, a, d, u)
546 return (c, a, d, u)
547
547
548 def add(self, list):
548 def add(self, list):
549 wlock = self.wlock()
549 wlock = self.wlock()
550 for f in list:
550 for f in list:
551 p = self.wjoin(f)
551 p = self.wjoin(f)
552 if not os.path.exists(p):
552 if not os.path.exists(p):
553 self.ui.warn(_("%s does not exist!\n") % f)
553 self.ui.warn(_("%s does not exist!\n") % f)
554 elif not os.path.isfile(p):
554 elif not os.path.isfile(p):
555 self.ui.warn(_("%s not added: only files supported currently\n") % f)
555 self.ui.warn(_("%s not added: only files supported currently\n") % f)
556 elif self.dirstate.state(f) in 'an':
556 elif self.dirstate.state(f) in 'an':
557 self.ui.warn(_("%s already tracked!\n") % f)
557 self.ui.warn(_("%s already tracked!\n") % f)
558 else:
558 else:
559 self.dirstate.update([f], "a")
559 self.dirstate.update([f], "a")
560
560
561 def forget(self, list):
561 def forget(self, list):
562 wlock = self.wlock()
562 wlock = self.wlock()
563 for f in list:
563 for f in list:
564 if self.dirstate.state(f) not in 'ai':
564 if self.dirstate.state(f) not in 'ai':
565 self.ui.warn(_("%s not added!\n") % f)
565 self.ui.warn(_("%s not added!\n") % f)
566 else:
566 else:
567 self.dirstate.forget([f])
567 self.dirstate.forget([f])
568
568
569 def remove(self, list, unlink=False):
569 def remove(self, list, unlink=False):
570 if unlink:
570 if unlink:
571 for f in list:
571 for f in list:
572 try:
572 try:
573 util.unlink(self.wjoin(f))
573 util.unlink(self.wjoin(f))
574 except OSError, inst:
574 except OSError, inst:
575 if inst.errno != errno.ENOENT: raise
575 if inst.errno != errno.ENOENT: raise
576 wlock = self.wlock()
576 wlock = self.wlock()
577 for f in list:
577 for f in list:
578 p = self.wjoin(f)
578 p = self.wjoin(f)
579 if os.path.exists(p):
579 if os.path.exists(p):
580 self.ui.warn(_("%s still exists!\n") % f)
580 self.ui.warn(_("%s still exists!\n") % f)
581 elif self.dirstate.state(f) == 'a':
581 elif self.dirstate.state(f) == 'a':
582 self.ui.warn(_("%s never committed!\n") % f)
582 self.ui.warn(_("%s never committed!\n") % f)
583 self.dirstate.forget([f])
583 self.dirstate.forget([f])
584 elif f not in self.dirstate:
584 elif f not in self.dirstate:
585 self.ui.warn(_("%s not tracked!\n") % f)
585 self.ui.warn(_("%s not tracked!\n") % f)
586 else:
586 else:
587 self.dirstate.update([f], "r")
587 self.dirstate.update([f], "r")
588
588
589 def undelete(self, list):
589 def undelete(self, list):
590 p = self.dirstate.parents()[0]
590 p = self.dirstate.parents()[0]
591 mn = self.changelog.read(p)[0]
591 mn = self.changelog.read(p)[0]
592 mf = self.manifest.readflags(mn)
592 mf = self.manifest.readflags(mn)
593 m = self.manifest.read(mn)
593 m = self.manifest.read(mn)
594 wlock = self.wlock()
594 wlock = self.wlock()
595 for f in list:
595 for f in list:
596 if self.dirstate.state(f) not in "r":
596 if self.dirstate.state(f) not in "r":
597 self.ui.warn("%s not removed!\n" % f)
597 self.ui.warn("%s not removed!\n" % f)
598 else:
598 else:
599 t = self.file(f).read(m[f])
599 t = self.file(f).read(m[f])
600 self.wwrite(f, t)
600 self.wwrite(f, t)
601 util.set_exec(self.wjoin(f), mf[f])
601 util.set_exec(self.wjoin(f), mf[f])
602 self.dirstate.update([f], "n")
602 self.dirstate.update([f], "n")
603
603
604 def copy(self, source, dest):
604 def copy(self, source, dest):
605 p = self.wjoin(dest)
605 p = self.wjoin(dest)
606 if not os.path.exists(p):
606 if not os.path.exists(p):
607 self.ui.warn(_("%s does not exist!\n") % dest)
607 self.ui.warn(_("%s does not exist!\n") % dest)
608 elif not os.path.isfile(p):
608 elif not os.path.isfile(p):
609 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
609 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
610 else:
610 else:
611 wlock = self.wlock()
611 wlock = self.wlock()
612 if self.dirstate.state(dest) == '?':
612 if self.dirstate.state(dest) == '?':
613 self.dirstate.update([dest], "a")
613 self.dirstate.update([dest], "a")
614 self.dirstate.copy(source, dest)
614 self.dirstate.copy(source, dest)
615
615
616 def heads(self, start=None):
616 def heads(self, start=None):
617 heads = self.changelog.heads(start)
617 heads = self.changelog.heads(start)
618 # sort the output in rev descending order
618 # sort the output in rev descending order
619 heads = [(-self.changelog.rev(h), h) for h in heads]
619 heads = [(-self.changelog.rev(h), h) for h in heads]
620 heads.sort()
620 heads.sort()
621 return [n for (r, n) in heads]
621 return [n for (r, n) in heads]
622
622
623 # branchlookup returns a dict giving a list of branches for
623 # branchlookup returns a dict giving a list of branches for
624 # each head. A branch is defined as the tag of a node or
624 # each head. A branch is defined as the tag of a node or
625 # the branch of the node's parents. If a node has multiple
625 # the branch of the node's parents. If a node has multiple
626 # branch tags, tags are eliminated if they are visible from other
626 # branch tags, tags are eliminated if they are visible from other
627 # branch tags.
627 # branch tags.
628 #
628 #
629 # So, for this graph: a->b->c->d->e
629 # So, for this graph: a->b->c->d->e
630 # \ /
630 # \ /
631 # aa -----/
631 # aa -----/
632 # a has tag 2.6.12
632 # a has tag 2.6.12
633 # d has tag 2.6.13
633 # d has tag 2.6.13
634 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
634 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
635 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
635 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
636 # from the list.
636 # from the list.
637 #
637 #
638 # It is possible that more than one head will have the same branch tag.
638 # It is possible that more than one head will have the same branch tag.
639 # callers need to check the result for multiple heads under the same
639 # callers need to check the result for multiple heads under the same
640 # branch tag if that is a problem for them (ie checkout of a specific
640 # branch tag if that is a problem for them (ie checkout of a specific
641 # branch).
641 # branch).
642 #
642 #
643 # passing in a specific branch will limit the depth of the search
643 # passing in a specific branch will limit the depth of the search
644 # through the parents. It won't limit the branches returned in the
644 # through the parents. It won't limit the branches returned in the
645 # result though.
645 # result though.
646 def branchlookup(self, heads=None, branch=None):
646 def branchlookup(self, heads=None, branch=None):
647 if not heads:
647 if not heads:
648 heads = self.heads()
648 heads = self.heads()
649 headt = [ h for h in heads ]
649 headt = [ h for h in heads ]
650 chlog = self.changelog
650 chlog = self.changelog
651 branches = {}
651 branches = {}
652 merges = []
652 merges = []
653 seenmerge = {}
653 seenmerge = {}
654
654
655 # traverse the tree once for each head, recording in the branches
655 # traverse the tree once for each head, recording in the branches
656 # dict which tags are visible from this head. The branches
656 # dict which tags are visible from this head. The branches
657 # dict also records which tags are visible from each tag
657 # dict also records which tags are visible from each tag
658 # while we traverse.
658 # while we traverse.
659 while headt or merges:
659 while headt or merges:
660 if merges:
660 if merges:
661 n, found = merges.pop()
661 n, found = merges.pop()
662 visit = [n]
662 visit = [n]
663 else:
663 else:
664 h = headt.pop()
664 h = headt.pop()
665 visit = [h]
665 visit = [h]
666 found = [h]
666 found = [h]
667 seen = {}
667 seen = {}
668 while visit:
668 while visit:
669 n = visit.pop()
669 n = visit.pop()
670 if n in seen:
670 if n in seen:
671 continue
671 continue
672 pp = chlog.parents(n)
672 pp = chlog.parents(n)
673 tags = self.nodetags(n)
673 tags = self.nodetags(n)
674 if tags:
674 if tags:
675 for x in tags:
675 for x in tags:
676 if x == 'tip':
676 if x == 'tip':
677 continue
677 continue
678 for f in found:
678 for f in found:
679 branches.setdefault(f, {})[n] = 1
679 branches.setdefault(f, {})[n] = 1
680 branches.setdefault(n, {})[n] = 1
680 branches.setdefault(n, {})[n] = 1
681 break
681 break
682 if n not in found:
682 if n not in found:
683 found.append(n)
683 found.append(n)
684 if branch in tags:
684 if branch in tags:
685 continue
685 continue
686 seen[n] = 1
686 seen[n] = 1
687 if pp[1] != nullid and n not in seenmerge:
687 if pp[1] != nullid and n not in seenmerge:
688 merges.append((pp[1], [x for x in found]))
688 merges.append((pp[1], [x for x in found]))
689 seenmerge[n] = 1
689 seenmerge[n] = 1
690 if pp[0] != nullid:
690 if pp[0] != nullid:
691 visit.append(pp[0])
691 visit.append(pp[0])
692 # traverse the branches dict, eliminating branch tags from each
692 # traverse the branches dict, eliminating branch tags from each
693 # head that are visible from another branch tag for that head.
693 # head that are visible from another branch tag for that head.
694 out = {}
694 out = {}
695 viscache = {}
695 viscache = {}
696 for h in heads:
696 for h in heads:
697 def visible(node):
697 def visible(node):
698 if node in viscache:
698 if node in viscache:
699 return viscache[node]
699 return viscache[node]
700 ret = {}
700 ret = {}
701 visit = [node]
701 visit = [node]
702 while visit:
702 while visit:
703 x = visit.pop()
703 x = visit.pop()
704 if x in viscache:
704 if x in viscache:
705 ret.update(viscache[x])
705 ret.update(viscache[x])
706 elif x not in ret:
706 elif x not in ret:
707 ret[x] = 1
707 ret[x] = 1
708 if x in branches:
708 if x in branches:
709 visit[len(visit):] = branches[x].keys()
709 visit[len(visit):] = branches[x].keys()
710 viscache[node] = ret
710 viscache[node] = ret
711 return ret
711 return ret
712 if h not in branches:
712 if h not in branches:
713 continue
713 continue
714 # O(n^2), but somewhat limited. This only searches the
714 # O(n^2), but somewhat limited. This only searches the
715 # tags visible from a specific head, not all the tags in the
715 # tags visible from a specific head, not all the tags in the
716 # whole repo.
716 # whole repo.
717 for b in branches[h]:
717 for b in branches[h]:
718 vis = False
718 vis = False
719 for bb in branches[h].keys():
719 for bb in branches[h].keys():
720 if b != bb:
720 if b != bb:
721 if b in visible(bb):
721 if b in visible(bb):
722 vis = True
722 vis = True
723 break
723 break
724 if not vis:
724 if not vis:
725 l = out.setdefault(h, [])
725 l = out.setdefault(h, [])
726 l[len(l):] = self.nodetags(b)
726 l[len(l):] = self.nodetags(b)
727 return out
727 return out
728
728
729 def branches(self, nodes):
729 def branches(self, nodes):
730 if not nodes: nodes = [self.changelog.tip()]
730 if not nodes: nodes = [self.changelog.tip()]
731 b = []
731 b = []
732 for n in nodes:
732 for n in nodes:
733 t = n
733 t = n
734 while n:
734 while n:
735 p = self.changelog.parents(n)
735 p = self.changelog.parents(n)
736 if p[1] != nullid or p[0] == nullid:
736 if p[1] != nullid or p[0] == nullid:
737 b.append((t, n, p[0], p[1]))
737 b.append((t, n, p[0], p[1]))
738 break
738 break
739 n = p[0]
739 n = p[0]
740 return b
740 return b
741
741
742 def between(self, pairs):
742 def between(self, pairs):
743 r = []
743 r = []
744
744
745 for top, bottom in pairs:
745 for top, bottom in pairs:
746 n, l, i = top, [], 0
746 n, l, i = top, [], 0
747 f = 1
747 f = 1
748
748
749 while n != bottom:
749 while n != bottom:
750 p = self.changelog.parents(n)[0]
750 p = self.changelog.parents(n)[0]
751 if i == f:
751 if i == f:
752 l.append(n)
752 l.append(n)
753 f = f * 2
753 f = f * 2
754 n = p
754 n = p
755 i += 1
755 i += 1
756
756
757 r.append(l)
757 r.append(l)
758
758
759 return r
759 return r
760
760
761 def findincoming(self, remote, base=None, heads=None):
761 def findincoming(self, remote, base=None, heads=None):
762 m = self.changelog.nodemap
762 m = self.changelog.nodemap
763 search = []
763 search = []
764 fetch = {}
764 fetch = {}
765 seen = {}
765 seen = {}
766 seenbranch = {}
766 seenbranch = {}
767 if base == None:
767 if base == None:
768 base = {}
768 base = {}
769
769
770 # assume we're closer to the tip than the root
770 # assume we're closer to the tip than the root
771 # and start by examining the heads
771 # and start by examining the heads
772 self.ui.status(_("searching for changes\n"))
772 self.ui.status(_("searching for changes\n"))
773
773
774 if not heads:
774 if not heads:
775 heads = remote.heads()
775 heads = remote.heads()
776
776
777 unknown = []
777 unknown = []
778 for h in heads:
778 for h in heads:
779 if h not in m:
779 if h not in m:
780 unknown.append(h)
780 unknown.append(h)
781 else:
781 else:
782 base[h] = 1
782 base[h] = 1
783
783
784 if not unknown:
784 if not unknown:
785 return None
785 return None
786
786
787 rep = {}
787 rep = {}
788 reqcnt = 0
788 reqcnt = 0
789
789
790 # search through remote branches
790 # search through remote branches
791 # a 'branch' here is a linear segment of history, with four parts:
791 # a 'branch' here is a linear segment of history, with four parts:
792 # head, root, first parent, second parent
792 # head, root, first parent, second parent
793 # (a branch always has two parents (or none) by definition)
793 # (a branch always has two parents (or none) by definition)
794 unknown = remote.branches(unknown)
794 unknown = remote.branches(unknown)
795 while unknown:
795 while unknown:
796 r = []
796 r = []
797 while unknown:
797 while unknown:
798 n = unknown.pop(0)
798 n = unknown.pop(0)
799 if n[0] in seen:
799 if n[0] in seen:
800 continue
800 continue
801
801
802 self.ui.debug(_("examining %s:%s\n") % (short(n[0]), short(n[1])))
802 self.ui.debug(_("examining %s:%s\n") % (short(n[0]), short(n[1])))
803 if n[0] == nullid:
803 if n[0] == nullid:
804 break
804 break
805 if n in seenbranch:
805 if n in seenbranch:
806 self.ui.debug(_("branch already found\n"))
806 self.ui.debug(_("branch already found\n"))
807 continue
807 continue
808 if n[1] and n[1] in m: # do we know the base?
808 if n[1] and n[1] in m: # do we know the base?
809 self.ui.debug(_("found incomplete branch %s:%s\n")
809 self.ui.debug(_("found incomplete branch %s:%s\n")
810 % (short(n[0]), short(n[1])))
810 % (short(n[0]), short(n[1])))
811 search.append(n) # schedule branch range for scanning
811 search.append(n) # schedule branch range for scanning
812 seenbranch[n] = 1
812 seenbranch[n] = 1
813 else:
813 else:
814 if n[1] not in seen and n[1] not in fetch:
814 if n[1] not in seen and n[1] not in fetch:
815 if n[2] in m and n[3] in m:
815 if n[2] in m and n[3] in m:
816 self.ui.debug(_("found new changeset %s\n") %
816 self.ui.debug(_("found new changeset %s\n") %
817 short(n[1]))
817 short(n[1]))
818 fetch[n[1]] = 1 # earliest unknown
818 fetch[n[1]] = 1 # earliest unknown
819 base[n[2]] = 1 # latest known
819 base[n[2]] = 1 # latest known
820 continue
820 continue
821
821
822 for a in n[2:4]:
822 for a in n[2:4]:
823 if a not in rep:
823 if a not in rep:
824 r.append(a)
824 r.append(a)
825 rep[a] = 1
825 rep[a] = 1
826
826
827 seen[n[0]] = 1
827 seen[n[0]] = 1
828
828
829 if r:
829 if r:
830 reqcnt += 1
830 reqcnt += 1
831 self.ui.debug(_("request %d: %s\n") %
831 self.ui.debug(_("request %d: %s\n") %
832 (reqcnt, " ".join(map(short, r))))
832 (reqcnt, " ".join(map(short, r))))
833 for p in range(0, len(r), 10):
833 for p in range(0, len(r), 10):
834 for b in remote.branches(r[p:p+10]):
834 for b in remote.branches(r[p:p+10]):
835 self.ui.debug(_("received %s:%s\n") %
835 self.ui.debug(_("received %s:%s\n") %
836 (short(b[0]), short(b[1])))
836 (short(b[0]), short(b[1])))
837 if b[0] in m:
837 if b[0] in m:
838 self.ui.debug(_("found base node %s\n") % short(b[0]))
838 self.ui.debug(_("found base node %s\n") % short(b[0]))
839 base[b[0]] = 1
839 base[b[0]] = 1
840 elif b[0] not in seen:
840 elif b[0] not in seen:
841 unknown.append(b)
841 unknown.append(b)
842
842
843 # do binary search on the branches we found
843 # do binary search on the branches we found
844 while search:
844 while search:
845 n = search.pop(0)
845 n = search.pop(0)
846 reqcnt += 1
846 reqcnt += 1
847 l = remote.between([(n[0], n[1])])[0]
847 l = remote.between([(n[0], n[1])])[0]
848 l.append(n[1])
848 l.append(n[1])
849 p = n[0]
849 p = n[0]
850 f = 1
850 f = 1
851 for i in l:
851 for i in l:
852 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
852 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
853 if i in m:
853 if i in m:
854 if f <= 2:
854 if f <= 2:
855 self.ui.debug(_("found new branch changeset %s\n") %
855 self.ui.debug(_("found new branch changeset %s\n") %
856 short(p))
856 short(p))
857 fetch[p] = 1
857 fetch[p] = 1
858 base[i] = 1
858 base[i] = 1
859 else:
859 else:
860 self.ui.debug(_("narrowed branch search to %s:%s\n")
860 self.ui.debug(_("narrowed branch search to %s:%s\n")
861 % (short(p), short(i)))
861 % (short(p), short(i)))
862 search.append((p, i))
862 search.append((p, i))
863 break
863 break
864 p, f = i, f * 2
864 p, f = i, f * 2
865
865
866 # sanity check our fetch list
866 # sanity check our fetch list
867 for f in fetch.keys():
867 for f in fetch.keys():
868 if f in m:
868 if f in m:
869 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
869 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
870
870
871 if base.keys() == [nullid]:
871 if base.keys() == [nullid]:
872 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
872 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
873
873
874 self.ui.note(_("found new changesets starting at ") +
874 self.ui.note(_("found new changesets starting at ") +
875 " ".join([short(f) for f in fetch]) + "\n")
875 " ".join([short(f) for f in fetch]) + "\n")
876
876
877 self.ui.debug(_("%d total queries\n") % reqcnt)
877 self.ui.debug(_("%d total queries\n") % reqcnt)
878
878
879 return fetch.keys()
879 return fetch.keys()
880
880
881 def findoutgoing(self, remote, base=None, heads=None):
881 def findoutgoing(self, remote, base=None, heads=None):
882 if base == None:
882 if base == None:
883 base = {}
883 base = {}
884 self.findincoming(remote, base, heads)
884 self.findincoming(remote, base, heads)
885
885
886 self.ui.debug(_("common changesets up to ")
886 self.ui.debug(_("common changesets up to ")
887 + " ".join(map(short, base.keys())) + "\n")
887 + " ".join(map(short, base.keys())) + "\n")
888
888
889 remain = dict.fromkeys(self.changelog.nodemap)
889 remain = dict.fromkeys(self.changelog.nodemap)
890
890
891 # prune everything remote has from the tree
891 # prune everything remote has from the tree
892 del remain[nullid]
892 del remain[nullid]
893 remove = base.keys()
893 remove = base.keys()
894 while remove:
894 while remove:
895 n = remove.pop(0)
895 n = remove.pop(0)
896 if n in remain:
896 if n in remain:
897 del remain[n]
897 del remain[n]
898 for p in self.changelog.parents(n):
898 for p in self.changelog.parents(n):
899 remove.append(p)
899 remove.append(p)
900
900
901 # find every node whose parents have been pruned
901 # find every node whose parents have been pruned
902 subset = []
902 subset = []
903 for n in remain:
903 for n in remain:
904 p1, p2 = self.changelog.parents(n)
904 p1, p2 = self.changelog.parents(n)
905 if p1 not in remain and p2 not in remain:
905 if p1 not in remain and p2 not in remain:
906 subset.append(n)
906 subset.append(n)
907
907
908 # this is the set of all roots we have to push
908 # this is the set of all roots we have to push
909 return subset
909 return subset
910
910
911 def pull(self, remote, heads = None):
911 def pull(self, remote, heads = None):
912 lock = self.lock()
912 lock = self.lock()
913
913
914 # if we have an empty repo, fetch everything
914 # if we have an empty repo, fetch everything
915 if self.changelog.tip() == nullid:
915 if self.changelog.tip() == nullid:
916 self.ui.status(_("requesting all changes\n"))
916 self.ui.status(_("requesting all changes\n"))
917 fetch = [nullid]
917 fetch = [nullid]
918 else:
918 else:
919 fetch = self.findincoming(remote)
919 fetch = self.findincoming(remote)
920
920
921 if not fetch:
921 if not fetch:
922 self.ui.status(_("no changes found\n"))
922 self.ui.status(_("no changes found\n"))
923 return 1
923 return 1
924
924
925 if heads is None:
925 if heads is None:
926 cg = remote.changegroup(fetch)
926 cg = remote.changegroup(fetch)
927 else:
927 else:
928 cg = remote.changegroupsubset(fetch, heads)
928 cg = remote.changegroupsubset(fetch, heads)
929 return self.addchangegroup(cg)
929 return self.addchangegroup(cg)
930
930
931 def push(self, remote, force=False):
931 def push(self, remote, force=False):
932 lock = remote.lock()
932 lock = remote.lock()
933
933
934 base = {}
934 base = {}
935 heads = remote.heads()
935 heads = remote.heads()
936 inc = self.findincoming(remote, base, heads)
936 inc = self.findincoming(remote, base, heads)
937 if not force and inc:
937 if not force and inc:
938 self.ui.warn(_("abort: unsynced remote changes!\n"))
938 self.ui.warn(_("abort: unsynced remote changes!\n"))
939 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
939 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
940 return 1
940 return 1
941
941
942 update = self.findoutgoing(remote, base)
942 update = self.findoutgoing(remote, base)
943 if not update:
943 if not update:
944 self.ui.status(_("no changes found\n"))
944 self.ui.status(_("no changes found\n"))
945 return 1
945 return 1
946 elif not force:
946 elif not force:
947 if len(heads) < len(self.changelog.heads()):
947 if len(heads) < len(self.changelog.heads()):
948 self.ui.warn(_("abort: push creates new remote branches!\n"))
948 self.ui.warn(_("abort: push creates new remote branches!\n"))
949 self.ui.status(_("(did you forget to merge?"
949 self.ui.status(_("(did you forget to merge?"
950 " use push -f to force)\n"))
950 " use push -f to force)\n"))
951 return 1
951 return 1
952
952
953 cg = self.changegroup(update)
953 cg = self.changegroup(update)
954 return remote.addchangegroup(cg)
954 return remote.addchangegroup(cg)
955
955
956 def changegroupsubset(self, bases, heads):
956 def changegroupsubset(self, bases, heads):
957 """This function generates a changegroup consisting of all the nodes
957 """This function generates a changegroup consisting of all the nodes
958 that are descendents of any of the bases, and ancestors of any of
958 that are descendents of any of the bases, and ancestors of any of
959 the heads.
959 the heads.
960
960
961 It is fairly complex as determining which filenodes and which
961 It is fairly complex as determining which filenodes and which
962 manifest nodes need to be included for the changeset to be complete
962 manifest nodes need to be included for the changeset to be complete
963 is non-trivial.
963 is non-trivial.
964
964
965 Another wrinkle is doing the reverse, figuring out which changeset in
965 Another wrinkle is doing the reverse, figuring out which changeset in
966 the changegroup a particular filenode or manifestnode belongs to."""
966 the changegroup a particular filenode or manifestnode belongs to."""
967
967
968 # Set up some initial variables
968 # Set up some initial variables
969 # Make it easy to refer to self.changelog
969 # Make it easy to refer to self.changelog
970 cl = self.changelog
970 cl = self.changelog
971 # msng is short for missing - compute the list of changesets in this
971 # msng is short for missing - compute the list of changesets in this
972 # changegroup.
972 # changegroup.
973 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
973 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
974 # Some bases may turn out to be superfluous, and some heads may be
974 # Some bases may turn out to be superfluous, and some heads may be
975 # too. nodesbetween will return the minimal set of bases and heads
975 # too. nodesbetween will return the minimal set of bases and heads
976 # necessary to re-create the changegroup.
976 # necessary to re-create the changegroup.
977
977
978 # Known heads are the list of heads that it is assumed the recipient
978 # Known heads are the list of heads that it is assumed the recipient
979 # of this changegroup will know about.
979 # of this changegroup will know about.
980 knownheads = {}
980 knownheads = {}
981 # We assume that all parents of bases are known heads.
981 # We assume that all parents of bases are known heads.
982 for n in bases:
982 for n in bases:
983 for p in cl.parents(n):
983 for p in cl.parents(n):
984 if p != nullid:
984 if p != nullid:
985 knownheads[p] = 1
985 knownheads[p] = 1
986 knownheads = knownheads.keys()
986 knownheads = knownheads.keys()
987 if knownheads:
987 if knownheads:
988 # Now that we know what heads are known, we can compute which
988 # Now that we know what heads are known, we can compute which
989 # changesets are known. The recipient must know about all
989 # changesets are known. The recipient must know about all
990 # changesets required to reach the known heads from the null
990 # changesets required to reach the known heads from the null
991 # changeset.
991 # changeset.
992 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
992 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
993 junk = None
993 junk = None
994 # Transform the list into an ersatz set.
994 # Transform the list into an ersatz set.
995 has_cl_set = dict.fromkeys(has_cl_set)
995 has_cl_set = dict.fromkeys(has_cl_set)
996 else:
996 else:
997 # If there were no known heads, the recipient cannot be assumed to
997 # If there were no known heads, the recipient cannot be assumed to
998 # know about any changesets.
998 # know about any changesets.
999 has_cl_set = {}
999 has_cl_set = {}
1000
1000
1001 # Make it easy to refer to self.manifest
1001 # Make it easy to refer to self.manifest
1002 mnfst = self.manifest
1002 mnfst = self.manifest
1003 # We don't know which manifests are missing yet
1003 # We don't know which manifests are missing yet
1004 msng_mnfst_set = {}
1004 msng_mnfst_set = {}
1005 # Nor do we know which filenodes are missing.
1005 # Nor do we know which filenodes are missing.
1006 msng_filenode_set = {}
1006 msng_filenode_set = {}
1007
1007
1008 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1008 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1009 junk = None
1009 junk = None
1010
1010
1011 # A changeset always belongs to itself, so the changenode lookup
1011 # A changeset always belongs to itself, so the changenode lookup
1012 # function for a changenode is identity.
1012 # function for a changenode is identity.
1013 def identity(x):
1013 def identity(x):
1014 return x
1014 return x
1015
1015
1016 # A function generating function. Sets up an environment for the
1016 # A function generating function. Sets up an environment for the
1017 # inner function.
1017 # inner function.
1018 def cmp_by_rev_func(revlog):
1018 def cmp_by_rev_func(revlog):
1019 # Compare two nodes by their revision number in the environment's
1019 # Compare two nodes by their revision number in the environment's
1020 # revision history. Since the revision number both represents the
1020 # revision history. Since the revision number both represents the
1021 # most efficient order to read the nodes in, and represents a
1021 # most efficient order to read the nodes in, and represents a
1022 # topological sorting of the nodes, this function is often useful.
1022 # topological sorting of the nodes, this function is often useful.
1023 def cmp_by_rev(a, b):
1023 def cmp_by_rev(a, b):
1024 return cmp(revlog.rev(a), revlog.rev(b))
1024 return cmp(revlog.rev(a), revlog.rev(b))
1025 return cmp_by_rev
1025 return cmp_by_rev
1026
1026
1027 # If we determine that a particular file or manifest node must be a
1027 # If we determine that a particular file or manifest node must be a
1028 # node that the recipient of the changegroup will already have, we can
1028 # node that the recipient of the changegroup will already have, we can
1029 # also assume the recipient will have all the parents. This function
1029 # also assume the recipient will have all the parents. This function
1030 # prunes them from the set of missing nodes.
1030 # prunes them from the set of missing nodes.
1031 def prune_parents(revlog, hasset, msngset):
1031 def prune_parents(revlog, hasset, msngset):
1032 haslst = hasset.keys()
1032 haslst = hasset.keys()
1033 haslst.sort(cmp_by_rev_func(revlog))
1033 haslst.sort(cmp_by_rev_func(revlog))
1034 for node in haslst:
1034 for node in haslst:
1035 parentlst = [p for p in revlog.parents(node) if p != nullid]
1035 parentlst = [p for p in revlog.parents(node) if p != nullid]
1036 while parentlst:
1036 while parentlst:
1037 n = parentlst.pop()
1037 n = parentlst.pop()
1038 if n not in hasset:
1038 if n not in hasset:
1039 hasset[n] = 1
1039 hasset[n] = 1
1040 p = [p for p in revlog.parents(n) if p != nullid]
1040 p = [p for p in revlog.parents(n) if p != nullid]
1041 parentlst.extend(p)
1041 parentlst.extend(p)
1042 for n in hasset:
1042 for n in hasset:
1043 msngset.pop(n, None)
1043 msngset.pop(n, None)
1044
1044
1045 # This is a function generating function used to set up an environment
1045 # This is a function generating function used to set up an environment
1046 # for the inner function to execute in.
1046 # for the inner function to execute in.
1047 def manifest_and_file_collector(changedfileset):
1047 def manifest_and_file_collector(changedfileset):
1048 # This is an information gathering function that gathers
1048 # This is an information gathering function that gathers
1049 # information from each changeset node that goes out as part of
1049 # information from each changeset node that goes out as part of
1050 # the changegroup. The information gathered is a list of which
1050 # the changegroup. The information gathered is a list of which
1051 # manifest nodes are potentially required (the recipient may
1051 # manifest nodes are potentially required (the recipient may
1052 # already have them) and total list of all files which were
1052 # already have them) and total list of all files which were
1053 # changed in any changeset in the changegroup.
1053 # changed in any changeset in the changegroup.
1054 #
1054 #
1055 # We also remember the first changenode we saw any manifest
1055 # We also remember the first changenode we saw any manifest
1056 # referenced by so we can later determine which changenode 'owns'
1056 # referenced by so we can later determine which changenode 'owns'
1057 # the manifest.
1057 # the manifest.
1058 def collect_manifests_and_files(clnode):
1058 def collect_manifests_and_files(clnode):
1059 c = cl.read(clnode)
1059 c = cl.read(clnode)
1060 for f in c[3]:
1060 for f in c[3]:
1061 # This is to make sure we only have one instance of each
1061 # This is to make sure we only have one instance of each
1062 # filename string for each filename.
1062 # filename string for each filename.
1063 changedfileset.setdefault(f, f)
1063 changedfileset.setdefault(f, f)
1064 msng_mnfst_set.setdefault(c[0], clnode)
1064 msng_mnfst_set.setdefault(c[0], clnode)
1065 return collect_manifests_and_files
1065 return collect_manifests_and_files
1066
1066
1067 # Figure out which manifest nodes (of the ones we think might be part
1067 # Figure out which manifest nodes (of the ones we think might be part
1068 # of the changegroup) the recipient must know about and remove them
1068 # of the changegroup) the recipient must know about and remove them
1069 # from the changegroup.
1069 # from the changegroup.
1070 def prune_manifests():
1070 def prune_manifests():
1071 has_mnfst_set = {}
1071 has_mnfst_set = {}
1072 for n in msng_mnfst_set:
1072 for n in msng_mnfst_set:
1073 # If a 'missing' manifest thinks it belongs to a changenode
1073 # If a 'missing' manifest thinks it belongs to a changenode
1074 # the recipient is assumed to have, obviously the recipient
1074 # the recipient is assumed to have, obviously the recipient
1075 # must have that manifest.
1075 # must have that manifest.
1076 linknode = cl.node(mnfst.linkrev(n))
1076 linknode = cl.node(mnfst.linkrev(n))
1077 if linknode in has_cl_set:
1077 if linknode in has_cl_set:
1078 has_mnfst_set[n] = 1
1078 has_mnfst_set[n] = 1
1079 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1079 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1080
1080
1081 # Use the information collected in collect_manifests_and_files to say
1081 # Use the information collected in collect_manifests_and_files to say
1082 # which changenode any manifestnode belongs to.
1082 # which changenode any manifestnode belongs to.
1083 def lookup_manifest_link(mnfstnode):
1083 def lookup_manifest_link(mnfstnode):
1084 return msng_mnfst_set[mnfstnode]
1084 return msng_mnfst_set[mnfstnode]
1085
1085
1086 # A function generating function that sets up the initial environment
1086 # A function generating function that sets up the initial environment
1087 # the inner function.
1087 # the inner function.
1088 def filenode_collector(changedfiles):
1088 def filenode_collector(changedfiles):
1089 next_rev = [0]
1089 next_rev = [0]
1090 # This gathers information from each manifestnode included in the
1090 # This gathers information from each manifestnode included in the
1091 # changegroup about which filenodes the manifest node references
1091 # changegroup about which filenodes the manifest node references
1092 # so we can include those in the changegroup too.
1092 # so we can include those in the changegroup too.
1093 #
1093 #
1094 # It also remembers which changenode each filenode belongs to. It
1094 # It also remembers which changenode each filenode belongs to. It
1095 # does this by assuming the a filenode belongs to the changenode
1095 # does this by assuming the a filenode belongs to the changenode
1096 # the first manifest that references it belongs to.
1096 # the first manifest that references it belongs to.
1097 def collect_msng_filenodes(mnfstnode):
1097 def collect_msng_filenodes(mnfstnode):
1098 r = mnfst.rev(mnfstnode)
1098 r = mnfst.rev(mnfstnode)
1099 if r == next_rev[0]:
1099 if r == next_rev[0]:
1100 # If the last rev we looked at was the one just previous,
1100 # If the last rev we looked at was the one just previous,
1101 # we only need to see a diff.
1101 # we only need to see a diff.
1102 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1102 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1103 # For each line in the delta
1103 # For each line in the delta
1104 for dline in delta.splitlines():
1104 for dline in delta.splitlines():
1105 # get the filename and filenode for that line
1105 # get the filename and filenode for that line
1106 f, fnode = dline.split('\0')
1106 f, fnode = dline.split('\0')
1107 fnode = bin(fnode[:40])
1107 fnode = bin(fnode[:40])
1108 f = changedfiles.get(f, None)
1108 f = changedfiles.get(f, None)
1109 # And if the file is in the list of files we care
1109 # And if the file is in the list of files we care
1110 # about.
1110 # about.
1111 if f is not None:
1111 if f is not None:
1112 # Get the changenode this manifest belongs to
1112 # Get the changenode this manifest belongs to
1113 clnode = msng_mnfst_set[mnfstnode]
1113 clnode = msng_mnfst_set[mnfstnode]
1114 # Create the set of filenodes for the file if
1114 # Create the set of filenodes for the file if
1115 # there isn't one already.
1115 # there isn't one already.
1116 ndset = msng_filenode_set.setdefault(f, {})
1116 ndset = msng_filenode_set.setdefault(f, {})
1117 # And set the filenode's changelog node to the
1117 # And set the filenode's changelog node to the
1118 # manifest's if it hasn't been set already.
1118 # manifest's if it hasn't been set already.
1119 ndset.setdefault(fnode, clnode)
1119 ndset.setdefault(fnode, clnode)
1120 else:
1120 else:
1121 # Otherwise we need a full manifest.
1121 # Otherwise we need a full manifest.
1122 m = mnfst.read(mnfstnode)
1122 m = mnfst.read(mnfstnode)
1123 # For every file in we care about.
1123 # For every file in we care about.
1124 for f in changedfiles:
1124 for f in changedfiles:
1125 fnode = m.get(f, None)
1125 fnode = m.get(f, None)
1126 # If it's in the manifest
1126 # If it's in the manifest
1127 if fnode is not None:
1127 if fnode is not None:
1128 # See comments above.
1128 # See comments above.
1129 clnode = msng_mnfst_set[mnfstnode]
1129 clnode = msng_mnfst_set[mnfstnode]
1130 ndset = msng_filenode_set.setdefault(f, {})
1130 ndset = msng_filenode_set.setdefault(f, {})
1131 ndset.setdefault(fnode, clnode)
1131 ndset.setdefault(fnode, clnode)
1132 # Remember the revision we hope to see next.
1132 # Remember the revision we hope to see next.
1133 next_rev[0] = r + 1
1133 next_rev[0] = r + 1
1134 return collect_msng_filenodes
1134 return collect_msng_filenodes
1135
1135
1136 # We have a list of filenodes we think we need for a file, lets remove
1136 # We have a list of filenodes we think we need for a file, lets remove
1137 # all those we now the recipient must have.
1137 # all those we now the recipient must have.
1138 def prune_filenodes(f, filerevlog):
1138 def prune_filenodes(f, filerevlog):
1139 msngset = msng_filenode_set[f]
1139 msngset = msng_filenode_set[f]
1140 hasset = {}
1140 hasset = {}
1141 # If a 'missing' filenode thinks it belongs to a changenode we
1141 # If a 'missing' filenode thinks it belongs to a changenode we
1142 # assume the recipient must have, then the recipient must have
1142 # assume the recipient must have, then the recipient must have
1143 # that filenode.
1143 # that filenode.
1144 for n in msngset:
1144 for n in msngset:
1145 clnode = cl.node(filerevlog.linkrev(n))
1145 clnode = cl.node(filerevlog.linkrev(n))
1146 if clnode in has_cl_set:
1146 if clnode in has_cl_set:
1147 hasset[n] = 1
1147 hasset[n] = 1
1148 prune_parents(filerevlog, hasset, msngset)
1148 prune_parents(filerevlog, hasset, msngset)
1149
1149
1150 # A function generator function that sets up the a context for the
1150 # A function generator function that sets up the a context for the
1151 # inner function.
1151 # inner function.
1152 def lookup_filenode_link_func(fname):
1152 def lookup_filenode_link_func(fname):
1153 msngset = msng_filenode_set[fname]
1153 msngset = msng_filenode_set[fname]
1154 # Lookup the changenode the filenode belongs to.
1154 # Lookup the changenode the filenode belongs to.
1155 def lookup_filenode_link(fnode):
1155 def lookup_filenode_link(fnode):
1156 return msngset[fnode]
1156 return msngset[fnode]
1157 return lookup_filenode_link
1157 return lookup_filenode_link
1158
1158
1159 # Now that we have all theses utility functions to help out and
1159 # Now that we have all theses utility functions to help out and
1160 # logically divide up the task, generate the group.
1160 # logically divide up the task, generate the group.
1161 def gengroup():
1161 def gengroup():
1162 # The set of changed files starts empty.
1162 # The set of changed files starts empty.
1163 changedfiles = {}
1163 changedfiles = {}
1164 # Create a changenode group generator that will call our functions
1164 # Create a changenode group generator that will call our functions
1165 # back to lookup the owning changenode and collect information.
1165 # back to lookup the owning changenode and collect information.
1166 group = cl.group(msng_cl_lst, identity,
1166 group = cl.group(msng_cl_lst, identity,
1167 manifest_and_file_collector(changedfiles))
1167 manifest_and_file_collector(changedfiles))
1168 for chnk in group:
1168 for chnk in group:
1169 yield chnk
1169 yield chnk
1170
1170
1171 # The list of manifests has been collected by the generator
1171 # The list of manifests has been collected by the generator
1172 # calling our functions back.
1172 # calling our functions back.
1173 prune_manifests()
1173 prune_manifests()
1174 msng_mnfst_lst = msng_mnfst_set.keys()
1174 msng_mnfst_lst = msng_mnfst_set.keys()
1175 # Sort the manifestnodes by revision number.
1175 # Sort the manifestnodes by revision number.
1176 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1176 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1177 # Create a generator for the manifestnodes that calls our lookup
1177 # Create a generator for the manifestnodes that calls our lookup
1178 # and data collection functions back.
1178 # and data collection functions back.
1179 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1179 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1180 filenode_collector(changedfiles))
1180 filenode_collector(changedfiles))
1181 for chnk in group:
1181 for chnk in group:
1182 yield chnk
1182 yield chnk
1183
1183
1184 # These are no longer needed, dereference and toss the memory for
1184 # These are no longer needed, dereference and toss the memory for
1185 # them.
1185 # them.
1186 msng_mnfst_lst = None
1186 msng_mnfst_lst = None
1187 msng_mnfst_set.clear()
1187 msng_mnfst_set.clear()
1188
1188
1189 changedfiles = changedfiles.keys()
1189 changedfiles = changedfiles.keys()
1190 changedfiles.sort()
1190 changedfiles.sort()
1191 # Go through all our files in order sorted by name.
1191 # Go through all our files in order sorted by name.
1192 for fname in changedfiles:
1192 for fname in changedfiles:
1193 filerevlog = self.file(fname)
1193 filerevlog = self.file(fname)
1194 # Toss out the filenodes that the recipient isn't really
1194 # Toss out the filenodes that the recipient isn't really
1195 # missing.
1195 # missing.
1196 prune_filenodes(fname, filerevlog)
1196 prune_filenodes(fname, filerevlog)
1197 msng_filenode_lst = msng_filenode_set[fname].keys()
1197 msng_filenode_lst = msng_filenode_set[fname].keys()
1198 # If any filenodes are left, generate the group for them,
1198 # If any filenodes are left, generate the group for them,
1199 # otherwise don't bother.
1199 # otherwise don't bother.
1200 if len(msng_filenode_lst) > 0:
1200 if len(msng_filenode_lst) > 0:
1201 yield struct.pack(">l", len(fname) + 4) + fname
1201 yield struct.pack(">l", len(fname) + 4) + fname
1202 # Sort the filenodes by their revision #
1202 # Sort the filenodes by their revision #
1203 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1203 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1204 # Create a group generator and only pass in a changenode
1204 # Create a group generator and only pass in a changenode
1205 # lookup function as we need to collect no information
1205 # lookup function as we need to collect no information
1206 # from filenodes.
1206 # from filenodes.
1207 group = filerevlog.group(msng_filenode_lst,
1207 group = filerevlog.group(msng_filenode_lst,
1208 lookup_filenode_link_func(fname))
1208 lookup_filenode_link_func(fname))
1209 for chnk in group:
1209 for chnk in group:
1210 yield chnk
1210 yield chnk
1211 # Don't need this anymore, toss it to free memory.
1211 # Don't need this anymore, toss it to free memory.
1212 del msng_filenode_set[fname]
1212 del msng_filenode_set[fname]
1213 # Signal that no more groups are left.
1213 # Signal that no more groups are left.
1214 yield struct.pack(">l", 0)
1214 yield struct.pack(">l", 0)
1215
1215
1216 return util.chunkbuffer(gengroup())
1216 return util.chunkbuffer(gengroup())
1217
1217
1218 def changegroup(self, basenodes):
1218 def changegroup(self, basenodes):
1219 """Generate a changegroup of all nodes that we have that a recipient
1219 """Generate a changegroup of all nodes that we have that a recipient
1220 doesn't.
1220 doesn't.
1221
1221
1222 This is much easier than the previous function as we can assume that
1222 This is much easier than the previous function as we can assume that
1223 the recipient has any changenode we aren't sending them."""
1223 the recipient has any changenode we aren't sending them."""
1224 cl = self.changelog
1224 cl = self.changelog
1225 nodes = cl.nodesbetween(basenodes, None)[0]
1225 nodes = cl.nodesbetween(basenodes, None)[0]
1226 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1226 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1227
1227
1228 def identity(x):
1228 def identity(x):
1229 return x
1229 return x
1230
1230
1231 def gennodelst(revlog):
1231 def gennodelst(revlog):
1232 for r in xrange(0, revlog.count()):
1232 for r in xrange(0, revlog.count()):
1233 n = revlog.node(r)
1233 n = revlog.node(r)
1234 if revlog.linkrev(n) in revset:
1234 if revlog.linkrev(n) in revset:
1235 yield n
1235 yield n
1236
1236
1237 def changed_file_collector(changedfileset):
1237 def changed_file_collector(changedfileset):
1238 def collect_changed_files(clnode):
1238 def collect_changed_files(clnode):
1239 c = cl.read(clnode)
1239 c = cl.read(clnode)
1240 for fname in c[3]:
1240 for fname in c[3]:
1241 changedfileset[fname] = 1
1241 changedfileset[fname] = 1
1242 return collect_changed_files
1242 return collect_changed_files
1243
1243
1244 def lookuprevlink_func(revlog):
1244 def lookuprevlink_func(revlog):
1245 def lookuprevlink(n):
1245 def lookuprevlink(n):
1246 return cl.node(revlog.linkrev(n))
1246 return cl.node(revlog.linkrev(n))
1247 return lookuprevlink
1247 return lookuprevlink
1248
1248
1249 def gengroup():
1249 def gengroup():
1250 # construct a list of all changed files
1250 # construct a list of all changed files
1251 changedfiles = {}
1251 changedfiles = {}
1252
1252
1253 for chnk in cl.group(nodes, identity,
1253 for chnk in cl.group(nodes, identity,
1254 changed_file_collector(changedfiles)):
1254 changed_file_collector(changedfiles)):
1255 yield chnk
1255 yield chnk
1256 changedfiles = changedfiles.keys()
1256 changedfiles = changedfiles.keys()
1257 changedfiles.sort()
1257 changedfiles.sort()
1258
1258
1259 mnfst = self.manifest
1259 mnfst = self.manifest
1260 nodeiter = gennodelst(mnfst)
1260 nodeiter = gennodelst(mnfst)
1261 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1261 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1262 yield chnk
1262 yield chnk
1263
1263
1264 for fname in changedfiles:
1264 for fname in changedfiles:
1265 filerevlog = self.file(fname)
1265 filerevlog = self.file(fname)
1266 nodeiter = gennodelst(filerevlog)
1266 nodeiter = gennodelst(filerevlog)
1267 nodeiter = list(nodeiter)
1267 nodeiter = list(nodeiter)
1268 if nodeiter:
1268 if nodeiter:
1269 yield struct.pack(">l", len(fname) + 4) + fname
1269 yield struct.pack(">l", len(fname) + 4) + fname
1270 lookup = lookuprevlink_func(filerevlog)
1270 lookup = lookuprevlink_func(filerevlog)
1271 for chnk in filerevlog.group(nodeiter, lookup):
1271 for chnk in filerevlog.group(nodeiter, lookup):
1272 yield chnk
1272 yield chnk
1273
1273
1274 yield struct.pack(">l", 0)
1274 yield struct.pack(">l", 0)
1275
1275
1276 return util.chunkbuffer(gengroup())
1276 return util.chunkbuffer(gengroup())
1277
1277
1278 def addchangegroup(self, source):
1278 def addchangegroup(self, source):
1279
1279
1280 def getchunk():
1280 def getchunk():
1281 d = source.read(4)
1281 d = source.read(4)
1282 if not d: return ""
1282 if not d: return ""
1283 l = struct.unpack(">l", d)[0]
1283 l = struct.unpack(">l", d)[0]
1284 if l <= 4: return ""
1284 if l <= 4: return ""
1285 d = source.read(l - 4)
1285 d = source.read(l - 4)
1286 if len(d) < l - 4:
1286 if len(d) < l - 4:
1287 raise repo.RepoError(_("premature EOF reading chunk"
1287 raise repo.RepoError(_("premature EOF reading chunk"
1288 " (got %d bytes, expected %d)")
1288 " (got %d bytes, expected %d)")
1289 % (len(d), l - 4))
1289 % (len(d), l - 4))
1290 return d
1290 return d
1291
1291
1292 def getgroup():
1292 def getgroup():
1293 while 1:
1293 while 1:
1294 c = getchunk()
1294 c = getchunk()
1295 if not c: break
1295 if not c: break
1296 yield c
1296 yield c
1297
1297
1298 def csmap(x):
1298 def csmap(x):
1299 self.ui.debug(_("add changeset %s\n") % short(x))
1299 self.ui.debug(_("add changeset %s\n") % short(x))
1300 return self.changelog.count()
1300 return self.changelog.count()
1301
1301
1302 def revmap(x):
1302 def revmap(x):
1303 return self.changelog.rev(x)
1303 return self.changelog.rev(x)
1304
1304
1305 if not source: return
1305 if not source: return
1306 changesets = files = revisions = 0
1306 changesets = files = revisions = 0
1307
1307
1308 tr = self.transaction()
1308 tr = self.transaction()
1309
1309
1310 oldheads = len(self.changelog.heads())
1310 oldheads = len(self.changelog.heads())
1311
1311
1312 # pull off the changeset group
1312 # pull off the changeset group
1313 self.ui.status(_("adding changesets\n"))
1313 self.ui.status(_("adding changesets\n"))
1314 co = self.changelog.tip()
1314 co = self.changelog.tip()
1315 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1315 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1316 cnr, cor = map(self.changelog.rev, (cn, co))
1316 cnr, cor = map(self.changelog.rev, (cn, co))
1317 if cn == nullid:
1317 if cn == nullid:
1318 cnr = cor
1318 cnr = cor
1319 changesets = cnr - cor
1319 changesets = cnr - cor
1320
1320
1321 # pull off the manifest group
1321 # pull off the manifest group
1322 self.ui.status(_("adding manifests\n"))
1322 self.ui.status(_("adding manifests\n"))
1323 mm = self.manifest.tip()
1323 mm = self.manifest.tip()
1324 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1324 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1325
1325
1326 # process the files
1326 # process the files
1327 self.ui.status(_("adding file changes\n"))
1327 self.ui.status(_("adding file changes\n"))
1328 while 1:
1328 while 1:
1329 f = getchunk()
1329 f = getchunk()
1330 if not f: break
1330 if not f: break
1331 self.ui.debug(_("adding %s revisions\n") % f)
1331 self.ui.debug(_("adding %s revisions\n") % f)
1332 fl = self.file(f)
1332 fl = self.file(f)
1333 o = fl.count()
1333 o = fl.count()
1334 n = fl.addgroup(getgroup(), revmap, tr)
1334 n = fl.addgroup(getgroup(), revmap, tr)
1335 revisions += fl.count() - o
1335 revisions += fl.count() - o
1336 files += 1
1336 files += 1
1337
1337
1338 newheads = len(self.changelog.heads())
1338 newheads = len(self.changelog.heads())
1339 heads = ""
1339 heads = ""
1340 if oldheads and newheads > oldheads:
1340 if oldheads and newheads > oldheads:
1341 heads = _(" (+%d heads)") % (newheads - oldheads)
1341 heads = _(" (+%d heads)") % (newheads - oldheads)
1342
1342
1343 self.ui.status(_("added %d changesets"
1343 self.ui.status(_("added %d changesets"
1344 " with %d changes to %d files%s\n")
1344 " with %d changes to %d files%s\n")
1345 % (changesets, revisions, files, heads))
1345 % (changesets, revisions, files, heads))
1346
1346
1347 tr.close()
1347 tr.close()
1348
1348
1349 if changesets > 0:
1349 if changesets > 0:
1350 if not self.hook("changegroup",
1350 if not self.hook("changegroup",
1351 node=hex(self.changelog.node(cor+1))):
1351 node=hex(self.changelog.node(cor+1))):
1352 self.ui.warn(_("abort: changegroup hook returned failure!\n"))
1352 self.ui.warn(_("abort: changegroup hook returned failure!\n"))
1353 return 1
1353 return 1
1354
1354
1355 for i in range(cor + 1, cnr + 1):
1355 for i in range(cor + 1, cnr + 1):
1356 self.hook("commit", node=hex(self.changelog.node(i)))
1356 self.hook("commit", node=hex(self.changelog.node(i)))
1357
1357
1358 return
1358 return
1359
1359
1360 def update(self, node, allow=False, force=False, choose=None,
1360 def update(self, node, allow=False, force=False, choose=None,
1361 moddirstate=True):
1361 moddirstate=True):
1362 pl = self.dirstate.parents()
1362 pl = self.dirstate.parents()
1363 if not force and pl[1] != nullid:
1363 if not force and pl[1] != nullid:
1364 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1364 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1365 return 1
1365 return 1
1366
1366
1367 p1, p2 = pl[0], node
1367 p1, p2 = pl[0], node
1368 pa = self.changelog.ancestor(p1, p2)
1368 pa = self.changelog.ancestor(p1, p2)
1369 m1n = self.changelog.read(p1)[0]
1369 m1n = self.changelog.read(p1)[0]
1370 m2n = self.changelog.read(p2)[0]
1370 m2n = self.changelog.read(p2)[0]
1371 man = self.manifest.ancestor(m1n, m2n)
1371 man = self.manifest.ancestor(m1n, m2n)
1372 m1 = self.manifest.read(m1n)
1372 m1 = self.manifest.read(m1n)
1373 mf1 = self.manifest.readflags(m1n)
1373 mf1 = self.manifest.readflags(m1n)
1374 m2 = self.manifest.read(m2n)
1374 m2 = self.manifest.read(m2n)
1375 mf2 = self.manifest.readflags(m2n)
1375 mf2 = self.manifest.readflags(m2n)
1376 ma = self.manifest.read(man)
1376 ma = self.manifest.read(man)
1377 mfa = self.manifest.readflags(man)
1377 mfa = self.manifest.readflags(man)
1378
1378
1379 (c, a, d, u) = self.changes()
1379 (c, a, d, u) = self.changes()
1380
1380
1381 # is this a jump, or a merge? i.e. is there a linear path
1381 # is this a jump, or a merge? i.e. is there a linear path
1382 # from p1 to p2?
1382 # from p1 to p2?
1383 linear_path = (pa == p1 or pa == p2)
1383 linear_path = (pa == p1 or pa == p2)
1384
1384
1385 # resolve the manifest to determine which files
1385 # resolve the manifest to determine which files
1386 # we care about merging
1386 # we care about merging
1387 self.ui.note(_("resolving manifests\n"))
1387 self.ui.note(_("resolving manifests\n"))
1388 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1388 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1389 (force, allow, moddirstate, linear_path))
1389 (force, allow, moddirstate, linear_path))
1390 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1390 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1391 (short(man), short(m1n), short(m2n)))
1391 (short(man), short(m1n), short(m2n)))
1392
1392
1393 merge = {}
1393 merge = {}
1394 get = {}
1394 get = {}
1395 remove = []
1395 remove = []
1396
1396
1397 # construct a working dir manifest
1397 # construct a working dir manifest
1398 mw = m1.copy()
1398 mw = m1.copy()
1399 mfw = mf1.copy()
1399 mfw = mf1.copy()
1400 umap = dict.fromkeys(u)
1400 umap = dict.fromkeys(u)
1401
1401
1402 for f in a + c + u:
1402 for f in a + c + u:
1403 mw[f] = ""
1403 mw[f] = ""
1404 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1404 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1405
1405
1406 if moddirstate:
1406 if moddirstate:
1407 wlock = self.wlock()
1407 wlock = self.wlock()
1408
1408
1409 for f in d:
1409 for f in d:
1410 if f in mw: del mw[f]
1410 if f in mw: del mw[f]
1411
1411
1412 # If we're jumping between revisions (as opposed to merging),
1412 # If we're jumping between revisions (as opposed to merging),
1413 # and if neither the working directory nor the target rev has
1413 # and if neither the working directory nor the target rev has
1414 # the file, then we need to remove it from the dirstate, to
1414 # the file, then we need to remove it from the dirstate, to
1415 # prevent the dirstate from listing the file when it is no
1415 # prevent the dirstate from listing the file when it is no
1416 # longer in the manifest.
1416 # longer in the manifest.
1417 if moddirstate and linear_path and f not in m2:
1417 if moddirstate and linear_path and f not in m2:
1418 self.dirstate.forget((f,))
1418 self.dirstate.forget((f,))
1419
1419
1420 # Compare manifests
1420 # Compare manifests
1421 for f, n in mw.iteritems():
1421 for f, n in mw.iteritems():
1422 if choose and not choose(f): continue
1422 if choose and not choose(f): continue
1423 if f in m2:
1423 if f in m2:
1424 s = 0
1424 s = 0
1425
1425
1426 # is the wfile new since m1, and match m2?
1426 # is the wfile new since m1, and match m2?
1427 if f not in m1:
1427 if f not in m1:
1428 t1 = self.wread(f)
1428 t1 = self.wread(f)
1429 t2 = self.file(f).read(m2[f])
1429 t2 = self.file(f).read(m2[f])
1430 if cmp(t1, t2) == 0:
1430 if cmp(t1, t2) == 0:
1431 n = m2[f]
1431 n = m2[f]
1432 del t1, t2
1432 del t1, t2
1433
1433
1434 # are files different?
1434 # are files different?
1435 if n != m2[f]:
1435 if n != m2[f]:
1436 a = ma.get(f, nullid)
1436 a = ma.get(f, nullid)
1437 # are both different from the ancestor?
1437 # are both different from the ancestor?
1438 if n != a and m2[f] != a:
1438 if n != a and m2[f] != a:
1439 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1439 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1440 # merge executable bits
1440 # merge executable bits
1441 # "if we changed or they changed, change in merge"
1441 # "if we changed or they changed, change in merge"
1442 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1442 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1443 mode = ((a^b) | (a^c)) ^ a
1443 mode = ((a^b) | (a^c)) ^ a
1444 merge[f] = (m1.get(f, nullid), m2[f], mode)
1444 merge[f] = (m1.get(f, nullid), m2[f], mode)
1445 s = 1
1445 s = 1
1446 # are we clobbering?
1446 # are we clobbering?
1447 # is remote's version newer?
1447 # is remote's version newer?
1448 # or are we going back in time?
1448 # or are we going back in time?
1449 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1449 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1450 self.ui.debug(_(" remote %s is newer, get\n") % f)
1450 self.ui.debug(_(" remote %s is newer, get\n") % f)
1451 get[f] = m2[f]
1451 get[f] = m2[f]
1452 s = 1
1452 s = 1
1453 elif f in umap:
1453 elif f in umap:
1454 # this unknown file is the same as the checkout
1454 # this unknown file is the same as the checkout
1455 get[f] = m2[f]
1455 get[f] = m2[f]
1456
1456
1457 if not s and mfw[f] != mf2[f]:
1457 if not s and mfw[f] != mf2[f]:
1458 if force:
1458 if force:
1459 self.ui.debug(_(" updating permissions for %s\n") % f)
1459 self.ui.debug(_(" updating permissions for %s\n") % f)
1460 util.set_exec(self.wjoin(f), mf2[f])
1460 util.set_exec(self.wjoin(f), mf2[f])
1461 else:
1461 else:
1462 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1462 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1463 mode = ((a^b) | (a^c)) ^ a
1463 mode = ((a^b) | (a^c)) ^ a
1464 if mode != b:
1464 if mode != b:
1465 self.ui.debug(_(" updating permissions for %s\n") % f)
1465 self.ui.debug(_(" updating permissions for %s\n") % f)
1466 util.set_exec(self.wjoin(f), mode)
1466 util.set_exec(self.wjoin(f), mode)
1467 del m2[f]
1467 del m2[f]
1468 elif f in ma:
1468 elif f in ma:
1469 if n != ma[f]:
1469 if n != ma[f]:
1470 r = _("d")
1470 r = _("d")
1471 if not force and (linear_path or allow):
1471 if not force and (linear_path or allow):
1472 r = self.ui.prompt(
1472 r = self.ui.prompt(
1473 (_(" local changed %s which remote deleted\n") % f) +
1473 (_(" local changed %s which remote deleted\n") % f) +
1474 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1474 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1475 if r == _("d"):
1475 if r == _("d"):
1476 remove.append(f)
1476 remove.append(f)
1477 else:
1477 else:
1478 self.ui.debug(_("other deleted %s\n") % f)
1478 self.ui.debug(_("other deleted %s\n") % f)
1479 remove.append(f) # other deleted it
1479 remove.append(f) # other deleted it
1480 else:
1480 else:
1481 # file is created on branch or in working directory
1481 # file is created on branch or in working directory
1482 if force and f not in umap:
1482 if force and f not in umap:
1483 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1483 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1484 remove.append(f)
1484 remove.append(f)
1485 elif n == m1.get(f, nullid): # same as parent
1485 elif n == m1.get(f, nullid): # same as parent
1486 if p2 == pa: # going backwards?
1486 if p2 == pa: # going backwards?
1487 self.ui.debug(_("remote deleted %s\n") % f)
1487 self.ui.debug(_("remote deleted %s\n") % f)
1488 remove.append(f)
1488 remove.append(f)
1489 else:
1489 else:
1490 self.ui.debug(_("local modified %s, keeping\n") % f)
1490 self.ui.debug(_("local modified %s, keeping\n") % f)
1491 else:
1491 else:
1492 self.ui.debug(_("working dir created %s, keeping\n") % f)
1492 self.ui.debug(_("working dir created %s, keeping\n") % f)
1493
1493
1494 for f, n in m2.iteritems():
1494 for f, n in m2.iteritems():
1495 if choose and not choose(f): continue
1495 if choose and not choose(f): continue
1496 if f[0] == "/": continue
1496 if f[0] == "/": continue
1497 if f in ma and n != ma[f]:
1497 if f in ma and n != ma[f]:
1498 r = _("k")
1498 r = _("k")
1499 if not force and (linear_path or allow):
1499 if not force and (linear_path or allow):
1500 r = self.ui.prompt(
1500 r = self.ui.prompt(
1501 (_("remote changed %s which local deleted\n") % f) +
1501 (_("remote changed %s which local deleted\n") % f) +
1502 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1502 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1503 if r == _("k"): get[f] = n
1503 if r == _("k"): get[f] = n
1504 elif f not in ma:
1504 elif f not in ma:
1505 self.ui.debug(_("remote created %s\n") % f)
1505 self.ui.debug(_("remote created %s\n") % f)
1506 get[f] = n
1506 get[f] = n
1507 else:
1507 else:
1508 if force or p2 == pa: # going backwards?
1508 if force or p2 == pa: # going backwards?
1509 self.ui.debug(_("local deleted %s, recreating\n") % f)
1509 self.ui.debug(_("local deleted %s, recreating\n") % f)
1510 get[f] = n
1510 get[f] = n
1511 else:
1511 else:
1512 self.ui.debug(_("local deleted %s\n") % f)
1512 self.ui.debug(_("local deleted %s\n") % f)
1513
1513
1514 del mw, m1, m2, ma
1514 del mw, m1, m2, ma
1515
1515
1516 if force:
1516 if force:
1517 for f in merge:
1517 for f in merge:
1518 get[f] = merge[f][1]
1518 get[f] = merge[f][1]
1519 merge = {}
1519 merge = {}
1520
1520
1521 if linear_path or force:
1521 if linear_path or force:
1522 # we don't need to do any magic, just jump to the new rev
1522 # we don't need to do any magic, just jump to the new rev
1523 branch_merge = False
1523 branch_merge = False
1524 p1, p2 = p2, nullid
1524 p1, p2 = p2, nullid
1525 else:
1525 else:
1526 if not allow:
1526 if not allow:
1527 self.ui.status(_("this update spans a branch"
1527 self.ui.status(_("this update spans a branch"
1528 " affecting the following files:\n"))
1528 " affecting the following files:\n"))
1529 fl = merge.keys() + get.keys()
1529 fl = merge.keys() + get.keys()
1530 fl.sort()
1530 fl.sort()
1531 for f in fl:
1531 for f in fl:
1532 cf = ""
1532 cf = ""
1533 if f in merge: cf = _(" (resolve)")
1533 if f in merge: cf = _(" (resolve)")
1534 self.ui.status(" %s%s\n" % (f, cf))
1534 self.ui.status(" %s%s\n" % (f, cf))
1535 self.ui.warn(_("aborting update spanning branches!\n"))
1535 self.ui.warn(_("aborting update spanning branches!\n"))
1536 self.ui.status(_("(use update -m to merge across branches"
1536 self.ui.status(_("(use update -m to merge across branches"
1537 " or -C to lose changes)\n"))
1537 " or -C to lose changes)\n"))
1538 return 1
1538 return 1
1539 branch_merge = True
1539 branch_merge = True
1540
1540
1541 # get the files we don't need to change
1541 # get the files we don't need to change
1542 files = get.keys()
1542 files = get.keys()
1543 files.sort()
1543 files.sort()
1544 for f in files:
1544 for f in files:
1545 if f[0] == "/": continue
1545 if f[0] == "/": continue
1546 self.ui.note(_("getting %s\n") % f)
1546 self.ui.note(_("getting %s\n") % f)
1547 t = self.file(f).read(get[f])
1547 t = self.file(f).read(get[f])
1548 self.wwrite(f, t)
1548 self.wwrite(f, t)
1549 util.set_exec(self.wjoin(f), mf2[f])
1549 util.set_exec(self.wjoin(f), mf2[f])
1550 if moddirstate:
1550 if moddirstate:
1551 if branch_merge:
1551 if branch_merge:
1552 self.dirstate.update([f], 'n', st_mtime=-1)
1552 self.dirstate.update([f], 'n', st_mtime=-1)
1553 else:
1553 else:
1554 self.dirstate.update([f], 'n')
1554 self.dirstate.update([f], 'n')
1555
1555
1556 # merge the tricky bits
1556 # merge the tricky bits
1557 files = merge.keys()
1557 files = merge.keys()
1558 files.sort()
1558 files.sort()
1559 for f in files:
1559 for f in files:
1560 self.ui.status(_("merging %s\n") % f)
1560 self.ui.status(_("merging %s\n") % f)
1561 my, other, flag = merge[f]
1561 my, other, flag = merge[f]
1562 self.merge3(f, my, other)
1562 self.merge3(f, my, other)
1563 util.set_exec(self.wjoin(f), flag)
1563 util.set_exec(self.wjoin(f), flag)
1564 if moddirstate:
1564 if moddirstate:
1565 if branch_merge:
1565 if branch_merge:
1566 # We've done a branch merge, mark this file as merged
1566 # We've done a branch merge, mark this file as merged
1567 # so that we properly record the merger later
1567 # so that we properly record the merger later
1568 self.dirstate.update([f], 'm')
1568 self.dirstate.update([f], 'm')
1569 else:
1569 else:
1570 # We've update-merged a locally modified file, so
1570 # We've update-merged a locally modified file, so
1571 # we set the dirstate to emulate a normal checkout
1571 # we set the dirstate to emulate a normal checkout
1572 # of that file some time in the past. Thus our
1572 # of that file some time in the past. Thus our
1573 # merge will appear as a normal local file
1573 # merge will appear as a normal local file
1574 # modification.
1574 # modification.
1575 f_len = len(self.file(f).read(other))
1575 f_len = len(self.file(f).read(other))
1576 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1576 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1577
1577
1578 remove.sort()
1578 remove.sort()
1579 for f in remove:
1579 for f in remove:
1580 self.ui.note(_("removing %s\n") % f)
1580 self.ui.note(_("removing %s\n") % f)
1581 try:
1581 try:
1582 util.unlink(self.wjoin(f))
1582 util.unlink(self.wjoin(f))
1583 except OSError, inst:
1583 except OSError, inst:
1584 if inst.errno != errno.ENOENT:
1584 if inst.errno != errno.ENOENT:
1585 self.ui.warn(_("update failed to remove %s: %s!\n") %
1585 self.ui.warn(_("update failed to remove %s: %s!\n") %
1586 (f, inst.strerror))
1586 (f, inst.strerror))
1587 if moddirstate:
1587 if moddirstate:
1588 if branch_merge:
1588 if branch_merge:
1589 self.dirstate.update(remove, 'r')
1589 self.dirstate.update(remove, 'r')
1590 else:
1590 else:
1591 self.dirstate.forget(remove)
1591 self.dirstate.forget(remove)
1592
1592
1593 if moddirstate:
1593 if moddirstate:
1594 self.dirstate.setparents(p1, p2)
1594 self.dirstate.setparents(p1, p2)
1595
1595
1596 def merge3(self, fn, my, other):
1596 def merge3(self, fn, my, other):
1597 """perform a 3-way merge in the working directory"""
1597 """perform a 3-way merge in the working directory"""
1598
1598
1599 def temp(prefix, node):
1599 def temp(prefix, node):
1600 pre = "%s~%s." % (os.path.basename(fn), prefix)
1600 pre = "%s~%s." % (os.path.basename(fn), prefix)
1601 (fd, name) = tempfile.mkstemp("", pre)
1601 (fd, name) = tempfile.mkstemp("", pre)
1602 f = os.fdopen(fd, "wb")
1602 f = os.fdopen(fd, "wb")
1603 self.wwrite(fn, fl.read(node), f)
1603 self.wwrite(fn, fl.read(node), f)
1604 f.close()
1604 f.close()
1605 return name
1605 return name
1606
1606
1607 fl = self.file(fn)
1607 fl = self.file(fn)
1608 base = fl.ancestor(my, other)
1608 base = fl.ancestor(my, other)
1609 a = self.wjoin(fn)
1609 a = self.wjoin(fn)
1610 b = temp("base", base)
1610 b = temp("base", base)
1611 c = temp("other", other)
1611 c = temp("other", other)
1612
1612
1613 self.ui.note(_("resolving %s\n") % fn)
1613 self.ui.note(_("resolving %s\n") % fn)
1614 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1614 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1615 (fn, short(my), short(other), short(base)))
1615 (fn, short(my), short(other), short(base)))
1616
1616
1617 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1617 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1618 or "hgmerge")
1618 or "hgmerge")
1619 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1619 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1620 if r:
1620 if r:
1621 self.ui.warn(_("merging %s failed!\n") % fn)
1621 self.ui.warn(_("merging %s failed!\n") % fn)
1622
1622
1623 os.unlink(b)
1623 os.unlink(b)
1624 os.unlink(c)
1624 os.unlink(c)
1625
1625
1626 def verify(self):
1626 def verify(self):
1627 filelinkrevs = {}
1627 filelinkrevs = {}
1628 filenodes = {}
1628 filenodes = {}
1629 changesets = revisions = files = 0
1629 changesets = revisions = files = 0
1630 errors = [0]
1630 errors = [0]
1631 neededmanifests = {}
1631 neededmanifests = {}
1632
1632
1633 def err(msg):
1633 def err(msg):
1634 self.ui.warn(msg + "\n")
1634 self.ui.warn(msg + "\n")
1635 errors[0] += 1
1635 errors[0] += 1
1636
1636
1637 seen = {}
1637 seen = {}
1638 self.ui.status(_("checking changesets\n"))
1638 self.ui.status(_("checking changesets\n"))
1639 d = self.changelog.checksize()
1639 d = self.changelog.checksize()
1640 if d:
1640 if d:
1641 err(_("changeset data short %d bytes") % d)
1641 err(_("changeset data short %d bytes") % d)
1642 for i in range(self.changelog.count()):
1642 for i in range(self.changelog.count()):
1643 changesets += 1
1643 changesets += 1
1644 n = self.changelog.node(i)
1644 n = self.changelog.node(i)
1645 l = self.changelog.linkrev(n)
1645 l = self.changelog.linkrev(n)
1646 if l != i:
1646 if l != i:
1647 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1647 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1648 if n in seen:
1648 if n in seen:
1649 err(_("duplicate changeset at revision %d") % i)
1649 err(_("duplicate changeset at revision %d") % i)
1650 seen[n] = 1
1650 seen[n] = 1
1651
1651
1652 for p in self.changelog.parents(n):
1652 for p in self.changelog.parents(n):
1653 if p not in self.changelog.nodemap:
1653 if p not in self.changelog.nodemap:
1654 err(_("changeset %s has unknown parent %s") %
1654 err(_("changeset %s has unknown parent %s") %
1655 (short(n), short(p)))
1655 (short(n), short(p)))
1656 try:
1656 try:
1657 changes = self.changelog.read(n)
1657 changes = self.changelog.read(n)
1658 except KeyboardInterrupt:
1658 except KeyboardInterrupt:
1659 self.ui.warn(_("interrupted"))
1659 self.ui.warn(_("interrupted"))
1660 raise
1660 raise
1661 except Exception, inst:
1661 except Exception, inst:
1662 err(_("unpacking changeset %s: %s") % (short(n), inst))
1662 err(_("unpacking changeset %s: %s") % (short(n), inst))
1663
1663
1664 neededmanifests[changes[0]] = n
1664 neededmanifests[changes[0]] = n
1665
1665
1666 for f in changes[3]:
1666 for f in changes[3]:
1667 filelinkrevs.setdefault(f, []).append(i)
1667 filelinkrevs.setdefault(f, []).append(i)
1668
1668
1669 seen = {}
1669 seen = {}
1670 self.ui.status(_("checking manifests\n"))
1670 self.ui.status(_("checking manifests\n"))
1671 d = self.manifest.checksize()
1671 d = self.manifest.checksize()
1672 if d:
1672 if d:
1673 err(_("manifest data short %d bytes") % d)
1673 err(_("manifest data short %d bytes") % d)
1674 for i in range(self.manifest.count()):
1674 for i in range(self.manifest.count()):
1675 n = self.manifest.node(i)
1675 n = self.manifest.node(i)
1676 l = self.manifest.linkrev(n)
1676 l = self.manifest.linkrev(n)
1677
1677
1678 if l < 0 or l >= self.changelog.count():
1678 if l < 0 or l >= self.changelog.count():
1679 err(_("bad manifest link (%d) at revision %d") % (l, i))
1679 err(_("bad manifest link (%d) at revision %d") % (l, i))
1680
1680
1681 if n in neededmanifests:
1681 if n in neededmanifests:
1682 del neededmanifests[n]
1682 del neededmanifests[n]
1683
1683
1684 if n in seen:
1684 if n in seen:
1685 err(_("duplicate manifest at revision %d") % i)
1685 err(_("duplicate manifest at revision %d") % i)
1686
1686
1687 seen[n] = 1
1687 seen[n] = 1
1688
1688
1689 for p in self.manifest.parents(n):
1689 for p in self.manifest.parents(n):
1690 if p not in self.manifest.nodemap:
1690 if p not in self.manifest.nodemap:
1691 err(_("manifest %s has unknown parent %s") %
1691 err(_("manifest %s has unknown parent %s") %
1692 (short(n), short(p)))
1692 (short(n), short(p)))
1693
1693
1694 try:
1694 try:
1695 delta = mdiff.patchtext(self.manifest.delta(n))
1695 delta = mdiff.patchtext(self.manifest.delta(n))
1696 except KeyboardInterrupt:
1696 except KeyboardInterrupt:
1697 self.ui.warn(_("interrupted"))
1697 self.ui.warn(_("interrupted"))
1698 raise
1698 raise
1699 except Exception, inst:
1699 except Exception, inst:
1700 err(_("unpacking manifest %s: %s") % (short(n), inst))
1700 err(_("unpacking manifest %s: %s") % (short(n), inst))
1701
1701
1702 ff = [ l.split('\0') for l in delta.splitlines() ]
1702 ff = [ l.split('\0') for l in delta.splitlines() ]
1703 for f, fn in ff:
1703 for f, fn in ff:
1704 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1704 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1705
1705
1706 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1706 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1707
1707
1708 for m,c in neededmanifests.items():
1708 for m,c in neededmanifests.items():
1709 err(_("Changeset %s refers to unknown manifest %s") %
1709 err(_("Changeset %s refers to unknown manifest %s") %
1710 (short(m), short(c)))
1710 (short(m), short(c)))
1711 del neededmanifests
1711 del neededmanifests
1712
1712
1713 for f in filenodes:
1713 for f in filenodes:
1714 if f not in filelinkrevs:
1714 if f not in filelinkrevs:
1715 err(_("file %s in manifest but not in changesets") % f)
1715 err(_("file %s in manifest but not in changesets") % f)
1716
1716
1717 for f in filelinkrevs:
1717 for f in filelinkrevs:
1718 if f not in filenodes:
1718 if f not in filenodes:
1719 err(_("file %s in changeset but not in manifest") % f)
1719 err(_("file %s in changeset but not in manifest") % f)
1720
1720
1721 self.ui.status(_("checking files\n"))
1721 self.ui.status(_("checking files\n"))
1722 ff = filenodes.keys()
1722 ff = filenodes.keys()
1723 ff.sort()
1723 ff.sort()
1724 for f in ff:
1724 for f in ff:
1725 if f == "/dev/null": continue
1725 if f == "/dev/null": continue
1726 files += 1
1726 files += 1
1727 fl = self.file(f)
1727 fl = self.file(f)
1728 d = fl.checksize()
1728 d = fl.checksize()
1729 if d:
1729 if d:
1730 err(_("%s file data short %d bytes") % (f, d))
1730 err(_("%s file data short %d bytes") % (f, d))
1731
1731
1732 nodes = { nullid: 1 }
1732 nodes = { nullid: 1 }
1733 seen = {}
1733 seen = {}
1734 for i in range(fl.count()):
1734 for i in range(fl.count()):
1735 revisions += 1
1735 revisions += 1
1736 n = fl.node(i)
1736 n = fl.node(i)
1737
1737
1738 if n in seen:
1738 if n in seen:
1739 err(_("%s: duplicate revision %d") % (f, i))
1739 err(_("%s: duplicate revision %d") % (f, i))
1740 if n not in filenodes[f]:
1740 if n not in filenodes[f]:
1741 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1741 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1742 else:
1742 else:
1743 del filenodes[f][n]
1743 del filenodes[f][n]
1744
1744
1745 flr = fl.linkrev(n)
1745 flr = fl.linkrev(n)
1746 if flr not in filelinkrevs[f]:
1746 if flr not in filelinkrevs[f]:
1747 err(_("%s:%s points to unexpected changeset %d")
1747 err(_("%s:%s points to unexpected changeset %d")
1748 % (f, short(n), flr))
1748 % (f, short(n), flr))
1749 else:
1749 else:
1750 filelinkrevs[f].remove(flr)
1750 filelinkrevs[f].remove(flr)
1751
1751
1752 # verify contents
1752 # verify contents
1753 try:
1753 try:
1754 t = fl.read(n)
1754 t = fl.read(n)
1755 except KeyboardInterrupt:
1755 except KeyboardInterrupt:
1756 self.ui.warn(_("interrupted"))
1756 self.ui.warn(_("interrupted"))
1757 raise
1757 raise
1758 except Exception, inst:
1758 except Exception, inst:
1759 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1759 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1760
1760
1761 # verify parents
1761 # verify parents
1762 (p1, p2) = fl.parents(n)
1762 (p1, p2) = fl.parents(n)
1763 if p1 not in nodes:
1763 if p1 not in nodes:
1764 err(_("file %s:%s unknown parent 1 %s") %
1764 err(_("file %s:%s unknown parent 1 %s") %
1765 (f, short(n), short(p1)))
1765 (f, short(n), short(p1)))
1766 if p2 not in nodes:
1766 if p2 not in nodes:
1767 err(_("file %s:%s unknown parent 2 %s") %
1767 err(_("file %s:%s unknown parent 2 %s") %
1768 (f, short(n), short(p1)))
1768 (f, short(n), short(p1)))
1769 nodes[n] = 1
1769 nodes[n] = 1
1770
1770
1771 # cross-check
1771 # cross-check
1772 for node in filenodes[f]:
1772 for node in filenodes[f]:
1773 err(_("node %s in manifests not in %s") % (hex(node), f))
1773 err(_("node %s in manifests not in %s") % (hex(node), f))
1774
1774
1775 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1775 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1776 (files, changesets, revisions))
1776 (files, changesets, revisions))
1777
1777
1778 if errors[0]:
1778 if errors[0]:
1779 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1779 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1780 return 1
1780 return 1
@@ -1,52 +1,52 b''
1 # lock.py - simple locking scheme for mercurial
1 # lock.py - simple locking scheme for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os, time
8 import os, time
9 import util
9 import util
10
10
11 class LockHeld(Exception):
11 class LockHeld(Exception):
12 pass
12 pass
13
13
14 class lock:
14 class lock(object):
15 def __init__(self, file, wait=1, releasefn=None):
15 def __init__(self, file, wait=1, releasefn=None):
16 self.f = file
16 self.f = file
17 self.held = 0
17 self.held = 0
18 self.wait = wait
18 self.wait = wait
19 self.releasefn = releasefn
19 self.releasefn = releasefn
20 self.lock()
20 self.lock()
21
21
22 def __del__(self):
22 def __del__(self):
23 self.release()
23 self.release()
24
24
25 def lock(self):
25 def lock(self):
26 while 1:
26 while 1:
27 try:
27 try:
28 self.trylock()
28 self.trylock()
29 return 1
29 return 1
30 except LockHeld, inst:
30 except LockHeld, inst:
31 if self.wait:
31 if self.wait:
32 time.sleep(1)
32 time.sleep(1)
33 continue
33 continue
34 raise inst
34 raise inst
35
35
36 def trylock(self):
36 def trylock(self):
37 pid = os.getpid()
37 pid = os.getpid()
38 try:
38 try:
39 util.makelock(str(pid), self.f)
39 util.makelock(str(pid), self.f)
40 self.held = 1
40 self.held = 1
41 except (OSError, IOError):
41 except (OSError, IOError):
42 raise LockHeld(util.readlock(self.f))
42 raise LockHeld(util.readlock(self.f))
43
43
44 def release(self):
44 def release(self):
45 if self.held:
45 if self.held:
46 self.held = 0
46 self.held = 0
47 if self.releasefn:
47 if self.releasefn:
48 self.releasefn()
48 self.releasefn()
49 try:
49 try:
50 os.unlink(self.f)
50 os.unlink(self.f)
51 except: pass
51 except: pass
52
52
@@ -1,20 +1,20 b''
1 # remoterepo - remote repositort proxy classes for mercurial
1 # remoterepo - remote repositort proxy classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 class remoterepository:
8 class remoterepository(object):
9 def local(self):
9 def local(self):
10 return False
10 return False
11
11
12 class remotelock:
12 class remotelock(object):
13 def __init__(self, repo):
13 def __init__(self, repo):
14 self.repo = repo
14 self.repo = repo
15 def release(self):
15 def release(self):
16 self.repo.unlock()
16 self.repo.unlock()
17 self.repo = None
17 self.repo = None
18 def __del__(self):
18 def __del__(self):
19 if self.repo:
19 if self.repo:
20 self.release()
20 self.release()
@@ -1,893 +1,893 b''
1 """
1 """
2 revlog.py - storage back-end for mercurial
2 revlog.py - storage back-end for mercurial
3
3
4 This provides efficient delta storage with O(1) retrieve and append
4 This provides efficient delta storage with O(1) retrieve and append
5 and O(changes) merge between branches
5 and O(changes) merge between branches
6
6
7 Copyright 2005 Matt Mackall <mpm@selenic.com>
7 Copyright 2005 Matt Mackall <mpm@selenic.com>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License, incorporated herein by reference.
10 of the GNU General Public License, incorporated herein by reference.
11 """
11 """
12
12
13 from node import *
13 from node import *
14 from i18n import gettext as _
14 from i18n import gettext as _
15 from demandload import demandload
15 from demandload import demandload
16 demandload(globals(), "binascii errno heapq mdiff sha struct zlib")
16 demandload(globals(), "binascii errno heapq mdiff sha struct zlib")
17
17
18 def hash(text, p1, p2):
18 def hash(text, p1, p2):
19 """generate a hash from the given text and its parent hashes
19 """generate a hash from the given text and its parent hashes
20
20
21 This hash combines both the current file contents and its history
21 This hash combines both the current file contents and its history
22 in a manner that makes it easy to distinguish nodes with the same
22 in a manner that makes it easy to distinguish nodes with the same
23 content in the revision graph.
23 content in the revision graph.
24 """
24 """
25 l = [p1, p2]
25 l = [p1, p2]
26 l.sort()
26 l.sort()
27 s = sha.new(l[0])
27 s = sha.new(l[0])
28 s.update(l[1])
28 s.update(l[1])
29 s.update(text)
29 s.update(text)
30 return s.digest()
30 return s.digest()
31
31
32 def compress(text):
32 def compress(text):
33 """ generate a possibly-compressed representation of text """
33 """ generate a possibly-compressed representation of text """
34 if not text: return ("", text)
34 if not text: return ("", text)
35 if len(text) < 44:
35 if len(text) < 44:
36 if text[0] == '\0': return ("", text)
36 if text[0] == '\0': return ("", text)
37 return ('u', text)
37 return ('u', text)
38 bin = zlib.compress(text)
38 bin = zlib.compress(text)
39 if len(bin) > len(text):
39 if len(bin) > len(text):
40 if text[0] == '\0': return ("", text)
40 if text[0] == '\0': return ("", text)
41 return ('u', text)
41 return ('u', text)
42 return ("", bin)
42 return ("", bin)
43
43
44 def decompress(bin):
44 def decompress(bin):
45 """ decompress the given input """
45 """ decompress the given input """
46 if not bin: return bin
46 if not bin: return bin
47 t = bin[0]
47 t = bin[0]
48 if t == '\0': return bin
48 if t == '\0': return bin
49 if t == 'x': return zlib.decompress(bin)
49 if t == 'x': return zlib.decompress(bin)
50 if t == 'u': return bin[1:]
50 if t == 'u': return bin[1:]
51 raise RevlogError(_("unknown compression type %s") % t)
51 raise RevlogError(_("unknown compression type %s") % t)
52
52
53 indexformat = ">4l20s20s20s"
53 indexformat = ">4l20s20s20s"
54
54
55 class lazyparser:
55 class lazyparser(object):
56 """
56 """
57 this class avoids the need to parse the entirety of large indices
57 this class avoids the need to parse the entirety of large indices
58
58
59 By default we parse and load 1000 entries at a time.
59 By default we parse and load 1000 entries at a time.
60
60
61 If no position is specified, we load the whole index, and replace
61 If no position is specified, we load the whole index, and replace
62 the lazy objects in revlog with the underlying objects for
62 the lazy objects in revlog with the underlying objects for
63 efficiency in cases where we look at most of the nodes.
63 efficiency in cases where we look at most of the nodes.
64 """
64 """
65 def __init__(self, data, revlog):
65 def __init__(self, data, revlog):
66 self.data = data
66 self.data = data
67 self.s = struct.calcsize(indexformat)
67 self.s = struct.calcsize(indexformat)
68 self.l = len(data)/self.s
68 self.l = len(data)/self.s
69 self.index = [None] * self.l
69 self.index = [None] * self.l
70 self.map = {nullid: -1}
70 self.map = {nullid: -1}
71 self.all = 0
71 self.all = 0
72 self.revlog = revlog
72 self.revlog = revlog
73
73
74 def trunc(self, pos):
74 def trunc(self, pos):
75 self.l = pos/self.s
75 self.l = pos/self.s
76
76
77 def load(self, pos=None):
77 def load(self, pos=None):
78 if self.all: return
78 if self.all: return
79 if pos is not None:
79 if pos is not None:
80 block = pos / 1000
80 block = pos / 1000
81 i = block * 1000
81 i = block * 1000
82 end = min(self.l, i + 1000)
82 end = min(self.l, i + 1000)
83 else:
83 else:
84 self.all = 1
84 self.all = 1
85 i = 0
85 i = 0
86 end = self.l
86 end = self.l
87 self.revlog.index = self.index
87 self.revlog.index = self.index
88 self.revlog.nodemap = self.map
88 self.revlog.nodemap = self.map
89
89
90 while i < end:
90 while i < end:
91 d = self.data[i * self.s: (i + 1) * self.s]
91 d = self.data[i * self.s: (i + 1) * self.s]
92 e = struct.unpack(indexformat, d)
92 e = struct.unpack(indexformat, d)
93 self.index[i] = e
93 self.index[i] = e
94 self.map[e[6]] = i
94 self.map[e[6]] = i
95 i += 1
95 i += 1
96
96
97 class lazyindex:
97 class lazyindex(object):
98 """a lazy version of the index array"""
98 """a lazy version of the index array"""
99 def __init__(self, parser):
99 def __init__(self, parser):
100 self.p = parser
100 self.p = parser
101 def __len__(self):
101 def __len__(self):
102 return len(self.p.index)
102 return len(self.p.index)
103 def load(self, pos):
103 def load(self, pos):
104 if pos < 0:
104 if pos < 0:
105 pos += len(self.p.index)
105 pos += len(self.p.index)
106 self.p.load(pos)
106 self.p.load(pos)
107 return self.p.index[pos]
107 return self.p.index[pos]
108 def __getitem__(self, pos):
108 def __getitem__(self, pos):
109 return self.p.index[pos] or self.load(pos)
109 return self.p.index[pos] or self.load(pos)
110 def __delitem__(self, pos):
110 def __delitem__(self, pos):
111 del self.p.index[pos]
111 del self.p.index[pos]
112 def append(self, e):
112 def append(self, e):
113 self.p.index.append(e)
113 self.p.index.append(e)
114 def trunc(self, pos):
114 def trunc(self, pos):
115 self.p.trunc(pos)
115 self.p.trunc(pos)
116
116
117 class lazymap:
117 class lazymap(object):
118 """a lazy version of the node map"""
118 """a lazy version of the node map"""
119 def __init__(self, parser):
119 def __init__(self, parser):
120 self.p = parser
120 self.p = parser
121 def load(self, key):
121 def load(self, key):
122 if self.p.all: return
122 if self.p.all: return
123 n = self.p.data.find(key)
123 n = self.p.data.find(key)
124 if n < 0:
124 if n < 0:
125 raise KeyError(key)
125 raise KeyError(key)
126 pos = n / self.p.s
126 pos = n / self.p.s
127 self.p.load(pos)
127 self.p.load(pos)
128 def __contains__(self, key):
128 def __contains__(self, key):
129 self.p.load()
129 self.p.load()
130 return key in self.p.map
130 return key in self.p.map
131 def __iter__(self):
131 def __iter__(self):
132 yield nullid
132 yield nullid
133 for i in xrange(self.p.l):
133 for i in xrange(self.p.l):
134 try:
134 try:
135 yield self.p.index[i][6]
135 yield self.p.index[i][6]
136 except:
136 except:
137 self.p.load(i)
137 self.p.load(i)
138 yield self.p.index[i][6]
138 yield self.p.index[i][6]
139 def __getitem__(self, key):
139 def __getitem__(self, key):
140 try:
140 try:
141 return self.p.map[key]
141 return self.p.map[key]
142 except KeyError:
142 except KeyError:
143 try:
143 try:
144 self.load(key)
144 self.load(key)
145 return self.p.map[key]
145 return self.p.map[key]
146 except KeyError:
146 except KeyError:
147 raise KeyError("node " + hex(key))
147 raise KeyError("node " + hex(key))
148 def __setitem__(self, key, val):
148 def __setitem__(self, key, val):
149 self.p.map[key] = val
149 self.p.map[key] = val
150 def __delitem__(self, key):
150 def __delitem__(self, key):
151 del self.p.map[key]
151 del self.p.map[key]
152
152
153 class RevlogError(Exception): pass
153 class RevlogError(Exception): pass
154
154
155 class revlog:
155 class revlog(object):
156 """
156 """
157 the underlying revision storage object
157 the underlying revision storage object
158
158
159 A revlog consists of two parts, an index and the revision data.
159 A revlog consists of two parts, an index and the revision data.
160
160
161 The index is a file with a fixed record size containing
161 The index is a file with a fixed record size containing
162 information on each revision, includings its nodeid (hash), the
162 information on each revision, includings its nodeid (hash), the
163 nodeids of its parents, the position and offset of its data within
163 nodeids of its parents, the position and offset of its data within
164 the data file, and the revision it's based on. Finally, each entry
164 the data file, and the revision it's based on. Finally, each entry
165 contains a linkrev entry that can serve as a pointer to external
165 contains a linkrev entry that can serve as a pointer to external
166 data.
166 data.
167
167
168 The revision data itself is a linear collection of data chunks.
168 The revision data itself is a linear collection of data chunks.
169 Each chunk represents a revision and is usually represented as a
169 Each chunk represents a revision and is usually represented as a
170 delta against the previous chunk. To bound lookup time, runs of
170 delta against the previous chunk. To bound lookup time, runs of
171 deltas are limited to about 2 times the length of the original
171 deltas are limited to about 2 times the length of the original
172 version data. This makes retrieval of a version proportional to
172 version data. This makes retrieval of a version proportional to
173 its size, or O(1) relative to the number of revisions.
173 its size, or O(1) relative to the number of revisions.
174
174
175 Both pieces of the revlog are written to in an append-only
175 Both pieces of the revlog are written to in an append-only
176 fashion, which means we never need to rewrite a file to insert or
176 fashion, which means we never need to rewrite a file to insert or
177 remove data, and can use some simple techniques to avoid the need
177 remove data, and can use some simple techniques to avoid the need
178 for locking while reading.
178 for locking while reading.
179 """
179 """
180 def __init__(self, opener, indexfile, datafile):
180 def __init__(self, opener, indexfile, datafile):
181 """
181 """
182 create a revlog object
182 create a revlog object
183
183
184 opener is a function that abstracts the file opening operation
184 opener is a function that abstracts the file opening operation
185 and can be used to implement COW semantics or the like.
185 and can be used to implement COW semantics or the like.
186 """
186 """
187 self.indexfile = indexfile
187 self.indexfile = indexfile
188 self.datafile = datafile
188 self.datafile = datafile
189 self.opener = opener
189 self.opener = opener
190 self.cache = None
190 self.cache = None
191
191
192 try:
192 try:
193 i = self.opener(self.indexfile).read()
193 i = self.opener(self.indexfile).read()
194 except IOError, inst:
194 except IOError, inst:
195 if inst.errno != errno.ENOENT:
195 if inst.errno != errno.ENOENT:
196 raise
196 raise
197 i = ""
197 i = ""
198
198
199 if len(i) > 10000:
199 if len(i) > 10000:
200 # big index, let's parse it on demand
200 # big index, let's parse it on demand
201 parser = lazyparser(i, self)
201 parser = lazyparser(i, self)
202 self.index = lazyindex(parser)
202 self.index = lazyindex(parser)
203 self.nodemap = lazymap(parser)
203 self.nodemap = lazymap(parser)
204 else:
204 else:
205 s = struct.calcsize(indexformat)
205 s = struct.calcsize(indexformat)
206 l = len(i) / s
206 l = len(i) / s
207 self.index = [None] * l
207 self.index = [None] * l
208 m = [None] * l
208 m = [None] * l
209
209
210 n = 0
210 n = 0
211 for f in xrange(0, len(i), s):
211 for f in xrange(0, len(i), s):
212 # offset, size, base, linkrev, p1, p2, nodeid
212 # offset, size, base, linkrev, p1, p2, nodeid
213 e = struct.unpack(indexformat, i[f:f + s])
213 e = struct.unpack(indexformat, i[f:f + s])
214 m[n] = (e[6], n)
214 m[n] = (e[6], n)
215 self.index[n] = e
215 self.index[n] = e
216 n += 1
216 n += 1
217
217
218 self.nodemap = dict(m)
218 self.nodemap = dict(m)
219 self.nodemap[nullid] = -1
219 self.nodemap[nullid] = -1
220
220
221 def tip(self): return self.node(len(self.index) - 1)
221 def tip(self): return self.node(len(self.index) - 1)
222 def count(self): return len(self.index)
222 def count(self): return len(self.index)
223 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
223 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
224 def rev(self, node):
224 def rev(self, node):
225 try:
225 try:
226 return self.nodemap[node]
226 return self.nodemap[node]
227 except KeyError:
227 except KeyError:
228 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
228 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
229 def linkrev(self, node): return self.index[self.rev(node)][3]
229 def linkrev(self, node): return self.index[self.rev(node)][3]
230 def parents(self, node):
230 def parents(self, node):
231 if node == nullid: return (nullid, nullid)
231 if node == nullid: return (nullid, nullid)
232 return self.index[self.rev(node)][4:6]
232 return self.index[self.rev(node)][4:6]
233
233
234 def start(self, rev): return self.index[rev][0]
234 def start(self, rev): return self.index[rev][0]
235 def length(self, rev): return self.index[rev][1]
235 def length(self, rev): return self.index[rev][1]
236 def end(self, rev): return self.start(rev) + self.length(rev)
236 def end(self, rev): return self.start(rev) + self.length(rev)
237 def base(self, rev): return self.index[rev][2]
237 def base(self, rev): return self.index[rev][2]
238
238
239 def reachable(self, rev, stop=None):
239 def reachable(self, rev, stop=None):
240 reachable = {}
240 reachable = {}
241 visit = [rev]
241 visit = [rev]
242 reachable[rev] = 1
242 reachable[rev] = 1
243 if stop:
243 if stop:
244 stopn = self.rev(stop)
244 stopn = self.rev(stop)
245 else:
245 else:
246 stopn = 0
246 stopn = 0
247 while visit:
247 while visit:
248 n = visit.pop(0)
248 n = visit.pop(0)
249 if n == stop:
249 if n == stop:
250 continue
250 continue
251 if n == nullid:
251 if n == nullid:
252 continue
252 continue
253 for p in self.parents(n):
253 for p in self.parents(n):
254 if self.rev(p) < stopn:
254 if self.rev(p) < stopn:
255 continue
255 continue
256 if p not in reachable:
256 if p not in reachable:
257 reachable[p] = 1
257 reachable[p] = 1
258 visit.append(p)
258 visit.append(p)
259 return reachable
259 return reachable
260
260
261 def nodesbetween(self, roots=None, heads=None):
261 def nodesbetween(self, roots=None, heads=None):
262 """Return a tuple containing three elements. Elements 1 and 2 contain
262 """Return a tuple containing three elements. Elements 1 and 2 contain
263 a final list bases and heads after all the unreachable ones have been
263 a final list bases and heads after all the unreachable ones have been
264 pruned. Element 0 contains a topologically sorted list of all
264 pruned. Element 0 contains a topologically sorted list of all
265
265
266 nodes that satisfy these constraints:
266 nodes that satisfy these constraints:
267 1. All nodes must be descended from a node in roots (the nodes on
267 1. All nodes must be descended from a node in roots (the nodes on
268 roots are considered descended from themselves).
268 roots are considered descended from themselves).
269 2. All nodes must also be ancestors of a node in heads (the nodes in
269 2. All nodes must also be ancestors of a node in heads (the nodes in
270 heads are considered to be their own ancestors).
270 heads are considered to be their own ancestors).
271
271
272 If roots is unspecified, nullid is assumed as the only root.
272 If roots is unspecified, nullid is assumed as the only root.
273 If heads is unspecified, it is taken to be the output of the
273 If heads is unspecified, it is taken to be the output of the
274 heads method (i.e. a list of all nodes in the repository that
274 heads method (i.e. a list of all nodes in the repository that
275 have no children)."""
275 have no children)."""
276 nonodes = ([], [], [])
276 nonodes = ([], [], [])
277 if roots is not None:
277 if roots is not None:
278 roots = list(roots)
278 roots = list(roots)
279 if not roots:
279 if not roots:
280 return nonodes
280 return nonodes
281 lowestrev = min([self.rev(n) for n in roots])
281 lowestrev = min([self.rev(n) for n in roots])
282 else:
282 else:
283 roots = [nullid] # Everybody's a descendent of nullid
283 roots = [nullid] # Everybody's a descendent of nullid
284 lowestrev = -1
284 lowestrev = -1
285 if (lowestrev == -1) and (heads is None):
285 if (lowestrev == -1) and (heads is None):
286 # We want _all_ the nodes!
286 # We want _all_ the nodes!
287 return ([self.node(r) for r in xrange(0, self.count())],
287 return ([self.node(r) for r in xrange(0, self.count())],
288 [nullid], list(self.heads()))
288 [nullid], list(self.heads()))
289 if heads is None:
289 if heads is None:
290 # All nodes are ancestors, so the latest ancestor is the last
290 # All nodes are ancestors, so the latest ancestor is the last
291 # node.
291 # node.
292 highestrev = self.count() - 1
292 highestrev = self.count() - 1
293 # Set ancestors to None to signal that every node is an ancestor.
293 # Set ancestors to None to signal that every node is an ancestor.
294 ancestors = None
294 ancestors = None
295 # Set heads to an empty dictionary for later discovery of heads
295 # Set heads to an empty dictionary for later discovery of heads
296 heads = {}
296 heads = {}
297 else:
297 else:
298 heads = list(heads)
298 heads = list(heads)
299 if not heads:
299 if not heads:
300 return nonodes
300 return nonodes
301 ancestors = {}
301 ancestors = {}
302 # Start at the top and keep marking parents until we're done.
302 # Start at the top and keep marking parents until we're done.
303 nodestotag = heads[:]
303 nodestotag = heads[:]
304 # Turn heads into a dictionary so we can remove 'fake' heads.
304 # Turn heads into a dictionary so we can remove 'fake' heads.
305 # Also, later we will be using it to filter out the heads we can't
305 # Also, later we will be using it to filter out the heads we can't
306 # find from roots.
306 # find from roots.
307 heads = dict.fromkeys(heads, 0)
307 heads = dict.fromkeys(heads, 0)
308 # Remember where the top was so we can use it as a limit later.
308 # Remember where the top was so we can use it as a limit later.
309 highestrev = max([self.rev(n) for n in nodestotag])
309 highestrev = max([self.rev(n) for n in nodestotag])
310 while nodestotag:
310 while nodestotag:
311 # grab a node to tag
311 # grab a node to tag
312 n = nodestotag.pop()
312 n = nodestotag.pop()
313 # Never tag nullid
313 # Never tag nullid
314 if n == nullid:
314 if n == nullid:
315 continue
315 continue
316 # A node's revision number represents its place in a
316 # A node's revision number represents its place in a
317 # topologically sorted list of nodes.
317 # topologically sorted list of nodes.
318 r = self.rev(n)
318 r = self.rev(n)
319 if r >= lowestrev:
319 if r >= lowestrev:
320 if n not in ancestors:
320 if n not in ancestors:
321 # If we are possibly a descendent of one of the roots
321 # If we are possibly a descendent of one of the roots
322 # and we haven't already been marked as an ancestor
322 # and we haven't already been marked as an ancestor
323 ancestors[n] = 1 # Mark as ancestor
323 ancestors[n] = 1 # Mark as ancestor
324 # Add non-nullid parents to list of nodes to tag.
324 # Add non-nullid parents to list of nodes to tag.
325 nodestotag.extend([p for p in self.parents(n) if
325 nodestotag.extend([p for p in self.parents(n) if
326 p != nullid])
326 p != nullid])
327 elif n in heads: # We've seen it before, is it a fake head?
327 elif n in heads: # We've seen it before, is it a fake head?
328 # So it is, real heads should not be the ancestors of
328 # So it is, real heads should not be the ancestors of
329 # any other heads.
329 # any other heads.
330 heads.pop(n)
330 heads.pop(n)
331 if not ancestors:
331 if not ancestors:
332 return nonodes
332 return nonodes
333 # Now that we have our set of ancestors, we want to remove any
333 # Now that we have our set of ancestors, we want to remove any
334 # roots that are not ancestors.
334 # roots that are not ancestors.
335
335
336 # If one of the roots was nullid, everything is included anyway.
336 # If one of the roots was nullid, everything is included anyway.
337 if lowestrev > -1:
337 if lowestrev > -1:
338 # But, since we weren't, let's recompute the lowest rev to not
338 # But, since we weren't, let's recompute the lowest rev to not
339 # include roots that aren't ancestors.
339 # include roots that aren't ancestors.
340
340
341 # Filter out roots that aren't ancestors of heads
341 # Filter out roots that aren't ancestors of heads
342 roots = [n for n in roots if n in ancestors]
342 roots = [n for n in roots if n in ancestors]
343 # Recompute the lowest revision
343 # Recompute the lowest revision
344 if roots:
344 if roots:
345 lowestrev = min([self.rev(n) for n in roots])
345 lowestrev = min([self.rev(n) for n in roots])
346 else:
346 else:
347 # No more roots? Return empty list
347 # No more roots? Return empty list
348 return nonodes
348 return nonodes
349 else:
349 else:
350 # We are descending from nullid, and don't need to care about
350 # We are descending from nullid, and don't need to care about
351 # any other roots.
351 # any other roots.
352 lowestrev = -1
352 lowestrev = -1
353 roots = [nullid]
353 roots = [nullid]
354 # Transform our roots list into a 'set' (i.e. a dictionary where the
354 # Transform our roots list into a 'set' (i.e. a dictionary where the
355 # values don't matter.
355 # values don't matter.
356 descendents = dict.fromkeys(roots, 1)
356 descendents = dict.fromkeys(roots, 1)
357 # Also, keep the original roots so we can filter out roots that aren't
357 # Also, keep the original roots so we can filter out roots that aren't
358 # 'real' roots (i.e. are descended from other roots).
358 # 'real' roots (i.e. are descended from other roots).
359 roots = descendents.copy()
359 roots = descendents.copy()
360 # Our topologically sorted list of output nodes.
360 # Our topologically sorted list of output nodes.
361 orderedout = []
361 orderedout = []
362 # Don't start at nullid since we don't want nullid in our output list,
362 # Don't start at nullid since we don't want nullid in our output list,
363 # and if nullid shows up in descedents, empty parents will look like
363 # and if nullid shows up in descedents, empty parents will look like
364 # they're descendents.
364 # they're descendents.
365 for r in xrange(max(lowestrev, 0), highestrev + 1):
365 for r in xrange(max(lowestrev, 0), highestrev + 1):
366 n = self.node(r)
366 n = self.node(r)
367 isdescendent = False
367 isdescendent = False
368 if lowestrev == -1: # Everybody is a descendent of nullid
368 if lowestrev == -1: # Everybody is a descendent of nullid
369 isdescendent = True
369 isdescendent = True
370 elif n in descendents:
370 elif n in descendents:
371 # n is already a descendent
371 # n is already a descendent
372 isdescendent = True
372 isdescendent = True
373 # This check only needs to be done here because all the roots
373 # This check only needs to be done here because all the roots
374 # will start being marked is descendents before the loop.
374 # will start being marked is descendents before the loop.
375 if n in roots:
375 if n in roots:
376 # If n was a root, check if it's a 'real' root.
376 # If n was a root, check if it's a 'real' root.
377 p = tuple(self.parents(n))
377 p = tuple(self.parents(n))
378 # If any of its parents are descendents, it's not a root.
378 # If any of its parents are descendents, it's not a root.
379 if (p[0] in descendents) or (p[1] in descendents):
379 if (p[0] in descendents) or (p[1] in descendents):
380 roots.pop(n)
380 roots.pop(n)
381 else:
381 else:
382 p = tuple(self.parents(n))
382 p = tuple(self.parents(n))
383 # A node is a descendent if either of its parents are
383 # A node is a descendent if either of its parents are
384 # descendents. (We seeded the dependents list with the roots
384 # descendents. (We seeded the dependents list with the roots
385 # up there, remember?)
385 # up there, remember?)
386 if (p[0] in descendents) or (p[1] in descendents):
386 if (p[0] in descendents) or (p[1] in descendents):
387 descendents[n] = 1
387 descendents[n] = 1
388 isdescendent = True
388 isdescendent = True
389 if isdescendent and ((ancestors is None) or (n in ancestors)):
389 if isdescendent and ((ancestors is None) or (n in ancestors)):
390 # Only include nodes that are both descendents and ancestors.
390 # Only include nodes that are both descendents and ancestors.
391 orderedout.append(n)
391 orderedout.append(n)
392 if (ancestors is not None) and (n in heads):
392 if (ancestors is not None) and (n in heads):
393 # We're trying to figure out which heads are reachable
393 # We're trying to figure out which heads are reachable
394 # from roots.
394 # from roots.
395 # Mark this head as having been reached
395 # Mark this head as having been reached
396 heads[n] = 1
396 heads[n] = 1
397 elif ancestors is None:
397 elif ancestors is None:
398 # Otherwise, we're trying to discover the heads.
398 # Otherwise, we're trying to discover the heads.
399 # Assume this is a head because if it isn't, the next step
399 # Assume this is a head because if it isn't, the next step
400 # will eventually remove it.
400 # will eventually remove it.
401 heads[n] = 1
401 heads[n] = 1
402 # But, obviously its parents aren't.
402 # But, obviously its parents aren't.
403 for p in self.parents(n):
403 for p in self.parents(n):
404 heads.pop(p, None)
404 heads.pop(p, None)
405 heads = [n for n in heads.iterkeys() if heads[n] != 0]
405 heads = [n for n in heads.iterkeys() if heads[n] != 0]
406 roots = roots.keys()
406 roots = roots.keys()
407 assert orderedout
407 assert orderedout
408 assert roots
408 assert roots
409 assert heads
409 assert heads
410 return (orderedout, roots, heads)
410 return (orderedout, roots, heads)
411
411
412 def heads(self, start=None):
412 def heads(self, start=None):
413 """return the list of all nodes that have no children
413 """return the list of all nodes that have no children
414
414
415 if start is specified, only heads that are descendants of
415 if start is specified, only heads that are descendants of
416 start will be returned
416 start will be returned
417
417
418 """
418 """
419 if start is None:
419 if start is None:
420 start = nullid
420 start = nullid
421 reachable = {start: 1}
421 reachable = {start: 1}
422 heads = {start: 1}
422 heads = {start: 1}
423 startrev = self.rev(start)
423 startrev = self.rev(start)
424
424
425 for r in xrange(startrev + 1, self.count()):
425 for r in xrange(startrev + 1, self.count()):
426 n = self.node(r)
426 n = self.node(r)
427 for pn in self.parents(n):
427 for pn in self.parents(n):
428 if pn in reachable:
428 if pn in reachable:
429 reachable[n] = 1
429 reachable[n] = 1
430 heads[n] = 1
430 heads[n] = 1
431 if pn in heads:
431 if pn in heads:
432 del heads[pn]
432 del heads[pn]
433 return heads.keys()
433 return heads.keys()
434
434
435 def children(self, node):
435 def children(self, node):
436 """find the children of a given node"""
436 """find the children of a given node"""
437 c = []
437 c = []
438 p = self.rev(node)
438 p = self.rev(node)
439 for r in range(p + 1, self.count()):
439 for r in range(p + 1, self.count()):
440 n = self.node(r)
440 n = self.node(r)
441 for pn in self.parents(n):
441 for pn in self.parents(n):
442 if pn == node:
442 if pn == node:
443 c.append(n)
443 c.append(n)
444 continue
444 continue
445 elif pn == nullid:
445 elif pn == nullid:
446 continue
446 continue
447 return c
447 return c
448
448
449 def lookup(self, id):
449 def lookup(self, id):
450 """locate a node based on revision number or subset of hex nodeid"""
450 """locate a node based on revision number or subset of hex nodeid"""
451 try:
451 try:
452 rev = int(id)
452 rev = int(id)
453 if str(rev) != id: raise ValueError
453 if str(rev) != id: raise ValueError
454 if rev < 0: rev = self.count() + rev
454 if rev < 0: rev = self.count() + rev
455 if rev < 0 or rev >= self.count(): raise ValueError
455 if rev < 0 or rev >= self.count(): raise ValueError
456 return self.node(rev)
456 return self.node(rev)
457 except (ValueError, OverflowError):
457 except (ValueError, OverflowError):
458 c = []
458 c = []
459 for n in self.nodemap:
459 for n in self.nodemap:
460 if hex(n).startswith(id):
460 if hex(n).startswith(id):
461 c.append(n)
461 c.append(n)
462 if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
462 if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
463 if len(c) < 1: raise RevlogError(_("No match found"))
463 if len(c) < 1: raise RevlogError(_("No match found"))
464 return c[0]
464 return c[0]
465
465
466 return None
466 return None
467
467
468 def diff(self, a, b):
468 def diff(self, a, b):
469 """return a delta between two revisions"""
469 """return a delta between two revisions"""
470 return mdiff.textdiff(a, b)
470 return mdiff.textdiff(a, b)
471
471
472 def patches(self, t, pl):
472 def patches(self, t, pl):
473 """apply a list of patches to a string"""
473 """apply a list of patches to a string"""
474 return mdiff.patches(t, pl)
474 return mdiff.patches(t, pl)
475
475
476 def delta(self, node):
476 def delta(self, node):
477 """return or calculate a delta between a node and its predecessor"""
477 """return or calculate a delta between a node and its predecessor"""
478 r = self.rev(node)
478 r = self.rev(node)
479 b = self.base(r)
479 b = self.base(r)
480 if r == b:
480 if r == b:
481 return self.diff(self.revision(self.node(r - 1)),
481 return self.diff(self.revision(self.node(r - 1)),
482 self.revision(node))
482 self.revision(node))
483 else:
483 else:
484 f = self.opener(self.datafile)
484 f = self.opener(self.datafile)
485 f.seek(self.start(r))
485 f.seek(self.start(r))
486 data = f.read(self.length(r))
486 data = f.read(self.length(r))
487 return decompress(data)
487 return decompress(data)
488
488
489 def revision(self, node):
489 def revision(self, node):
490 """return an uncompressed revision of a given"""
490 """return an uncompressed revision of a given"""
491 if node == nullid: return ""
491 if node == nullid: return ""
492 if self.cache and self.cache[0] == node: return self.cache[2]
492 if self.cache and self.cache[0] == node: return self.cache[2]
493
493
494 # look up what we need to read
494 # look up what we need to read
495 text = None
495 text = None
496 rev = self.rev(node)
496 rev = self.rev(node)
497 start, length, base, link, p1, p2, node = self.index[rev]
497 start, length, base, link, p1, p2, node = self.index[rev]
498 end = start + length
498 end = start + length
499 if base != rev: start = self.start(base)
499 if base != rev: start = self.start(base)
500
500
501 # do we have useful data cached?
501 # do we have useful data cached?
502 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
502 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
503 base = self.cache[1]
503 base = self.cache[1]
504 start = self.start(base + 1)
504 start = self.start(base + 1)
505 text = self.cache[2]
505 text = self.cache[2]
506 last = 0
506 last = 0
507
507
508 f = self.opener(self.datafile)
508 f = self.opener(self.datafile)
509 f.seek(start)
509 f.seek(start)
510 data = f.read(end - start)
510 data = f.read(end - start)
511
511
512 if text is None:
512 if text is None:
513 last = self.length(base)
513 last = self.length(base)
514 text = decompress(data[:last])
514 text = decompress(data[:last])
515
515
516 bins = []
516 bins = []
517 for r in xrange(base + 1, rev + 1):
517 for r in xrange(base + 1, rev + 1):
518 s = self.length(r)
518 s = self.length(r)
519 bins.append(decompress(data[last:last + s]))
519 bins.append(decompress(data[last:last + s]))
520 last = last + s
520 last = last + s
521
521
522 text = mdiff.patches(text, bins)
522 text = mdiff.patches(text, bins)
523
523
524 if node != hash(text, p1, p2):
524 if node != hash(text, p1, p2):
525 raise RevlogError(_("integrity check failed on %s:%d")
525 raise RevlogError(_("integrity check failed on %s:%d")
526 % (self.datafile, rev))
526 % (self.datafile, rev))
527
527
528 self.cache = (node, rev, text)
528 self.cache = (node, rev, text)
529 return text
529 return text
530
530
531 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
531 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
532 """add a revision to the log
532 """add a revision to the log
533
533
534 text - the revision data to add
534 text - the revision data to add
535 transaction - the transaction object used for rollback
535 transaction - the transaction object used for rollback
536 link - the linkrev data to add
536 link - the linkrev data to add
537 p1, p2 - the parent nodeids of the revision
537 p1, p2 - the parent nodeids of the revision
538 d - an optional precomputed delta
538 d - an optional precomputed delta
539 """
539 """
540 if text is None: text = ""
540 if text is None: text = ""
541 if p1 is None: p1 = self.tip()
541 if p1 is None: p1 = self.tip()
542 if p2 is None: p2 = nullid
542 if p2 is None: p2 = nullid
543
543
544 node = hash(text, p1, p2)
544 node = hash(text, p1, p2)
545
545
546 if node in self.nodemap:
546 if node in self.nodemap:
547 return node
547 return node
548
548
549 n = self.count()
549 n = self.count()
550 t = n - 1
550 t = n - 1
551
551
552 if n:
552 if n:
553 base = self.base(t)
553 base = self.base(t)
554 start = self.start(base)
554 start = self.start(base)
555 end = self.end(t)
555 end = self.end(t)
556 if not d:
556 if not d:
557 prev = self.revision(self.tip())
557 prev = self.revision(self.tip())
558 d = self.diff(prev, str(text))
558 d = self.diff(prev, str(text))
559 data = compress(d)
559 data = compress(d)
560 l = len(data[1]) + len(data[0])
560 l = len(data[1]) + len(data[0])
561 dist = end - start + l
561 dist = end - start + l
562
562
563 # full versions are inserted when the needed deltas
563 # full versions are inserted when the needed deltas
564 # become comparable to the uncompressed text
564 # become comparable to the uncompressed text
565 if not n or dist > len(text) * 2:
565 if not n or dist > len(text) * 2:
566 data = compress(text)
566 data = compress(text)
567 l = len(data[1]) + len(data[0])
567 l = len(data[1]) + len(data[0])
568 base = n
568 base = n
569 else:
569 else:
570 base = self.base(t)
570 base = self.base(t)
571
571
572 offset = 0
572 offset = 0
573 if t >= 0:
573 if t >= 0:
574 offset = self.end(t)
574 offset = self.end(t)
575
575
576 e = (offset, l, base, link, p1, p2, node)
576 e = (offset, l, base, link, p1, p2, node)
577
577
578 self.index.append(e)
578 self.index.append(e)
579 self.nodemap[node] = n
579 self.nodemap[node] = n
580 entry = struct.pack(indexformat, *e)
580 entry = struct.pack(indexformat, *e)
581
581
582 transaction.add(self.datafile, e[0])
582 transaction.add(self.datafile, e[0])
583 f = self.opener(self.datafile, "a")
583 f = self.opener(self.datafile, "a")
584 if data[0]:
584 if data[0]:
585 f.write(data[0])
585 f.write(data[0])
586 f.write(data[1])
586 f.write(data[1])
587 transaction.add(self.indexfile, n * len(entry))
587 transaction.add(self.indexfile, n * len(entry))
588 self.opener(self.indexfile, "a").write(entry)
588 self.opener(self.indexfile, "a").write(entry)
589
589
590 self.cache = (node, n, text)
590 self.cache = (node, n, text)
591 return node
591 return node
592
592
593 def ancestor(self, a, b):
593 def ancestor(self, a, b):
594 """calculate the least common ancestor of nodes a and b"""
594 """calculate the least common ancestor of nodes a and b"""
595 # calculate the distance of every node from root
595 # calculate the distance of every node from root
596 dist = {nullid: 0}
596 dist = {nullid: 0}
597 for i in xrange(self.count()):
597 for i in xrange(self.count()):
598 n = self.node(i)
598 n = self.node(i)
599 p1, p2 = self.parents(n)
599 p1, p2 = self.parents(n)
600 dist[n] = max(dist[p1], dist[p2]) + 1
600 dist[n] = max(dist[p1], dist[p2]) + 1
601
601
602 # traverse ancestors in order of decreasing distance from root
602 # traverse ancestors in order of decreasing distance from root
603 def ancestors(node):
603 def ancestors(node):
604 # we store negative distances because heap returns smallest member
604 # we store negative distances because heap returns smallest member
605 h = [(-dist[node], node)]
605 h = [(-dist[node], node)]
606 seen = {}
606 seen = {}
607 earliest = self.count()
607 earliest = self.count()
608 while h:
608 while h:
609 d, n = heapq.heappop(h)
609 d, n = heapq.heappop(h)
610 if n not in seen:
610 if n not in seen:
611 seen[n] = 1
611 seen[n] = 1
612 r = self.rev(n)
612 r = self.rev(n)
613 yield (-d, n)
613 yield (-d, n)
614 for p in self.parents(n):
614 for p in self.parents(n):
615 heapq.heappush(h, (-dist[p], p))
615 heapq.heappush(h, (-dist[p], p))
616
616
617 def generations(node):
617 def generations(node):
618 sg, s = None, {}
618 sg, s = None, {}
619 for g,n in ancestors(node):
619 for g,n in ancestors(node):
620 if g != sg:
620 if g != sg:
621 if sg:
621 if sg:
622 yield sg, s
622 yield sg, s
623 sg, s = g, {n:1}
623 sg, s = g, {n:1}
624 else:
624 else:
625 s[n] = 1
625 s[n] = 1
626 yield sg, s
626 yield sg, s
627
627
628 x = generations(a)
628 x = generations(a)
629 y = generations(b)
629 y = generations(b)
630 gx = x.next()
630 gx = x.next()
631 gy = y.next()
631 gy = y.next()
632
632
633 # increment each ancestor list until it is closer to root than
633 # increment each ancestor list until it is closer to root than
634 # the other, or they match
634 # the other, or they match
635 while 1:
635 while 1:
636 #print "ancestor gen %s %s" % (gx[0], gy[0])
636 #print "ancestor gen %s %s" % (gx[0], gy[0])
637 if gx[0] == gy[0]:
637 if gx[0] == gy[0]:
638 # find the intersection
638 # find the intersection
639 i = [ n for n in gx[1] if n in gy[1] ]
639 i = [ n for n in gx[1] if n in gy[1] ]
640 if i:
640 if i:
641 return i[0]
641 return i[0]
642 else:
642 else:
643 #print "next"
643 #print "next"
644 gy = y.next()
644 gy = y.next()
645 gx = x.next()
645 gx = x.next()
646 elif gx[0] < gy[0]:
646 elif gx[0] < gy[0]:
647 #print "next y"
647 #print "next y"
648 gy = y.next()
648 gy = y.next()
649 else:
649 else:
650 #print "next x"
650 #print "next x"
651 gx = x.next()
651 gx = x.next()
652
652
653 def group(self, nodelist, lookup, infocollect = None):
653 def group(self, nodelist, lookup, infocollect = None):
654 """calculate a delta group
654 """calculate a delta group
655
655
656 Given a list of changeset revs, return a set of deltas and
656 Given a list of changeset revs, return a set of deltas and
657 metadata corresponding to nodes. the first delta is
657 metadata corresponding to nodes. the first delta is
658 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
658 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
659 have this parent as it has all history before these
659 have this parent as it has all history before these
660 changesets. parent is parent[0]
660 changesets. parent is parent[0]
661 """
661 """
662 revs = [self.rev(n) for n in nodelist]
662 revs = [self.rev(n) for n in nodelist]
663 needed = dict.fromkeys(revs, 1)
663 needed = dict.fromkeys(revs, 1)
664
664
665 # if we don't have any revisions touched by these changesets, bail
665 # if we don't have any revisions touched by these changesets, bail
666 if not revs:
666 if not revs:
667 yield struct.pack(">l", 0)
667 yield struct.pack(">l", 0)
668 return
668 return
669
669
670 # add the parent of the first rev
670 # add the parent of the first rev
671 p = self.parents(self.node(revs[0]))[0]
671 p = self.parents(self.node(revs[0]))[0]
672 revs.insert(0, self.rev(p))
672 revs.insert(0, self.rev(p))
673
673
674 # for each delta that isn't contiguous in the log, we need to
674 # for each delta that isn't contiguous in the log, we need to
675 # reconstruct the base, reconstruct the result, and then
675 # reconstruct the base, reconstruct the result, and then
676 # calculate the delta. We also need to do this where we've
676 # calculate the delta. We also need to do this where we've
677 # stored a full version and not a delta
677 # stored a full version and not a delta
678 for i in xrange(0, len(revs) - 1):
678 for i in xrange(0, len(revs) - 1):
679 a, b = revs[i], revs[i + 1]
679 a, b = revs[i], revs[i + 1]
680 if a + 1 != b or self.base(b) == b:
680 if a + 1 != b or self.base(b) == b:
681 for j in xrange(self.base(a), a + 1):
681 for j in xrange(self.base(a), a + 1):
682 needed[j] = 1
682 needed[j] = 1
683 for j in xrange(self.base(b), b + 1):
683 for j in xrange(self.base(b), b + 1):
684 needed[j] = 1
684 needed[j] = 1
685
685
686 # calculate spans to retrieve from datafile
686 # calculate spans to retrieve from datafile
687 needed = needed.keys()
687 needed = needed.keys()
688 needed.sort()
688 needed.sort()
689 spans = []
689 spans = []
690 oo = -1
690 oo = -1
691 ol = 0
691 ol = 0
692 for n in needed:
692 for n in needed:
693 if n < 0: continue
693 if n < 0: continue
694 o = self.start(n)
694 o = self.start(n)
695 l = self.length(n)
695 l = self.length(n)
696 if oo + ol == o: # can we merge with the previous?
696 if oo + ol == o: # can we merge with the previous?
697 nl = spans[-1][2]
697 nl = spans[-1][2]
698 nl.append((n, l))
698 nl.append((n, l))
699 ol += l
699 ol += l
700 spans[-1] = (oo, ol, nl)
700 spans[-1] = (oo, ol, nl)
701 else:
701 else:
702 oo = o
702 oo = o
703 ol = l
703 ol = l
704 spans.append((oo, ol, [(n, l)]))
704 spans.append((oo, ol, [(n, l)]))
705
705
706 # read spans in, divide up chunks
706 # read spans in, divide up chunks
707 chunks = {}
707 chunks = {}
708 for span in spans:
708 for span in spans:
709 # we reopen the file for each span to make http happy for now
709 # we reopen the file for each span to make http happy for now
710 f = self.opener(self.datafile)
710 f = self.opener(self.datafile)
711 f.seek(span[0])
711 f.seek(span[0])
712 data = f.read(span[1])
712 data = f.read(span[1])
713
713
714 # divide up the span
714 # divide up the span
715 pos = 0
715 pos = 0
716 for r, l in span[2]:
716 for r, l in span[2]:
717 chunks[r] = decompress(data[pos: pos + l])
717 chunks[r] = decompress(data[pos: pos + l])
718 pos += l
718 pos += l
719
719
720 # helper to reconstruct intermediate versions
720 # helper to reconstruct intermediate versions
721 def construct(text, base, rev):
721 def construct(text, base, rev):
722 bins = [chunks[r] for r in xrange(base + 1, rev + 1)]
722 bins = [chunks[r] for r in xrange(base + 1, rev + 1)]
723 return mdiff.patches(text, bins)
723 return mdiff.patches(text, bins)
724
724
725 # build deltas
725 # build deltas
726 deltas = []
726 deltas = []
727 for d in xrange(0, len(revs) - 1):
727 for d in xrange(0, len(revs) - 1):
728 a, b = revs[d], revs[d + 1]
728 a, b = revs[d], revs[d + 1]
729 n = self.node(b)
729 n = self.node(b)
730
730
731 if infocollect is not None:
731 if infocollect is not None:
732 infocollect(n)
732 infocollect(n)
733
733
734 # do we need to construct a new delta?
734 # do we need to construct a new delta?
735 if a + 1 != b or self.base(b) == b:
735 if a + 1 != b or self.base(b) == b:
736 if a >= 0:
736 if a >= 0:
737 base = self.base(a)
737 base = self.base(a)
738 ta = chunks[self.base(a)]
738 ta = chunks[self.base(a)]
739 ta = construct(ta, base, a)
739 ta = construct(ta, base, a)
740 else:
740 else:
741 ta = ""
741 ta = ""
742
742
743 base = self.base(b)
743 base = self.base(b)
744 if a > base:
744 if a > base:
745 base = a
745 base = a
746 tb = ta
746 tb = ta
747 else:
747 else:
748 tb = chunks[self.base(b)]
748 tb = chunks[self.base(b)]
749 tb = construct(tb, base, b)
749 tb = construct(tb, base, b)
750 d = self.diff(ta, tb)
750 d = self.diff(ta, tb)
751 else:
751 else:
752 d = chunks[b]
752 d = chunks[b]
753
753
754 p = self.parents(n)
754 p = self.parents(n)
755 meta = n + p[0] + p[1] + lookup(n)
755 meta = n + p[0] + p[1] + lookup(n)
756 l = struct.pack(">l", len(meta) + len(d) + 4)
756 l = struct.pack(">l", len(meta) + len(d) + 4)
757 yield l
757 yield l
758 yield meta
758 yield meta
759 yield d
759 yield d
760
760
761 yield struct.pack(">l", 0)
761 yield struct.pack(">l", 0)
762
762
763 def addgroup(self, revs, linkmapper, transaction, unique=0):
763 def addgroup(self, revs, linkmapper, transaction, unique=0):
764 """
764 """
765 add a delta group
765 add a delta group
766
766
767 given a set of deltas, add them to the revision log. the
767 given a set of deltas, add them to the revision log. the
768 first delta is against its parent, which should be in our
768 first delta is against its parent, which should be in our
769 log, the rest are against the previous delta.
769 log, the rest are against the previous delta.
770 """
770 """
771
771
772 #track the base of the current delta log
772 #track the base of the current delta log
773 r = self.count()
773 r = self.count()
774 t = r - 1
774 t = r - 1
775 node = nullid
775 node = nullid
776
776
777 base = prev = -1
777 base = prev = -1
778 start = end = measure = 0
778 start = end = measure = 0
779 if r:
779 if r:
780 start = self.start(self.base(t))
780 start = self.start(self.base(t))
781 end = self.end(t)
781 end = self.end(t)
782 measure = self.length(self.base(t))
782 measure = self.length(self.base(t))
783 base = self.base(t)
783 base = self.base(t)
784 prev = self.tip()
784 prev = self.tip()
785
785
786 transaction.add(self.datafile, end)
786 transaction.add(self.datafile, end)
787 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
787 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
788 dfh = self.opener(self.datafile, "a")
788 dfh = self.opener(self.datafile, "a")
789 ifh = self.opener(self.indexfile, "a")
789 ifh = self.opener(self.indexfile, "a")
790
790
791 # loop through our set of deltas
791 # loop through our set of deltas
792 chain = None
792 chain = None
793 for chunk in revs:
793 for chunk in revs:
794 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
794 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
795 link = linkmapper(cs)
795 link = linkmapper(cs)
796 if node in self.nodemap:
796 if node in self.nodemap:
797 # this can happen if two branches make the same change
797 # this can happen if two branches make the same change
798 # if unique:
798 # if unique:
799 # raise RevlogError(_("already have %s") % hex(node[:4]))
799 # raise RevlogError(_("already have %s") % hex(node[:4]))
800 chain = node
800 chain = node
801 continue
801 continue
802 delta = chunk[80:]
802 delta = chunk[80:]
803
803
804 for p in (p1, p2):
804 for p in (p1, p2):
805 if not p in self.nodemap:
805 if not p in self.nodemap:
806 raise RevlogError(_("unknown parent %s") % short(p1))
806 raise RevlogError(_("unknown parent %s") % short(p1))
807
807
808 if not chain:
808 if not chain:
809 # retrieve the parent revision of the delta chain
809 # retrieve the parent revision of the delta chain
810 chain = p1
810 chain = p1
811 if not chain in self.nodemap:
811 if not chain in self.nodemap:
812 raise RevlogError(_("unknown base %s") % short(chain[:4]))
812 raise RevlogError(_("unknown base %s") % short(chain[:4]))
813
813
814 # full versions are inserted when the needed deltas become
814 # full versions are inserted when the needed deltas become
815 # comparable to the uncompressed text or when the previous
815 # comparable to the uncompressed text or when the previous
816 # version is not the one we have a delta against. We use
816 # version is not the one we have a delta against. We use
817 # the size of the previous full rev as a proxy for the
817 # the size of the previous full rev as a proxy for the
818 # current size.
818 # current size.
819
819
820 if chain == prev:
820 if chain == prev:
821 tempd = compress(delta)
821 tempd = compress(delta)
822 cdelta = tempd[0] + tempd[1]
822 cdelta = tempd[0] + tempd[1]
823
823
824 if chain != prev or (end - start + len(cdelta)) > measure * 2:
824 if chain != prev or (end - start + len(cdelta)) > measure * 2:
825 # flush our writes here so we can read it in revision
825 # flush our writes here so we can read it in revision
826 dfh.flush()
826 dfh.flush()
827 ifh.flush()
827 ifh.flush()
828 text = self.revision(chain)
828 text = self.revision(chain)
829 text = self.patches(text, [delta])
829 text = self.patches(text, [delta])
830 chk = self.addrevision(text, transaction, link, p1, p2)
830 chk = self.addrevision(text, transaction, link, p1, p2)
831 if chk != node:
831 if chk != node:
832 raise RevlogError(_("consistency error adding group"))
832 raise RevlogError(_("consistency error adding group"))
833 measure = len(text)
833 measure = len(text)
834 else:
834 else:
835 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
835 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
836 self.index.append(e)
836 self.index.append(e)
837 self.nodemap[node] = r
837 self.nodemap[node] = r
838 dfh.write(cdelta)
838 dfh.write(cdelta)
839 ifh.write(struct.pack(indexformat, *e))
839 ifh.write(struct.pack(indexformat, *e))
840
840
841 t, r, chain, prev = r, r + 1, node, node
841 t, r, chain, prev = r, r + 1, node, node
842 start = self.start(self.base(t))
842 start = self.start(self.base(t))
843 end = self.end(t)
843 end = self.end(t)
844
844
845 dfh.close()
845 dfh.close()
846 ifh.close()
846 ifh.close()
847 return node
847 return node
848
848
849 def strip(self, rev, minlink):
849 def strip(self, rev, minlink):
850 if self.count() == 0 or rev >= self.count():
850 if self.count() == 0 or rev >= self.count():
851 return
851 return
852
852
853 # When stripping away a revision, we need to make sure it
853 # When stripping away a revision, we need to make sure it
854 # does not actually belong to an older changeset.
854 # does not actually belong to an older changeset.
855 # The minlink parameter defines the oldest revision
855 # The minlink parameter defines the oldest revision
856 # we're allowed to strip away.
856 # we're allowed to strip away.
857 while minlink > self.index[rev][3]:
857 while minlink > self.index[rev][3]:
858 rev += 1
858 rev += 1
859 if rev >= self.count():
859 if rev >= self.count():
860 return
860 return
861
861
862 # first truncate the files on disk
862 # first truncate the files on disk
863 end = self.start(rev)
863 end = self.start(rev)
864 self.opener(self.datafile, "a").truncate(end)
864 self.opener(self.datafile, "a").truncate(end)
865 end = rev * struct.calcsize(indexformat)
865 end = rev * struct.calcsize(indexformat)
866 self.opener(self.indexfile, "a").truncate(end)
866 self.opener(self.indexfile, "a").truncate(end)
867
867
868 # then reset internal state in memory to forget those revisions
868 # then reset internal state in memory to forget those revisions
869 self.cache = None
869 self.cache = None
870 for p in self.index[rev:]:
870 for p in self.index[rev:]:
871 del self.nodemap[p[6]]
871 del self.nodemap[p[6]]
872 del self.index[rev:]
872 del self.index[rev:]
873
873
874 # truncating the lazyindex also truncates the lazymap.
874 # truncating the lazyindex also truncates the lazymap.
875 if isinstance(self.index, lazyindex):
875 if isinstance(self.index, lazyindex):
876 self.index.trunc(end)
876 self.index.trunc(end)
877
877
878
878
879 def checksize(self):
879 def checksize(self):
880 expected = 0
880 expected = 0
881 if self.count():
881 if self.count():
882 expected = self.end(self.count() - 1)
882 expected = self.end(self.count() - 1)
883 try:
883 try:
884 f = self.opener(self.datafile)
884 f = self.opener(self.datafile)
885 f.seek(0, 2)
885 f.seek(0, 2)
886 actual = f.tell()
886 actual = f.tell()
887 return expected - actual
887 return expected - actual
888 except IOError, inst:
888 except IOError, inst:
889 if inst.errno == errno.ENOENT:
889 if inst.errno == errno.ENOENT:
890 return 0
890 return 0
891 raise
891 raise
892
892
893
893
@@ -1,78 +1,78 b''
1 # transaction.py - simple journalling scheme for mercurial
1 # transaction.py - simple journalling scheme for mercurial
2 #
2 #
3 # This transaction scheme is intended to gracefully handle program
3 # This transaction scheme is intended to gracefully handle program
4 # errors and interruptions. More serious failures like system crashes
4 # errors and interruptions. More serious failures like system crashes
5 # can be recovered with an fsck-like tool. As the whole repository is
5 # can be recovered with an fsck-like tool. As the whole repository is
6 # effectively log-structured, this should amount to simply truncating
6 # effectively log-structured, this should amount to simply truncating
7 # anything that isn't referenced in the changelog.
7 # anything that isn't referenced in the changelog.
8 #
8 #
9 # Copyright 2005 Matt Mackall <mpm@selenic.com>
9 # Copyright 2005 Matt Mackall <mpm@selenic.com>
10 #
10 #
11 # This software may be used and distributed according to the terms
11 # This software may be used and distributed according to the terms
12 # of the GNU General Public License, incorporated herein by reference.
12 # of the GNU General Public License, incorporated herein by reference.
13
13
14 import os
14 import os
15 from i18n import gettext as _
15 from i18n import gettext as _
16
16
17 class transaction:
17 class transaction(object):
18 def __init__(self, report, opener, journal, after=None):
18 def __init__(self, report, opener, journal, after=None):
19 self.journal = None
19 self.journal = None
20
20
21 # abort here if the journal already exists
21 # abort here if the journal already exists
22 if os.path.exists(journal):
22 if os.path.exists(journal):
23 raise AssertionError(_("journal already exists - run hg recover"))
23 raise AssertionError(_("journal already exists - run hg recover"))
24
24
25 self.report = report
25 self.report = report
26 self.opener = opener
26 self.opener = opener
27 self.after = after
27 self.after = after
28 self.entries = []
28 self.entries = []
29 self.map = {}
29 self.map = {}
30 self.journal = journal
30 self.journal = journal
31
31
32 self.file = open(self.journal, "w")
32 self.file = open(self.journal, "w")
33
33
34 def __del__(self):
34 def __del__(self):
35 if self.journal:
35 if self.journal:
36 if self.entries: self.abort()
36 if self.entries: self.abort()
37 self.file.close()
37 self.file.close()
38 try: os.unlink(self.journal)
38 try: os.unlink(self.journal)
39 except: pass
39 except: pass
40
40
41 def add(self, file, offset):
41 def add(self, file, offset):
42 if file in self.map: return
42 if file in self.map: return
43 self.entries.append((file, offset))
43 self.entries.append((file, offset))
44 self.map[file] = 1
44 self.map[file] = 1
45 # add enough data to the journal to do the truncate
45 # add enough data to the journal to do the truncate
46 self.file.write("%s\0%d\n" % (file, offset))
46 self.file.write("%s\0%d\n" % (file, offset))
47 self.file.flush()
47 self.file.flush()
48
48
49 def close(self):
49 def close(self):
50 self.file.close()
50 self.file.close()
51 self.entries = []
51 self.entries = []
52 if self.after:
52 if self.after:
53 self.after()
53 self.after()
54 else:
54 else:
55 os.unlink(self.journal)
55 os.unlink(self.journal)
56 self.journal = None
56 self.journal = None
57
57
58 def abort(self):
58 def abort(self):
59 if not self.entries: return
59 if not self.entries: return
60
60
61 self.report(_("transaction abort!\n"))
61 self.report(_("transaction abort!\n"))
62
62
63 for f, o in self.entries:
63 for f, o in self.entries:
64 try:
64 try:
65 self.opener(f, "a").truncate(o)
65 self.opener(f, "a").truncate(o)
66 except:
66 except:
67 self.report(_("failed to truncate %s\n") % f)
67 self.report(_("failed to truncate %s\n") % f)
68
68
69 self.entries = []
69 self.entries = []
70
70
71 self.report(_("rollback completed\n"))
71 self.report(_("rollback completed\n"))
72
72
73 def rollback(opener, file):
73 def rollback(opener, file):
74 for l in open(file).readlines():
74 for l in open(file).readlines():
75 f, o = l.split('\0')
75 f, o = l.split('\0')
76 opener(f, "a").truncate(int(o))
76 opener(f, "a").truncate(int(o))
77 os.unlink(file)
77 os.unlink(file)
78
78
@@ -1,155 +1,155 b''
1 # ui.py - user interface bits for mercurial
1 # ui.py - user interface bits for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os, ConfigParser
8 import os, ConfigParser
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 demandload(globals(), "re socket sys util")
11 demandload(globals(), "re socket sys util")
12
12
13 class ui:
13 class ui(object):
14 def __init__(self, verbose=False, debug=False, quiet=False,
14 def __init__(self, verbose=False, debug=False, quiet=False,
15 interactive=True):
15 interactive=True):
16 self.overlay = {}
16 self.overlay = {}
17 self.cdata = ConfigParser.SafeConfigParser()
17 self.cdata = ConfigParser.SafeConfigParser()
18 self.readconfig(util.rcpath)
18 self.readconfig(util.rcpath)
19
19
20 self.quiet = self.configbool("ui", "quiet")
20 self.quiet = self.configbool("ui", "quiet")
21 self.verbose = self.configbool("ui", "verbose")
21 self.verbose = self.configbool("ui", "verbose")
22 self.debugflag = self.configbool("ui", "debug")
22 self.debugflag = self.configbool("ui", "debug")
23 self.interactive = self.configbool("ui", "interactive", True)
23 self.interactive = self.configbool("ui", "interactive", True)
24
24
25 self.updateopts(verbose, debug, quiet, interactive)
25 self.updateopts(verbose, debug, quiet, interactive)
26
26
27 def updateopts(self, verbose=False, debug=False, quiet=False,
27 def updateopts(self, verbose=False, debug=False, quiet=False,
28 interactive=True):
28 interactive=True):
29 self.quiet = (self.quiet or quiet) and not verbose and not debug
29 self.quiet = (self.quiet or quiet) and not verbose and not debug
30 self.verbose = (self.verbose or verbose) or debug
30 self.verbose = (self.verbose or verbose) or debug
31 self.debugflag = (self.debugflag or debug)
31 self.debugflag = (self.debugflag or debug)
32 self.interactive = (self.interactive and interactive)
32 self.interactive = (self.interactive and interactive)
33
33
34 def readconfig(self, fn):
34 def readconfig(self, fn):
35 if isinstance(fn, basestring):
35 if isinstance(fn, basestring):
36 fn = [fn]
36 fn = [fn]
37 for f in fn:
37 for f in fn:
38 try:
38 try:
39 self.cdata.read(f)
39 self.cdata.read(f)
40 except ConfigParser.ParsingError, inst:
40 except ConfigParser.ParsingError, inst:
41 raise util.Abort(_("Failed to parse %s\n%s") % (f, inst))
41 raise util.Abort(_("Failed to parse %s\n%s") % (f, inst))
42
42
43 def setconfig(self, section, name, val):
43 def setconfig(self, section, name, val):
44 self.overlay[(section, name)] = val
44 self.overlay[(section, name)] = val
45
45
46 def config(self, section, name, default=None):
46 def config(self, section, name, default=None):
47 if self.overlay.has_key((section, name)):
47 if self.overlay.has_key((section, name)):
48 return self.overlay[(section, name)]
48 return self.overlay[(section, name)]
49 if self.cdata.has_option(section, name):
49 if self.cdata.has_option(section, name):
50 return self.cdata.get(section, name)
50 return self.cdata.get(section, name)
51 return default
51 return default
52
52
53 def configbool(self, section, name, default=False):
53 def configbool(self, section, name, default=False):
54 if self.overlay.has_key((section, name)):
54 if self.overlay.has_key((section, name)):
55 return self.overlay[(section, name)]
55 return self.overlay[(section, name)]
56 if self.cdata.has_option(section, name):
56 if self.cdata.has_option(section, name):
57 return self.cdata.getboolean(section, name)
57 return self.cdata.getboolean(section, name)
58 return default
58 return default
59
59
60 def configitems(self, section):
60 def configitems(self, section):
61 if self.cdata.has_section(section):
61 if self.cdata.has_section(section):
62 return self.cdata.items(section)
62 return self.cdata.items(section)
63 return []
63 return []
64
64
65 def walkconfig(self):
65 def walkconfig(self):
66 seen = {}
66 seen = {}
67 for (section, name), value in self.overlay.iteritems():
67 for (section, name), value in self.overlay.iteritems():
68 yield section, name, value
68 yield section, name, value
69 seen[section, name] = 1
69 seen[section, name] = 1
70 for section in self.cdata.sections():
70 for section in self.cdata.sections():
71 for name, value in self.cdata.items(section):
71 for name, value in self.cdata.items(section):
72 if (section, name) in seen: continue
72 if (section, name) in seen: continue
73 yield section, name, value.replace('\n', '\\n')
73 yield section, name, value.replace('\n', '\\n')
74 seen[section, name] = 1
74 seen[section, name] = 1
75
75
76 def extensions(self):
76 def extensions(self):
77 return self.configitems("extensions")
77 return self.configitems("extensions")
78
78
79 def username(self):
79 def username(self):
80 return (os.environ.get("HGUSER") or
80 return (os.environ.get("HGUSER") or
81 self.config("ui", "username") or
81 self.config("ui", "username") or
82 os.environ.get("EMAIL") or
82 os.environ.get("EMAIL") or
83 (os.environ.get("LOGNAME",
83 (os.environ.get("LOGNAME",
84 os.environ.get("USERNAME", "unknown"))
84 os.environ.get("USERNAME", "unknown"))
85 + '@' + socket.getfqdn()))
85 + '@' + socket.getfqdn()))
86
86
87 def shortuser(self, user):
87 def shortuser(self, user):
88 """Return a short representation of a user name or email address."""
88 """Return a short representation of a user name or email address."""
89 if not self.verbose:
89 if not self.verbose:
90 f = user.find('@')
90 f = user.find('@')
91 if f >= 0:
91 if f >= 0:
92 user = user[:f]
92 user = user[:f]
93 f = user.find('<')
93 f = user.find('<')
94 if f >= 0:
94 if f >= 0:
95 user = user[f+1:]
95 user = user[f+1:]
96 return user
96 return user
97
97
98 def expandpath(self, loc, root=""):
98 def expandpath(self, loc, root=""):
99 paths = {}
99 paths = {}
100 for name, path in self.configitems("paths"):
100 for name, path in self.configitems("paths"):
101 m = path.find("://")
101 m = path.find("://")
102 if m == -1:
102 if m == -1:
103 path = os.path.join(root, path)
103 path = os.path.join(root, path)
104 paths[name] = path
104 paths[name] = path
105
105
106 return paths.get(loc, loc)
106 return paths.get(loc, loc)
107
107
108 def write(self, *args):
108 def write(self, *args):
109 for a in args:
109 for a in args:
110 sys.stdout.write(str(a))
110 sys.stdout.write(str(a))
111
111
112 def write_err(self, *args):
112 def write_err(self, *args):
113 sys.stdout.flush()
113 sys.stdout.flush()
114 for a in args:
114 for a in args:
115 sys.stderr.write(str(a))
115 sys.stderr.write(str(a))
116
116
117 def readline(self):
117 def readline(self):
118 return sys.stdin.readline()[:-1]
118 return sys.stdin.readline()[:-1]
119 def prompt(self, msg, pat, default="y"):
119 def prompt(self, msg, pat, default="y"):
120 if not self.interactive: return default
120 if not self.interactive: return default
121 while 1:
121 while 1:
122 self.write(msg, " ")
122 self.write(msg, " ")
123 r = self.readline()
123 r = self.readline()
124 if re.match(pat, r):
124 if re.match(pat, r):
125 return r
125 return r
126 else:
126 else:
127 self.write(_("unrecognized response\n"))
127 self.write(_("unrecognized response\n"))
128 def status(self, *msg):
128 def status(self, *msg):
129 if not self.quiet: self.write(*msg)
129 if not self.quiet: self.write(*msg)
130 def warn(self, *msg):
130 def warn(self, *msg):
131 self.write_err(*msg)
131 self.write_err(*msg)
132 def note(self, *msg):
132 def note(self, *msg):
133 if self.verbose: self.write(*msg)
133 if self.verbose: self.write(*msg)
134 def debug(self, *msg):
134 def debug(self, *msg):
135 if self.debugflag: self.write(*msg)
135 if self.debugflag: self.write(*msg)
136 def edit(self, text):
136 def edit(self, text):
137 import tempfile
137 import tempfile
138 (fd, name) = tempfile.mkstemp("hg")
138 (fd, name) = tempfile.mkstemp("hg")
139 f = os.fdopen(fd, "w")
139 f = os.fdopen(fd, "w")
140 f.write(text)
140 f.write(text)
141 f.close()
141 f.close()
142
142
143 editor = (os.environ.get("HGEDITOR") or
143 editor = (os.environ.get("HGEDITOR") or
144 self.config("ui", "editor") or
144 self.config("ui", "editor") or
145 os.environ.get("EDITOR", "vi"))
145 os.environ.get("EDITOR", "vi"))
146
146
147 os.environ["HGUSER"] = self.username()
147 os.environ["HGUSER"] = self.username()
148 util.system("%s %s" % (editor, name), errprefix=_("edit failed"))
148 util.system("%s %s" % (editor, name), errprefix=_("edit failed"))
149
149
150 t = open(name).read()
150 t = open(name).read()
151 t = re.sub("(?m)^HG:.*\n", "", t)
151 t = re.sub("(?m)^HG:.*\n", "", t)
152
152
153 os.unlink(name)
153 os.unlink(name)
154
154
155 return t
155 return t
General Comments 0
You need to be logged in to leave comments. Login now