##// END OF EJS Templates
merge with crew.
Vadim Gelfer -
r1757:23012d48 merge default
parent child Browse files
Show More
@@ -1,287 +1,287 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # This software may be used and distributed according to the terms
3 # This software may be used and distributed according to the terms
4 # of the GNU General Public License, incorporated herein by reference.
4 # of the GNU General Public License, incorporated herein by reference.
5
5
6 from mercurial.demandload import demandload
6 from mercurial.demandload import demandload
7 demandload(globals(), "os sys sets")
7 demandload(globals(), "os sys sets")
8 from mercurial import hg
8 from mercurial import hg
9
9
10 versionstr = "0.0.3"
10 versionstr = "0.0.3"
11
11
12 def lookup_rev(ui, repo, rev=None):
12 def lookup_rev(ui, repo, rev=None):
13 """returns rev or the checked-out revision if rev is None"""
13 """returns rev or the checked-out revision if rev is None"""
14 if not rev is None:
14 if not rev is None:
15 return repo.lookup(rev)
15 return repo.lookup(rev)
16 parents = [p for p in repo.dirstate.parents() if p != hg.nullid]
16 parents = [p for p in repo.dirstate.parents() if p != hg.nullid]
17 if len(parents) != 1:
17 if len(parents) != 1:
18 ui.warn("unexpected number of parents\n")
18 ui.warn("unexpected number of parents\n")
19 ui.warn("please commit or revert\n")
19 ui.warn("please commit or revert\n")
20 sys.exit(1)
20 sys.exit(1)
21 return parents.pop()
21 return parents.pop()
22
22
23 def check_clean(ui, repo):
23 def check_clean(ui, repo):
24 modified, added, removed, deleted, unknown = repo.changes()
24 modified, added, removed, deleted, unknown = repo.changes()
25 if modified or added or removed:
25 if modified or added or removed:
26 ui.warn("Repository is not clean, please commit or revert\n")
26 ui.warn("Repository is not clean, please commit or revert\n")
27 sys.exit(1)
27 sys.exit(1)
28
28
29 class bisect(object):
29 class bisect(object):
30 """dichotomic search in the DAG of changesets"""
30 """dichotomic search in the DAG of changesets"""
31 def __init__(self, ui, repo):
31 def __init__(self, ui, repo):
32 self.repo = repo
32 self.repo = repo
33 self.path = os.path.join(repo.join(""), "bisect")
33 self.path = os.path.join(repo.join(""), "bisect")
34 self.ui = ui
34 self.ui = ui
35 self.goodrevs = []
35 self.goodrevs = []
36 self.badrev = None
36 self.badrev = None
37 self.good_dirty = 0
37 self.good_dirty = 0
38 self.bad_dirty = 0
38 self.bad_dirty = 0
39 self.good_path = os.path.join(self.path, "good")
39 self.good_path = os.path.join(self.path, "good")
40 self.bad_path = os.path.join(self.path, "bad")
40 self.bad_path = os.path.join(self.path, "bad")
41
41
42 s = self.good_path
42 s = self.good_path
43 if os.path.exists(s):
43 if os.path.exists(s):
44 self.goodrevs = self.repo.opener(s).read().splitlines()
44 self.goodrevs = self.repo.opener(s).read().splitlines()
45 self.goodrevs = [hg.bin(x) for x in self.goodrevs]
45 self.goodrevs = [hg.bin(x) for x in self.goodrevs]
46 s = self.bad_path
46 s = self.bad_path
47 if os.path.exists(s):
47 if os.path.exists(s):
48 r = self.repo.opener(s).read().splitlines()
48 r = self.repo.opener(s).read().splitlines()
49 if r:
49 if r:
50 self.badrev = hg.bin(r.pop(0))
50 self.badrev = hg.bin(r.pop(0))
51
51
52 def __del__(self):
52 def __del__(self):
53 if not os.path.isdir(self.path):
53 if not os.path.isdir(self.path):
54 return
54 return
55 f = self.repo.opener(self.good_path, "w")
55 f = self.repo.opener(self.good_path, "w")
56 f.write("\n".join([hg.hex(r) for r in self.goodrevs]))
56 f.write("\n".join([hg.hex(r) for r in self.goodrevs]))
57 if len(self.goodrevs) > 0:
57 if len(self.goodrevs) > 0:
58 f.write("\n")
58 f.write("\n")
59 f = self.repo.opener(self.bad_path, "w")
59 f = self.repo.opener(self.bad_path, "w")
60 if self.badrev:
60 if self.badrev:
61 f.write(hg.hex(self.badrev) + "\n")
61 f.write(hg.hex(self.badrev) + "\n")
62
62
63 def init(self):
63 def init(self):
64 """start a new bisection"""
64 """start a new bisection"""
65 if os.path.isdir(self.path):
65 if os.path.isdir(self.path):
66 self.ui.warn("bisect directory already exists\n")
66 self.ui.warn("bisect directory already exists\n")
67 return 1
67 return 1
68 os.mkdir(self.path)
68 os.mkdir(self.path)
69 check_clean(self.ui, self.repo)
69 check_clean(self.ui, self.repo)
70 return 0
70 return 0
71
71
72 def reset(self):
72 def reset(self):
73 """finish a bisection"""
73 """finish a bisection"""
74 if os.path.isdir(self.path):
74 if os.path.isdir(self.path):
75 sl = [self.bad_path, self.good_path]
75 sl = [self.bad_path, self.good_path]
76 for s in sl:
76 for s in sl:
77 if os.path.exists(s):
77 if os.path.exists(s):
78 os.unlink(s)
78 os.unlink(s)
79 os.rmdir(self.path)
79 os.rmdir(self.path)
80 # Not sure about this
80 # Not sure about this
81 #self.ui.write("Going back to tip\n")
81 #self.ui.write("Going back to tip\n")
82 #self.repo.update(self.repo.changelog.tip())
82 #self.repo.update(self.repo.changelog.tip())
83 return 1
83 return 1
84
84
85 def num_ancestors(self, head=None, stop=None):
85 def num_ancestors(self, head=None, stop=None):
86 """
86 """
87 returns a dict with the mapping:
87 returns a dict with the mapping:
88 node -> number of ancestors (self included)
88 node -> number of ancestors (self included)
89 for all nodes who are ancestor of head and
89 for all nodes who are ancestor of head and
90 not in stop.
90 not in stop.
91 """
91 """
92 if head is None:
92 if head is None:
93 head = self.badrev
93 head = self.badrev
94 return self.__ancestors_and_nb_ancestors(head, stop)[1]
94 return self.__ancestors_and_nb_ancestors(head, stop)[1]
95
95
96 def ancestors(self, head=None, stop=None):
96 def ancestors(self, head=None, stop=None):
97 """
97 """
98 returns the set of the ancestors of head (self included)
98 returns the set of the ancestors of head (self included)
99 who are not in stop.
99 who are not in stop.
100 """
100 """
101 if head is None:
101 if head is None:
102 head = self.badrev
102 head = self.badrev
103 return self.__ancestors_and_nb_ancestors(head, stop)[0]
103 return self.__ancestors_and_nb_ancestors(head, stop)[0]
104
104
105 def __ancestors_and_nb_ancestors(self, head, stop=None):
105 def __ancestors_and_nb_ancestors(self, head, stop=None):
106 """
106 """
107 if stop is None then ancestors of goodrevs are used as
107 if stop is None then ancestors of goodrevs are used as
108 lower limit.
108 lower limit.
109
109
110 returns (anc, n_child) where anc is the set of the ancestors of head
110 returns (anc, n_child) where anc is the set of the ancestors of head
111 and n_child is a dictionary with the following mapping:
111 and n_child is a dictionary with the following mapping:
112 node -> number of ancestors (self included)
112 node -> number of ancestors (self included)
113 """
113 """
114 cl = self.repo.changelog
114 cl = self.repo.changelog
115 if not stop:
115 if not stop:
116 stop = sets.Set([])
116 stop = sets.Set([])
117 for g in reversed(self.goodrevs):
117 for g in reversed(self.goodrevs):
118 if g in stop:
118 if g in stop:
119 continue
119 continue
120 stop.update(cl.reachable(g))
120 stop.update(cl.reachable(g))
121 def num_children(a):
121 def num_children(a):
122 """
122 """
123 returns a dictionnary with the following mapping
123 returns a dictionnary with the following mapping
124 node -> [number of children, empty set]
124 node -> [number of children, empty set]
125 """
125 """
126 d = {a: [0, sets.Set([])]}
126 d = {a: [0, sets.Set([])]}
127 for i in xrange(cl.rev(a)+1):
127 for i in xrange(cl.rev(a)+1):
128 n = cl.node(i)
128 n = cl.node(i)
129 if not d.has_key(n):
129 if not d.has_key(n):
130 d[n] = [0, sets.Set([])]
130 d[n] = [0, sets.Set([])]
131 parents = [p for p in cl.parents(n) if p != hg.nullid]
131 parents = [p for p in cl.parents(n) if p != hg.nullid]
132 for p in parents:
132 for p in parents:
133 d[p][0] += 1
133 d[p][0] += 1
134 return d
134 return d
135
135
136 if head in stop:
136 if head in stop:
137 self.ui.warn("Unconsistent state, %s is good and bad\n"
137 self.ui.warn("Unconsistent state, %s is good and bad\n"
138 % hg.hex(head))
138 % hg.hex(head))
139 sys.exit(1)
139 sys.exit(1)
140 n_child = num_children(head)
140 n_child = num_children(head)
141 for i in xrange(cl.rev(head)+1):
141 for i in xrange(cl.rev(head)+1):
142 n = cl.node(i)
142 n = cl.node(i)
143 parents = [p for p in cl.parents(n) if p != hg.nullid]
143 parents = [p for p in cl.parents(n) if p != hg.nullid]
144 for p in parents:
144 for p in parents:
145 n_child[p][0] -= 1
145 n_child[p][0] -= 1
146 if not n in stop:
146 if not n in stop:
147 n_child[n][1].union_update(n_child[p][1])
147 n_child[n][1].union_update(n_child[p][1])
148 if n_child[p][0] == 0:
148 if n_child[p][0] == 0:
149 n_child[p] = len(n_child[p][1])
149 n_child[p] = len(n_child[p][1])
150 if not n in stop:
150 if not n in stop:
151 n_child[n][1].add(n)
151 n_child[n][1].add(n)
152 if n_child[n][0] == 0:
152 if n_child[n][0] == 0:
153 if n == head:
153 if n == head:
154 anc = n_child[n][1]
154 anc = n_child[n][1]
155 n_child[n] = len(n_child[n][1])
155 n_child[n] = len(n_child[n][1])
156 return anc, n_child
156 return anc, n_child
157
157
158 def next(self):
158 def next(self):
159 if not self.badrev:
159 if not self.badrev:
160 self.ui.warn("You should give at least one bad\n")
160 self.ui.warn("You should give at least one bad\n")
161 sys.exit(1)
161 sys.exit(1)
162 if not self.goodrevs:
162 if not self.goodrevs:
163 self.ui.warn("No good revision given\n")
163 self.ui.warn("No good revision given\n")
164 self.ui.warn("Assuming the first revision is good\n")
164 self.ui.warn("Assuming the first revision is good\n")
165 ancestors, num_ancestors = self.__ancestors_and_nb_ancestors(self.badrev)
165 ancestors, num_ancestors = self.__ancestors_and_nb_ancestors(self.badrev)
166 tot = len(ancestors)
166 tot = len(ancestors)
167 if tot == 1:
167 if tot == 1:
168 if ancestors.pop() != self.badrev:
168 if ancestors.pop() != self.badrev:
169 self.ui.warn("Could not find the first bad revision\n")
169 self.ui.warn("Could not find the first bad revision\n")
170 sys.exit(1)
170 sys.exit(1)
171 self.ui.write(
171 self.ui.write(
172 "The first bad revision is : %s\n" % hg.hex(self.badrev))
172 "The first bad revision is : %s\n" % hg.hex(self.badrev))
173 sys.exit(0)
173 sys.exit(0)
174 self.ui.write("%d revisions left\n" % tot)
174 self.ui.write("%d revisions left\n" % tot)
175 best_rev = None
175 best_rev = None
176 best_len = -1
176 best_len = -1
177 for n in ancestors:
177 for n in ancestors:
178 l = num_ancestors[n]
178 l = num_ancestors[n]
179 l = min(l, tot - l)
179 l = min(l, tot - l)
180 if l > best_len:
180 if l > best_len:
181 best_len = l
181 best_len = l
182 best_rev = n
182 best_rev = n
183 return best_rev
183 return best_rev
184
184
185 def autonext(self):
185 def autonext(self):
186 """find and update to the next revision to test"""
186 """find and update to the next revision to test"""
187 check_clean(self.ui, self.repo)
187 check_clean(self.ui, self.repo)
188 rev = self.next()
188 rev = self.next()
189 self.ui.write("Now testing %s\n" % hg.hex(rev))
189 self.ui.write("Now testing %s\n" % hg.hex(rev))
190 return self.repo.update(rev, allow=True, force=True)
190 return self.repo.update(rev, force=True)
191
191
192 def good(self, rev):
192 def good(self, rev):
193 self.goodrevs.append(rev)
193 self.goodrevs.append(rev)
194
194
195 def autogood(self, rev=None):
195 def autogood(self, rev=None):
196 """mark revision as good and update to the next revision to test"""
196 """mark revision as good and update to the next revision to test"""
197 check_clean(self.ui, self.repo)
197 check_clean(self.ui, self.repo)
198 rev = lookup_rev(self.ui, self.repo, rev)
198 rev = lookup_rev(self.ui, self.repo, rev)
199 self.good(rev)
199 self.good(rev)
200 if self.badrev:
200 if self.badrev:
201 self.autonext()
201 self.autonext()
202
202
203 def bad(self, rev):
203 def bad(self, rev):
204 self.badrev = rev
204 self.badrev = rev
205
205
206 def autobad(self, rev=None):
206 def autobad(self, rev=None):
207 """mark revision as bad and update to the next revision to test"""
207 """mark revision as bad and update to the next revision to test"""
208 check_clean(self.ui, self.repo)
208 check_clean(self.ui, self.repo)
209 rev = lookup_rev(self.ui, self.repo, rev)
209 rev = lookup_rev(self.ui, self.repo, rev)
210 self.bad(rev)
210 self.bad(rev)
211 if self.goodrevs:
211 if self.goodrevs:
212 self.autonext()
212 self.autonext()
213
213
214 # should we put it in the class ?
214 # should we put it in the class ?
215 def test(ui, repo, rev):
215 def test(ui, repo, rev):
216 """test the bisection code"""
216 """test the bisection code"""
217 b = bisect(ui, repo)
217 b = bisect(ui, repo)
218 rev = repo.lookup(rev)
218 rev = repo.lookup(rev)
219 ui.write("testing with rev %s\n" % hg.hex(rev))
219 ui.write("testing with rev %s\n" % hg.hex(rev))
220 anc = b.ancestors()
220 anc = b.ancestors()
221 while len(anc) > 1:
221 while len(anc) > 1:
222 if not rev in anc:
222 if not rev in anc:
223 ui.warn("failure while bisecting\n")
223 ui.warn("failure while bisecting\n")
224 sys.exit(1)
224 sys.exit(1)
225 ui.write("it worked :)\n")
225 ui.write("it worked :)\n")
226 new_rev = b.next()
226 new_rev = b.next()
227 ui.write("choosing if good or bad\n")
227 ui.write("choosing if good or bad\n")
228 if rev in b.ancestors(head=new_rev):
228 if rev in b.ancestors(head=new_rev):
229 b.bad(new_rev)
229 b.bad(new_rev)
230 ui.write("it is bad\n")
230 ui.write("it is bad\n")
231 else:
231 else:
232 b.good(new_rev)
232 b.good(new_rev)
233 ui.write("it is good\n")
233 ui.write("it is good\n")
234 anc = b.ancestors()
234 anc = b.ancestors()
235 repo.update(new_rev, allow=True, force=True)
235 repo.update(new_rev, force=True)
236 for v in anc:
236 for v in anc:
237 if v != rev:
237 if v != rev:
238 ui.warn("fail to found cset! :(\n")
238 ui.warn("fail to found cset! :(\n")
239 return 1
239 return 1
240 ui.write("Found bad cset: %s\n" % hg.hex(b.badrev))
240 ui.write("Found bad cset: %s\n" % hg.hex(b.badrev))
241 ui.write("Everything is ok :)\n")
241 ui.write("Everything is ok :)\n")
242 return 0
242 return 0
243
243
244 def bisect_run(ui, repo, cmd=None, *args):
244 def bisect_run(ui, repo, cmd=None, *args):
245 """bisect extension: dichotomic search in the DAG of changesets
245 """bisect extension: dichotomic search in the DAG of changesets
246 for subcommands see "hg bisect help\"
246 for subcommands see "hg bisect help\"
247 """
247 """
248 def help_(cmd=None, *args):
248 def help_(cmd=None, *args):
249 """show help for a given bisect subcommand or all subcommands"""
249 """show help for a given bisect subcommand or all subcommands"""
250 cmdtable = bisectcmdtable
250 cmdtable = bisectcmdtable
251 if cmd:
251 if cmd:
252 doc = cmdtable[cmd][0].__doc__
252 doc = cmdtable[cmd][0].__doc__
253 synopsis = cmdtable[cmd][2]
253 synopsis = cmdtable[cmd][2]
254 ui.write(synopsis + "\n")
254 ui.write(synopsis + "\n")
255 ui.write("\n" + doc + "\n")
255 ui.write("\n" + doc + "\n")
256 return
256 return
257 ui.write("list of subcommands for the bisect extension\n\n")
257 ui.write("list of subcommands for the bisect extension\n\n")
258 cmds = cmdtable.keys()
258 cmds = cmdtable.keys()
259 cmds.sort()
259 cmds.sort()
260 m = max([len(c) for c in cmds])
260 m = max([len(c) for c in cmds])
261 for cmd in cmds:
261 for cmd in cmds:
262 doc = cmdtable[cmd][0].__doc__.splitlines(0)[0].rstrip()
262 doc = cmdtable[cmd][0].__doc__.splitlines(0)[0].rstrip()
263 ui.write(" %-*s %s\n" % (m, cmd, doc))
263 ui.write(" %-*s %s\n" % (m, cmd, doc))
264
264
265 b = bisect(ui, repo)
265 b = bisect(ui, repo)
266 bisectcmdtable = {
266 bisectcmdtable = {
267 "init": (b.init, 0, "hg bisect init"),
267 "init": (b.init, 0, "hg bisect init"),
268 "bad": (b.autobad, 1, "hg bisect bad [<rev>]"),
268 "bad": (b.autobad, 1, "hg bisect bad [<rev>]"),
269 "good": (b.autogood, 1, "hg bisect good [<rev>]"),
269 "good": (b.autogood, 1, "hg bisect good [<rev>]"),
270 "next": (b.autonext, 0, "hg bisect next"),
270 "next": (b.autonext, 0, "hg bisect next"),
271 "reset": (b.reset, 0, "hg bisect reset"),
271 "reset": (b.reset, 0, "hg bisect reset"),
272 "help": (help_, 1, "hg bisect help [<subcommand>]"),
272 "help": (help_, 1, "hg bisect help [<subcommand>]"),
273 }
273 }
274
274
275 if not bisectcmdtable.has_key(cmd):
275 if not bisectcmdtable.has_key(cmd):
276 ui.warn("bisect: Unknown sub-command\n")
276 ui.warn("bisect: Unknown sub-command\n")
277 return help_()
277 return help_()
278 if len(args) > bisectcmdtable[cmd][1]:
278 if len(args) > bisectcmdtable[cmd][1]:
279 ui.warn("bisect: Too many arguments\n")
279 ui.warn("bisect: Too many arguments\n")
280 return help_()
280 return help_()
281 return bisectcmdtable[cmd][0](*args)
281 return bisectcmdtable[cmd][0](*args)
282
282
283 cmdtable = {
283 cmdtable = {
284 "bisect": (bisect_run, [],
284 "bisect": (bisect_run, [],
285 "hg bisect [help|init|reset|next|good|bad]"),
285 "hg bisect [help|init|reset|next|good|bad]"),
286 #"bisect-test": (test, [], "hg bisect-test rev"),
286 #"bisect-test": (test, [], "hg bisect-test rev"),
287 }
287 }
@@ -1,2868 +1,2884 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 demandload(globals(), "fancyopts ui hg util lock revlog")
12 demandload(globals(), "fancyopts ui hg util lock revlog")
13 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
13 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
14 demandload(globals(), "errno socket version struct atexit sets bz2")
14 demandload(globals(), "errno socket version struct atexit sets bz2")
15
15
16 class UnknownCommand(Exception):
16 class UnknownCommand(Exception):
17 """Exception raised if command is not in the command table."""
17 """Exception raised if command is not in the command table."""
18 class AmbiguousCommand(Exception):
18 class AmbiguousCommand(Exception):
19 """Exception raised if command shortcut matches more than one command."""
19 """Exception raised if command shortcut matches more than one command."""
20
20
21 def filterfiles(filters, files):
21 def filterfiles(filters, files):
22 l = [x for x in files if x in filters]
22 l = [x for x in files if x in filters]
23
23
24 for t in filters:
24 for t in filters:
25 if t and t[-1] != "/":
25 if t and t[-1] != "/":
26 t += "/"
26 t += "/"
27 l += [x for x in files if x.startswith(t)]
27 l += [x for x in files if x.startswith(t)]
28 return l
28 return l
29
29
30 def relpath(repo, args):
30 def relpath(repo, args):
31 cwd = repo.getcwd()
31 cwd = repo.getcwd()
32 if cwd:
32 if cwd:
33 return [util.normpath(os.path.join(cwd, x)) for x in args]
33 return [util.normpath(os.path.join(cwd, x)) for x in args]
34 return args
34 return args
35
35
36 def matchpats(repo, pats=[], opts={}, head=''):
36 def matchpats(repo, pats=[], opts={}, head=''):
37 cwd = repo.getcwd()
37 cwd = repo.getcwd()
38 if not pats and cwd:
38 if not pats and cwd:
39 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
39 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
40 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
40 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
41 cwd = ''
41 cwd = ''
42 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
42 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
43 opts.get('exclude'), head)
43 opts.get('exclude'), head)
44
44
45 def makewalk(repo, pats, opts, node=None, head=''):
45 def makewalk(repo, pats, opts, node=None, head=''):
46 files, matchfn, anypats = matchpats(repo, pats, opts, head)
46 files, matchfn, anypats = matchpats(repo, pats, opts, head)
47 exact = dict(zip(files, files))
47 exact = dict(zip(files, files))
48 def walk():
48 def walk():
49 for src, fn in repo.walk(node=node, files=files, match=matchfn):
49 for src, fn in repo.walk(node=node, files=files, match=matchfn):
50 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
50 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
51 return files, matchfn, walk()
51 return files, matchfn, walk()
52
52
53 def walk(repo, pats, opts, node=None, head=''):
53 def walk(repo, pats, opts, node=None, head=''):
54 files, matchfn, results = makewalk(repo, pats, opts, node, head)
54 files, matchfn, results = makewalk(repo, pats, opts, node, head)
55 for r in results:
55 for r in results:
56 yield r
56 yield r
57
57
58 def walkchangerevs(ui, repo, pats, opts):
58 def walkchangerevs(ui, repo, pats, opts):
59 '''Iterate over files and the revs they changed in.
59 '''Iterate over files and the revs they changed in.
60
60
61 Callers most commonly need to iterate backwards over the history
61 Callers most commonly need to iterate backwards over the history
62 it is interested in. Doing so has awful (quadratic-looking)
62 it is interested in. Doing so has awful (quadratic-looking)
63 performance, so we use iterators in a "windowed" way.
63 performance, so we use iterators in a "windowed" way.
64
64
65 We walk a window of revisions in the desired order. Within the
65 We walk a window of revisions in the desired order. Within the
66 window, we first walk forwards to gather data, then in the desired
66 window, we first walk forwards to gather data, then in the desired
67 order (usually backwards) to display it.
67 order (usually backwards) to display it.
68
68
69 This function returns an (iterator, getchange, matchfn) tuple. The
69 This function returns an (iterator, getchange, matchfn) tuple. The
70 getchange function returns the changelog entry for a numeric
70 getchange function returns the changelog entry for a numeric
71 revision. The iterator yields 3-tuples. They will be of one of
71 revision. The iterator yields 3-tuples. They will be of one of
72 the following forms:
72 the following forms:
73
73
74 "window", incrementing, lastrev: stepping through a window,
74 "window", incrementing, lastrev: stepping through a window,
75 positive if walking forwards through revs, last rev in the
75 positive if walking forwards through revs, last rev in the
76 sequence iterated over - use to reset state for the current window
76 sequence iterated over - use to reset state for the current window
77
77
78 "add", rev, fns: out-of-order traversal of the given file names
78 "add", rev, fns: out-of-order traversal of the given file names
79 fns, which changed during revision rev - use to gather data for
79 fns, which changed during revision rev - use to gather data for
80 possible display
80 possible display
81
81
82 "iter", rev, None: in-order traversal of the revs earlier iterated
82 "iter", rev, None: in-order traversal of the revs earlier iterated
83 over with "add" - use to display data'''
83 over with "add" - use to display data'''
84
84
85 files, matchfn, anypats = matchpats(repo, pats, opts)
85 files, matchfn, anypats = matchpats(repo, pats, opts)
86
86
87 if repo.changelog.count() == 0:
87 if repo.changelog.count() == 0:
88 return [], False, matchfn
88 return [], False, matchfn
89
89
90 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
90 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
91 wanted = {}
91 wanted = {}
92 slowpath = anypats
92 slowpath = anypats
93 window = 300
93 window = 300
94 fncache = {}
94 fncache = {}
95
95
96 chcache = {}
96 chcache = {}
97 def getchange(rev):
97 def getchange(rev):
98 ch = chcache.get(rev)
98 ch = chcache.get(rev)
99 if ch is None:
99 if ch is None:
100 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
100 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
101 return ch
101 return ch
102
102
103 if not slowpath and not files:
103 if not slowpath and not files:
104 # No files, no patterns. Display all revs.
104 # No files, no patterns. Display all revs.
105 wanted = dict(zip(revs, revs))
105 wanted = dict(zip(revs, revs))
106 if not slowpath:
106 if not slowpath:
107 # Only files, no patterns. Check the history of each file.
107 # Only files, no patterns. Check the history of each file.
108 def filerevgen(filelog):
108 def filerevgen(filelog):
109 for i in xrange(filelog.count() - 1, -1, -window):
109 for i in xrange(filelog.count() - 1, -1, -window):
110 revs = []
110 revs = []
111 for j in xrange(max(0, i - window), i + 1):
111 for j in xrange(max(0, i - window), i + 1):
112 revs.append(filelog.linkrev(filelog.node(j)))
112 revs.append(filelog.linkrev(filelog.node(j)))
113 revs.reverse()
113 revs.reverse()
114 for rev in revs:
114 for rev in revs:
115 yield rev
115 yield rev
116
116
117 minrev, maxrev = min(revs), max(revs)
117 minrev, maxrev = min(revs), max(revs)
118 for file in files:
118 for file_ in files:
119 filelog = repo.file(file)
119 filelog = repo.file(file_)
120 # A zero count may be a directory or deleted file, so
120 # A zero count may be a directory or deleted file, so
121 # try to find matching entries on the slow path.
121 # try to find matching entries on the slow path.
122 if filelog.count() == 0:
122 if filelog.count() == 0:
123 slowpath = True
123 slowpath = True
124 break
124 break
125 for rev in filerevgen(filelog):
125 for rev in filerevgen(filelog):
126 if rev <= maxrev:
126 if rev <= maxrev:
127 if rev < minrev:
127 if rev < minrev:
128 break
128 break
129 fncache.setdefault(rev, [])
129 fncache.setdefault(rev, [])
130 fncache[rev].append(file)
130 fncache[rev].append(file_)
131 wanted[rev] = 1
131 wanted[rev] = 1
132 if slowpath:
132 if slowpath:
133 # The slow path checks files modified in every changeset.
133 # The slow path checks files modified in every changeset.
134 def changerevgen():
134 def changerevgen():
135 for i in xrange(repo.changelog.count() - 1, -1, -window):
135 for i in xrange(repo.changelog.count() - 1, -1, -window):
136 for j in xrange(max(0, i - window), i + 1):
136 for j in xrange(max(0, i - window), i + 1):
137 yield j, getchange(j)[3]
137 yield j, getchange(j)[3]
138
138
139 for rev, changefiles in changerevgen():
139 for rev, changefiles in changerevgen():
140 matches = filter(matchfn, changefiles)
140 matches = filter(matchfn, changefiles)
141 if matches:
141 if matches:
142 fncache[rev] = matches
142 fncache[rev] = matches
143 wanted[rev] = 1
143 wanted[rev] = 1
144
144
145 def iterate():
145 def iterate():
146 for i in xrange(0, len(revs), window):
146 for i in xrange(0, len(revs), window):
147 yield 'window', revs[0] < revs[-1], revs[-1]
147 yield 'window', revs[0] < revs[-1], revs[-1]
148 nrevs = [rev for rev in revs[i:min(i+window, len(revs))]
148 nrevs = [rev for rev in revs[i:min(i+window, len(revs))]
149 if rev in wanted]
149 if rev in wanted]
150 srevs = list(nrevs)
150 srevs = list(nrevs)
151 srevs.sort()
151 srevs.sort()
152 for rev in srevs:
152 for rev in srevs:
153 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
153 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
154 yield 'add', rev, fns
154 yield 'add', rev, fns
155 for rev in nrevs:
155 for rev in nrevs:
156 yield 'iter', rev, None
156 yield 'iter', rev, None
157 return iterate(), getchange, matchfn
157 return iterate(), getchange, matchfn
158
158
159 revrangesep = ':'
159 revrangesep = ':'
160
160
161 def revrange(ui, repo, revs, revlog=None):
161 def revrange(ui, repo, revs, revlog=None):
162 """Yield revision as strings from a list of revision specifications."""
162 """Yield revision as strings from a list of revision specifications."""
163 if revlog is None:
163 if revlog is None:
164 revlog = repo.changelog
164 revlog = repo.changelog
165 revcount = revlog.count()
165 revcount = revlog.count()
166 def fix(val, defval):
166 def fix(val, defval):
167 if not val:
167 if not val:
168 return defval
168 return defval
169 try:
169 try:
170 num = int(val)
170 num = int(val)
171 if str(num) != val:
171 if str(num) != val:
172 raise ValueError
172 raise ValueError
173 if num < 0:
173 if num < 0:
174 num += revcount
174 num += revcount
175 if num < 0:
175 if num < 0:
176 num = 0
176 num = 0
177 elif num >= revcount:
177 elif num >= revcount:
178 raise ValueError
178 raise ValueError
179 except ValueError:
179 except ValueError:
180 try:
180 try:
181 num = repo.changelog.rev(repo.lookup(val))
181 num = repo.changelog.rev(repo.lookup(val))
182 except KeyError:
182 except KeyError:
183 try:
183 try:
184 num = revlog.rev(revlog.lookup(val))
184 num = revlog.rev(revlog.lookup(val))
185 except KeyError:
185 except KeyError:
186 raise util.Abort(_('invalid revision identifier %s'), val)
186 raise util.Abort(_('invalid revision identifier %s'), val)
187 return num
187 return num
188 seen = {}
188 seen = {}
189 for spec in revs:
189 for spec in revs:
190 if spec.find(revrangesep) >= 0:
190 if spec.find(revrangesep) >= 0:
191 start, end = spec.split(revrangesep, 1)
191 start, end = spec.split(revrangesep, 1)
192 start = fix(start, 0)
192 start = fix(start, 0)
193 end = fix(end, revcount - 1)
193 end = fix(end, revcount - 1)
194 step = start > end and -1 or 1
194 step = start > end and -1 or 1
195 for rev in xrange(start, end+step, step):
195 for rev in xrange(start, end+step, step):
196 if rev in seen:
196 if rev in seen:
197 continue
197 continue
198 seen[rev] = 1
198 seen[rev] = 1
199 yield str(rev)
199 yield str(rev)
200 else:
200 else:
201 rev = fix(spec, None)
201 rev = fix(spec, None)
202 if rev in seen:
202 if rev in seen:
203 continue
203 continue
204 seen[rev] = 1
204 seen[rev] = 1
205 yield str(rev)
205 yield str(rev)
206
206
207 def make_filename(repo, r, pat, node=None,
207 def make_filename(repo, r, pat, node=None,
208 total=None, seqno=None, revwidth=None, pathname=None):
208 total=None, seqno=None, revwidth=None, pathname=None):
209 node_expander = {
209 node_expander = {
210 'H': lambda: hex(node),
210 'H': lambda: hex(node),
211 'R': lambda: str(r.rev(node)),
211 'R': lambda: str(r.rev(node)),
212 'h': lambda: short(node),
212 'h': lambda: short(node),
213 }
213 }
214 expander = {
214 expander = {
215 '%': lambda: '%',
215 '%': lambda: '%',
216 'b': lambda: os.path.basename(repo.root),
216 'b': lambda: os.path.basename(repo.root),
217 }
217 }
218
218
219 try:
219 try:
220 if node:
220 if node:
221 expander.update(node_expander)
221 expander.update(node_expander)
222 if node and revwidth is not None:
222 if node and revwidth is not None:
223 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
223 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
224 if total is not None:
224 if total is not None:
225 expander['N'] = lambda: str(total)
225 expander['N'] = lambda: str(total)
226 if seqno is not None:
226 if seqno is not None:
227 expander['n'] = lambda: str(seqno)
227 expander['n'] = lambda: str(seqno)
228 if total is not None and seqno is not None:
228 if total is not None and seqno is not None:
229 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
229 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
230 if pathname is not None:
230 if pathname is not None:
231 expander['s'] = lambda: os.path.basename(pathname)
231 expander['s'] = lambda: os.path.basename(pathname)
232 expander['d'] = lambda: os.path.dirname(pathname) or '.'
232 expander['d'] = lambda: os.path.dirname(pathname) or '.'
233 expander['p'] = lambda: pathname
233 expander['p'] = lambda: pathname
234
234
235 newname = []
235 newname = []
236 patlen = len(pat)
236 patlen = len(pat)
237 i = 0
237 i = 0
238 while i < patlen:
238 while i < patlen:
239 c = pat[i]
239 c = pat[i]
240 if c == '%':
240 if c == '%':
241 i += 1
241 i += 1
242 c = pat[i]
242 c = pat[i]
243 c = expander[c]()
243 c = expander[c]()
244 newname.append(c)
244 newname.append(c)
245 i += 1
245 i += 1
246 return ''.join(newname)
246 return ''.join(newname)
247 except KeyError, inst:
247 except KeyError, inst:
248 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
248 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
249 inst.args[0])
249 inst.args[0])
250
250
251 def make_file(repo, r, pat, node=None,
251 def make_file(repo, r, pat, node=None,
252 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
252 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
253 if not pat or pat == '-':
253 if not pat or pat == '-':
254 return 'w' in mode and sys.stdout or sys.stdin
254 return 'w' in mode and sys.stdout or sys.stdin
255 if hasattr(pat, 'write') and 'w' in mode:
255 if hasattr(pat, 'write') and 'w' in mode:
256 return pat
256 return pat
257 if hasattr(pat, 'read') and 'r' in mode:
257 if hasattr(pat, 'read') and 'r' in mode:
258 return pat
258 return pat
259 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
259 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
260 pathname),
260 pathname),
261 mode)
261 mode)
262
262
263 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
263 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
264 changes=None, text=False, opts={}):
264 changes=None, text=False, opts={}):
265 if not changes:
265 if not changes:
266 changes = repo.changes(node1, node2, files, match=match)
266 changes = repo.changes(node1, node2, files, match=match)
267 modified, added, removed, deleted, unknown = changes
267 modified, added, removed, deleted, unknown = changes
268 if files:
268 if files:
269 modified, added, removed = map(lambda x: filterfiles(files, x),
269 modified, added, removed = map(lambda x: filterfiles(files, x),
270 (modified, added, removed))
270 (modified, added, removed))
271
271
272 if not modified and not added and not removed:
272 if not modified and not added and not removed:
273 return
273 return
274
274
275 if node2:
275 if node2:
276 change = repo.changelog.read(node2)
276 change = repo.changelog.read(node2)
277 mmap2 = repo.manifest.read(change[0])
277 mmap2 = repo.manifest.read(change[0])
278 date2 = util.datestr(change[2])
278 date2 = util.datestr(change[2])
279 def read(f):
279 def read(f):
280 return repo.file(f).read(mmap2[f])
280 return repo.file(f).read(mmap2[f])
281 else:
281 else:
282 date2 = util.datestr()
282 date2 = util.datestr()
283 if not node1:
283 if not node1:
284 node1 = repo.dirstate.parents()[0]
284 node1 = repo.dirstate.parents()[0]
285 def read(f):
285 def read(f):
286 return repo.wread(f)
286 return repo.wread(f)
287
287
288 if ui.quiet:
288 if ui.quiet:
289 r = None
289 r = None
290 else:
290 else:
291 hexfunc = ui.verbose and hex or short
291 hexfunc = ui.verbose and hex or short
292 r = [hexfunc(node) for node in [node1, node2] if node]
292 r = [hexfunc(node) for node in [node1, node2] if node]
293
293
294 change = repo.changelog.read(node1)
294 change = repo.changelog.read(node1)
295 mmap = repo.manifest.read(change[0])
295 mmap = repo.manifest.read(change[0])
296 date1 = util.datestr(change[2])
296 date1 = util.datestr(change[2])
297
297
298 diffopts = ui.diffopts()
298 diffopts = ui.diffopts()
299 showfunc = opts.get('show_function') or diffopts['showfunc']
299 showfunc = opts.get('show_function') or diffopts['showfunc']
300 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
300 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
301 for f in modified:
301 for f in modified:
302 to = None
302 to = None
303 if f in mmap:
303 if f in mmap:
304 to = repo.file(f).read(mmap[f])
304 to = repo.file(f).read(mmap[f])
305 tn = read(f)
305 tn = read(f)
306 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
306 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
307 showfunc=showfunc, ignorews=ignorews))
307 showfunc=showfunc, ignorews=ignorews))
308 for f in added:
308 for f in added:
309 to = None
309 to = None
310 tn = read(f)
310 tn = read(f)
311 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
311 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
312 showfunc=showfunc, ignorews=ignorews))
312 showfunc=showfunc, ignorews=ignorews))
313 for f in removed:
313 for f in removed:
314 to = repo.file(f).read(mmap[f])
314 to = repo.file(f).read(mmap[f])
315 tn = None
315 tn = None
316 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
316 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
317 showfunc=showfunc, ignorews=ignorews))
317 showfunc=showfunc, ignorews=ignorews))
318
318
319 def trimuser(ui, name, rev, revcache):
319 def trimuser(ui, name, rev, revcache):
320 """trim the name of the user who committed a change"""
320 """trim the name of the user who committed a change"""
321 user = revcache.get(rev)
321 user = revcache.get(rev)
322 if user is None:
322 if user is None:
323 user = revcache[rev] = ui.shortuser(name)
323 user = revcache[rev] = ui.shortuser(name)
324 return user
324 return user
325
325
326 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
326 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
327 """show a single changeset or file revision"""
327 """show a single changeset or file revision"""
328 log = repo.changelog
328 log = repo.changelog
329 if changenode is None:
329 if changenode is None:
330 changenode = log.node(rev)
330 changenode = log.node(rev)
331 elif not rev:
331 elif not rev:
332 rev = log.rev(changenode)
332 rev = log.rev(changenode)
333
333
334 if ui.quiet:
334 if ui.quiet:
335 ui.write("%d:%s\n" % (rev, short(changenode)))
335 ui.write("%d:%s\n" % (rev, short(changenode)))
336 return
336 return
337
337
338 changes = log.read(changenode)
338 changes = log.read(changenode)
339 date = util.datestr(changes[2])
339 date = util.datestr(changes[2])
340
340
341 parents = [(log.rev(p), ui.verbose and hex(p) or short(p))
341 parents = [(log.rev(p), ui.verbose and hex(p) or short(p))
342 for p in log.parents(changenode)
342 for p in log.parents(changenode)
343 if ui.debugflag or p != nullid]
343 if ui.debugflag or p != nullid]
344 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
344 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
345 parents = []
345 parents = []
346
346
347 if ui.verbose:
347 if ui.verbose:
348 ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
348 ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
349 else:
349 else:
350 ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
350 ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
351
351
352 for tag in repo.nodetags(changenode):
352 for tag in repo.nodetags(changenode):
353 ui.status(_("tag: %s\n") % tag)
353 ui.status(_("tag: %s\n") % tag)
354 for parent in parents:
354 for parent in parents:
355 ui.write(_("parent: %d:%s\n") % parent)
355 ui.write(_("parent: %d:%s\n") % parent)
356
356
357 if brinfo and changenode in brinfo:
357 if brinfo and changenode in brinfo:
358 br = brinfo[changenode]
358 br = brinfo[changenode]
359 ui.write(_("branch: %s\n") % " ".join(br))
359 ui.write(_("branch: %s\n") % " ".join(br))
360
360
361 ui.debug(_("manifest: %d:%s\n") % (repo.manifest.rev(changes[0]),
361 ui.debug(_("manifest: %d:%s\n") % (repo.manifest.rev(changes[0]),
362 hex(changes[0])))
362 hex(changes[0])))
363 ui.status(_("user: %s\n") % changes[1])
363 ui.status(_("user: %s\n") % changes[1])
364 ui.status(_("date: %s\n") % date)
364 ui.status(_("date: %s\n") % date)
365
365
366 if ui.debugflag:
366 if ui.debugflag:
367 files = repo.changes(log.parents(changenode)[0], changenode)
367 files = repo.changes(log.parents(changenode)[0], changenode)
368 for key, value in zip([_("files:"), _("files+:"), _("files-:")], files):
368 for key, value in zip([_("files:"), _("files+:"), _("files-:")], files):
369 if value:
369 if value:
370 ui.note("%-12s %s\n" % (key, " ".join(value)))
370 ui.note("%-12s %s\n" % (key, " ".join(value)))
371 else:
371 else:
372 ui.note(_("files: %s\n") % " ".join(changes[3]))
372 ui.note(_("files: %s\n") % " ".join(changes[3]))
373
373
374 description = changes[4].strip()
374 description = changes[4].strip()
375 if description:
375 if description:
376 if ui.verbose:
376 if ui.verbose:
377 ui.status(_("description:\n"))
377 ui.status(_("description:\n"))
378 ui.status(description)
378 ui.status(description)
379 ui.status("\n\n")
379 ui.status("\n\n")
380 else:
380 else:
381 ui.status(_("summary: %s\n") % description.splitlines()[0])
381 ui.status(_("summary: %s\n") % description.splitlines()[0])
382 ui.status("\n")
382 ui.status("\n")
383
383
384 def show_version(ui):
384 def show_version(ui):
385 """output version and copyright information"""
385 """output version and copyright information"""
386 ui.write(_("Mercurial Distributed SCM (version %s)\n")
386 ui.write(_("Mercurial Distributed SCM (version %s)\n")
387 % version.get_version())
387 % version.get_version())
388 ui.status(_(
388 ui.status(_(
389 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
389 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
390 "This is free software; see the source for copying conditions. "
390 "This is free software; see the source for copying conditions. "
391 "There is NO\nwarranty; "
391 "There is NO\nwarranty; "
392 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
392 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
393 ))
393 ))
394
394
395 def help_(ui, cmd=None, with_version=False):
395 def help_(ui, cmd=None, with_version=False):
396 """show help for a given command or all commands"""
396 """show help for a given command or all commands"""
397 option_lists = []
397 option_lists = []
398 if cmd and cmd != 'shortlist':
398 if cmd and cmd != 'shortlist':
399 if with_version:
399 if with_version:
400 show_version(ui)
400 show_version(ui)
401 ui.write('\n')
401 ui.write('\n')
402 aliases, i = find(cmd)
402 aliases, i = find(cmd)
403 # synopsis
403 # synopsis
404 ui.write("%s\n\n" % i[2])
404 ui.write("%s\n\n" % i[2])
405
405
406 # description
406 # description
407 doc = i[0].__doc__
407 doc = i[0].__doc__
408 if not doc:
408 if not doc:
409 doc = _("(No help text available)")
409 doc = _("(No help text available)")
410 if ui.quiet:
410 if ui.quiet:
411 doc = doc.splitlines(0)[0]
411 doc = doc.splitlines(0)[0]
412 ui.write("%s\n" % doc.rstrip())
412 ui.write("%s\n" % doc.rstrip())
413
413
414 if not ui.quiet:
414 if not ui.quiet:
415 # aliases
415 # aliases
416 if len(aliases) > 1:
416 if len(aliases) > 1:
417 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
417 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
418
418
419 # options
419 # options
420 if i[1]:
420 if i[1]:
421 option_lists.append(("options", i[1]))
421 option_lists.append(("options", i[1]))
422
422
423 else:
423 else:
424 # program name
424 # program name
425 if ui.verbose or with_version:
425 if ui.verbose or with_version:
426 show_version(ui)
426 show_version(ui)
427 else:
427 else:
428 ui.status(_("Mercurial Distributed SCM\n"))
428 ui.status(_("Mercurial Distributed SCM\n"))
429 ui.status('\n')
429 ui.status('\n')
430
430
431 # list of commands
431 # list of commands
432 if cmd == "shortlist":
432 if cmd == "shortlist":
433 ui.status(_('basic commands (use "hg help" '
433 ui.status(_('basic commands (use "hg help" '
434 'for the full list or option "-v" for details):\n\n'))
434 'for the full list or option "-v" for details):\n\n'))
435 elif ui.verbose:
435 elif ui.verbose:
436 ui.status(_('list of commands:\n\n'))
436 ui.status(_('list of commands:\n\n'))
437 else:
437 else:
438 ui.status(_('list of commands (use "hg help -v" '
438 ui.status(_('list of commands (use "hg help -v" '
439 'to show aliases and global options):\n\n'))
439 'to show aliases and global options):\n\n'))
440
440
441 h = {}
441 h = {}
442 cmds = {}
442 cmds = {}
443 for c, e in table.items():
443 for c, e in table.items():
444 f = c.split("|")[0]
444 f = c.split("|")[0]
445 if cmd == "shortlist" and not f.startswith("^"):
445 if cmd == "shortlist" and not f.startswith("^"):
446 continue
446 continue
447 f = f.lstrip("^")
447 f = f.lstrip("^")
448 if not ui.debugflag and f.startswith("debug"):
448 if not ui.debugflag and f.startswith("debug"):
449 continue
449 continue
450 d = ""
451 doc = e[0].__doc__
450 doc = e[0].__doc__
452 if not doc:
451 if not doc:
453 doc = _("(No help text available)")
452 doc = _("(No help text available)")
454 h[f] = doc.splitlines(0)[0].rstrip()
453 h[f] = doc.splitlines(0)[0].rstrip()
455 cmds[f] = c.lstrip("^")
454 cmds[f] = c.lstrip("^")
456
455
457 fns = h.keys()
456 fns = h.keys()
458 fns.sort()
457 fns.sort()
459 m = max(map(len, fns))
458 m = max(map(len, fns))
460 for f in fns:
459 for f in fns:
461 if ui.verbose:
460 if ui.verbose:
462 commands = cmds[f].replace("|",", ")
461 commands = cmds[f].replace("|",", ")
463 ui.write(" %s:\n %s\n"%(commands, h[f]))
462 ui.write(" %s:\n %s\n"%(commands, h[f]))
464 else:
463 else:
465 ui.write(' %-*s %s\n' % (m, f, h[f]))
464 ui.write(' %-*s %s\n' % (m, f, h[f]))
466
465
467 # global options
466 # global options
468 if ui.verbose:
467 if ui.verbose:
469 option_lists.append(("global options", globalopts))
468 option_lists.append(("global options", globalopts))
470
469
471 # list all option lists
470 # list all option lists
472 opt_output = []
471 opt_output = []
473 for title, options in option_lists:
472 for title, options in option_lists:
474 opt_output.append(("\n%s:\n" % title, None))
473 opt_output.append(("\n%s:\n" % title, None))
475 for shortopt, longopt, default, desc in options:
474 for shortopt, longopt, default, desc in options:
476 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
475 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
477 longopt and " --%s" % longopt),
476 longopt and " --%s" % longopt),
478 "%s%s" % (desc,
477 "%s%s" % (desc,
479 default
478 default
480 and _(" (default: %s)") % default
479 and _(" (default: %s)") % default
481 or "")))
480 or "")))
482
481
483 if opt_output:
482 if opt_output:
484 opts_len = max([len(line[0]) for line in opt_output if line[1]])
483 opts_len = max([len(line[0]) for line in opt_output if line[1]])
485 for first, second in opt_output:
484 for first, second in opt_output:
486 if second:
485 if second:
487 ui.write(" %-*s %s\n" % (opts_len, first, second))
486 ui.write(" %-*s %s\n" % (opts_len, first, second))
488 else:
487 else:
489 ui.write("%s\n" % first)
488 ui.write("%s\n" % first)
490
489
491 # Commands start here, listed alphabetically
490 # Commands start here, listed alphabetically
492
491
493 def add(ui, repo, *pats, **opts):
492 def add(ui, repo, *pats, **opts):
494 """add the specified files on the next commit
493 """add the specified files on the next commit
495
494
496 Schedule files to be version controlled and added to the repository.
495 Schedule files to be version controlled and added to the repository.
497
496
498 The files will be added to the repository at the next commit.
497 The files will be added to the repository at the next commit.
499
498
500 If no names are given, add all files in the repository.
499 If no names are given, add all files in the repository.
501 """
500 """
502
501
503 names = []
502 names = []
504 for src, abs, rel, exact in walk(repo, pats, opts):
503 for src, abs, rel, exact in walk(repo, pats, opts):
505 if exact:
504 if exact:
506 if ui.verbose:
505 if ui.verbose:
507 ui.status(_('adding %s\n') % rel)
506 ui.status(_('adding %s\n') % rel)
508 names.append(abs)
507 names.append(abs)
509 elif repo.dirstate.state(abs) == '?':
508 elif repo.dirstate.state(abs) == '?':
510 ui.status(_('adding %s\n') % rel)
509 ui.status(_('adding %s\n') % rel)
511 names.append(abs)
510 names.append(abs)
512 repo.add(names)
511 repo.add(names)
513
512
514 def addremove(ui, repo, *pats, **opts):
513 def addremove(ui, repo, *pats, **opts):
515 """add all new files, delete all missing files
514 """add all new files, delete all missing files
516
515
517 Add all new files and remove all missing files from the repository.
516 Add all new files and remove all missing files from the repository.
518
517
519 New files are ignored if they match any of the patterns in .hgignore. As
518 New files are ignored if they match any of the patterns in .hgignore. As
520 with add, these changes take effect at the next commit.
519 with add, these changes take effect at the next commit.
521 """
520 """
522 return addremove_lock(ui, repo, pats, opts)
521 return addremove_lock(ui, repo, pats, opts)
523
522
524 def addremove_lock(ui, repo, pats, opts, wlock=None):
523 def addremove_lock(ui, repo, pats, opts, wlock=None):
525 add, remove = [], []
524 add, remove = [], []
526 for src, abs, rel, exact in walk(repo, pats, opts):
525 for src, abs, rel, exact in walk(repo, pats, opts):
527 if src == 'f' and repo.dirstate.state(abs) == '?':
526 if src == 'f' and repo.dirstate.state(abs) == '?':
528 add.append(abs)
527 add.append(abs)
529 if ui.verbose or not exact:
528 if ui.verbose or not exact:
530 ui.status(_('adding %s\n') % ((pats and rel) or abs))
529 ui.status(_('adding %s\n') % ((pats and rel) or abs))
531 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
530 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
532 remove.append(abs)
531 remove.append(abs)
533 if ui.verbose or not exact:
532 if ui.verbose or not exact:
534 ui.status(_('removing %s\n') % ((pats and rel) or abs))
533 ui.status(_('removing %s\n') % ((pats and rel) or abs))
535 repo.add(add, wlock=wlock)
534 repo.add(add, wlock=wlock)
536 repo.remove(remove, wlock=wlock)
535 repo.remove(remove, wlock=wlock)
537
536
538 def annotate(ui, repo, *pats, **opts):
537 def annotate(ui, repo, *pats, **opts):
539 """show changeset information per file line
538 """show changeset information per file line
540
539
541 List changes in files, showing the revision id responsible for each line
540 List changes in files, showing the revision id responsible for each line
542
541
543 This command is useful to discover who did a change or when a change took
542 This command is useful to discover who did a change or when a change took
544 place.
543 place.
545
544
546 Without the -a option, annotate will avoid processing files it
545 Without the -a option, annotate will avoid processing files it
547 detects as binary. With -a, annotate will generate an annotation
546 detects as binary. With -a, annotate will generate an annotation
548 anyway, probably with undesirable results.
547 anyway, probably with undesirable results.
549 """
548 """
550 def getnode(rev):
549 def getnode(rev):
551 return short(repo.changelog.node(rev))
550 return short(repo.changelog.node(rev))
552
551
553 ucache = {}
552 ucache = {}
554 def getname(rev):
553 def getname(rev):
555 cl = repo.changelog.read(repo.changelog.node(rev))
554 cl = repo.changelog.read(repo.changelog.node(rev))
556 return trimuser(ui, cl[1], rev, ucache)
555 return trimuser(ui, cl[1], rev, ucache)
557
556
558 dcache = {}
557 dcache = {}
559 def getdate(rev):
558 def getdate(rev):
560 datestr = dcache.get(rev)
559 datestr = dcache.get(rev)
561 if datestr is None:
560 if datestr is None:
562 cl = repo.changelog.read(repo.changelog.node(rev))
561 cl = repo.changelog.read(repo.changelog.node(rev))
563 datestr = dcache[rev] = util.datestr(cl[2])
562 datestr = dcache[rev] = util.datestr(cl[2])
564 return datestr
563 return datestr
565
564
566 if not pats:
565 if not pats:
567 raise util.Abort(_('at least one file name or pattern required'))
566 raise util.Abort(_('at least one file name or pattern required'))
568
567
569 opmap = [['user', getname], ['number', str], ['changeset', getnode],
568 opmap = [['user', getname], ['number', str], ['changeset', getnode],
570 ['date', getdate]]
569 ['date', getdate]]
571 if not opts['user'] and not opts['changeset'] and not opts['date']:
570 if not opts['user'] and not opts['changeset'] and not opts['date']:
572 opts['number'] = 1
571 opts['number'] = 1
573
572
574 if opts['rev']:
573 if opts['rev']:
575 node = repo.changelog.lookup(opts['rev'])
574 node = repo.changelog.lookup(opts['rev'])
576 else:
575 else:
577 node = repo.dirstate.parents()[0]
576 node = repo.dirstate.parents()[0]
578 change = repo.changelog.read(node)
577 change = repo.changelog.read(node)
579 mmap = repo.manifest.read(change[0])
578 mmap = repo.manifest.read(change[0])
580
579
581 for src, abs, rel, exact in walk(repo, pats, opts):
580 for src, abs, rel, exact in walk(repo, pats, opts):
582 if abs not in mmap:
581 if abs not in mmap:
583 ui.warn(_("warning: %s is not in the repository!\n") %
582 ui.warn(_("warning: %s is not in the repository!\n") %
584 ((pats and rel) or abs))
583 ((pats and rel) or abs))
585 continue
584 continue
586
585
587 f = repo.file(abs)
586 f = repo.file(abs)
588 if not opts['text'] and util.binary(f.read(mmap[abs])):
587 if not opts['text'] and util.binary(f.read(mmap[abs])):
589 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
588 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
590 continue
589 continue
591
590
592 lines = f.annotate(mmap[abs])
591 lines = f.annotate(mmap[abs])
593 pieces = []
592 pieces = []
594
593
595 for o, f in opmap:
594 for o, f in opmap:
596 if opts[o]:
595 if opts[o]:
597 l = [f(n) for n, dummy in lines]
596 l = [f(n) for n, dummy in lines]
598 if l:
597 if l:
599 m = max(map(len, l))
598 m = max(map(len, l))
600 pieces.append(["%*s" % (m, x) for x in l])
599 pieces.append(["%*s" % (m, x) for x in l])
601
600
602 if pieces:
601 if pieces:
603 for p, l in zip(zip(*pieces), lines):
602 for p, l in zip(zip(*pieces), lines):
604 ui.write("%s: %s" % (" ".join(p), l[1]))
603 ui.write("%s: %s" % (" ".join(p), l[1]))
605
604
606 def bundle(ui, repo, fname, dest="default-push", **opts):
605 def bundle(ui, repo, fname, dest="default-push", **opts):
607 """create a changegroup file
606 """create a changegroup file
608
607
609 Generate a compressed changegroup file collecting all changesets
608 Generate a compressed changegroup file collecting all changesets
610 not found in the other repository.
609 not found in the other repository.
611
610
612 This file can then be transferred using conventional means and
611 This file can then be transferred using conventional means and
613 applied to another repository with the unbundle command. This is
612 applied to another repository with the unbundle command. This is
614 useful when native push and pull are not available or when
613 useful when native push and pull are not available or when
615 exporting an entire repository is undesirable. The standard file
614 exporting an entire repository is undesirable. The standard file
616 extension is ".hg".
615 extension is ".hg".
617
616
618 Unlike import/export, this exactly preserves all changeset
617 Unlike import/export, this exactly preserves all changeset
619 contents including permissions, rename data, and revision history.
618 contents including permissions, rename data, and revision history.
620 """
619 """
621 f = open(fname, "wb")
620 f = open(fname, "wb")
622 dest = ui.expandpath(dest, repo.root)
621 dest = ui.expandpath(dest, repo.root)
623 other = hg.repository(ui, dest)
622 other = hg.repository(ui, dest)
624 o = repo.findoutgoing(other)
623 o = repo.findoutgoing(other)
625 cg = repo.changegroup(o, 'bundle')
624 cg = repo.changegroup(o, 'bundle')
626
625
627 try:
626 try:
628 f.write("HG10")
627 f.write("HG10")
629 z = bz2.BZ2Compressor(9)
628 z = bz2.BZ2Compressor(9)
630 while 1:
629 while 1:
631 chunk = cg.read(4096)
630 chunk = cg.read(4096)
632 if not chunk:
631 if not chunk:
633 break
632 break
634 f.write(z.compress(chunk))
633 f.write(z.compress(chunk))
635 f.write(z.flush())
634 f.write(z.flush())
636 except:
635 except:
637 os.unlink(fname)
636 os.unlink(fname)
638 raise
637 raise
639
638
640 def cat(ui, repo, file1, *pats, **opts):
639 def cat(ui, repo, file1, *pats, **opts):
641 """output the latest or given revisions of files
640 """output the latest or given revisions of files
642
641
643 Print the specified files as they were at the given revision.
642 Print the specified files as they were at the given revision.
644 If no revision is given then the tip is used.
643 If no revision is given then the tip is used.
645
644
646 Output may be to a file, in which case the name of the file is
645 Output may be to a file, in which case the name of the file is
647 given using a format string. The formatting rules are the same as
646 given using a format string. The formatting rules are the same as
648 for the export command, with the following additions:
647 for the export command, with the following additions:
649
648
650 %s basename of file being printed
649 %s basename of file being printed
651 %d dirname of file being printed, or '.' if in repo root
650 %d dirname of file being printed, or '.' if in repo root
652 %p root-relative path name of file being printed
651 %p root-relative path name of file being printed
653 """
652 """
654 mf = {}
653 mf = {}
655 rev = opts['rev']
654 rev = opts['rev']
656 if rev:
655 if rev:
657 node = repo.lookup(rev)
656 node = repo.lookup(rev)
658 else:
657 else:
659 node = repo.changelog.tip()
658 node = repo.changelog.tip()
660 change = repo.changelog.read(node)
659 change = repo.changelog.read(node)
661 mf = repo.manifest.read(change[0])
660 mf = repo.manifest.read(change[0])
662 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, node):
661 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, node):
663 r = repo.file(abs)
662 r = repo.file(abs)
664 n = mf[abs]
663 n = mf[abs]
665 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
664 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
666 fp.write(r.read(n))
665 fp.write(r.read(n))
667
666
668 def clone(ui, source, dest=None, **opts):
667 def clone(ui, source, dest=None, **opts):
669 """make a copy of an existing repository
668 """make a copy of an existing repository
670
669
671 Create a copy of an existing repository in a new directory.
670 Create a copy of an existing repository in a new directory.
672
671
673 If no destination directory name is specified, it defaults to the
672 If no destination directory name is specified, it defaults to the
674 basename of the source.
673 basename of the source.
675
674
676 The location of the source is added to the new repository's
675 The location of the source is added to the new repository's
677 .hg/hgrc file, as the default to be used for future pulls.
676 .hg/hgrc file, as the default to be used for future pulls.
678
677
679 For efficiency, hardlinks are used for cloning whenever the source
678 For efficiency, hardlinks are used for cloning whenever the source
680 and destination are on the same filesystem. Some filesystems,
679 and destination are on the same filesystem. Some filesystems,
681 such as AFS, implement hardlinking incorrectly, but do not report
680 such as AFS, implement hardlinking incorrectly, but do not report
682 errors. In these cases, use the --pull option to avoid
681 errors. In these cases, use the --pull option to avoid
683 hardlinking.
682 hardlinking.
684 """
683 """
685 if dest is None:
684 if dest is None:
686 dest = os.path.basename(os.path.normpath(source))
685 dest = os.path.basename(os.path.normpath(source))
687
686
688 if os.path.exists(dest):
687 if os.path.exists(dest):
689 raise util.Abort(_("destination '%s' already exists"), dest)
688 raise util.Abort(_("destination '%s' already exists"), dest)
690
689
691 dest = os.path.realpath(dest)
690 dest = os.path.realpath(dest)
692
691
693 class Dircleanup(object):
692 class Dircleanup(object):
694 def __init__(self, dir_):
693 def __init__(self, dir_):
695 self.rmtree = shutil.rmtree
694 self.rmtree = shutil.rmtree
696 self.dir_ = dir_
695 self.dir_ = dir_
697 os.mkdir(dir_)
696 os.mkdir(dir_)
698 def close(self):
697 def close(self):
699 self.dir_ = None
698 self.dir_ = None
700 def __del__(self):
699 def __del__(self):
701 if self.dir_:
700 if self.dir_:
702 self.rmtree(self.dir_, True)
701 self.rmtree(self.dir_, True)
703
702
704 if opts['ssh']:
703 if opts['ssh']:
705 ui.setconfig("ui", "ssh", opts['ssh'])
704 ui.setconfig("ui", "ssh", opts['ssh'])
706 if opts['remotecmd']:
705 if opts['remotecmd']:
707 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
706 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
708
707
709 if not os.path.exists(source):
708 if not os.path.exists(source):
710 source = ui.expandpath(source)
709 source = ui.expandpath(source)
711
710
712 d = Dircleanup(dest)
711 d = Dircleanup(dest)
713 abspath = source
712 abspath = source
714 other = hg.repository(ui, source)
713 other = hg.repository(ui, source)
715
714
716 copy = False
715 copy = False
717 if other.dev() != -1:
716 if other.dev() != -1:
718 abspath = os.path.abspath(source)
717 abspath = os.path.abspath(source)
719 if not opts['pull'] and not opts['rev']:
718 if not opts['pull'] and not opts['rev']:
720 copy = True
719 copy = True
721
720
722 if copy:
721 if copy:
723 try:
722 try:
724 # we use a lock here because if we race with commit, we
723 # we use a lock here because if we race with commit, we
725 # can end up with extra data in the cloned revlogs that's
724 # can end up with extra data in the cloned revlogs that's
726 # not pointed to by changesets, thus causing verify to
725 # not pointed to by changesets, thus causing verify to
727 # fail
726 # fail
728 l1 = lock.lock(os.path.join(source, ".hg", "lock"))
727 l1 = other.lock()
729 except OSError:
728 except lock.LockException:
730 copy = False
729 copy = False
731
730
732 if copy:
731 if copy:
733 # we lock here to avoid premature writing to the target
732 # we lock here to avoid premature writing to the target
734 os.mkdir(os.path.join(dest, ".hg"))
733 os.mkdir(os.path.join(dest, ".hg"))
735 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
734 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
736
735
737 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
736 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
738 for f in files.split():
737 for f in files.split():
739 src = os.path.join(source, ".hg", f)
738 src = os.path.join(source, ".hg", f)
740 dst = os.path.join(dest, ".hg", f)
739 dst = os.path.join(dest, ".hg", f)
741 try:
740 try:
742 util.copyfiles(src, dst)
741 util.copyfiles(src, dst)
743 except OSError, inst:
742 except OSError, inst:
744 if inst.errno != errno.ENOENT:
743 if inst.errno != errno.ENOENT:
745 raise
744 raise
746
745
747 repo = hg.repository(ui, dest)
746 repo = hg.repository(ui, dest)
748
747
749 else:
748 else:
750 revs = None
749 revs = None
751 if opts['rev']:
750 if opts['rev']:
752 if not other.local():
751 if not other.local():
753 error = _("clone -r not supported yet for remote repositories.")
752 error = _("clone -r not supported yet for remote repositories.")
754 raise util.Abort(error)
753 raise util.Abort(error)
755 else:
754 else:
756 revs = [other.lookup(rev) for rev in opts['rev']]
755 revs = [other.lookup(rev) for rev in opts['rev']]
757 repo = hg.repository(ui, dest, create=1)
756 repo = hg.repository(ui, dest, create=1)
758 repo.pull(other, heads = revs)
757 repo.pull(other, heads = revs)
759
758
760 f = repo.opener("hgrc", "w", text=True)
759 f = repo.opener("hgrc", "w", text=True)
761 f.write("[paths]\n")
760 f.write("[paths]\n")
762 f.write("default = %s\n" % abspath)
761 f.write("default = %s\n" % abspath)
763 f.close()
762 f.close()
764
763
765 if not opts['noupdate']:
764 if not opts['noupdate']:
766 update(ui, repo)
765 update(ui, repo)
767
766
768 d.close()
767 d.close()
769
768
770 def commit(ui, repo, *pats, **opts):
769 def commit(ui, repo, *pats, **opts):
771 """commit the specified files or all outstanding changes
770 """commit the specified files or all outstanding changes
772
771
773 Commit changes to the given files into the repository.
772 Commit changes to the given files into the repository.
774
773
775 If a list of files is omitted, all changes reported by "hg status"
774 If a list of files is omitted, all changes reported by "hg status"
776 will be commited.
775 will be commited.
777
776
778 The HGEDITOR or EDITOR environment variables are used to start an
777 The HGEDITOR or EDITOR environment variables are used to start an
779 editor to add a commit comment.
778 editor to add a commit comment.
780 """
779 """
781 message = opts['message']
780 message = opts['message']
782 logfile = opts['logfile']
781 logfile = opts['logfile']
783
782
784 if message and logfile:
783 if message and logfile:
785 raise util.Abort(_('options --message and --logfile are mutually '
784 raise util.Abort(_('options --message and --logfile are mutually '
786 'exclusive'))
785 'exclusive'))
787 if not message and logfile:
786 if not message and logfile:
788 try:
787 try:
789 if logfile == '-':
788 if logfile == '-':
790 message = sys.stdin.read()
789 message = sys.stdin.read()
791 else:
790 else:
792 message = open(logfile).read()
791 message = open(logfile).read()
793 except IOError, inst:
792 except IOError, inst:
794 raise util.Abort(_("can't read commit message '%s': %s") %
793 raise util.Abort(_("can't read commit message '%s': %s") %
795 (logfile, inst.strerror))
794 (logfile, inst.strerror))
796
795
797 if opts['addremove']:
796 if opts['addremove']:
798 addremove(ui, repo, *pats, **opts)
797 addremove(ui, repo, *pats, **opts)
799 fns, match, anypats = matchpats(repo, pats, opts)
798 fns, match, anypats = matchpats(repo, pats, opts)
800 if pats:
799 if pats:
801 modified, added, removed, deleted, unknown = (
800 modified, added, removed, deleted, unknown = (
802 repo.changes(files=fns, match=match))
801 repo.changes(files=fns, match=match))
803 files = modified + added + removed
802 files = modified + added + removed
804 else:
803 else:
805 files = []
804 files = []
806 try:
805 try:
807 repo.commit(files, message, opts['user'], opts['date'], match)
806 repo.commit(files, message, opts['user'], opts['date'], match)
808 except ValueError, inst:
807 except ValueError, inst:
809 raise util.Abort(str(inst))
808 raise util.Abort(str(inst))
810
809
811 def docopy(ui, repo, pats, opts):
810 def docopy(ui, repo, pats, opts):
812 cwd = repo.getcwd()
811 cwd = repo.getcwd()
813 errors = 0
812 errors = 0
814 copied = []
813 copied = []
815 targets = {}
814 targets = {}
816
815
817 def okaytocopy(abs, rel, exact):
816 def okaytocopy(abs, rel, exact):
818 reasons = {'?': _('is not managed'),
817 reasons = {'?': _('is not managed'),
819 'a': _('has been marked for add'),
818 'a': _('has been marked for add'),
820 'r': _('has been marked for remove')}
819 'r': _('has been marked for remove')}
821 state = repo.dirstate.state(abs)
820 state = repo.dirstate.state(abs)
822 reason = reasons.get(state)
821 reason = reasons.get(state)
823 if reason:
822 if reason:
824 if state == 'a':
823 if state == 'a':
825 origsrc = repo.dirstate.copied(abs)
824 origsrc = repo.dirstate.copied(abs)
826 if origsrc is not None:
825 if origsrc is not None:
827 return origsrc
826 return origsrc
828 if exact:
827 if exact:
829 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
828 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
830 else:
829 else:
831 return abs
830 return abs
832
831
833 def copy(origsrc, abssrc, relsrc, target, exact):
832 def copy(origsrc, abssrc, relsrc, target, exact):
834 abstarget = util.canonpath(repo.root, cwd, target)
833 abstarget = util.canonpath(repo.root, cwd, target)
835 reltarget = util.pathto(cwd, abstarget)
834 reltarget = util.pathto(cwd, abstarget)
836 prevsrc = targets.get(abstarget)
835 prevsrc = targets.get(abstarget)
837 if prevsrc is not None:
836 if prevsrc is not None:
838 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
837 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
839 (reltarget, abssrc, prevsrc))
838 (reltarget, abssrc, prevsrc))
840 return
839 return
841 if (not opts['after'] and os.path.exists(reltarget) or
840 if (not opts['after'] and os.path.exists(reltarget) or
842 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
841 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
843 if not opts['force']:
842 if not opts['force']:
844 ui.warn(_('%s: not overwriting - file exists\n') %
843 ui.warn(_('%s: not overwriting - file exists\n') %
845 reltarget)
844 reltarget)
846 return
845 return
847 if not opts['after']:
846 if not opts['after']:
848 os.unlink(reltarget)
847 os.unlink(reltarget)
849 if opts['after']:
848 if opts['after']:
850 if not os.path.exists(reltarget):
849 if not os.path.exists(reltarget):
851 return
850 return
852 else:
851 else:
853 targetdir = os.path.dirname(reltarget) or '.'
852 targetdir = os.path.dirname(reltarget) or '.'
854 if not os.path.isdir(targetdir):
853 if not os.path.isdir(targetdir):
855 os.makedirs(targetdir)
854 os.makedirs(targetdir)
856 try:
855 try:
857 shutil.copyfile(relsrc, reltarget)
856 shutil.copyfile(relsrc, reltarget)
858 shutil.copymode(relsrc, reltarget)
857 shutil.copymode(relsrc, reltarget)
859 except shutil.Error, inst:
858 except shutil.Error, inst:
860 raise util.Abort(str(inst))
859 raise util.Abort(str(inst))
861 except IOError, inst:
860 except IOError, inst:
862 if inst.errno == errno.ENOENT:
861 if inst.errno == errno.ENOENT:
863 ui.warn(_('%s: deleted in working copy\n') % relsrc)
862 ui.warn(_('%s: deleted in working copy\n') % relsrc)
864 else:
863 else:
865 ui.warn(_('%s: cannot copy - %s\n') %
864 ui.warn(_('%s: cannot copy - %s\n') %
866 (relsrc, inst.strerror))
865 (relsrc, inst.strerror))
867 errors += 1
866 errors += 1
868 return
867 return
869 if ui.verbose or not exact:
868 if ui.verbose or not exact:
870 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
869 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
871 targets[abstarget] = abssrc
870 targets[abstarget] = abssrc
872 repo.copy(origsrc, abstarget)
871 repo.copy(origsrc, abstarget)
873 copied.append((abssrc, relsrc, exact))
872 copied.append((abssrc, relsrc, exact))
874
873
875 def targetpathfn(pat, dest, srcs):
874 def targetpathfn(pat, dest, srcs):
876 if os.path.isdir(pat):
875 if os.path.isdir(pat):
877 abspfx = util.canonpath(repo.root, cwd, pat)
876 abspfx = util.canonpath(repo.root, cwd, pat)
878 if destdirexists:
877 if destdirexists:
879 striplen = len(os.path.split(abspfx)[0])
878 striplen = len(os.path.split(abspfx)[0])
880 else:
879 else:
881 striplen = len(abspfx)
880 striplen = len(abspfx)
882 if striplen:
881 if striplen:
883 striplen += len(os.sep)
882 striplen += len(os.sep)
884 res = lambda p: os.path.join(dest, p[striplen:])
883 res = lambda p: os.path.join(dest, p[striplen:])
885 elif destdirexists:
884 elif destdirexists:
886 res = lambda p: os.path.join(dest, os.path.basename(p))
885 res = lambda p: os.path.join(dest, os.path.basename(p))
887 else:
886 else:
888 res = lambda p: dest
887 res = lambda p: dest
889 return res
888 return res
890
889
891 def targetpathafterfn(pat, dest, srcs):
890 def targetpathafterfn(pat, dest, srcs):
892 if util.patkind(pat, None)[0]:
891 if util.patkind(pat, None)[0]:
893 # a mercurial pattern
892 # a mercurial pattern
894 res = lambda p: os.path.join(dest, os.path.basename(p))
893 res = lambda p: os.path.join(dest, os.path.basename(p))
895 else:
894 else:
896 abspfx = util.canonpath(repo.root, cwd, pat)
895 abspfx = util.canonpath(repo.root, cwd, pat)
897 if len(abspfx) < len(srcs[0][0]):
896 if len(abspfx) < len(srcs[0][0]):
898 # A directory. Either the target path contains the last
897 # A directory. Either the target path contains the last
899 # component of the source path or it does not.
898 # component of the source path or it does not.
900 def evalpath(striplen):
899 def evalpath(striplen):
901 score = 0
900 score = 0
902 for s in srcs:
901 for s in srcs:
903 t = os.path.join(dest, s[0][striplen:])
902 t = os.path.join(dest, s[0][striplen:])
904 if os.path.exists(t):
903 if os.path.exists(t):
905 score += 1
904 score += 1
906 return score
905 return score
907
906
908 striplen = len(abspfx)
907 striplen = len(abspfx)
909 if striplen:
908 if striplen:
910 striplen += len(os.sep)
909 striplen += len(os.sep)
911 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
910 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
912 score = evalpath(striplen)
911 score = evalpath(striplen)
913 striplen1 = len(os.path.split(abspfx)[0])
912 striplen1 = len(os.path.split(abspfx)[0])
914 if striplen1:
913 if striplen1:
915 striplen1 += len(os.sep)
914 striplen1 += len(os.sep)
916 if evalpath(striplen1) > score:
915 if evalpath(striplen1) > score:
917 striplen = striplen1
916 striplen = striplen1
918 res = lambda p: os.path.join(dest, p[striplen:])
917 res = lambda p: os.path.join(dest, p[striplen:])
919 else:
918 else:
920 # a file
919 # a file
921 if destdirexists:
920 if destdirexists:
922 res = lambda p: os.path.join(dest, os.path.basename(p))
921 res = lambda p: os.path.join(dest, os.path.basename(p))
923 else:
922 else:
924 res = lambda p: dest
923 res = lambda p: dest
925 return res
924 return res
926
925
927
926
928 pats = list(pats)
927 pats = list(pats)
929 if not pats:
928 if not pats:
930 raise util.Abort(_('no source or destination specified'))
929 raise util.Abort(_('no source or destination specified'))
931 if len(pats) == 1:
930 if len(pats) == 1:
932 raise util.Abort(_('no destination specified'))
931 raise util.Abort(_('no destination specified'))
933 dest = pats.pop()
932 dest = pats.pop()
934 destdirexists = os.path.isdir(dest)
933 destdirexists = os.path.isdir(dest)
935 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
934 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
936 raise util.Abort(_('with multiple sources, destination must be an '
935 raise util.Abort(_('with multiple sources, destination must be an '
937 'existing directory'))
936 'existing directory'))
938 if opts['after']:
937 if opts['after']:
939 tfn = targetpathafterfn
938 tfn = targetpathafterfn
940 else:
939 else:
941 tfn = targetpathfn
940 tfn = targetpathfn
942 copylist = []
941 copylist = []
943 for pat in pats:
942 for pat in pats:
944 srcs = []
943 srcs = []
945 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
944 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
946 origsrc = okaytocopy(abssrc, relsrc, exact)
945 origsrc = okaytocopy(abssrc, relsrc, exact)
947 if origsrc:
946 if origsrc:
948 srcs.append((origsrc, abssrc, relsrc, exact))
947 srcs.append((origsrc, abssrc, relsrc, exact))
949 if not srcs:
948 if not srcs:
950 continue
949 continue
951 copylist.append((tfn(pat, dest, srcs), srcs))
950 copylist.append((tfn(pat, dest, srcs), srcs))
952 if not copylist:
951 if not copylist:
953 raise util.Abort(_('no files to copy'))
952 raise util.Abort(_('no files to copy'))
954
953
955 for targetpath, srcs in copylist:
954 for targetpath, srcs in copylist:
956 for origsrc, abssrc, relsrc, exact in srcs:
955 for origsrc, abssrc, relsrc, exact in srcs:
957 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
956 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
958
957
959 if errors:
958 if errors:
960 ui.warn(_('(consider using --after)\n'))
959 ui.warn(_('(consider using --after)\n'))
961 return errors, copied
960 return errors, copied
962
961
963 def copy(ui, repo, *pats, **opts):
962 def copy(ui, repo, *pats, **opts):
964 """mark files as copied for the next commit
963 """mark files as copied for the next commit
965
964
966 Mark dest as having copies of source files. If dest is a
965 Mark dest as having copies of source files. If dest is a
967 directory, copies are put in that directory. If dest is a file,
966 directory, copies are put in that directory. If dest is a file,
968 there can only be one source.
967 there can only be one source.
969
968
970 By default, this command copies the contents of files as they
969 By default, this command copies the contents of files as they
971 stand in the working directory. If invoked with --after, the
970 stand in the working directory. If invoked with --after, the
972 operation is recorded, but no copying is performed.
971 operation is recorded, but no copying is performed.
973
972
974 This command takes effect in the next commit.
973 This command takes effect in the next commit.
975
974
976 NOTE: This command should be treated as experimental. While it
975 NOTE: This command should be treated as experimental. While it
977 should properly record copied files, this information is not yet
976 should properly record copied files, this information is not yet
978 fully used by merge, nor fully reported by log.
977 fully used by merge, nor fully reported by log.
979 """
978 """
980 errs, copied = docopy(ui, repo, pats, opts)
979 errs, copied = docopy(ui, repo, pats, opts)
981 return errs
980 return errs
982
981
983 def debugancestor(ui, index, rev1, rev2):
982 def debugancestor(ui, index, rev1, rev2):
984 """find the ancestor revision of two revisions in a given index"""
983 """find the ancestor revision of two revisions in a given index"""
985 r = revlog.revlog(util.opener(os.getcwd()), index, "")
984 r = revlog.revlog(util.opener(os.getcwd()), index, "")
986 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
985 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
987 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
986 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
988
987
988 def debugrebuildstate(ui, repo, rev=None):
989 """rebuild the dirstate as it would look like for the given revision"""
990 if not rev:
991 rev = repo.changelog.tip()
992 else:
993 rev = repo.lookup(rev)
994 change = repo.changelog.read(rev)
995 n = change[0]
996 files = repo.manifest.readflags(n)
997 wlock = self.repo.wlock()
998 repo.dirstate.rebuild(rev, files.iteritems())
999
989 def debugcheckstate(ui, repo):
1000 def debugcheckstate(ui, repo):
990 """validate the correctness of the current dirstate"""
1001 """validate the correctness of the current dirstate"""
991 parent1, parent2 = repo.dirstate.parents()
1002 parent1, parent2 = repo.dirstate.parents()
992 repo.dirstate.read()
1003 repo.dirstate.read()
993 dc = repo.dirstate.map
1004 dc = repo.dirstate.map
994 keys = dc.keys()
1005 keys = dc.keys()
995 keys.sort()
1006 keys.sort()
996 m1n = repo.changelog.read(parent1)[0]
1007 m1n = repo.changelog.read(parent1)[0]
997 m2n = repo.changelog.read(parent2)[0]
1008 m2n = repo.changelog.read(parent2)[0]
998 m1 = repo.manifest.read(m1n)
1009 m1 = repo.manifest.read(m1n)
999 m2 = repo.manifest.read(m2n)
1010 m2 = repo.manifest.read(m2n)
1000 errors = 0
1011 errors = 0
1001 for f in dc:
1012 for f in dc:
1002 state = repo.dirstate.state(f)
1013 state = repo.dirstate.state(f)
1003 if state in "nr" and f not in m1:
1014 if state in "nr" and f not in m1:
1004 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1015 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1005 errors += 1
1016 errors += 1
1006 if state in "a" and f in m1:
1017 if state in "a" and f in m1:
1007 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1018 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1008 errors += 1
1019 errors += 1
1009 if state in "m" and f not in m1 and f not in m2:
1020 if state in "m" and f not in m1 and f not in m2:
1010 ui.warn(_("%s in state %s, but not in either manifest\n") %
1021 ui.warn(_("%s in state %s, but not in either manifest\n") %
1011 (f, state))
1022 (f, state))
1012 errors += 1
1023 errors += 1
1013 for f in m1:
1024 for f in m1:
1014 state = repo.dirstate.state(f)
1025 state = repo.dirstate.state(f)
1015 if state not in "nrm":
1026 if state not in "nrm":
1016 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1027 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1017 errors += 1
1028 errors += 1
1018 if errors:
1029 if errors:
1019 error = _(".hg/dirstate inconsistent with current parent's manifest")
1030 error = _(".hg/dirstate inconsistent with current parent's manifest")
1020 raise util.Abort(error)
1031 raise util.Abort(error)
1021
1032
1022 def debugconfig(ui):
1033 def debugconfig(ui):
1023 """show combined config settings from all hgrc files"""
1034 """show combined config settings from all hgrc files"""
1024 try:
1035 try:
1025 repo = hg.repository(ui)
1036 repo = hg.repository(ui)
1026 except hg.RepoError:
1037 except hg.RepoError:
1027 pass
1038 pass
1028 for section, name, value in ui.walkconfig():
1039 for section, name, value in ui.walkconfig():
1029 ui.write('%s.%s=%s\n' % (section, name, value))
1040 ui.write('%s.%s=%s\n' % (section, name, value))
1030
1041
1031 def debugsetparents(ui, repo, rev1, rev2=None):
1042 def debugsetparents(ui, repo, rev1, rev2=None):
1032 """manually set the parents of the current working directory
1043 """manually set the parents of the current working directory
1033
1044
1034 This is useful for writing repository conversion tools, but should
1045 This is useful for writing repository conversion tools, but should
1035 be used with care.
1046 be used with care.
1036 """
1047 """
1037
1048
1038 if not rev2:
1049 if not rev2:
1039 rev2 = hex(nullid)
1050 rev2 = hex(nullid)
1040
1051
1041 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1052 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1042
1053
1043 def debugstate(ui, repo):
1054 def debugstate(ui, repo):
1044 """show the contents of the current dirstate"""
1055 """show the contents of the current dirstate"""
1045 repo.dirstate.read()
1056 repo.dirstate.read()
1046 dc = repo.dirstate.map
1057 dc = repo.dirstate.map
1047 keys = dc.keys()
1058 keys = dc.keys()
1048 keys.sort()
1059 keys.sort()
1049 for file_ in keys:
1060 for file_ in keys:
1050 ui.write("%c %3o %10d %s %s\n"
1061 ui.write("%c %3o %10d %s %s\n"
1051 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1062 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1052 time.strftime("%x %X",
1063 time.strftime("%x %X",
1053 time.localtime(dc[file_][3])), file_))
1064 time.localtime(dc[file_][3])), file_))
1054 for f in repo.dirstate.copies:
1065 for f in repo.dirstate.copies:
1055 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1066 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1056
1067
1057 def debugdata(ui, file_, rev):
1068 def debugdata(ui, file_, rev):
1058 """dump the contents of an data file revision"""
1069 """dump the contents of an data file revision"""
1059 r = revlog.revlog(util.opener(os.getcwd()), file_[:-2] + ".i", file_)
1070 r = revlog.revlog(util.opener(os.getcwd()), file_[:-2] + ".i", file_)
1060 try:
1071 try:
1061 ui.write(r.revision(r.lookup(rev)))
1072 ui.write(r.revision(r.lookup(rev)))
1062 except KeyError:
1073 except KeyError:
1063 raise util.Abort(_('invalid revision identifier %s'), rev)
1074 raise util.Abort(_('invalid revision identifier %s'), rev)
1064
1075
1065 def debugindex(ui, file_):
1076 def debugindex(ui, file_):
1066 """dump the contents of an index file"""
1077 """dump the contents of an index file"""
1067 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
1078 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
1068 ui.write(" rev offset length base linkrev" +
1079 ui.write(" rev offset length base linkrev" +
1069 " nodeid p1 p2\n")
1080 " nodeid p1 p2\n")
1070 for i in range(r.count()):
1081 for i in range(r.count()):
1071 e = r.index[i]
1082 e = r.index[i]
1072 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1083 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1073 i, e[0], e[1], e[2], e[3],
1084 i, e[0], e[1], e[2], e[3],
1074 short(e[6]), short(e[4]), short(e[5])))
1085 short(e[6]), short(e[4]), short(e[5])))
1075
1086
1076 def debugindexdot(ui, file_):
1087 def debugindexdot(ui, file_):
1077 """dump an index DAG as a .dot file"""
1088 """dump an index DAG as a .dot file"""
1078 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
1089 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
1079 ui.write("digraph G {\n")
1090 ui.write("digraph G {\n")
1080 for i in range(r.count()):
1091 for i in range(r.count()):
1081 e = r.index[i]
1092 e = r.index[i]
1082 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
1093 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
1083 if e[5] != nullid:
1094 if e[5] != nullid:
1084 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
1095 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
1085 ui.write("}\n")
1096 ui.write("}\n")
1086
1097
1087 def debugrename(ui, repo, file, rev=None):
1098 def debugrename(ui, repo, file, rev=None):
1088 """dump rename information"""
1099 """dump rename information"""
1089 r = repo.file(relpath(repo, [file])[0])
1100 r = repo.file(relpath(repo, [file])[0])
1090 if rev:
1101 if rev:
1091 try:
1102 try:
1092 # assume all revision numbers are for changesets
1103 # assume all revision numbers are for changesets
1093 n = repo.lookup(rev)
1104 n = repo.lookup(rev)
1094 change = repo.changelog.read(n)
1105 change = repo.changelog.read(n)
1095 m = repo.manifest.read(change[0])
1106 m = repo.manifest.read(change[0])
1096 n = m[relpath(repo, [file])[0]]
1107 n = m[relpath(repo, [file])[0]]
1097 except (hg.RepoError, KeyError):
1108 except (hg.RepoError, KeyError):
1098 n = r.lookup(rev)
1109 n = r.lookup(rev)
1099 else:
1110 else:
1100 n = r.tip()
1111 n = r.tip()
1101 m = r.renamed(n)
1112 m = r.renamed(n)
1102 if m:
1113 if m:
1103 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1114 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1104 else:
1115 else:
1105 ui.write(_("not renamed\n"))
1116 ui.write(_("not renamed\n"))
1106
1117
1107 def debugwalk(ui, repo, *pats, **opts):
1118 def debugwalk(ui, repo, *pats, **opts):
1108 """show how files match on given patterns"""
1119 """show how files match on given patterns"""
1109 items = list(walk(repo, pats, opts))
1120 items = list(walk(repo, pats, opts))
1110 if not items:
1121 if not items:
1111 return
1122 return
1112 fmt = '%%s %%-%ds %%-%ds %%s' % (
1123 fmt = '%%s %%-%ds %%-%ds %%s' % (
1113 max([len(abs) for (src, abs, rel, exact) in items]),
1124 max([len(abs) for (src, abs, rel, exact) in items]),
1114 max([len(rel) for (src, abs, rel, exact) in items]))
1125 max([len(rel) for (src, abs, rel, exact) in items]))
1115 for src, abs, rel, exact in items:
1126 for src, abs, rel, exact in items:
1116 line = fmt % (src, abs, rel, exact and 'exact' or '')
1127 line = fmt % (src, abs, rel, exact and 'exact' or '')
1117 ui.write("%s\n" % line.rstrip())
1128 ui.write("%s\n" % line.rstrip())
1118
1129
1119 def diff(ui, repo, *pats, **opts):
1130 def diff(ui, repo, *pats, **opts):
1120 """diff repository (or selected files)
1131 """diff repository (or selected files)
1121
1132
1122 Show differences between revisions for the specified files.
1133 Show differences between revisions for the specified files.
1123
1134
1124 Differences between files are shown using the unified diff format.
1135 Differences between files are shown using the unified diff format.
1125
1136
1126 When two revision arguments are given, then changes are shown
1137 When two revision arguments are given, then changes are shown
1127 between those revisions. If only one revision is specified then
1138 between those revisions. If only one revision is specified then
1128 that revision is compared to the working directory, and, when no
1139 that revision is compared to the working directory, and, when no
1129 revisions are specified, the working directory files are compared
1140 revisions are specified, the working directory files are compared
1130 to its parent.
1141 to its parent.
1131
1142
1132 Without the -a option, diff will avoid generating diffs of files
1143 Without the -a option, diff will avoid generating diffs of files
1133 it detects as binary. With -a, diff will generate a diff anyway,
1144 it detects as binary. With -a, diff will generate a diff anyway,
1134 probably with undesirable results.
1145 probably with undesirable results.
1135 """
1146 """
1136 node1, node2 = None, None
1147 node1, node2 = None, None
1137 revs = [repo.lookup(x) for x in opts['rev']]
1148 revs = [repo.lookup(x) for x in opts['rev']]
1138
1149
1139 if len(revs) > 0:
1150 if len(revs) > 0:
1140 node1 = revs[0]
1151 node1 = revs[0]
1141 if len(revs) > 1:
1152 if len(revs) > 1:
1142 node2 = revs[1]
1153 node2 = revs[1]
1143 if len(revs) > 2:
1154 if len(revs) > 2:
1144 raise util.Abort(_("too many revisions to diff"))
1155 raise util.Abort(_("too many revisions to diff"))
1145
1156
1146 fns, matchfn, anypats = matchpats(repo, pats, opts)
1157 fns, matchfn, anypats = matchpats(repo, pats, opts)
1147
1158
1148 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1159 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1149 text=opts['text'], opts=opts)
1160 text=opts['text'], opts=opts)
1150
1161
1151 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1162 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1152 node = repo.lookup(changeset)
1163 node = repo.lookup(changeset)
1153 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1164 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1154 if opts['switch_parent']:
1165 if opts['switch_parent']:
1155 parents.reverse()
1166 parents.reverse()
1156 prev = (parents and parents[0]) or nullid
1167 prev = (parents and parents[0]) or nullid
1157 change = repo.changelog.read(node)
1168 change = repo.changelog.read(node)
1158
1169
1159 fp = make_file(repo, repo.changelog, opts['output'],
1170 fp = make_file(repo, repo.changelog, opts['output'],
1160 node=node, total=total, seqno=seqno,
1171 node=node, total=total, seqno=seqno,
1161 revwidth=revwidth)
1172 revwidth=revwidth)
1162 if fp != sys.stdout:
1173 if fp != sys.stdout:
1163 ui.note("%s\n" % fp.name)
1174 ui.note("%s\n" % fp.name)
1164
1175
1165 fp.write("# HG changeset patch\n")
1176 fp.write("# HG changeset patch\n")
1166 fp.write("# User %s\n" % change[1])
1177 fp.write("# User %s\n" % change[1])
1167 fp.write("# Node ID %s\n" % hex(node))
1178 fp.write("# Node ID %s\n" % hex(node))
1168 fp.write("# Parent %s\n" % hex(prev))
1179 fp.write("# Parent %s\n" % hex(prev))
1169 if len(parents) > 1:
1180 if len(parents) > 1:
1170 fp.write("# Parent %s\n" % hex(parents[1]))
1181 fp.write("# Parent %s\n" % hex(parents[1]))
1171 fp.write(change[4].rstrip())
1182 fp.write(change[4].rstrip())
1172 fp.write("\n\n")
1183 fp.write("\n\n")
1173
1184
1174 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1185 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1175 if fp != sys.stdout:
1186 if fp != sys.stdout:
1176 fp.close()
1187 fp.close()
1177
1188
1178 def export(ui, repo, *changesets, **opts):
1189 def export(ui, repo, *changesets, **opts):
1179 """dump the header and diffs for one or more changesets
1190 """dump the header and diffs for one or more changesets
1180
1191
1181 Print the changeset header and diffs for one or more revisions.
1192 Print the changeset header and diffs for one or more revisions.
1182
1193
1183 The information shown in the changeset header is: author,
1194 The information shown in the changeset header is: author,
1184 changeset hash, parent and commit comment.
1195 changeset hash, parent and commit comment.
1185
1196
1186 Output may be to a file, in which case the name of the file is
1197 Output may be to a file, in which case the name of the file is
1187 given using a format string. The formatting rules are as follows:
1198 given using a format string. The formatting rules are as follows:
1188
1199
1189 %% literal "%" character
1200 %% literal "%" character
1190 %H changeset hash (40 bytes of hexadecimal)
1201 %H changeset hash (40 bytes of hexadecimal)
1191 %N number of patches being generated
1202 %N number of patches being generated
1192 %R changeset revision number
1203 %R changeset revision number
1193 %b basename of the exporting repository
1204 %b basename of the exporting repository
1194 %h short-form changeset hash (12 bytes of hexadecimal)
1205 %h short-form changeset hash (12 bytes of hexadecimal)
1195 %n zero-padded sequence number, starting at 1
1206 %n zero-padded sequence number, starting at 1
1196 %r zero-padded changeset revision number
1207 %r zero-padded changeset revision number
1197
1208
1198 Without the -a option, export will avoid generating diffs of files
1209 Without the -a option, export will avoid generating diffs of files
1199 it detects as binary. With -a, export will generate a diff anyway,
1210 it detects as binary. With -a, export will generate a diff anyway,
1200 probably with undesirable results.
1211 probably with undesirable results.
1201
1212
1202 With the --switch-parent option, the diff will be against the second
1213 With the --switch-parent option, the diff will be against the second
1203 parent. It can be useful to review a merge.
1214 parent. It can be useful to review a merge.
1204 """
1215 """
1205 if not changesets:
1216 if not changesets:
1206 raise util.Abort(_("export requires at least one changeset"))
1217 raise util.Abort(_("export requires at least one changeset"))
1207 seqno = 0
1218 seqno = 0
1208 revs = list(revrange(ui, repo, changesets))
1219 revs = list(revrange(ui, repo, changesets))
1209 total = len(revs)
1220 total = len(revs)
1210 revwidth = max(map(len, revs))
1221 revwidth = max(map(len, revs))
1211 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1222 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1212 ui.note(msg)
1223 ui.note(msg)
1213 for cset in revs:
1224 for cset in revs:
1214 seqno += 1
1225 seqno += 1
1215 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1226 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1216
1227
1217 def forget(ui, repo, *pats, **opts):
1228 def forget(ui, repo, *pats, **opts):
1218 """don't add the specified files on the next commit
1229 """don't add the specified files on the next commit
1219
1230
1220 Undo an 'hg add' scheduled for the next commit.
1231 Undo an 'hg add' scheduled for the next commit.
1221 """
1232 """
1222 forget = []
1233 forget = []
1223 for src, abs, rel, exact in walk(repo, pats, opts):
1234 for src, abs, rel, exact in walk(repo, pats, opts):
1224 if repo.dirstate.state(abs) == 'a':
1235 if repo.dirstate.state(abs) == 'a':
1225 forget.append(abs)
1236 forget.append(abs)
1226 if ui.verbose or not exact:
1237 if ui.verbose or not exact:
1227 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1238 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1228 repo.forget(forget)
1239 repo.forget(forget)
1229
1240
1230 def grep(ui, repo, pattern, *pats, **opts):
1241 def grep(ui, repo, pattern, *pats, **opts):
1231 """search for a pattern in specified files and revisions
1242 """search for a pattern in specified files and revisions
1232
1243
1233 Search revisions of files for a regular expression.
1244 Search revisions of files for a regular expression.
1234
1245
1235 This command behaves differently than Unix grep. It only accepts
1246 This command behaves differently than Unix grep. It only accepts
1236 Python/Perl regexps. It searches repository history, not the
1247 Python/Perl regexps. It searches repository history, not the
1237 working directory. It always prints the revision number in which
1248 working directory. It always prints the revision number in which
1238 a match appears.
1249 a match appears.
1239
1250
1240 By default, grep only prints output for the first revision of a
1251 By default, grep only prints output for the first revision of a
1241 file in which it finds a match. To get it to print every revision
1252 file in which it finds a match. To get it to print every revision
1242 that contains a change in match status ("-" for a match that
1253 that contains a change in match status ("-" for a match that
1243 becomes a non-match, or "+" for a non-match that becomes a match),
1254 becomes a non-match, or "+" for a non-match that becomes a match),
1244 use the --all flag.
1255 use the --all flag.
1245 """
1256 """
1246 reflags = 0
1257 reflags = 0
1247 if opts['ignore_case']:
1258 if opts['ignore_case']:
1248 reflags |= re.I
1259 reflags |= re.I
1249 regexp = re.compile(pattern, reflags)
1260 regexp = re.compile(pattern, reflags)
1250 sep, eol = ':', '\n'
1261 sep, eol = ':', '\n'
1251 if opts['print0']:
1262 if opts['print0']:
1252 sep = eol = '\0'
1263 sep = eol = '\0'
1253
1264
1254 fcache = {}
1265 fcache = {}
1255 def getfile(fn):
1266 def getfile(fn):
1256 if fn not in fcache:
1267 if fn not in fcache:
1257 fcache[fn] = repo.file(fn)
1268 fcache[fn] = repo.file(fn)
1258 return fcache[fn]
1269 return fcache[fn]
1259
1270
1260 def matchlines(body):
1271 def matchlines(body):
1261 begin = 0
1272 begin = 0
1262 linenum = 0
1273 linenum = 0
1263 while True:
1274 while True:
1264 match = regexp.search(body, begin)
1275 match = regexp.search(body, begin)
1265 if not match:
1276 if not match:
1266 break
1277 break
1267 mstart, mend = match.span()
1278 mstart, mend = match.span()
1268 linenum += body.count('\n', begin, mstart) + 1
1279 linenum += body.count('\n', begin, mstart) + 1
1269 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1280 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1270 lend = body.find('\n', mend)
1281 lend = body.find('\n', mend)
1271 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1282 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1272 begin = lend + 1
1283 begin = lend + 1
1273
1284
1274 class linestate(object):
1285 class linestate(object):
1275 def __init__(self, line, linenum, colstart, colend):
1286 def __init__(self, line, linenum, colstart, colend):
1276 self.line = line
1287 self.line = line
1277 self.linenum = linenum
1288 self.linenum = linenum
1278 self.colstart = colstart
1289 self.colstart = colstart
1279 self.colend = colend
1290 self.colend = colend
1280 def __eq__(self, other):
1291 def __eq__(self, other):
1281 return self.line == other.line
1292 return self.line == other.line
1282 def __hash__(self):
1293 def __hash__(self):
1283 return hash(self.line)
1294 return hash(self.line)
1284
1295
1285 matches = {}
1296 matches = {}
1286 def grepbody(fn, rev, body):
1297 def grepbody(fn, rev, body):
1287 matches[rev].setdefault(fn, {})
1298 matches[rev].setdefault(fn, {})
1288 m = matches[rev][fn]
1299 m = matches[rev][fn]
1289 for lnum, cstart, cend, line in matchlines(body):
1300 for lnum, cstart, cend, line in matchlines(body):
1290 s = linestate(line, lnum, cstart, cend)
1301 s = linestate(line, lnum, cstart, cend)
1291 m[s] = s
1302 m[s] = s
1292
1303
1304 # FIXME: prev isn't used, why ?
1293 prev = {}
1305 prev = {}
1294 ucache = {}
1306 ucache = {}
1295 def display(fn, rev, states, prevstates):
1307 def display(fn, rev, states, prevstates):
1296 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1308 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1297 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1309 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1298 counts = {'-': 0, '+': 0}
1310 counts = {'-': 0, '+': 0}
1299 filerevmatches = {}
1311 filerevmatches = {}
1300 for l in diff:
1312 for l in diff:
1301 if incrementing or not opts['all']:
1313 if incrementing or not opts['all']:
1302 change = ((l in prevstates) and '-') or '+'
1314 change = ((l in prevstates) and '-') or '+'
1303 r = rev
1315 r = rev
1304 else:
1316 else:
1305 change = ((l in states) and '-') or '+'
1317 change = ((l in states) and '-') or '+'
1306 r = prev[fn]
1318 r = prev[fn]
1307 cols = [fn, str(rev)]
1319 cols = [fn, str(rev)]
1308 if opts['line_number']:
1320 if opts['line_number']:
1309 cols.append(str(l.linenum))
1321 cols.append(str(l.linenum))
1310 if opts['all']:
1322 if opts['all']:
1311 cols.append(change)
1323 cols.append(change)
1312 if opts['user']:
1324 if opts['user']:
1313 cols.append(trimuser(ui, getchange(rev)[1], rev,
1325 cols.append(trimuser(ui, getchange(rev)[1], rev,
1314 ucache))
1326 ucache))
1315 if opts['files_with_matches']:
1327 if opts['files_with_matches']:
1316 c = (fn, rev)
1328 c = (fn, rev)
1317 if c in filerevmatches:
1329 if c in filerevmatches:
1318 continue
1330 continue
1319 filerevmatches[c] = 1
1331 filerevmatches[c] = 1
1320 else:
1332 else:
1321 cols.append(l.line)
1333 cols.append(l.line)
1322 ui.write(sep.join(cols), eol)
1334 ui.write(sep.join(cols), eol)
1323 counts[change] += 1
1335 counts[change] += 1
1324 return counts['+'], counts['-']
1336 return counts['+'], counts['-']
1325
1337
1326 fstate = {}
1338 fstate = {}
1327 skip = {}
1339 skip = {}
1328 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1340 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1329 count = 0
1341 count = 0
1330 incrementing = False
1342 incrementing = False
1331 for st, rev, fns in changeiter:
1343 for st, rev, fns in changeiter:
1332 if st == 'window':
1344 if st == 'window':
1333 incrementing = rev
1345 incrementing = rev
1334 matches.clear()
1346 matches.clear()
1335 elif st == 'add':
1347 elif st == 'add':
1336 change = repo.changelog.read(repo.lookup(str(rev)))
1348 change = repo.changelog.read(repo.lookup(str(rev)))
1337 mf = repo.manifest.read(change[0])
1349 mf = repo.manifest.read(change[0])
1338 matches[rev] = {}
1350 matches[rev] = {}
1339 for fn in fns:
1351 for fn in fns:
1340 if fn in skip:
1352 if fn in skip:
1341 continue
1353 continue
1342 fstate.setdefault(fn, {})
1354 fstate.setdefault(fn, {})
1343 try:
1355 try:
1344 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1356 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1345 except KeyError:
1357 except KeyError:
1346 pass
1358 pass
1347 elif st == 'iter':
1359 elif st == 'iter':
1348 states = matches[rev].items()
1360 states = matches[rev].items()
1349 states.sort()
1361 states.sort()
1350 for fn, m in states:
1362 for fn, m in states:
1351 if fn in skip:
1363 if fn in skip:
1352 continue
1364 continue
1353 if incrementing or not opts['all'] or fstate[fn]:
1365 if incrementing or not opts['all'] or fstate[fn]:
1354 pos, neg = display(fn, rev, m, fstate[fn])
1366 pos, neg = display(fn, rev, m, fstate[fn])
1355 count += pos + neg
1367 count += pos + neg
1356 if pos and not opts['all']:
1368 if pos and not opts['all']:
1357 skip[fn] = True
1369 skip[fn] = True
1358 fstate[fn] = m
1370 fstate[fn] = m
1359 prev[fn] = rev
1371 prev[fn] = rev
1360
1372
1361 if not incrementing:
1373 if not incrementing:
1362 fstate = fstate.items()
1374 fstate = fstate.items()
1363 fstate.sort()
1375 fstate.sort()
1364 for fn, state in fstate:
1376 for fn, state in fstate:
1365 if fn in skip:
1377 if fn in skip:
1366 continue
1378 continue
1367 display(fn, rev, {}, state)
1379 display(fn, rev, {}, state)
1368 return (count == 0 and 1) or 0
1380 return (count == 0 and 1) or 0
1369
1381
1370 def heads(ui, repo, **opts):
1382 def heads(ui, repo, **opts):
1371 """show current repository heads
1383 """show current repository heads
1372
1384
1373 Show all repository head changesets.
1385 Show all repository head changesets.
1374
1386
1375 Repository "heads" are changesets that don't have children
1387 Repository "heads" are changesets that don't have children
1376 changesets. They are where development generally takes place and
1388 changesets. They are where development generally takes place and
1377 are the usual targets for update and merge operations.
1389 are the usual targets for update and merge operations.
1378 """
1390 """
1379 if opts['rev']:
1391 if opts['rev']:
1380 heads = repo.heads(repo.lookup(opts['rev']))
1392 heads = repo.heads(repo.lookup(opts['rev']))
1381 else:
1393 else:
1382 heads = repo.heads()
1394 heads = repo.heads()
1383 br = None
1395 br = None
1384 if opts['branches']:
1396 if opts['branches']:
1385 br = repo.branchlookup(heads)
1397 br = repo.branchlookup(heads)
1386 for n in heads:
1398 for n in heads:
1387 show_changeset(ui, repo, changenode=n, brinfo=br)
1399 show_changeset(ui, repo, changenode=n, brinfo=br)
1388
1400
1389 def identify(ui, repo):
1401 def identify(ui, repo):
1390 """print information about the working copy
1402 """print information about the working copy
1391
1403
1392 Print a short summary of the current state of the repo.
1404 Print a short summary of the current state of the repo.
1393
1405
1394 This summary identifies the repository state using one or two parent
1406 This summary identifies the repository state using one or two parent
1395 hash identifiers, followed by a "+" if there are uncommitted changes
1407 hash identifiers, followed by a "+" if there are uncommitted changes
1396 in the working directory, followed by a list of tags for this revision.
1408 in the working directory, followed by a list of tags for this revision.
1397 """
1409 """
1398 parents = [p for p in repo.dirstate.parents() if p != nullid]
1410 parents = [p for p in repo.dirstate.parents() if p != nullid]
1399 if not parents:
1411 if not parents:
1400 ui.write(_("unknown\n"))
1412 ui.write(_("unknown\n"))
1401 return
1413 return
1402
1414
1403 hexfunc = ui.verbose and hex or short
1415 hexfunc = ui.verbose and hex or short
1404 modified, added, removed, deleted, unknown = repo.changes()
1416 modified, added, removed, deleted, unknown = repo.changes()
1405 output = ["%s%s" %
1417 output = ["%s%s" %
1406 ('+'.join([hexfunc(parent) for parent in parents]),
1418 ('+'.join([hexfunc(parent) for parent in parents]),
1407 (modified or added or removed or deleted) and "+" or "")]
1419 (modified or added or removed or deleted) and "+" or "")]
1408
1420
1409 if not ui.quiet:
1421 if not ui.quiet:
1410 # multiple tags for a single parent separated by '/'
1422 # multiple tags for a single parent separated by '/'
1411 parenttags = ['/'.join(tags)
1423 parenttags = ['/'.join(tags)
1412 for tags in map(repo.nodetags, parents) if tags]
1424 for tags in map(repo.nodetags, parents) if tags]
1413 # tags for multiple parents separated by ' + '
1425 # tags for multiple parents separated by ' + '
1414 if parenttags:
1426 if parenttags:
1415 output.append(' + '.join(parenttags))
1427 output.append(' + '.join(parenttags))
1416
1428
1417 ui.write("%s\n" % ' '.join(output))
1429 ui.write("%s\n" % ' '.join(output))
1418
1430
1419 def import_(ui, repo, patch1, *patches, **opts):
1431 def import_(ui, repo, patch1, *patches, **opts):
1420 """import an ordered set of patches
1432 """import an ordered set of patches
1421
1433
1422 Import a list of patches and commit them individually.
1434 Import a list of patches and commit them individually.
1423
1435
1424 If there are outstanding changes in the working directory, import
1436 If there are outstanding changes in the working directory, import
1425 will abort unless given the -f flag.
1437 will abort unless given the -f flag.
1426
1438
1427 If a patch looks like a mail message (its first line starts with
1439 If a patch looks like a mail message (its first line starts with
1428 "From " or looks like an RFC822 header), it will not be applied
1440 "From " or looks like an RFC822 header), it will not be applied
1429 unless the -f option is used. The importer neither parses nor
1441 unless the -f option is used. The importer neither parses nor
1430 discards mail headers, so use -f only to override the "mailness"
1442 discards mail headers, so use -f only to override the "mailness"
1431 safety check, not to import a real mail message.
1443 safety check, not to import a real mail message.
1432 """
1444 """
1433 patches = (patch1,) + patches
1445 patches = (patch1,) + patches
1434
1446
1435 if not opts['force']:
1447 if not opts['force']:
1436 modified, added, removed, deleted, unknown = repo.changes()
1448 modified, added, removed, deleted, unknown = repo.changes()
1437 if modified or added or removed or deleted:
1449 if modified or added or removed or deleted:
1438 raise util.Abort(_("outstanding uncommitted changes"))
1450 raise util.Abort(_("outstanding uncommitted changes"))
1439
1451
1440 d = opts["base"]
1452 d = opts["base"]
1441 strip = opts["strip"]
1453 strip = opts["strip"]
1442
1454
1443 mailre = re.compile(r'(?:From |[\w-]+:)')
1455 mailre = re.compile(r'(?:From |[\w-]+:)')
1444
1456
1445 # attempt to detect the start of a patch
1457 # attempt to detect the start of a patch
1446 # (this heuristic is borrowed from quilt)
1458 # (this heuristic is borrowed from quilt)
1447 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1459 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1448 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1460 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1449 '(---|\*\*\*)[ \t])')
1461 '(---|\*\*\*)[ \t])')
1450
1462
1451 for patch in patches:
1463 for patch in patches:
1452 ui.status(_("applying %s\n") % patch)
1464 ui.status(_("applying %s\n") % patch)
1453 pf = os.path.join(d, patch)
1465 pf = os.path.join(d, patch)
1454
1466
1455 message = []
1467 message = []
1456 user = None
1468 user = None
1457 hgpatch = False
1469 hgpatch = False
1458 for line in file(pf):
1470 for line in file(pf):
1459 line = line.rstrip()
1471 line = line.rstrip()
1460 if (not message and not hgpatch and
1472 if (not message and not hgpatch and
1461 mailre.match(line) and not opts['force']):
1473 mailre.match(line) and not opts['force']):
1462 if len(line) > 35:
1474 if len(line) > 35:
1463 line = line[:32] + '...'
1475 line = line[:32] + '...'
1464 raise util.Abort(_('first line looks like a '
1476 raise util.Abort(_('first line looks like a '
1465 'mail header: ') + line)
1477 'mail header: ') + line)
1466 if diffre.match(line):
1478 if diffre.match(line):
1467 break
1479 break
1468 elif hgpatch:
1480 elif hgpatch:
1469 # parse values when importing the result of an hg export
1481 # parse values when importing the result of an hg export
1470 if line.startswith("# User "):
1482 if line.startswith("# User "):
1471 user = line[7:]
1483 user = line[7:]
1472 ui.debug(_('User: %s\n') % user)
1484 ui.debug(_('User: %s\n') % user)
1473 elif not line.startswith("# ") and line:
1485 elif not line.startswith("# ") and line:
1474 message.append(line)
1486 message.append(line)
1475 hgpatch = False
1487 hgpatch = False
1476 elif line == '# HG changeset patch':
1488 elif line == '# HG changeset patch':
1477 hgpatch = True
1489 hgpatch = True
1478 message = [] # We may have collected garbage
1490 message = [] # We may have collected garbage
1479 else:
1491 else:
1480 message.append(line)
1492 message.append(line)
1481
1493
1482 # make sure message isn't empty
1494 # make sure message isn't empty
1483 if not message:
1495 if not message:
1484 message = _("imported patch %s\n") % patch
1496 message = _("imported patch %s\n") % patch
1485 else:
1497 else:
1486 message = "%s\n" % '\n'.join(message)
1498 message = "%s\n" % '\n'.join(message)
1487 ui.debug(_('message:\n%s\n') % message)
1499 ui.debug(_('message:\n%s\n') % message)
1488
1500
1489 files = util.patch(strip, pf, ui)
1501 files = util.patch(strip, pf, ui)
1490
1502
1491 if len(files) > 0:
1503 if len(files) > 0:
1492 addremove(ui, repo, *files)
1504 addremove(ui, repo, *files)
1493 repo.commit(files, message, user)
1505 repo.commit(files, message, user)
1494
1506
1495 def incoming(ui, repo, source="default", **opts):
1507 def incoming(ui, repo, source="default", **opts):
1496 """show new changesets found in source
1508 """show new changesets found in source
1497
1509
1498 Show new changesets found in the specified repo or the default
1510 Show new changesets found in the specified repo or the default
1499 pull repo. These are the changesets that would be pulled if a pull
1511 pull repo. These are the changesets that would be pulled if a pull
1500 was requested.
1512 was requested.
1501
1513
1502 Currently only local repositories are supported.
1514 Currently only local repositories are supported.
1503 """
1515 """
1504 source = ui.expandpath(source, repo.root)
1516 source = ui.expandpath(source, repo.root)
1505 other = hg.repository(ui, source)
1517 other = hg.repository(ui, source)
1506 if not other.local():
1518 if not other.local():
1507 raise util.Abort(_("incoming doesn't work for remote repositories yet"))
1519 raise util.Abort(_("incoming doesn't work for remote repositories yet"))
1508 o = repo.findincoming(other)
1520 o = repo.findincoming(other)
1509 if not o:
1521 if not o:
1510 return
1522 return
1511 o = other.changelog.nodesbetween(o)[0]
1523 o = other.changelog.nodesbetween(o)[0]
1512 if opts['newest_first']:
1524 if opts['newest_first']:
1513 o.reverse()
1525 o.reverse()
1514 for n in o:
1526 for n in o:
1515 parents = [p for p in other.changelog.parents(n) if p != nullid]
1527 parents = [p for p in other.changelog.parents(n) if p != nullid]
1516 if opts['no_merges'] and len(parents) == 2:
1528 if opts['no_merges'] and len(parents) == 2:
1517 continue
1529 continue
1518 show_changeset(ui, other, changenode=n)
1530 show_changeset(ui, other, changenode=n)
1519 if opts['patch']:
1531 if opts['patch']:
1520 prev = (parents and parents[0]) or nullid
1532 prev = (parents and parents[0]) or nullid
1521 dodiff(ui, ui, other, prev, n)
1533 dodiff(ui, ui, other, prev, n)
1522 ui.write("\n")
1534 ui.write("\n")
1523
1535
1524 def init(ui, dest="."):
1536 def init(ui, dest="."):
1525 """create a new repository in the given directory
1537 """create a new repository in the given directory
1526
1538
1527 Initialize a new repository in the given directory. If the given
1539 Initialize a new repository in the given directory. If the given
1528 directory does not exist, it is created.
1540 directory does not exist, it is created.
1529
1541
1530 If no directory is given, the current directory is used.
1542 If no directory is given, the current directory is used.
1531 """
1543 """
1532 if not os.path.exists(dest):
1544 if not os.path.exists(dest):
1533 os.mkdir(dest)
1545 os.mkdir(dest)
1534 hg.repository(ui, dest, create=1)
1546 hg.repository(ui, dest, create=1)
1535
1547
1536 def locate(ui, repo, *pats, **opts):
1548 def locate(ui, repo, *pats, **opts):
1537 """locate files matching specific patterns
1549 """locate files matching specific patterns
1538
1550
1539 Print all files under Mercurial control whose names match the
1551 Print all files under Mercurial control whose names match the
1540 given patterns.
1552 given patterns.
1541
1553
1542 This command searches the current directory and its
1554 This command searches the current directory and its
1543 subdirectories. To search an entire repository, move to the root
1555 subdirectories. To search an entire repository, move to the root
1544 of the repository.
1556 of the repository.
1545
1557
1546 If no patterns are given to match, this command prints all file
1558 If no patterns are given to match, this command prints all file
1547 names.
1559 names.
1548
1560
1549 If you want to feed the output of this command into the "xargs"
1561 If you want to feed the output of this command into the "xargs"
1550 command, use the "-0" option to both this command and "xargs".
1562 command, use the "-0" option to both this command and "xargs".
1551 This will avoid the problem of "xargs" treating single filenames
1563 This will avoid the problem of "xargs" treating single filenames
1552 that contain white space as multiple filenames.
1564 that contain white space as multiple filenames.
1553 """
1565 """
1554 end = opts['print0'] and '\0' or '\n'
1566 end = opts['print0'] and '\0' or '\n'
1555 rev = opts['rev']
1567 rev = opts['rev']
1556 if rev:
1568 if rev:
1557 node = repo.lookup(rev)
1569 node = repo.lookup(rev)
1558 else:
1570 else:
1559 node = None
1571 node = None
1560
1572
1561 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1573 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1562 head='(?:.*/|)'):
1574 head='(?:.*/|)'):
1563 if not node and repo.dirstate.state(abs) == '?':
1575 if not node and repo.dirstate.state(abs) == '?':
1564 continue
1576 continue
1565 if opts['fullpath']:
1577 if opts['fullpath']:
1566 ui.write(os.path.join(repo.root, abs), end)
1578 ui.write(os.path.join(repo.root, abs), end)
1567 else:
1579 else:
1568 ui.write(((pats and rel) or abs), end)
1580 ui.write(((pats and rel) or abs), end)
1569
1581
1570 def log(ui, repo, *pats, **opts):
1582 def log(ui, repo, *pats, **opts):
1571 """show revision history of entire repository or files
1583 """show revision history of entire repository or files
1572
1584
1573 Print the revision history of the specified files or the entire project.
1585 Print the revision history of the specified files or the entire project.
1574
1586
1575 By default this command outputs: changeset id and hash, tags,
1587 By default this command outputs: changeset id and hash, tags,
1576 non-trivial parents, user, date and time, and a summary for each
1588 non-trivial parents, user, date and time, and a summary for each
1577 commit. When the -v/--verbose switch is used, the list of changed
1589 commit. When the -v/--verbose switch is used, the list of changed
1578 files and full commit message is shown.
1590 files and full commit message is shown.
1579 """
1591 """
1580 class dui(object):
1592 class dui(object):
1581 # Implement and delegate some ui protocol. Save hunks of
1593 # Implement and delegate some ui protocol. Save hunks of
1582 # output for later display in the desired order.
1594 # output for later display in the desired order.
1583 def __init__(self, ui):
1595 def __init__(self, ui):
1584 self.ui = ui
1596 self.ui = ui
1585 self.hunk = {}
1597 self.hunk = {}
1586 def bump(self, rev):
1598 def bump(self, rev):
1587 self.rev = rev
1599 self.rev = rev
1588 self.hunk[rev] = []
1600 self.hunk[rev] = []
1589 def note(self, *args):
1601 def note(self, *args):
1590 if self.verbose:
1602 if self.verbose:
1591 self.write(*args)
1603 self.write(*args)
1592 def status(self, *args):
1604 def status(self, *args):
1593 if not self.quiet:
1605 if not self.quiet:
1594 self.write(*args)
1606 self.write(*args)
1595 def write(self, *args):
1607 def write(self, *args):
1596 self.hunk[self.rev].append(args)
1608 self.hunk[self.rev].append(args)
1597 def debug(self, *args):
1609 def debug(self, *args):
1598 if self.debugflag:
1610 if self.debugflag:
1599 self.write(*args)
1611 self.write(*args)
1600 def __getattr__(self, key):
1612 def __getattr__(self, key):
1601 return getattr(self.ui, key)
1613 return getattr(self.ui, key)
1602
1614
1603 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1615 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1604
1616
1605 if opts['limit']:
1617 if opts['limit']:
1606 try:
1618 try:
1607 limit = int(opts['limit'])
1619 limit = int(opts['limit'])
1608 except ValueError:
1620 except ValueError:
1609 raise util.Abort(_('limit must be a positive integer'))
1621 raise util.Abort(_('limit must be a positive integer'))
1610 if limit <= 0: raise util.Abort(_('limit must be positive'))
1622 if limit <= 0: raise util.Abort(_('limit must be positive'))
1611 else:
1623 else:
1612 limit = sys.maxint
1624 limit = sys.maxint
1613 count = 0
1625 count = 0
1614
1626
1615 for st, rev, fns in changeiter:
1627 for st, rev, fns in changeiter:
1616 if st == 'window':
1628 if st == 'window':
1617 du = dui(ui)
1629 du = dui(ui)
1618 elif st == 'add':
1630 elif st == 'add':
1619 du.bump(rev)
1631 du.bump(rev)
1620 changenode = repo.changelog.node(rev)
1632 changenode = repo.changelog.node(rev)
1621 parents = [p for p in repo.changelog.parents(changenode)
1633 parents = [p for p in repo.changelog.parents(changenode)
1622 if p != nullid]
1634 if p != nullid]
1623 if opts['no_merges'] and len(parents) == 2:
1635 if opts['no_merges'] and len(parents) == 2:
1624 continue
1636 continue
1625 if opts['only_merges'] and len(parents) != 2:
1637 if opts['only_merges'] and len(parents) != 2:
1626 continue
1638 continue
1627
1639
1628 br = None
1640 br = None
1629 if opts['keyword']:
1641 if opts['keyword']:
1630 changes = getchange(rev)
1642 changes = getchange(rev)
1631 miss = 0
1643 miss = 0
1632 for k in [kw.lower() for kw in opts['keyword']]:
1644 for k in [kw.lower() for kw in opts['keyword']]:
1633 if not (k in changes[1].lower() or
1645 if not (k in changes[1].lower() or
1634 k in changes[4].lower() or
1646 k in changes[4].lower() or
1635 k in " ".join(changes[3][:20]).lower()):
1647 k in " ".join(changes[3][:20]).lower()):
1636 miss = 1
1648 miss = 1
1637 break
1649 break
1638 if miss:
1650 if miss:
1639 continue
1651 continue
1640
1652
1641 if opts['branch']:
1653 if opts['branch']:
1642 br = repo.branchlookup([repo.changelog.node(rev)])
1654 br = repo.branchlookup([repo.changelog.node(rev)])
1643
1655
1644 show_changeset(du, repo, rev, brinfo=br)
1656 show_changeset(du, repo, rev, brinfo=br)
1645 if opts['patch']:
1657 if opts['patch']:
1646 prev = (parents and parents[0]) or nullid
1658 prev = (parents and parents[0]) or nullid
1647 dodiff(du, du, repo, prev, changenode, match=matchfn)
1659 dodiff(du, du, repo, prev, changenode, match=matchfn)
1648 du.write("\n\n")
1660 du.write("\n\n")
1649 elif st == 'iter':
1661 elif st == 'iter':
1650 if count == limit: break
1662 if count == limit: break
1651 count += 1
1663 count += 1
1652 for args in du.hunk[rev]:
1664 for args in du.hunk[rev]:
1653 ui.write(*args)
1665 ui.write(*args)
1654
1666
1655 def manifest(ui, repo, rev=None):
1667 def manifest(ui, repo, rev=None):
1656 """output the latest or given revision of the project manifest
1668 """output the latest or given revision of the project manifest
1657
1669
1658 Print a list of version controlled files for the given revision.
1670 Print a list of version controlled files for the given revision.
1659
1671
1660 The manifest is the list of files being version controlled. If no revision
1672 The manifest is the list of files being version controlled. If no revision
1661 is given then the tip is used.
1673 is given then the tip is used.
1662 """
1674 """
1663 if rev:
1675 if rev:
1664 try:
1676 try:
1665 # assume all revision numbers are for changesets
1677 # assume all revision numbers are for changesets
1666 n = repo.lookup(rev)
1678 n = repo.lookup(rev)
1667 change = repo.changelog.read(n)
1679 change = repo.changelog.read(n)
1668 n = change[0]
1680 n = change[0]
1669 except hg.RepoError:
1681 except hg.RepoError:
1670 n = repo.manifest.lookup(rev)
1682 n = repo.manifest.lookup(rev)
1671 else:
1683 else:
1672 n = repo.manifest.tip()
1684 n = repo.manifest.tip()
1673 m = repo.manifest.read(n)
1685 m = repo.manifest.read(n)
1674 mf = repo.manifest.readflags(n)
1686 mf = repo.manifest.readflags(n)
1675 files = m.keys()
1687 files = m.keys()
1676 files.sort()
1688 files.sort()
1677
1689
1678 for f in files:
1690 for f in files:
1679 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1691 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1680
1692
1681 def outgoing(ui, repo, dest="default-push", **opts):
1693 def outgoing(ui, repo, dest="default-push", **opts):
1682 """show changesets not found in destination
1694 """show changesets not found in destination
1683
1695
1684 Show changesets not found in the specified destination repo or the
1696 Show changesets not found in the specified destination repo or the
1685 default push repo. These are the changesets that would be pushed
1697 default push repo. These are the changesets that would be pushed
1686 if a push was requested.
1698 if a push was requested.
1687 """
1699 """
1688 dest = ui.expandpath(dest, repo.root)
1700 dest = ui.expandpath(dest, repo.root)
1689 other = hg.repository(ui, dest)
1701 other = hg.repository(ui, dest)
1690 o = repo.findoutgoing(other)
1702 o = repo.findoutgoing(other)
1691 o = repo.changelog.nodesbetween(o)[0]
1703 o = repo.changelog.nodesbetween(o)[0]
1692 if opts['newest_first']:
1704 if opts['newest_first']:
1693 o.reverse()
1705 o.reverse()
1694 for n in o:
1706 for n in o:
1695 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1707 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1696 if opts['no_merges'] and len(parents) == 2:
1708 if opts['no_merges'] and len(parents) == 2:
1697 continue
1709 continue
1698 show_changeset(ui, repo, changenode=n)
1710 show_changeset(ui, repo, changenode=n)
1699 if opts['patch']:
1711 if opts['patch']:
1700 prev = (parents and parents[0]) or nullid
1712 prev = (parents and parents[0]) or nullid
1701 dodiff(ui, ui, repo, prev, n)
1713 dodiff(ui, ui, repo, prev, n)
1702 ui.write("\n")
1714 ui.write("\n")
1703
1715
1704 def parents(ui, repo, rev=None, branch=None):
1716 def parents(ui, repo, rev=None, branch=None):
1705 """show the parents of the working dir or revision
1717 """show the parents of the working dir or revision
1706
1718
1707 Print the working directory's parent revisions.
1719 Print the working directory's parent revisions.
1708 """
1720 """
1709 if rev:
1721 if rev:
1710 p = repo.changelog.parents(repo.lookup(rev))
1722 p = repo.changelog.parents(repo.lookup(rev))
1711 else:
1723 else:
1712 p = repo.dirstate.parents()
1724 p = repo.dirstate.parents()
1713
1725
1714 br = None
1726 br = None
1715 if branch is not None:
1727 if branch is not None:
1716 br = repo.branchlookup(p)
1728 br = repo.branchlookup(p)
1717 for n in p:
1729 for n in p:
1718 if n != nullid:
1730 if n != nullid:
1719 show_changeset(ui, repo, changenode=n, brinfo=br)
1731 show_changeset(ui, repo, changenode=n, brinfo=br)
1720
1732
1721 def paths(ui, search=None):
1733 def paths(ui, search=None):
1722 """show definition of symbolic path names
1734 """show definition of symbolic path names
1723
1735
1724 Show definition of symbolic path name NAME. If no name is given, show
1736 Show definition of symbolic path name NAME. If no name is given, show
1725 definition of available names.
1737 definition of available names.
1726
1738
1727 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1739 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1728 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1740 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1729 """
1741 """
1730 try:
1742 try:
1731 repo = hg.repository(ui=ui)
1743 repo = hg.repository(ui=ui)
1732 except hg.RepoError:
1744 except hg.RepoError:
1733 pass
1745 pass
1734
1746
1735 if search:
1747 if search:
1736 for name, path in ui.configitems("paths"):
1748 for name, path in ui.configitems("paths"):
1737 if name == search:
1749 if name == search:
1738 ui.write("%s\n" % path)
1750 ui.write("%s\n" % path)
1739 return
1751 return
1740 ui.warn(_("not found!\n"))
1752 ui.warn(_("not found!\n"))
1741 return 1
1753 return 1
1742 else:
1754 else:
1743 for name, path in ui.configitems("paths"):
1755 for name, path in ui.configitems("paths"):
1744 ui.write("%s = %s\n" % (name, path))
1756 ui.write("%s = %s\n" % (name, path))
1745
1757
1746 def pull(ui, repo, source="default", **opts):
1758 def pull(ui, repo, source="default", **opts):
1747 """pull changes from the specified source
1759 """pull changes from the specified source
1748
1760
1749 Pull changes from a remote repository to a local one.
1761 Pull changes from a remote repository to a local one.
1750
1762
1751 This finds all changes from the repository at the specified path
1763 This finds all changes from the repository at the specified path
1752 or URL and adds them to the local repository. By default, this
1764 or URL and adds them to the local repository. By default, this
1753 does not update the copy of the project in the working directory.
1765 does not update the copy of the project in the working directory.
1754
1766
1755 Valid URLs are of the form:
1767 Valid URLs are of the form:
1756
1768
1757 local/filesystem/path
1769 local/filesystem/path
1758 http://[user@]host[:port][/path]
1770 http://[user@]host[:port][/path]
1759 https://[user@]host[:port][/path]
1771 https://[user@]host[:port][/path]
1760 ssh://[user@]host[:port][/path]
1772 ssh://[user@]host[:port][/path]
1761
1773
1762 SSH requires an accessible shell account on the destination machine
1774 SSH requires an accessible shell account on the destination machine
1763 and a copy of hg in the remote path. With SSH, paths are relative
1775 and a copy of hg in the remote path. With SSH, paths are relative
1764 to the remote user's home directory by default; use two slashes at
1776 to the remote user's home directory by default; use two slashes at
1765 the start of a path to specify it as relative to the filesystem root.
1777 the start of a path to specify it as relative to the filesystem root.
1766 """
1778 """
1767 source = ui.expandpath(source, repo.root)
1779 source = ui.expandpath(source, repo.root)
1768 ui.status(_('pulling from %s\n') % (source))
1780 ui.status(_('pulling from %s\n') % (source))
1769
1781
1770 if opts['ssh']:
1782 if opts['ssh']:
1771 ui.setconfig("ui", "ssh", opts['ssh'])
1783 ui.setconfig("ui", "ssh", opts['ssh'])
1772 if opts['remotecmd']:
1784 if opts['remotecmd']:
1773 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1785 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1774
1786
1775 other = hg.repository(ui, source)
1787 other = hg.repository(ui, source)
1776 revs = None
1788 revs = None
1777 if opts['rev'] and not other.local():
1789 if opts['rev'] and not other.local():
1778 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
1790 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
1779 elif opts['rev']:
1791 elif opts['rev']:
1780 revs = [other.lookup(rev) for rev in opts['rev']]
1792 revs = [other.lookup(rev) for rev in opts['rev']]
1781 r = repo.pull(other, heads=revs)
1793 r = repo.pull(other, heads=revs)
1782 if not r:
1794 if not r:
1783 if opts['update']:
1795 if opts['update']:
1784 return update(ui, repo)
1796 return update(ui, repo)
1785 else:
1797 else:
1786 ui.status(_("(run 'hg update' to get a working copy)\n"))
1798 ui.status(_("(run 'hg update' to get a working copy)\n"))
1787
1799
1788 return r
1800 return r
1789
1801
1790 def push(ui, repo, dest="default-push", force=False, ssh=None, remotecmd=None):
1802 def push(ui, repo, dest="default-push", force=False, ssh=None, remotecmd=None):
1791 """push changes to the specified destination
1803 """push changes to the specified destination
1792
1804
1793 Push changes from the local repository to the given destination.
1805 Push changes from the local repository to the given destination.
1794
1806
1795 This is the symmetrical operation for pull. It helps to move
1807 This is the symmetrical operation for pull. It helps to move
1796 changes from the current repository to a different one. If the
1808 changes from the current repository to a different one. If the
1797 destination is local this is identical to a pull in that directory
1809 destination is local this is identical to a pull in that directory
1798 from the current one.
1810 from the current one.
1799
1811
1800 By default, push will refuse to run if it detects the result would
1812 By default, push will refuse to run if it detects the result would
1801 increase the number of remote heads. This generally indicates the
1813 increase the number of remote heads. This generally indicates the
1802 the client has forgotten to sync and merge before pushing.
1814 the client has forgotten to sync and merge before pushing.
1803
1815
1804 Valid URLs are of the form:
1816 Valid URLs are of the form:
1805
1817
1806 local/filesystem/path
1818 local/filesystem/path
1807 ssh://[user@]host[:port][/path]
1819 ssh://[user@]host[:port][/path]
1808
1820
1809 SSH requires an accessible shell account on the destination
1821 SSH requires an accessible shell account on the destination
1810 machine and a copy of hg in the remote path.
1822 machine and a copy of hg in the remote path.
1811 """
1823 """
1812 dest = ui.expandpath(dest, repo.root)
1824 dest = ui.expandpath(dest, repo.root)
1813 ui.status('pushing to %s\n' % (dest))
1825 ui.status('pushing to %s\n' % (dest))
1814
1826
1815 if ssh:
1827 if ssh:
1816 ui.setconfig("ui", "ssh", ssh)
1828 ui.setconfig("ui", "ssh", ssh)
1817 if remotecmd:
1829 if remotecmd:
1818 ui.setconfig("ui", "remotecmd", remotecmd)
1830 ui.setconfig("ui", "remotecmd", remotecmd)
1819
1831
1820 other = hg.repository(ui, dest)
1832 other = hg.repository(ui, dest)
1821 r = repo.push(other, force)
1833 r = repo.push(other, force)
1822 return r
1834 return r
1823
1835
1824 def rawcommit(ui, repo, *flist, **rc):
1836 def rawcommit(ui, repo, *flist, **rc):
1825 """raw commit interface (DEPRECATED)
1837 """raw commit interface (DEPRECATED)
1826
1838
1827 Lowlevel commit, for use in helper scripts.
1839 Lowlevel commit, for use in helper scripts.
1828
1840
1829 This command is not intended to be used by normal users, as it is
1841 This command is not intended to be used by normal users, as it is
1830 primarily useful for importing from other SCMs.
1842 primarily useful for importing from other SCMs.
1831
1843
1832 This command is now deprecated and will be removed in a future
1844 This command is now deprecated and will be removed in a future
1833 release, please use debugsetparents and commit instead.
1845 release, please use debugsetparents and commit instead.
1834 """
1846 """
1835
1847
1836 ui.warn(_("(the rawcommit command is deprecated)\n"))
1848 ui.warn(_("(the rawcommit command is deprecated)\n"))
1837
1849
1838 message = rc['message']
1850 message = rc['message']
1839 if not message and rc['logfile']:
1851 if not message and rc['logfile']:
1840 try:
1852 try:
1841 message = open(rc['logfile']).read()
1853 message = open(rc['logfile']).read()
1842 except IOError:
1854 except IOError:
1843 pass
1855 pass
1844 if not message and not rc['logfile']:
1856 if not message and not rc['logfile']:
1845 raise util.Abort(_("missing commit message"))
1857 raise util.Abort(_("missing commit message"))
1846
1858
1847 files = relpath(repo, list(flist))
1859 files = relpath(repo, list(flist))
1848 if rc['files']:
1860 if rc['files']:
1849 files += open(rc['files']).read().splitlines()
1861 files += open(rc['files']).read().splitlines()
1850
1862
1851 rc['parent'] = map(repo.lookup, rc['parent'])
1863 rc['parent'] = map(repo.lookup, rc['parent'])
1852
1864
1853 try:
1865 try:
1854 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1866 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1855 except ValueError, inst:
1867 except ValueError, inst:
1856 raise util.Abort(str(inst))
1868 raise util.Abort(str(inst))
1857
1869
1858 def recover(ui, repo):
1870 def recover(ui, repo):
1859 """roll back an interrupted transaction
1871 """roll back an interrupted transaction
1860
1872
1861 Recover from an interrupted commit or pull.
1873 Recover from an interrupted commit or pull.
1862
1874
1863 This command tries to fix the repository status after an interrupted
1875 This command tries to fix the repository status after an interrupted
1864 operation. It should only be necessary when Mercurial suggests it.
1876 operation. It should only be necessary when Mercurial suggests it.
1865 """
1877 """
1866 if repo.recover():
1878 if repo.recover():
1867 return repo.verify()
1879 return repo.verify()
1868 return False
1880 return False
1869
1881
1870 def remove(ui, repo, pat, *pats, **opts):
1882 def remove(ui, repo, pat, *pats, **opts):
1871 """remove the specified files on the next commit
1883 """remove the specified files on the next commit
1872
1884
1873 Schedule the indicated files for removal from the repository.
1885 Schedule the indicated files for removal from the repository.
1874
1886
1875 This command schedules the files to be removed at the next commit.
1887 This command schedules the files to be removed at the next commit.
1876 This only removes files from the current branch, not from the
1888 This only removes files from the current branch, not from the
1877 entire project history. If the files still exist in the working
1889 entire project history. If the files still exist in the working
1878 directory, they will be deleted from it.
1890 directory, they will be deleted from it.
1879 """
1891 """
1880 names = []
1892 names = []
1881 def okaytoremove(abs, rel, exact):
1893 def okaytoremove(abs, rel, exact):
1882 modified, added, removed, deleted, unknown = repo.changes(files=[abs])
1894 modified, added, removed, deleted, unknown = repo.changes(files=[abs])
1883 reason = None
1895 reason = None
1884 if modified:
1896 if modified:
1885 reason = _('is modified')
1897 reason = _('is modified')
1886 elif added:
1898 elif added:
1887 reason = _('has been marked for add')
1899 reason = _('has been marked for add')
1888 elif unknown:
1900 elif unknown:
1889 reason = _('is not managed')
1901 reason = _('is not managed')
1890 if reason:
1902 if reason:
1891 if exact:
1903 if exact:
1892 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
1904 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
1893 else:
1905 else:
1894 return True
1906 return True
1895 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
1907 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
1896 if okaytoremove(abs, rel, exact):
1908 if okaytoremove(abs, rel, exact):
1897 if ui.verbose or not exact:
1909 if ui.verbose or not exact:
1898 ui.status(_('removing %s\n') % rel)
1910 ui.status(_('removing %s\n') % rel)
1899 names.append(abs)
1911 names.append(abs)
1900 repo.remove(names, unlink=True)
1912 repo.remove(names, unlink=True)
1901
1913
1902 def rename(ui, repo, *pats, **opts):
1914 def rename(ui, repo, *pats, **opts):
1903 """rename files; equivalent of copy + remove
1915 """rename files; equivalent of copy + remove
1904
1916
1905 Mark dest as copies of sources; mark sources for deletion. If
1917 Mark dest as copies of sources; mark sources for deletion. If
1906 dest is a directory, copies are put in that directory. If dest is
1918 dest is a directory, copies are put in that directory. If dest is
1907 a file, there can only be one source.
1919 a file, there can only be one source.
1908
1920
1909 By default, this command copies the contents of files as they
1921 By default, this command copies the contents of files as they
1910 stand in the working directory. If invoked with --after, the
1922 stand in the working directory. If invoked with --after, the
1911 operation is recorded, but no copying is performed.
1923 operation is recorded, but no copying is performed.
1912
1924
1913 This command takes effect in the next commit.
1925 This command takes effect in the next commit.
1914
1926
1915 NOTE: This command should be treated as experimental. While it
1927 NOTE: This command should be treated as experimental. While it
1916 should properly record rename files, this information is not yet
1928 should properly record rename files, this information is not yet
1917 fully used by merge, nor fully reported by log.
1929 fully used by merge, nor fully reported by log.
1918 """
1930 """
1919 errs, copied = docopy(ui, repo, pats, opts)
1931 errs, copied = docopy(ui, repo, pats, opts)
1920 names = []
1932 names = []
1921 for abs, rel, exact in copied:
1933 for abs, rel, exact in copied:
1922 if ui.verbose or not exact:
1934 if ui.verbose or not exact:
1923 ui.status(_('removing %s\n') % rel)
1935 ui.status(_('removing %s\n') % rel)
1924 names.append(abs)
1936 names.append(abs)
1925 repo.remove(names, unlink=True)
1937 repo.remove(names, unlink=True)
1926 return errs
1938 return errs
1927
1939
1928 def revert(ui, repo, *pats, **opts):
1940 def revert(ui, repo, *pats, **opts):
1929 """revert modified files or dirs back to their unmodified states
1941 """revert modified files or dirs back to their unmodified states
1930
1942
1931 Revert any uncommitted modifications made to the named files or
1943 Revert any uncommitted modifications made to the named files or
1932 directories. This restores the contents of the affected files to
1944 directories. This restores the contents of the affected files to
1933 an unmodified state.
1945 an unmodified state.
1934
1946
1935 If a file has been deleted, it is recreated. If the executable
1947 If a file has been deleted, it is recreated. If the executable
1936 mode of a file was changed, it is reset.
1948 mode of a file was changed, it is reset.
1937
1949
1938 If names are given, all files matching the names are reverted.
1950 If names are given, all files matching the names are reverted.
1939
1951
1940 If no arguments are given, all files in the repository are reverted.
1952 If no arguments are given, all files in the repository are reverted.
1941 """
1953 """
1942 node = opts['rev'] and repo.lookup(opts['rev']) or \
1954 node = opts['rev'] and repo.lookup(opts['rev']) or \
1943 repo.dirstate.parents()[0]
1955 repo.dirstate.parents()[0]
1944
1956
1945 files, choose, anypats = matchpats(repo, pats, opts)
1957 files, choose, anypats = matchpats(repo, pats, opts)
1946 modified, added, removed, deleted, unknown = repo.changes(match=choose)
1958 modified, added, removed, deleted, unknown = repo.changes(match=choose)
1947 repo.forget(added)
1959 repo.forget(added)
1948 repo.undelete(removed + deleted)
1960 repo.undelete(removed + deleted)
1949
1961
1950 return repo.update(node, False, True, choose, False)
1962 return repo.update(node, False, True, choose, False)
1951
1963
1952 def root(ui, repo):
1964 def root(ui, repo):
1953 """print the root (top) of the current working dir
1965 """print the root (top) of the current working dir
1954
1966
1955 Print the root directory of the current repository.
1967 Print the root directory of the current repository.
1956 """
1968 """
1957 ui.write(repo.root + "\n")
1969 ui.write(repo.root + "\n")
1958
1970
1959 def serve(ui, repo, **opts):
1971 def serve(ui, repo, **opts):
1960 """export the repository via HTTP
1972 """export the repository via HTTP
1961
1973
1962 Start a local HTTP repository browser and pull server.
1974 Start a local HTTP repository browser and pull server.
1963
1975
1964 By default, the server logs accesses to stdout and errors to
1976 By default, the server logs accesses to stdout and errors to
1965 stderr. Use the "-A" and "-E" options to log to files.
1977 stderr. Use the "-A" and "-E" options to log to files.
1966 """
1978 """
1967
1979
1968 if opts["stdio"]:
1980 if opts["stdio"]:
1969 fin, fout = sys.stdin, sys.stdout
1981 fin, fout = sys.stdin, sys.stdout
1970 sys.stdout = sys.stderr
1982 sys.stdout = sys.stderr
1971
1983
1972 # Prevent insertion/deletion of CRs
1984 # Prevent insertion/deletion of CRs
1973 util.set_binary(fin)
1985 util.set_binary(fin)
1974 util.set_binary(fout)
1986 util.set_binary(fout)
1975
1987
1976 def getarg():
1988 def getarg():
1977 argline = fin.readline()[:-1]
1989 argline = fin.readline()[:-1]
1978 arg, l = argline.split()
1990 arg, l = argline.split()
1979 val = fin.read(int(l))
1991 val = fin.read(int(l))
1980 return arg, val
1992 return arg, val
1981 def respond(v):
1993 def respond(v):
1982 fout.write("%d\n" % len(v))
1994 fout.write("%d\n" % len(v))
1983 fout.write(v)
1995 fout.write(v)
1984 fout.flush()
1996 fout.flush()
1985
1997
1986 lock = None
1998 lock = None
1987
1999
1988 while 1:
2000 while 1:
1989 cmd = fin.readline()[:-1]
2001 cmd = fin.readline()[:-1]
1990 if cmd == '':
2002 if cmd == '':
1991 return
2003 return
1992 if cmd == "heads":
2004 if cmd == "heads":
1993 h = repo.heads()
2005 h = repo.heads()
1994 respond(" ".join(map(hex, h)) + "\n")
2006 respond(" ".join(map(hex, h)) + "\n")
1995 if cmd == "lock":
2007 if cmd == "lock":
1996 lock = repo.lock()
2008 lock = repo.lock()
1997 respond("")
2009 respond("")
1998 if cmd == "unlock":
2010 if cmd == "unlock":
1999 if lock:
2011 if lock:
2000 lock.release()
2012 lock.release()
2001 lock = None
2013 lock = None
2002 respond("")
2014 respond("")
2003 elif cmd == "branches":
2015 elif cmd == "branches":
2004 arg, nodes = getarg()
2016 arg, nodes = getarg()
2005 nodes = map(bin, nodes.split(" "))
2017 nodes = map(bin, nodes.split(" "))
2006 r = []
2018 r = []
2007 for b in repo.branches(nodes):
2019 for b in repo.branches(nodes):
2008 r.append(" ".join(map(hex, b)) + "\n")
2020 r.append(" ".join(map(hex, b)) + "\n")
2009 respond("".join(r))
2021 respond("".join(r))
2010 elif cmd == "between":
2022 elif cmd == "between":
2011 arg, pairs = getarg()
2023 arg, pairs = getarg()
2012 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
2024 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
2013 r = []
2025 r = []
2014 for b in repo.between(pairs):
2026 for b in repo.between(pairs):
2015 r.append(" ".join(map(hex, b)) + "\n")
2027 r.append(" ".join(map(hex, b)) + "\n")
2016 respond("".join(r))
2028 respond("".join(r))
2017 elif cmd == "changegroup":
2029 elif cmd == "changegroup":
2018 nodes = []
2030 nodes = []
2019 arg, roots = getarg()
2031 arg, roots = getarg()
2020 nodes = map(bin, roots.split(" "))
2032 nodes = map(bin, roots.split(" "))
2021
2033
2022 cg = repo.changegroup(nodes, 'serve')
2034 cg = repo.changegroup(nodes, 'serve')
2023 while 1:
2035 while 1:
2024 d = cg.read(4096)
2036 d = cg.read(4096)
2025 if not d:
2037 if not d:
2026 break
2038 break
2027 fout.write(d)
2039 fout.write(d)
2028
2040
2029 fout.flush()
2041 fout.flush()
2030
2042
2031 elif cmd == "addchangegroup":
2043 elif cmd == "addchangegroup":
2032 if not lock:
2044 if not lock:
2033 respond("not locked")
2045 respond("not locked")
2034 continue
2046 continue
2035 respond("")
2047 respond("")
2036
2048
2037 r = repo.addchangegroup(fin)
2049 r = repo.addchangegroup(fin)
2038 respond("")
2050 respond("")
2039
2051
2040 optlist = "name templates style address port ipv6 accesslog errorlog"
2052 optlist = "name templates style address port ipv6 accesslog errorlog"
2041 for o in optlist.split():
2053 for o in optlist.split():
2042 if opts[o]:
2054 if opts[o]:
2043 ui.setconfig("web", o, opts[o])
2055 ui.setconfig("web", o, opts[o])
2044
2056
2045 if opts['daemon'] and not opts['daemon_pipefds']:
2057 if opts['daemon'] and not opts['daemon_pipefds']:
2046 rfd, wfd = os.pipe()
2058 rfd, wfd = os.pipe()
2047 args = sys.argv[:]
2059 args = sys.argv[:]
2048 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2060 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2049 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2061 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2050 args[0], args)
2062 args[0], args)
2051 os.close(wfd)
2063 os.close(wfd)
2052 os.read(rfd, 1)
2064 os.read(rfd, 1)
2053 os._exit(0)
2065 os._exit(0)
2054
2066
2055 try:
2067 try:
2056 httpd = hgweb.create_server(repo)
2068 httpd = hgweb.create_server(repo)
2057 except socket.error, inst:
2069 except socket.error, inst:
2058 raise util.Abort(_('cannot start server: ') + inst.args[1])
2070 raise util.Abort(_('cannot start server: ') + inst.args[1])
2059
2071
2060 if ui.verbose:
2072 if ui.verbose:
2061 addr, port = httpd.socket.getsockname()
2073 addr, port = httpd.socket.getsockname()
2062 if addr == '0.0.0.0':
2074 if addr == '0.0.0.0':
2063 addr = socket.gethostname()
2075 addr = socket.gethostname()
2064 else:
2076 else:
2065 try:
2077 try:
2066 addr = socket.gethostbyaddr(addr)[0]
2078 addr = socket.gethostbyaddr(addr)[0]
2067 except socket.error:
2079 except socket.error:
2068 pass
2080 pass
2069 if port != 80:
2081 if port != 80:
2070 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2082 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2071 else:
2083 else:
2072 ui.status(_('listening at http://%s/\n') % addr)
2084 ui.status(_('listening at http://%s/\n') % addr)
2073
2085
2074 if opts['pid_file']:
2086 if opts['pid_file']:
2075 fp = open(opts['pid_file'], 'w')
2087 fp = open(opts['pid_file'], 'w')
2076 fp.write(str(os.getpid()))
2088 fp.write(str(os.getpid()))
2077 fp.close()
2089 fp.close()
2078
2090
2079 if opts['daemon_pipefds']:
2091 if opts['daemon_pipefds']:
2080 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2092 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2081 os.close(rfd)
2093 os.close(rfd)
2082 os.write(wfd, 'y')
2094 os.write(wfd, 'y')
2083 os.close(wfd)
2095 os.close(wfd)
2084 sys.stdout.flush()
2096 sys.stdout.flush()
2085 sys.stderr.flush()
2097 sys.stderr.flush()
2086 fd = os.open(util.nulldev, os.O_RDWR)
2098 fd = os.open(util.nulldev, os.O_RDWR)
2087 if fd != 0: os.dup2(fd, 0)
2099 if fd != 0: os.dup2(fd, 0)
2088 if fd != 1: os.dup2(fd, 1)
2100 if fd != 1: os.dup2(fd, 1)
2089 if fd != 2: os.dup2(fd, 2)
2101 if fd != 2: os.dup2(fd, 2)
2090 if fd not in (0, 1, 2): os.close(fd)
2102 if fd not in (0, 1, 2): os.close(fd)
2091
2103
2092 httpd.serve_forever()
2104 httpd.serve_forever()
2093
2105
2094 def status(ui, repo, *pats, **opts):
2106 def status(ui, repo, *pats, **opts):
2095 """show changed files in the working directory
2107 """show changed files in the working directory
2096
2108
2097 Show changed files in the repository. If names are
2109 Show changed files in the repository. If names are
2098 given, only files that match are shown.
2110 given, only files that match are shown.
2099
2111
2100 The codes used to show the status of files are:
2112 The codes used to show the status of files are:
2101 M = modified
2113 M = modified
2102 A = added
2114 A = added
2103 R = removed
2115 R = removed
2104 ! = deleted, but still tracked
2116 ! = deleted, but still tracked
2105 ? = not tracked
2117 ? = not tracked
2106 """
2118 """
2107
2119
2108 files, matchfn, anypats = matchpats(repo, pats, opts)
2120 files, matchfn, anypats = matchpats(repo, pats, opts)
2109 cwd = (pats and repo.getcwd()) or ''
2121 cwd = (pats and repo.getcwd()) or ''
2110 modified, added, removed, deleted, unknown = [
2122 modified, added, removed, deleted, unknown = [
2111 [util.pathto(cwd, x) for x in n]
2123 [util.pathto(cwd, x) for x in n]
2112 for n in repo.changes(files=files, match=matchfn)]
2124 for n in repo.changes(files=files, match=matchfn)]
2113
2125
2114 changetypes = [(_('modified'), 'M', modified),
2126 changetypes = [(_('modified'), 'M', modified),
2115 (_('added'), 'A', added),
2127 (_('added'), 'A', added),
2116 (_('removed'), 'R', removed),
2128 (_('removed'), 'R', removed),
2117 (_('deleted'), '!', deleted),
2129 (_('deleted'), '!', deleted),
2118 (_('unknown'), '?', unknown)]
2130 (_('unknown'), '?', unknown)]
2119
2131
2120 end = opts['print0'] and '\0' or '\n'
2132 end = opts['print0'] and '\0' or '\n'
2121
2133
2122 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2134 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2123 or changetypes):
2135 or changetypes):
2124 if opts['no_status']:
2136 if opts['no_status']:
2125 format = "%%s%s" % end
2137 format = "%%s%s" % end
2126 else:
2138 else:
2127 format = "%s %%s%s" % (char, end);
2139 format = "%s %%s%s" % (char, end);
2128
2140
2129 for f in changes:
2141 for f in changes:
2130 ui.write(format % f)
2142 ui.write(format % f)
2131
2143
2132 def tag(ui, repo, name, rev_=None, **opts):
2144 def tag(ui, repo, name, rev_=None, **opts):
2133 """add a tag for the current tip or a given revision
2145 """add a tag for the current tip or a given revision
2134
2146
2135 Name a particular revision using <name>.
2147 Name a particular revision using <name>.
2136
2148
2137 Tags are used to name particular revisions of the repository and are
2149 Tags are used to name particular revisions of the repository and are
2138 very useful to compare different revision, to go back to significant
2150 very useful to compare different revision, to go back to significant
2139 earlier versions or to mark branch points as releases, etc.
2151 earlier versions or to mark branch points as releases, etc.
2140
2152
2141 If no revision is given, the tip is used.
2153 If no revision is given, the tip is used.
2142
2154
2143 To facilitate version control, distribution, and merging of tags,
2155 To facilitate version control, distribution, and merging of tags,
2144 they are stored as a file named ".hgtags" which is managed
2156 they are stored as a file named ".hgtags" which is managed
2145 similarly to other project files and can be hand-edited if
2157 similarly to other project files and can be hand-edited if
2146 necessary. The file '.hg/localtags' is used for local tags (not
2158 necessary. The file '.hg/localtags' is used for local tags (not
2147 shared among repositories).
2159 shared among repositories).
2148 """
2160 """
2149 if name == "tip":
2161 if name == "tip":
2150 raise util.Abort(_("the name 'tip' is reserved"))
2162 raise util.Abort(_("the name 'tip' is reserved"))
2151 if rev_ is not None:
2163 if rev_ is not None:
2152 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2164 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2153 "please use 'hg tag [-r REV] NAME' instead\n"))
2165 "please use 'hg tag [-r REV] NAME' instead\n"))
2154 if opts['rev']:
2166 if opts['rev']:
2155 raise util.Abort(_("use only one form to specify the revision"))
2167 raise util.Abort(_("use only one form to specify the revision"))
2156 if opts['rev']:
2168 if opts['rev']:
2157 rev_ = opts['rev']
2169 rev_ = opts['rev']
2158 if rev_:
2170 if rev_:
2159 r = hex(repo.lookup(rev_))
2171 r = hex(repo.lookup(rev_))
2160 else:
2172 else:
2161 r = hex(repo.changelog.tip())
2173 r = hex(repo.changelog.tip())
2162
2174
2163 disallowed = (revrangesep, '\r', '\n')
2175 disallowed = (revrangesep, '\r', '\n')
2164 for c in disallowed:
2176 for c in disallowed:
2165 if name.find(c) >= 0:
2177 if name.find(c) >= 0:
2166 raise util.Abort(_("%s cannot be used in a tag name") % repr(c))
2178 raise util.Abort(_("%s cannot be used in a tag name") % repr(c))
2167
2179
2168 repo.hook('pretag', throw=True, node=r, tag=name,
2180 repo.hook('pretag', throw=True, node=r, tag=name,
2169 local=int(not not opts['local']))
2181 local=int(not not opts['local']))
2170
2182
2171 if opts['local']:
2183 if opts['local']:
2172 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2184 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2173 repo.hook('tag', node=r, tag=name, local=1)
2185 repo.hook('tag', node=r, tag=name, local=1)
2174 return
2186 return
2175
2187
2176 for x in repo.changes():
2188 for x in repo.changes():
2177 if ".hgtags" in x:
2189 if ".hgtags" in x:
2178 raise util.Abort(_("working copy of .hgtags is changed "
2190 raise util.Abort(_("working copy of .hgtags is changed "
2179 "(please commit .hgtags manually)"))
2191 "(please commit .hgtags manually)"))
2180
2192
2181 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2193 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2182 if repo.dirstate.state(".hgtags") == '?':
2194 if repo.dirstate.state(".hgtags") == '?':
2183 repo.add([".hgtags"])
2195 repo.add([".hgtags"])
2184
2196
2185 message = (opts['message'] or
2197 message = (opts['message'] or
2186 _("Added tag %s for changeset %s") % (name, r))
2198 _("Added tag %s for changeset %s") % (name, r))
2187 try:
2199 try:
2188 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2200 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2189 repo.hook('tag', node=r, tag=name, local=0)
2201 repo.hook('tag', node=r, tag=name, local=0)
2190 except ValueError, inst:
2202 except ValueError, inst:
2191 raise util.Abort(str(inst))
2203 raise util.Abort(str(inst))
2192
2204
2193 def tags(ui, repo):
2205 def tags(ui, repo):
2194 """list repository tags
2206 """list repository tags
2195
2207
2196 List the repository tags.
2208 List the repository tags.
2197
2209
2198 This lists both regular and local tags.
2210 This lists both regular and local tags.
2199 """
2211 """
2200
2212
2201 l = repo.tagslist()
2213 l = repo.tagslist()
2202 l.reverse()
2214 l.reverse()
2203 for t, n in l:
2215 for t, n in l:
2204 try:
2216 try:
2205 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2217 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2206 except KeyError:
2218 except KeyError:
2207 r = " ?:?"
2219 r = " ?:?"
2208 ui.write("%-30s %s\n" % (t, r))
2220 ui.write("%-30s %s\n" % (t, r))
2209
2221
2210 def tip(ui, repo, **opts):
2222 def tip(ui, repo, **opts):
2211 """show the tip revision
2223 """show the tip revision
2212
2224
2213 Show the tip revision.
2225 Show the tip revision.
2214 """
2226 """
2215 n = repo.changelog.tip()
2227 n = repo.changelog.tip()
2216 show_changeset(ui, repo, changenode=n)
2228 show_changeset(ui, repo, changenode=n)
2217 if opts['patch']:
2229 if opts['patch']:
2218 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2230 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2219
2231
2220 def unbundle(ui, repo, fname, **opts):
2232 def unbundle(ui, repo, fname, **opts):
2221 """apply a changegroup file
2233 """apply a changegroup file
2222
2234
2223 Apply a compressed changegroup file generated by the bundle
2235 Apply a compressed changegroup file generated by the bundle
2224 command.
2236 command.
2225 """
2237 """
2226 f = urllib.urlopen(fname)
2238 f = urllib.urlopen(fname)
2227
2239
2228 if f.read(4) != "HG10":
2240 if f.read(4) != "HG10":
2229 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2241 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2230
2242
2231 def bzgenerator(f):
2243 def bzgenerator(f):
2232 zd = bz2.BZ2Decompressor()
2244 zd = bz2.BZ2Decompressor()
2233 for chunk in f:
2245 for chunk in f:
2234 yield zd.decompress(chunk)
2246 yield zd.decompress(chunk)
2235
2247
2236 bzgen = bzgenerator(util.filechunkiter(f, 4096))
2248 bzgen = bzgenerator(util.filechunkiter(f, 4096))
2237 if repo.addchangegroup(util.chunkbuffer(bzgen)):
2249 if repo.addchangegroup(util.chunkbuffer(bzgen)):
2238 return 1
2250 return 1
2239
2251
2240 if opts['update']:
2252 if opts['update']:
2241 return update(ui, repo)
2253 return update(ui, repo)
2242 else:
2254 else:
2243 ui.status(_("(run 'hg update' to get a working copy)\n"))
2255 ui.status(_("(run 'hg update' to get a working copy)\n"))
2244
2256
2245 def undo(ui, repo):
2257 def undo(ui, repo):
2246 """undo the last commit or pull
2258 """undo the last commit or pull
2247
2259
2248 Roll back the last pull or commit transaction on the
2260 Roll back the last pull or commit transaction on the
2249 repository, restoring the project to its earlier state.
2261 repository, restoring the project to its earlier state.
2250
2262
2251 This command should be used with care. There is only one level of
2263 This command should be used with care. There is only one level of
2252 undo and there is no redo.
2264 undo and there is no redo.
2253
2265
2254 This command is not intended for use on public repositories. Once
2266 This command is not intended for use on public repositories. Once
2255 a change is visible for pull by other users, undoing it locally is
2267 a change is visible for pull by other users, undoing it locally is
2256 ineffective.
2268 ineffective.
2257 """
2269 """
2258 repo.undo()
2270 repo.undo()
2259
2271
2260 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2272 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2261 branch=None):
2273 branch=None):
2262 """update or merge working directory
2274 """update or merge working directory
2263
2275
2264 Update the working directory to the specified revision.
2276 Update the working directory to the specified revision.
2265
2277
2266 If there are no outstanding changes in the working directory and
2278 If there are no outstanding changes in the working directory and
2267 there is a linear relationship between the current version and the
2279 there is a linear relationship between the current version and the
2268 requested version, the result is the requested version.
2280 requested version, the result is the requested version.
2269
2281
2270 Otherwise the result is a merge between the contents of the
2282 Otherwise the result is a merge between the contents of the
2271 current working directory and the requested version. Files that
2283 current working directory and the requested version. Files that
2272 changed between either parent are marked as changed for the next
2284 changed between either parent are marked as changed for the next
2273 commit and a commit must be performed before any further updates
2285 commit and a commit must be performed before any further updates
2274 are allowed.
2286 are allowed.
2275
2287
2276 By default, update will refuse to run if doing so would require
2288 By default, update will refuse to run if doing so would require
2277 merging or discarding local changes.
2289 merging or discarding local changes.
2278 """
2290 """
2279 if branch:
2291 if branch:
2280 br = repo.branchlookup(branch=branch)
2292 br = repo.branchlookup(branch=branch)
2281 found = []
2293 found = []
2282 for x in br:
2294 for x in br:
2283 if branch in br[x]:
2295 if branch in br[x]:
2284 found.append(x)
2296 found.append(x)
2285 if len(found) > 1:
2297 if len(found) > 1:
2286 ui.warn(_("Found multiple heads for %s\n") % branch)
2298 ui.warn(_("Found multiple heads for %s\n") % branch)
2287 for x in found:
2299 for x in found:
2288 show_changeset(ui, repo, changenode=x, brinfo=br)
2300 show_changeset(ui, repo, changenode=x, brinfo=br)
2289 return 1
2301 return 1
2290 if len(found) == 1:
2302 if len(found) == 1:
2291 node = found[0]
2303 node = found[0]
2292 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2304 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2293 else:
2305 else:
2294 ui.warn(_("branch %s not found\n") % (branch))
2306 ui.warn(_("branch %s not found\n") % (branch))
2295 return 1
2307 return 1
2296 else:
2308 else:
2297 node = node and repo.lookup(node) or repo.changelog.tip()
2309 node = node and repo.lookup(node) or repo.changelog.tip()
2298 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2310 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2299
2311
2300 def verify(ui, repo):
2312 def verify(ui, repo):
2301 """verify the integrity of the repository
2313 """verify the integrity of the repository
2302
2314
2303 Verify the integrity of the current repository.
2315 Verify the integrity of the current repository.
2304
2316
2305 This will perform an extensive check of the repository's
2317 This will perform an extensive check of the repository's
2306 integrity, validating the hashes and checksums of each entry in
2318 integrity, validating the hashes and checksums of each entry in
2307 the changelog, manifest, and tracked files, as well as the
2319 the changelog, manifest, and tracked files, as well as the
2308 integrity of their crosslinks and indices.
2320 integrity of their crosslinks and indices.
2309 """
2321 """
2310 return repo.verify()
2322 return repo.verify()
2311
2323
2312 # Command options and aliases are listed here, alphabetically
2324 # Command options and aliases are listed here, alphabetically
2313
2325
2314 table = {
2326 table = {
2315 "^add":
2327 "^add":
2316 (add,
2328 (add,
2317 [('I', 'include', [], _('include names matching the given patterns')),
2329 [('I', 'include', [], _('include names matching the given patterns')),
2318 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2330 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2319 _('hg add [OPTION]... [FILE]...')),
2331 _('hg add [OPTION]... [FILE]...')),
2320 "addremove":
2332 "addremove":
2321 (addremove,
2333 (addremove,
2322 [('I', 'include', [], _('include names matching the given patterns')),
2334 [('I', 'include', [], _('include names matching the given patterns')),
2323 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2335 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2324 _('hg addremove [OPTION]... [FILE]...')),
2336 _('hg addremove [OPTION]... [FILE]...')),
2325 "^annotate":
2337 "^annotate":
2326 (annotate,
2338 (annotate,
2327 [('r', 'rev', '', _('annotate the specified revision')),
2339 [('r', 'rev', '', _('annotate the specified revision')),
2328 ('a', 'text', None, _('treat all files as text')),
2340 ('a', 'text', None, _('treat all files as text')),
2329 ('u', 'user', None, _('list the author')),
2341 ('u', 'user', None, _('list the author')),
2330 ('d', 'date', None, _('list the date')),
2342 ('d', 'date', None, _('list the date')),
2331 ('n', 'number', None, _('list the revision number (default)')),
2343 ('n', 'number', None, _('list the revision number (default)')),
2332 ('c', 'changeset', None, _('list the changeset')),
2344 ('c', 'changeset', None, _('list the changeset')),
2333 ('I', 'include', [], _('include names matching the given patterns')),
2345 ('I', 'include', [], _('include names matching the given patterns')),
2334 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2346 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2335 _('hg annotate [OPTION]... FILE...')),
2347 _('hg annotate [OPTION]... FILE...')),
2336 "bundle":
2348 "bundle":
2337 (bundle,
2349 (bundle,
2338 [],
2350 [],
2339 _('hg bundle FILE DEST')),
2351 _('hg bundle FILE DEST')),
2340 "cat":
2352 "cat":
2341 (cat,
2353 (cat,
2342 [('I', 'include', [], _('include names matching the given patterns')),
2354 [('I', 'include', [], _('include names matching the given patterns')),
2343 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2355 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2344 ('o', 'output', '', _('print output to file with formatted name')),
2356 ('o', 'output', '', _('print output to file with formatted name')),
2345 ('r', 'rev', '', _('print the given revision'))],
2357 ('r', 'rev', '', _('print the given revision'))],
2346 _('hg cat [OPTION]... FILE...')),
2358 _('hg cat [OPTION]... FILE...')),
2347 "^clone":
2359 "^clone":
2348 (clone,
2360 (clone,
2349 [('U', 'noupdate', None, _('do not update the new working directory')),
2361 [('U', 'noupdate', None, _('do not update the new working directory')),
2350 ('e', 'ssh', '', _('specify ssh command to use')),
2362 ('e', 'ssh', '', _('specify ssh command to use')),
2351 ('', 'pull', None, _('use pull protocol to copy metadata')),
2363 ('', 'pull', None, _('use pull protocol to copy metadata')),
2352 ('r', 'rev', [],
2364 ('r', 'rev', [],
2353 _('a changeset you would like to have after cloning')),
2365 _('a changeset you would like to have after cloning')),
2354 ('', 'remotecmd', '',
2366 ('', 'remotecmd', '',
2355 _('specify hg command to run on the remote side'))],
2367 _('specify hg command to run on the remote side'))],
2356 _('hg clone [OPTION]... SOURCE [DEST]')),
2368 _('hg clone [OPTION]... SOURCE [DEST]')),
2357 "^commit|ci":
2369 "^commit|ci":
2358 (commit,
2370 (commit,
2359 [('A', 'addremove', None, _('run addremove during commit')),
2371 [('A', 'addremove', None, _('run addremove during commit')),
2360 ('I', 'include', [], _('include names matching the given patterns')),
2372 ('I', 'include', [], _('include names matching the given patterns')),
2361 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2373 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2362 ('m', 'message', '', _('use <text> as commit message')),
2374 ('m', 'message', '', _('use <text> as commit message')),
2363 ('l', 'logfile', '', _('read the commit message from <file>')),
2375 ('l', 'logfile', '', _('read the commit message from <file>')),
2364 ('d', 'date', '', _('record datecode as commit date')),
2376 ('d', 'date', '', _('record datecode as commit date')),
2365 ('u', 'user', '', _('record user as commiter'))],
2377 ('u', 'user', '', _('record user as commiter'))],
2366 _('hg commit [OPTION]... [FILE]...')),
2378 _('hg commit [OPTION]... [FILE]...')),
2367 "copy|cp":
2379 "copy|cp":
2368 (copy,
2380 (copy,
2369 [('I', 'include', [], _('include names matching the given patterns')),
2381 [('I', 'include', [], _('include names matching the given patterns')),
2370 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2382 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2371 ('A', 'after', None, _('record a copy that has already occurred')),
2383 ('A', 'after', None, _('record a copy that has already occurred')),
2372 ('f', 'force', None,
2384 ('f', 'force', None,
2373 _('forcibly copy over an existing managed file'))],
2385 _('forcibly copy over an existing managed file'))],
2374 _('hg copy [OPTION]... [SOURCE]... DEST')),
2386 _('hg copy [OPTION]... [SOURCE]... DEST')),
2375 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2387 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2388 "debugrebuildstate":
2389 (debugrebuildstate,
2390 [('r', 'rev', "", _("revision to rebuild to"))],
2391 _('debugrebuildstate [-r REV] [REV]')),
2376 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2392 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2377 "debugconfig": (debugconfig, [], _('debugconfig')),
2393 "debugconfig": (debugconfig, [], _('debugconfig')),
2378 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2394 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2379 "debugstate": (debugstate, [], _('debugstate')),
2395 "debugstate": (debugstate, [], _('debugstate')),
2380 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2396 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2381 "debugindex": (debugindex, [], _('debugindex FILE')),
2397 "debugindex": (debugindex, [], _('debugindex FILE')),
2382 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2398 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2383 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2399 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2384 "debugwalk":
2400 "debugwalk":
2385 (debugwalk,
2401 (debugwalk,
2386 [('I', 'include', [], _('include names matching the given patterns')),
2402 [('I', 'include', [], _('include names matching the given patterns')),
2387 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2403 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2388 _('debugwalk [OPTION]... [FILE]...')),
2404 _('debugwalk [OPTION]... [FILE]...')),
2389 "^diff":
2405 "^diff":
2390 (diff,
2406 (diff,
2391 [('r', 'rev', [], _('revision')),
2407 [('r', 'rev', [], _('revision')),
2392 ('a', 'text', None, _('treat all files as text')),
2408 ('a', 'text', None, _('treat all files as text')),
2393 ('I', 'include', [], _('include names matching the given patterns')),
2409 ('I', 'include', [], _('include names matching the given patterns')),
2394 ('p', 'show-function', None,
2410 ('p', 'show-function', None,
2395 _('show which function each change is in')),
2411 _('show which function each change is in')),
2396 ('w', 'ignore-all-space', None,
2412 ('w', 'ignore-all-space', None,
2397 _('ignore white space when comparing lines')),
2413 _('ignore white space when comparing lines')),
2398 ('X', 'exclude', [],
2414 ('X', 'exclude', [],
2399 _('exclude names matching the given patterns'))],
2415 _('exclude names matching the given patterns'))],
2400 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2416 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2401 "^export":
2417 "^export":
2402 (export,
2418 (export,
2403 [('o', 'output', '', _('print output to file with formatted name')),
2419 [('o', 'output', '', _('print output to file with formatted name')),
2404 ('a', 'text', None, _('treat all files as text')),
2420 ('a', 'text', None, _('treat all files as text')),
2405 ('', 'switch-parent', None, _('diff against the second parent'))],
2421 ('', 'switch-parent', None, _('diff against the second parent'))],
2406 _('hg export [-a] [-o OUTFILE] REV...')),
2422 _('hg export [-a] [-o OUTFILE] REV...')),
2407 "forget":
2423 "forget":
2408 (forget,
2424 (forget,
2409 [('I', 'include', [], _('include names matching the given patterns')),
2425 [('I', 'include', [], _('include names matching the given patterns')),
2410 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2426 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2411 _('hg forget [OPTION]... FILE...')),
2427 _('hg forget [OPTION]... FILE...')),
2412 "grep":
2428 "grep":
2413 (grep,
2429 (grep,
2414 [('0', 'print0', None, _('end fields with NUL')),
2430 [('0', 'print0', None, _('end fields with NUL')),
2415 ('I', 'include', [], _('include names matching the given patterns')),
2431 ('I', 'include', [], _('include names matching the given patterns')),
2416 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2432 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2417 ('', 'all', None, _('print all revisions that match')),
2433 ('', 'all', None, _('print all revisions that match')),
2418 ('i', 'ignore-case', None, _('ignore case when matching')),
2434 ('i', 'ignore-case', None, _('ignore case when matching')),
2419 ('l', 'files-with-matches', None,
2435 ('l', 'files-with-matches', None,
2420 _('print only filenames and revs that match')),
2436 _('print only filenames and revs that match')),
2421 ('n', 'line-number', None, _('print matching line numbers')),
2437 ('n', 'line-number', None, _('print matching line numbers')),
2422 ('r', 'rev', [], _('search in given revision range')),
2438 ('r', 'rev', [], _('search in given revision range')),
2423 ('u', 'user', None, _('print user who committed change'))],
2439 ('u', 'user', None, _('print user who committed change'))],
2424 _('hg grep [OPTION]... PATTERN [FILE]...')),
2440 _('hg grep [OPTION]... PATTERN [FILE]...')),
2425 "heads":
2441 "heads":
2426 (heads,
2442 (heads,
2427 [('b', 'branches', None, _('find branch info')),
2443 [('b', 'branches', None, _('find branch info')),
2428 ('r', 'rev', '', _('show only heads which are descendants of rev'))],
2444 ('r', 'rev', '', _('show only heads which are descendants of rev'))],
2429 _('hg heads [-b] [-r <rev>]')),
2445 _('hg heads [-b] [-r <rev>]')),
2430 "help": (help_, [], _('hg help [COMMAND]')),
2446 "help": (help_, [], _('hg help [COMMAND]')),
2431 "identify|id": (identify, [], _('hg identify')),
2447 "identify|id": (identify, [], _('hg identify')),
2432 "import|patch":
2448 "import|patch":
2433 (import_,
2449 (import_,
2434 [('p', 'strip', 1,
2450 [('p', 'strip', 1,
2435 _('directory strip option for patch. This has the same\n') +
2451 _('directory strip option for patch. This has the same\n') +
2436 _('meaning as the corresponding patch option')),
2452 _('meaning as the corresponding patch option')),
2437 ('f', 'force', None,
2453 ('f', 'force', None,
2438 _('skip check for outstanding uncommitted changes')),
2454 _('skip check for outstanding uncommitted changes')),
2439 ('b', 'base', '', _('base path'))],
2455 ('b', 'base', '', _('base path'))],
2440 _('hg import [-f] [-p NUM] [-b BASE] PATCH...')),
2456 _('hg import [-f] [-p NUM] [-b BASE] PATCH...')),
2441 "incoming|in": (incoming,
2457 "incoming|in": (incoming,
2442 [('M', 'no-merges', None, _('do not show merges')),
2458 [('M', 'no-merges', None, _('do not show merges')),
2443 ('p', 'patch', None, _('show patch')),
2459 ('p', 'patch', None, _('show patch')),
2444 ('n', 'newest-first', None, _('show newest record first'))],
2460 ('n', 'newest-first', None, _('show newest record first'))],
2445 _('hg incoming [-p] [-n] [-M] [SOURCE]')),
2461 _('hg incoming [-p] [-n] [-M] [SOURCE]')),
2446 "^init": (init, [], _('hg init [DEST]')),
2462 "^init": (init, [], _('hg init [DEST]')),
2447 "locate":
2463 "locate":
2448 (locate,
2464 (locate,
2449 [('r', 'rev', '', _('search the repository as it stood at rev')),
2465 [('r', 'rev', '', _('search the repository as it stood at rev')),
2450 ('0', 'print0', None,
2466 ('0', 'print0', None,
2451 _('end filenames with NUL, for use with xargs')),
2467 _('end filenames with NUL, for use with xargs')),
2452 ('f', 'fullpath', None,
2468 ('f', 'fullpath', None,
2453 _('print complete paths from the filesystem root')),
2469 _('print complete paths from the filesystem root')),
2454 ('I', 'include', [], _('include names matching the given patterns')),
2470 ('I', 'include', [], _('include names matching the given patterns')),
2455 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2471 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2456 _('hg locate [OPTION]... [PATTERN]...')),
2472 _('hg locate [OPTION]... [PATTERN]...')),
2457 "^log|history":
2473 "^log|history":
2458 (log,
2474 (log,
2459 [('I', 'include', [], _('include names matching the given patterns')),
2475 [('I', 'include', [], _('include names matching the given patterns')),
2460 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2476 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2461 ('b', 'branch', None, _('show branches')),
2477 ('b', 'branch', None, _('show branches')),
2462 ('k', 'keyword', [], _('search for a keyword')),
2478 ('k', 'keyword', [], _('search for a keyword')),
2463 ('l', 'limit', '', _('limit number of changes displayed')),
2479 ('l', 'limit', '', _('limit number of changes displayed')),
2464 ('r', 'rev', [], _('show the specified revision or range')),
2480 ('r', 'rev', [], _('show the specified revision or range')),
2465 ('M', 'no-merges', None, _('do not show merges')),
2481 ('M', 'no-merges', None, _('do not show merges')),
2466 ('m', 'only-merges', None, _('show only merges')),
2482 ('m', 'only-merges', None, _('show only merges')),
2467 ('p', 'patch', None, _('show patch'))],
2483 ('p', 'patch', None, _('show patch'))],
2468 _('hg log [-I] [-X] [-r REV]... [-p] [FILE]')),
2484 _('hg log [-I] [-X] [-r REV]... [-p] [FILE]')),
2469 "manifest": (manifest, [], _('hg manifest [REV]')),
2485 "manifest": (manifest, [], _('hg manifest [REV]')),
2470 "outgoing|out": (outgoing,
2486 "outgoing|out": (outgoing,
2471 [('M', 'no-merges', None, _('do not show merges')),
2487 [('M', 'no-merges', None, _('do not show merges')),
2472 ('p', 'patch', None, _('show patch')),
2488 ('p', 'patch', None, _('show patch')),
2473 ('n', 'newest-first', None, _('show newest record first'))],
2489 ('n', 'newest-first', None, _('show newest record first'))],
2474 _('hg outgoing [-p] [-n] [-M] [DEST]')),
2490 _('hg outgoing [-p] [-n] [-M] [DEST]')),
2475 "^parents":
2491 "^parents":
2476 (parents,
2492 (parents,
2477 [('b', 'branch', None, _('show branches'))],
2493 [('b', 'branch', None, _('show branches'))],
2478 _('hg parents [-b] [REV]')),
2494 _('hg parents [-b] [REV]')),
2479 "paths": (paths, [], _('hg paths [NAME]')),
2495 "paths": (paths, [], _('hg paths [NAME]')),
2480 "^pull":
2496 "^pull":
2481 (pull,
2497 (pull,
2482 [('u', 'update', None,
2498 [('u', 'update', None,
2483 _('update the working directory to tip after pull')),
2499 _('update the working directory to tip after pull')),
2484 ('e', 'ssh', '', _('specify ssh command to use')),
2500 ('e', 'ssh', '', _('specify ssh command to use')),
2485 ('r', 'rev', [], _('a specific revision you would like to pull')),
2501 ('r', 'rev', [], _('a specific revision you would like to pull')),
2486 ('', 'remotecmd', '',
2502 ('', 'remotecmd', '',
2487 _('specify hg command to run on the remote side'))],
2503 _('specify hg command to run on the remote side'))],
2488 _('hg pull [-u] [-e FILE] [-r rev] [--remotecmd FILE] [SOURCE]')),
2504 _('hg pull [-u] [-e FILE] [-r rev] [--remotecmd FILE] [SOURCE]')),
2489 "^push":
2505 "^push":
2490 (push,
2506 (push,
2491 [('f', 'force', None, _('force push')),
2507 [('f', 'force', None, _('force push')),
2492 ('e', 'ssh', '', _('specify ssh command to use')),
2508 ('e', 'ssh', '', _('specify ssh command to use')),
2493 ('', 'remotecmd', '',
2509 ('', 'remotecmd', '',
2494 _('specify hg command to run on the remote side'))],
2510 _('specify hg command to run on the remote side'))],
2495 _('hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]')),
2511 _('hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]')),
2496 "rawcommit":
2512 "rawcommit":
2497 (rawcommit,
2513 (rawcommit,
2498 [('p', 'parent', [], _('parent')),
2514 [('p', 'parent', [], _('parent')),
2499 ('d', 'date', '', _('date code')),
2515 ('d', 'date', '', _('date code')),
2500 ('u', 'user', '', _('user')),
2516 ('u', 'user', '', _('user')),
2501 ('F', 'files', '', _('file list')),
2517 ('F', 'files', '', _('file list')),
2502 ('m', 'message', '', _('commit message')),
2518 ('m', 'message', '', _('commit message')),
2503 ('l', 'logfile', '', _('commit message file'))],
2519 ('l', 'logfile', '', _('commit message file'))],
2504 _('hg rawcommit [OPTION]... [FILE]...')),
2520 _('hg rawcommit [OPTION]... [FILE]...')),
2505 "recover": (recover, [], _('hg recover')),
2521 "recover": (recover, [], _('hg recover')),
2506 "^remove|rm":
2522 "^remove|rm":
2507 (remove,
2523 (remove,
2508 [('I', 'include', [], _('include names matching the given patterns')),
2524 [('I', 'include', [], _('include names matching the given patterns')),
2509 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2525 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2510 _('hg remove [OPTION]... FILE...')),
2526 _('hg remove [OPTION]... FILE...')),
2511 "rename|mv":
2527 "rename|mv":
2512 (rename,
2528 (rename,
2513 [('I', 'include', [], _('include names matching the given patterns')),
2529 [('I', 'include', [], _('include names matching the given patterns')),
2514 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2530 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2515 ('A', 'after', None, _('record a rename that has already occurred')),
2531 ('A', 'after', None, _('record a rename that has already occurred')),
2516 ('f', 'force', None,
2532 ('f', 'force', None,
2517 _('forcibly copy over an existing managed file'))],
2533 _('forcibly copy over an existing managed file'))],
2518 _('hg rename [OPTION]... [SOURCE]... DEST')),
2534 _('hg rename [OPTION]... [SOURCE]... DEST')),
2519 "^revert":
2535 "^revert":
2520 (revert,
2536 (revert,
2521 [('I', 'include', [], _('include names matching the given patterns')),
2537 [('I', 'include', [], _('include names matching the given patterns')),
2522 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2538 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2523 ('r', 'rev', '', _('revision to revert to'))],
2539 ('r', 'rev', '', _('revision to revert to'))],
2524 _('hg revert [-n] [-r REV] [NAME]...')),
2540 _('hg revert [-n] [-r REV] [NAME]...')),
2525 "root": (root, [], _('hg root')),
2541 "root": (root, [], _('hg root')),
2526 "^serve":
2542 "^serve":
2527 (serve,
2543 (serve,
2528 [('A', 'accesslog', '', _('name of access log file to write to')),
2544 [('A', 'accesslog', '', _('name of access log file to write to')),
2529 ('d', 'daemon', None, _('run server in background')),
2545 ('d', 'daemon', None, _('run server in background')),
2530 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2546 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2531 ('E', 'errorlog', '', _('name of error log file to write to')),
2547 ('E', 'errorlog', '', _('name of error log file to write to')),
2532 ('p', 'port', 0, _('port to use (default: 8000)')),
2548 ('p', 'port', 0, _('port to use (default: 8000)')),
2533 ('a', 'address', '', _('address to use')),
2549 ('a', 'address', '', _('address to use')),
2534 ('n', 'name', '',
2550 ('n', 'name', '',
2535 _('name to show in web pages (default: working dir)')),
2551 _('name to show in web pages (default: working dir)')),
2536 ('', 'pid-file', '', _('name of file to write process ID to')),
2552 ('', 'pid-file', '', _('name of file to write process ID to')),
2537 ('', 'stdio', None, _('for remote clients')),
2553 ('', 'stdio', None, _('for remote clients')),
2538 ('t', 'templates', '', _('web templates to use')),
2554 ('t', 'templates', '', _('web templates to use')),
2539 ('', 'style', '', _('template style to use')),
2555 ('', 'style', '', _('template style to use')),
2540 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2556 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2541 _('hg serve [OPTION]...')),
2557 _('hg serve [OPTION]...')),
2542 "^status|st":
2558 "^status|st":
2543 (status,
2559 (status,
2544 [('m', 'modified', None, _('show only modified files')),
2560 [('m', 'modified', None, _('show only modified files')),
2545 ('a', 'added', None, _('show only added files')),
2561 ('a', 'added', None, _('show only added files')),
2546 ('r', 'removed', None, _('show only removed files')),
2562 ('r', 'removed', None, _('show only removed files')),
2547 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2563 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2548 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2564 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2549 ('n', 'no-status', None, _('hide status prefix')),
2565 ('n', 'no-status', None, _('hide status prefix')),
2550 ('0', 'print0', None,
2566 ('0', 'print0', None,
2551 _('end filenames with NUL, for use with xargs')),
2567 _('end filenames with NUL, for use with xargs')),
2552 ('I', 'include', [], _('include names matching the given patterns')),
2568 ('I', 'include', [], _('include names matching the given patterns')),
2553 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2569 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2554 _('hg status [OPTION]... [FILE]...')),
2570 _('hg status [OPTION]... [FILE]...')),
2555 "tag":
2571 "tag":
2556 (tag,
2572 (tag,
2557 [('l', 'local', None, _('make the tag local')),
2573 [('l', 'local', None, _('make the tag local')),
2558 ('m', 'message', '', _('message for tag commit log entry')),
2574 ('m', 'message', '', _('message for tag commit log entry')),
2559 ('d', 'date', '', _('record datecode as commit date')),
2575 ('d', 'date', '', _('record datecode as commit date')),
2560 ('u', 'user', '', _('record user as commiter')),
2576 ('u', 'user', '', _('record user as commiter')),
2561 ('r', 'rev', '', _('revision to tag'))],
2577 ('r', 'rev', '', _('revision to tag'))],
2562 _('hg tag [-r REV] [OPTION]... NAME')),
2578 _('hg tag [-r REV] [OPTION]... NAME')),
2563 "tags": (tags, [], _('hg tags')),
2579 "tags": (tags, [], _('hg tags')),
2564 "tip": (tip, [('p', 'patch', None, _('show patch'))], _('hg tip')),
2580 "tip": (tip, [('p', 'patch', None, _('show patch'))], _('hg tip')),
2565 "unbundle":
2581 "unbundle":
2566 (unbundle,
2582 (unbundle,
2567 [('u', 'update', None,
2583 [('u', 'update', None,
2568 _('update the working directory to tip after unbundle'))],
2584 _('update the working directory to tip after unbundle'))],
2569 _('hg unbundle [-u] FILE')),
2585 _('hg unbundle [-u] FILE')),
2570 "undo": (undo, [], _('hg undo')),
2586 "undo": (undo, [], _('hg undo')),
2571 "^update|up|checkout|co":
2587 "^update|up|checkout|co":
2572 (update,
2588 (update,
2573 [('b', 'branch', '', _('checkout the head of a specific branch')),
2589 [('b', 'branch', '', _('checkout the head of a specific branch')),
2574 ('m', 'merge', None, _('allow merging of branches')),
2590 ('m', 'merge', None, _('allow merging of branches')),
2575 ('C', 'clean', None, _('overwrite locally modified files')),
2591 ('C', 'clean', None, _('overwrite locally modified files')),
2576 ('f', 'force', None, _('force a merge with outstanding changes'))],
2592 ('f', 'force', None, _('force a merge with outstanding changes'))],
2577 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
2593 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
2578 "verify": (verify, [], _('hg verify')),
2594 "verify": (verify, [], _('hg verify')),
2579 "version": (show_version, [], _('hg version')),
2595 "version": (show_version, [], _('hg version')),
2580 }
2596 }
2581
2597
2582 globalopts = [
2598 globalopts = [
2583 ('R', 'repository', '', _('repository root directory')),
2599 ('R', 'repository', '', _('repository root directory')),
2584 ('', 'cwd', '', _('change working directory')),
2600 ('', 'cwd', '', _('change working directory')),
2585 ('y', 'noninteractive', None,
2601 ('y', 'noninteractive', None,
2586 _('do not prompt, assume \'yes\' for any required answers')),
2602 _('do not prompt, assume \'yes\' for any required answers')),
2587 ('q', 'quiet', None, _('suppress output')),
2603 ('q', 'quiet', None, _('suppress output')),
2588 ('v', 'verbose', None, _('enable additional output')),
2604 ('v', 'verbose', None, _('enable additional output')),
2589 ('', 'debug', None, _('enable debugging output')),
2605 ('', 'debug', None, _('enable debugging output')),
2590 ('', 'debugger', None, _('start debugger')),
2606 ('', 'debugger', None, _('start debugger')),
2591 ('', 'traceback', None, _('print traceback on exception')),
2607 ('', 'traceback', None, _('print traceback on exception')),
2592 ('', 'time', None, _('time how long the command takes')),
2608 ('', 'time', None, _('time how long the command takes')),
2593 ('', 'profile', None, _('print command execution profile')),
2609 ('', 'profile', None, _('print command execution profile')),
2594 ('', 'version', None, _('output version information and exit')),
2610 ('', 'version', None, _('output version information and exit')),
2595 ('h', 'help', None, _('display help and exit')),
2611 ('h', 'help', None, _('display help and exit')),
2596 ]
2612 ]
2597
2613
2598 norepo = ("clone init version help debugancestor debugconfig debugdata"
2614 norepo = ("clone init version help debugancestor debugconfig debugdata"
2599 " debugindex debugindexdot paths")
2615 " debugindex debugindexdot paths")
2600
2616
2601 def find(cmd):
2617 def find(cmd):
2602 """Return (aliases, command table entry) for command string."""
2618 """Return (aliases, command table entry) for command string."""
2603 choice = None
2619 choice = None
2604 count = 0
2620 count = 0
2605 for e in table.keys():
2621 for e in table.keys():
2606 aliases = e.lstrip("^").split("|")
2622 aliases = e.lstrip("^").split("|")
2607 if cmd in aliases:
2623 if cmd in aliases:
2608 return aliases, table[e]
2624 return aliases, table[e]
2609 for a in aliases:
2625 for a in aliases:
2610 if a.startswith(cmd):
2626 if a.startswith(cmd):
2611 count += 1
2627 count += 1
2612 choice = aliases, table[e]
2628 choice = aliases, table[e]
2613 break
2629 break
2614
2630
2615 if count > 1:
2631 if count > 1:
2616 raise AmbiguousCommand(cmd)
2632 raise AmbiguousCommand(cmd)
2617
2633
2618 if choice:
2634 if choice:
2619 return choice
2635 return choice
2620
2636
2621 raise UnknownCommand(cmd)
2637 raise UnknownCommand(cmd)
2622
2638
2623 class SignalInterrupt(Exception):
2639 class SignalInterrupt(Exception):
2624 """Exception raised on SIGTERM and SIGHUP."""
2640 """Exception raised on SIGTERM and SIGHUP."""
2625
2641
2626 def catchterm(*args):
2642 def catchterm(*args):
2627 raise SignalInterrupt
2643 raise SignalInterrupt
2628
2644
2629 def run():
2645 def run():
2630 sys.exit(dispatch(sys.argv[1:]))
2646 sys.exit(dispatch(sys.argv[1:]))
2631
2647
2632 class ParseError(Exception):
2648 class ParseError(Exception):
2633 """Exception raised on errors in parsing the command line."""
2649 """Exception raised on errors in parsing the command line."""
2634
2650
2635 def parse(ui, args):
2651 def parse(ui, args):
2636 options = {}
2652 options = {}
2637 cmdoptions = {}
2653 cmdoptions = {}
2638
2654
2639 try:
2655 try:
2640 args = fancyopts.fancyopts(args, globalopts, options)
2656 args = fancyopts.fancyopts(args, globalopts, options)
2641 except fancyopts.getopt.GetoptError, inst:
2657 except fancyopts.getopt.GetoptError, inst:
2642 raise ParseError(None, inst)
2658 raise ParseError(None, inst)
2643
2659
2644 if args:
2660 if args:
2645 cmd, args = args[0], args[1:]
2661 cmd, args = args[0], args[1:]
2646 aliases, i = find(cmd)
2662 aliases, i = find(cmd)
2647 cmd = aliases[0]
2663 cmd = aliases[0]
2648 defaults = ui.config("defaults", cmd)
2664 defaults = ui.config("defaults", cmd)
2649 if defaults:
2665 if defaults:
2650 args = defaults.split() + args
2666 args = defaults.split() + args
2651 c = list(i[1])
2667 c = list(i[1])
2652 else:
2668 else:
2653 cmd = None
2669 cmd = None
2654 c = []
2670 c = []
2655
2671
2656 # combine global options into local
2672 # combine global options into local
2657 for o in globalopts:
2673 for o in globalopts:
2658 c.append((o[0], o[1], options[o[1]], o[3]))
2674 c.append((o[0], o[1], options[o[1]], o[3]))
2659
2675
2660 try:
2676 try:
2661 args = fancyopts.fancyopts(args, c, cmdoptions)
2677 args = fancyopts.fancyopts(args, c, cmdoptions)
2662 except fancyopts.getopt.GetoptError, inst:
2678 except fancyopts.getopt.GetoptError, inst:
2663 raise ParseError(cmd, inst)
2679 raise ParseError(cmd, inst)
2664
2680
2665 # separate global options back out
2681 # separate global options back out
2666 for o in globalopts:
2682 for o in globalopts:
2667 n = o[1]
2683 n = o[1]
2668 options[n] = cmdoptions[n]
2684 options[n] = cmdoptions[n]
2669 del cmdoptions[n]
2685 del cmdoptions[n]
2670
2686
2671 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2687 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2672
2688
2673 def dispatch(args):
2689 def dispatch(args):
2674 signal.signal(signal.SIGTERM, catchterm)
2690 signal.signal(signal.SIGTERM, catchterm)
2675 try:
2691 try:
2676 signal.signal(signal.SIGHUP, catchterm)
2692 signal.signal(signal.SIGHUP, catchterm)
2677 except AttributeError:
2693 except AttributeError:
2678 pass
2694 pass
2679
2695
2680 try:
2696 try:
2681 u = ui.ui()
2697 u = ui.ui()
2682 except util.Abort, inst:
2698 except util.Abort, inst:
2683 sys.stderr.write(_("abort: %s\n") % inst)
2699 sys.stderr.write(_("abort: %s\n") % inst)
2684 sys.exit(1)
2700 sys.exit(1)
2685
2701
2686 external = []
2702 external = []
2687 for x in u.extensions():
2703 for x in u.extensions():
2688 def on_exception(exc, inst):
2704 def on_exception(exc, inst):
2689 u.warn(_("*** failed to import extension %s\n") % x[1])
2705 u.warn(_("*** failed to import extension %s\n") % x[1])
2690 u.warn("%s\n" % inst)
2706 u.warn("%s\n" % inst)
2691 if "--traceback" in sys.argv[1:]:
2707 if "--traceback" in sys.argv[1:]:
2692 traceback.print_exc()
2708 traceback.print_exc()
2693 if x[1]:
2709 if x[1]:
2694 try:
2710 try:
2695 mod = imp.load_source(x[0], x[1])
2711 mod = imp.load_source(x[0], x[1])
2696 except Exception, inst:
2712 except Exception, inst:
2697 on_exception(Exception, inst)
2713 on_exception(Exception, inst)
2698 continue
2714 continue
2699 else:
2715 else:
2700 def importh(name):
2716 def importh(name):
2701 mod = __import__(name)
2717 mod = __import__(name)
2702 components = name.split('.')
2718 components = name.split('.')
2703 for comp in components[1:]:
2719 for comp in components[1:]:
2704 mod = getattr(mod, comp)
2720 mod = getattr(mod, comp)
2705 return mod
2721 return mod
2706 try:
2722 try:
2707 mod = importh(x[0])
2723 mod = importh(x[0])
2708 except Exception, inst:
2724 except Exception, inst:
2709 on_exception(Exception, inst)
2725 on_exception(Exception, inst)
2710 continue
2726 continue
2711
2727
2712 external.append(mod)
2728 external.append(mod)
2713 for x in external:
2729 for x in external:
2714 cmdtable = getattr(x, 'cmdtable', {})
2730 cmdtable = getattr(x, 'cmdtable', {})
2715 for t in cmdtable:
2731 for t in cmdtable:
2716 if t in table:
2732 if t in table:
2717 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
2733 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
2718 table.update(cmdtable)
2734 table.update(cmdtable)
2719
2735
2720 try:
2736 try:
2721 cmd, func, args, options, cmdoptions = parse(u, args)
2737 cmd, func, args, options, cmdoptions = parse(u, args)
2722 except ParseError, inst:
2738 except ParseError, inst:
2723 if inst.args[0]:
2739 if inst.args[0]:
2724 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
2740 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
2725 help_(u, inst.args[0])
2741 help_(u, inst.args[0])
2726 else:
2742 else:
2727 u.warn(_("hg: %s\n") % inst.args[1])
2743 u.warn(_("hg: %s\n") % inst.args[1])
2728 help_(u, 'shortlist')
2744 help_(u, 'shortlist')
2729 sys.exit(-1)
2745 sys.exit(-1)
2730 except AmbiguousCommand, inst:
2746 except AmbiguousCommand, inst:
2731 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2747 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2732 sys.exit(1)
2748 sys.exit(1)
2733 except UnknownCommand, inst:
2749 except UnknownCommand, inst:
2734 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2750 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2735 help_(u, 'shortlist')
2751 help_(u, 'shortlist')
2736 sys.exit(1)
2752 sys.exit(1)
2737
2753
2738 if options["time"]:
2754 if options["time"]:
2739 def get_times():
2755 def get_times():
2740 t = os.times()
2756 t = os.times()
2741 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2757 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2742 t = (t[0], t[1], t[2], t[3], time.clock())
2758 t = (t[0], t[1], t[2], t[3], time.clock())
2743 return t
2759 return t
2744 s = get_times()
2760 s = get_times()
2745 def print_time():
2761 def print_time():
2746 t = get_times()
2762 t = get_times()
2747 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
2763 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
2748 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2764 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2749 atexit.register(print_time)
2765 atexit.register(print_time)
2750
2766
2751 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2767 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2752 not options["noninteractive"])
2768 not options["noninteractive"])
2753
2769
2754 # enter the debugger before command execution
2770 # enter the debugger before command execution
2755 if options['debugger']:
2771 if options['debugger']:
2756 pdb.set_trace()
2772 pdb.set_trace()
2757
2773
2758 try:
2774 try:
2759 try:
2775 try:
2760 if options['help']:
2776 if options['help']:
2761 help_(u, cmd, options['version'])
2777 help_(u, cmd, options['version'])
2762 sys.exit(0)
2778 sys.exit(0)
2763 elif options['version']:
2779 elif options['version']:
2764 show_version(u)
2780 show_version(u)
2765 sys.exit(0)
2781 sys.exit(0)
2766 elif not cmd:
2782 elif not cmd:
2767 help_(u, 'shortlist')
2783 help_(u, 'shortlist')
2768 sys.exit(0)
2784 sys.exit(0)
2769
2785
2770 if options['cwd']:
2786 if options['cwd']:
2771 try:
2787 try:
2772 os.chdir(options['cwd'])
2788 os.chdir(options['cwd'])
2773 except OSError, inst:
2789 except OSError, inst:
2774 raise util.Abort('%s: %s' %
2790 raise util.Abort('%s: %s' %
2775 (options['cwd'], inst.strerror))
2791 (options['cwd'], inst.strerror))
2776
2792
2777 if cmd not in norepo.split():
2793 if cmd not in norepo.split():
2778 path = options["repository"] or ""
2794 path = options["repository"] or ""
2779 repo = hg.repository(ui=u, path=path)
2795 repo = hg.repository(ui=u, path=path)
2780 for x in external:
2796 for x in external:
2781 if hasattr(x, 'reposetup'):
2797 if hasattr(x, 'reposetup'):
2782 x.reposetup(u, repo)
2798 x.reposetup(u, repo)
2783 d = lambda: func(u, repo, *args, **cmdoptions)
2799 d = lambda: func(u, repo, *args, **cmdoptions)
2784 else:
2800 else:
2785 d = lambda: func(u, *args, **cmdoptions)
2801 d = lambda: func(u, *args, **cmdoptions)
2786
2802
2787 if options['profile']:
2803 if options['profile']:
2788 import hotshot, hotshot.stats
2804 import hotshot, hotshot.stats
2789 prof = hotshot.Profile("hg.prof")
2805 prof = hotshot.Profile("hg.prof")
2790 r = prof.runcall(d)
2806 r = prof.runcall(d)
2791 prof.close()
2807 prof.close()
2792 stats = hotshot.stats.load("hg.prof")
2808 stats = hotshot.stats.load("hg.prof")
2793 stats.strip_dirs()
2809 stats.strip_dirs()
2794 stats.sort_stats('time', 'calls')
2810 stats.sort_stats('time', 'calls')
2795 stats.print_stats(40)
2811 stats.print_stats(40)
2796 return r
2812 return r
2797 else:
2813 else:
2798 return d()
2814 return d()
2799 except:
2815 except:
2800 # enter the debugger when we hit an exception
2816 # enter the debugger when we hit an exception
2801 if options['debugger']:
2817 if options['debugger']:
2802 pdb.post_mortem(sys.exc_info()[2])
2818 pdb.post_mortem(sys.exc_info()[2])
2803 if options['traceback']:
2819 if options['traceback']:
2804 traceback.print_exc()
2820 traceback.print_exc()
2805 raise
2821 raise
2806 except hg.RepoError, inst:
2822 except hg.RepoError, inst:
2807 u.warn(_("abort: "), inst, "!\n")
2823 u.warn(_("abort: "), inst, "!\n")
2808 except revlog.RevlogError, inst:
2824 except revlog.RevlogError, inst:
2809 u.warn(_("abort: "), inst, "!\n")
2825 u.warn(_("abort: "), inst, "!\n")
2810 except SignalInterrupt:
2826 except SignalInterrupt:
2811 u.warn(_("killed!\n"))
2827 u.warn(_("killed!\n"))
2812 except KeyboardInterrupt:
2828 except KeyboardInterrupt:
2813 try:
2829 try:
2814 u.warn(_("interrupted!\n"))
2830 u.warn(_("interrupted!\n"))
2815 except IOError, inst:
2831 except IOError, inst:
2816 if inst.errno == errno.EPIPE:
2832 if inst.errno == errno.EPIPE:
2817 if u.debugflag:
2833 if u.debugflag:
2818 u.warn(_("\nbroken pipe\n"))
2834 u.warn(_("\nbroken pipe\n"))
2819 else:
2835 else:
2820 raise
2836 raise
2821 except IOError, inst:
2837 except IOError, inst:
2822 if hasattr(inst, "code"):
2838 if hasattr(inst, "code"):
2823 u.warn(_("abort: %s\n") % inst)
2839 u.warn(_("abort: %s\n") % inst)
2824 elif hasattr(inst, "reason"):
2840 elif hasattr(inst, "reason"):
2825 u.warn(_("abort: error: %s\n") % inst.reason[1])
2841 u.warn(_("abort: error: %s\n") % inst.reason[1])
2826 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
2842 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
2827 if u.debugflag:
2843 if u.debugflag:
2828 u.warn(_("broken pipe\n"))
2844 u.warn(_("broken pipe\n"))
2829 elif getattr(inst, "strerror", None):
2845 elif getattr(inst, "strerror", None):
2830 if getattr(inst, "filename", None):
2846 if getattr(inst, "filename", None):
2831 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
2847 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
2832 else:
2848 else:
2833 u.warn(_("abort: %s\n") % inst.strerror)
2849 u.warn(_("abort: %s\n") % inst.strerror)
2834 else:
2850 else:
2835 raise
2851 raise
2836 except OSError, inst:
2852 except OSError, inst:
2837 if hasattr(inst, "filename"):
2853 if hasattr(inst, "filename"):
2838 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
2854 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
2839 else:
2855 else:
2840 u.warn(_("abort: %s\n") % inst.strerror)
2856 u.warn(_("abort: %s\n") % inst.strerror)
2841 except util.Abort, inst:
2857 except util.Abort, inst:
2842 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
2858 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
2843 sys.exit(1)
2859 sys.exit(1)
2844 except TypeError, inst:
2860 except TypeError, inst:
2845 # was this an argument error?
2861 # was this an argument error?
2846 tb = traceback.extract_tb(sys.exc_info()[2])
2862 tb = traceback.extract_tb(sys.exc_info()[2])
2847 if len(tb) > 2: # no
2863 if len(tb) > 2: # no
2848 raise
2864 raise
2849 u.debug(inst, "\n")
2865 u.debug(inst, "\n")
2850 u.warn(_("%s: invalid arguments\n") % cmd)
2866 u.warn(_("%s: invalid arguments\n") % cmd)
2851 help_(u, cmd)
2867 help_(u, cmd)
2852 except AmbiguousCommand, inst:
2868 except AmbiguousCommand, inst:
2853 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2869 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2854 help_(u, 'shortlist')
2870 help_(u, 'shortlist')
2855 except UnknownCommand, inst:
2871 except UnknownCommand, inst:
2856 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2872 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2857 help_(u, 'shortlist')
2873 help_(u, 'shortlist')
2858 except SystemExit:
2874 except SystemExit:
2859 # don't catch this in the catch-all below
2875 # don't catch this in the catch-all below
2860 raise
2876 raise
2861 except:
2877 except:
2862 u.warn(_("** unknown exception encountered, details follow\n"))
2878 u.warn(_("** unknown exception encountered, details follow\n"))
2863 u.warn(_("** report bug details to mercurial@selenic.com\n"))
2879 u.warn(_("** report bug details to mercurial@selenic.com\n"))
2864 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
2880 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
2865 % version.get_version())
2881 % version.get_version())
2866 raise
2882 raise
2867
2883
2868 sys.exit(-1)
2884 sys.exit(-1)
@@ -1,420 +1,434 b''
1 """
1 """
2 dirstate.py - working directory tracking for mercurial
2 dirstate.py - working directory tracking for mercurial
3
3
4 Copyright 2005 Matt Mackall <mpm@selenic.com>
4 Copyright 2005 Matt Mackall <mpm@selenic.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8 """
8 """
9
9
10 import struct, os
10 import struct, os
11 from node import *
11 from node import *
12 from i18n import gettext as _
12 from i18n import gettext as _
13 from demandload import *
13 from demandload import *
14 demandload(globals(), "time bisect stat util re errno")
14 demandload(globals(), "time bisect stat util re errno")
15
15
16 class dirstate(object):
16 class dirstate(object):
17 def __init__(self, opener, ui, root):
17 def __init__(self, opener, ui, root):
18 self.opener = opener
18 self.opener = opener
19 self.root = root
19 self.root = root
20 self.dirty = 0
20 self.dirty = 0
21 self.ui = ui
21 self.ui = ui
22 self.map = None
22 self.map = None
23 self.pl = None
23 self.pl = None
24 self.copies = {}
24 self.copies = {}
25 self.ignorefunc = None
25 self.ignorefunc = None
26 self.blockignore = False
26 self.blockignore = False
27
27
28 def wjoin(self, f):
28 def wjoin(self, f):
29 return os.path.join(self.root, f)
29 return os.path.join(self.root, f)
30
30
31 def getcwd(self):
31 def getcwd(self):
32 cwd = os.getcwd()
32 cwd = os.getcwd()
33 if cwd == self.root: return ''
33 if cwd == self.root: return ''
34 return cwd[len(self.root) + 1:]
34 return cwd[len(self.root) + 1:]
35
35
36 def hgignore(self):
36 def hgignore(self):
37 '''return the contents of .hgignore as a list of patterns.
37 '''return the contents of .hgignore as a list of patterns.
38
38
39 trailing white space is dropped.
39 trailing white space is dropped.
40 the escape character is backslash.
40 the escape character is backslash.
41 comments start with #.
41 comments start with #.
42 empty lines are skipped.
42 empty lines are skipped.
43
43
44 lines can be of the following formats:
44 lines can be of the following formats:
45
45
46 syntax: regexp # defaults following lines to non-rooted regexps
46 syntax: regexp # defaults following lines to non-rooted regexps
47 syntax: glob # defaults following lines to non-rooted globs
47 syntax: glob # defaults following lines to non-rooted globs
48 re:pattern # non-rooted regular expression
48 re:pattern # non-rooted regular expression
49 glob:pattern # non-rooted glob
49 glob:pattern # non-rooted glob
50 pattern # pattern of the current default type'''
50 pattern # pattern of the current default type'''
51 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
51 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
52 def parselines(fp):
52 def parselines(fp):
53 for line in fp:
53 for line in fp:
54 escape = False
54 escape = False
55 for i in xrange(len(line)):
55 for i in xrange(len(line)):
56 if escape: escape = False
56 if escape: escape = False
57 elif line[i] == '\\': escape = True
57 elif line[i] == '\\': escape = True
58 elif line[i] == '#': break
58 elif line[i] == '#': break
59 line = line[:i].rstrip()
59 line = line[:i].rstrip()
60 if line: yield line
60 if line: yield line
61 pats = []
61 pats = []
62 try:
62 try:
63 fp = open(self.wjoin('.hgignore'))
63 fp = open(self.wjoin('.hgignore'))
64 syntax = 'relre:'
64 syntax = 'relre:'
65 for line in parselines(fp):
65 for line in parselines(fp):
66 if line.startswith('syntax:'):
66 if line.startswith('syntax:'):
67 s = line[7:].strip()
67 s = line[7:].strip()
68 try:
68 try:
69 syntax = syntaxes[s]
69 syntax = syntaxes[s]
70 except KeyError:
70 except KeyError:
71 self.ui.warn(_(".hgignore: ignoring invalid "
71 self.ui.warn(_(".hgignore: ignoring invalid "
72 "syntax '%s'\n") % s)
72 "syntax '%s'\n") % s)
73 continue
73 continue
74 pat = syntax + line
74 pat = syntax + line
75 for s in syntaxes.values():
75 for s in syntaxes.values():
76 if line.startswith(s):
76 if line.startswith(s):
77 pat = line
77 pat = line
78 break
78 break
79 pats.append(pat)
79 pats.append(pat)
80 except IOError: pass
80 except IOError: pass
81 return pats
81 return pats
82
82
83 def ignore(self, fn):
83 def ignore(self, fn):
84 '''default match function used by dirstate and localrepository.
84 '''default match function used by dirstate and localrepository.
85 this honours the .hgignore file, and nothing more.'''
85 this honours the .hgignore file, and nothing more.'''
86 if self.blockignore:
86 if self.blockignore:
87 return False
87 return False
88 if not self.ignorefunc:
88 if not self.ignorefunc:
89 ignore = self.hgignore()
89 ignore = self.hgignore()
90 if ignore:
90 if ignore:
91 files, self.ignorefunc, anypats = util.matcher(self.root,
91 files, self.ignorefunc, anypats = util.matcher(self.root,
92 inc=ignore,
92 inc=ignore,
93 src='.hgignore')
93 src='.hgignore')
94 else:
94 else:
95 self.ignorefunc = util.never
95 self.ignorefunc = util.never
96 return self.ignorefunc(fn)
96 return self.ignorefunc(fn)
97
97
98 def __del__(self):
98 def __del__(self):
99 if self.dirty:
99 if self.dirty:
100 self.write()
100 self.write()
101
101
102 def __getitem__(self, key):
102 def __getitem__(self, key):
103 try:
103 try:
104 return self.map[key]
104 return self.map[key]
105 except TypeError:
105 except TypeError:
106 self.lazyread()
106 self.lazyread()
107 return self[key]
107 return self[key]
108
108
109 def __contains__(self, key):
109 def __contains__(self, key):
110 self.lazyread()
110 self.lazyread()
111 return key in self.map
111 return key in self.map
112
112
113 def parents(self):
113 def parents(self):
114 self.lazyread()
114 self.lazyread()
115 return self.pl
115 return self.pl
116
116
117 def markdirty(self):
117 def markdirty(self):
118 if not self.dirty:
118 if not self.dirty:
119 self.dirty = 1
119 self.dirty = 1
120
120
121 def setparents(self, p1, p2=nullid):
121 def setparents(self, p1, p2=nullid):
122 self.lazyread()
122 self.lazyread()
123 self.markdirty()
123 self.markdirty()
124 self.pl = p1, p2
124 self.pl = p1, p2
125
125
126 def state(self, key):
126 def state(self, key):
127 try:
127 try:
128 return self[key][0]
128 return self[key][0]
129 except KeyError:
129 except KeyError:
130 return "?"
130 return "?"
131
131
132 def lazyread(self):
132 def lazyread(self):
133 if self.map is None:
133 if self.map is None:
134 self.read()
134 self.read()
135
135
136 def read(self):
136 def read(self):
137 self.map = {}
137 self.map = {}
138 self.pl = [nullid, nullid]
138 self.pl = [nullid, nullid]
139 try:
139 try:
140 st = self.opener("dirstate").read()
140 st = self.opener("dirstate").read()
141 if not st: return
141 if not st: return
142 except: return
142 except: return
143
143
144 self.pl = [st[:20], st[20: 40]]
144 self.pl = [st[:20], st[20: 40]]
145
145
146 pos = 40
146 pos = 40
147 while pos < len(st):
147 while pos < len(st):
148 e = struct.unpack(">cllll", st[pos:pos+17])
148 e = struct.unpack(">cllll", st[pos:pos+17])
149 l = e[4]
149 l = e[4]
150 pos += 17
150 pos += 17
151 f = st[pos:pos + l]
151 f = st[pos:pos + l]
152 if '\0' in f:
152 if '\0' in f:
153 f, c = f.split('\0')
153 f, c = f.split('\0')
154 self.copies[f] = c
154 self.copies[f] = c
155 self.map[f] = e[:4]
155 self.map[f] = e[:4]
156 pos += l
156 pos += l
157
157
158 def copy(self, source, dest):
158 def copy(self, source, dest):
159 self.lazyread()
159 self.lazyread()
160 self.markdirty()
160 self.markdirty()
161 self.copies[dest] = source
161 self.copies[dest] = source
162
162
163 def copied(self, file):
163 def copied(self, file):
164 return self.copies.get(file, None)
164 return self.copies.get(file, None)
165
165
166 def update(self, files, state, **kw):
166 def update(self, files, state, **kw):
167 ''' current states:
167 ''' current states:
168 n normal
168 n normal
169 m needs merging
169 m needs merging
170 r marked for removal
170 r marked for removal
171 a marked for addition'''
171 a marked for addition'''
172
172
173 if not files: return
173 if not files: return
174 self.lazyread()
174 self.lazyread()
175 self.markdirty()
175 self.markdirty()
176 for f in files:
176 for f in files:
177 if state == "r":
177 if state == "r":
178 self.map[f] = ('r', 0, 0, 0)
178 self.map[f] = ('r', 0, 0, 0)
179 else:
179 else:
180 s = os.lstat(self.wjoin(f))
180 s = os.lstat(self.wjoin(f))
181 st_size = kw.get('st_size', s.st_size)
181 st_size = kw.get('st_size', s.st_size)
182 st_mtime = kw.get('st_mtime', s.st_mtime)
182 st_mtime = kw.get('st_mtime', s.st_mtime)
183 self.map[f] = (state, s.st_mode, st_size, st_mtime)
183 self.map[f] = (state, s.st_mode, st_size, st_mtime)
184 if self.copies.has_key(f):
184 if self.copies.has_key(f):
185 del self.copies[f]
185 del self.copies[f]
186
186
187 def forget(self, files):
187 def forget(self, files):
188 if not files: return
188 if not files: return
189 self.lazyread()
189 self.lazyread()
190 self.markdirty()
190 self.markdirty()
191 for f in files:
191 for f in files:
192 try:
192 try:
193 del self.map[f]
193 del self.map[f]
194 except KeyError:
194 except KeyError:
195 self.ui.warn(_("not in dirstate: %s!\n") % f)
195 self.ui.warn(_("not in dirstate: %s!\n") % f)
196 pass
196 pass
197
197
198 def clear(self):
198 def clear(self):
199 self.map = {}
199 self.map = {}
200 self.copies = {}
201 self.markdirty()
202
203 def rebuild(self, parent, files):
204 self.clear()
205 umask = os.umask(0)
206 os.umask(umask)
207 for f, mode in files:
208 if mode:
209 self.map[f] = ('n', ~umask, -1, 0)
210 else:
211 self.map[f] = ('n', ~umask & 0666, -1, 0)
212 self.pl = (parent, nullid)
200 self.markdirty()
213 self.markdirty()
201
214
202 def write(self):
215 def write(self):
203 st = self.opener("dirstate", "w", atomic=True)
216 st = self.opener("dirstate", "w", atomic=True)
204 st.write("".join(self.pl))
217 st.write("".join(self.pl))
205 for f, e in self.map.items():
218 for f, e in self.map.items():
206 c = self.copied(f)
219 c = self.copied(f)
207 if c:
220 if c:
208 f = f + "\0" + c
221 f = f + "\0" + c
209 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
222 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
210 st.write(e + f)
223 st.write(e + f)
211 self.dirty = 0
224 self.dirty = 0
212
225
213 def filterfiles(self, files):
226 def filterfiles(self, files):
214 ret = {}
227 ret = {}
215 unknown = []
228 unknown = []
216
229
217 for x in files:
230 for x in files:
218 if x == '.':
231 if x == '.':
219 return self.map.copy()
232 return self.map.copy()
220 if x not in self.map:
233 if x not in self.map:
221 unknown.append(x)
234 unknown.append(x)
222 else:
235 else:
223 ret[x] = self.map[x]
236 ret[x] = self.map[x]
224
237
225 if not unknown:
238 if not unknown:
226 return ret
239 return ret
227
240
228 b = self.map.keys()
241 b = self.map.keys()
229 b.sort()
242 b.sort()
230 blen = len(b)
243 blen = len(b)
231
244
232 for x in unknown:
245 for x in unknown:
233 bs = bisect.bisect(b, x)
246 bs = bisect.bisect(b, x)
234 if bs != 0 and b[bs-1] == x:
247 if bs != 0 and b[bs-1] == x:
235 ret[x] = self.map[x]
248 ret[x] = self.map[x]
236 continue
249 continue
237 while bs < blen:
250 while bs < blen:
238 s = b[bs]
251 s = b[bs]
239 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
252 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
240 ret[s] = self.map[s]
253 ret[s] = self.map[s]
241 else:
254 else:
242 break
255 break
243 bs += 1
256 bs += 1
244 return ret
257 return ret
245
258
246 def supported_type(self, f, st, verbose=False):
259 def supported_type(self, f, st, verbose=False):
247 if stat.S_ISREG(st.st_mode):
260 if stat.S_ISREG(st.st_mode):
248 return True
261 return True
249 if verbose:
262 if verbose:
250 kind = 'unknown'
263 kind = 'unknown'
251 if stat.S_ISCHR(st.st_mode): kind = _('character device')
264 if stat.S_ISCHR(st.st_mode): kind = _('character device')
252 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
265 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
253 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
266 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
254 elif stat.S_ISLNK(st.st_mode): kind = _('symbolic link')
267 elif stat.S_ISLNK(st.st_mode): kind = _('symbolic link')
255 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
268 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
256 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
269 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
257 self.ui.warn(_('%s: unsupported file type (type is %s)\n') % (
270 self.ui.warn(_('%s: unsupported file type (type is %s)\n') % (
258 util.pathto(self.getcwd(), f),
271 util.pathto(self.getcwd(), f),
259 kind))
272 kind))
260 return False
273 return False
261
274
262 def statwalk(self, files=None, match=util.always, dc=None):
275 def statwalk(self, files=None, match=util.always, dc=None):
263 self.lazyread()
276 self.lazyread()
264
277
265 # walk all files by default
278 # walk all files by default
266 if not files:
279 if not files:
267 files = [self.root]
280 files = [self.root]
268 if not dc:
281 if not dc:
269 dc = self.map.copy()
282 dc = self.map.copy()
270 elif not dc:
283 elif not dc:
271 dc = self.filterfiles(files)
284 dc = self.filterfiles(files)
272
285
273 def statmatch(file, stat):
286 def statmatch(file_, stat):
274 file = util.pconvert(file)
287 file_ = util.pconvert(file_)
275 if file not in dc and self.ignore(file):
288 if file_ not in dc and self.ignore(file_):
276 return False
289 return False
277 return match(file)
290 return match(file_)
278
291
279 return self.walkhelper(files=files, statmatch=statmatch, dc=dc)
292 return self.walkhelper(files=files, statmatch=statmatch, dc=dc)
280
293
281 def walk(self, files=None, match=util.always, dc=None):
294 def walk(self, files=None, match=util.always, dc=None):
282 # filter out the stat
295 # filter out the stat
283 for src, f, st in self.statwalk(files, match, dc):
296 for src, f, st in self.statwalk(files, match, dc):
284 yield src, f
297 yield src, f
285
298
286 # walk recursively through the directory tree, finding all files
299 # walk recursively through the directory tree, finding all files
287 # matched by the statmatch function
300 # matched by the statmatch function
288 #
301 #
289 # results are yielded in a tuple (src, filename, st), where src
302 # results are yielded in a tuple (src, filename, st), where src
290 # is one of:
303 # is one of:
291 # 'f' the file was found in the directory tree
304 # 'f' the file was found in the directory tree
292 # 'm' the file was only in the dirstate and not in the tree
305 # 'm' the file was only in the dirstate and not in the tree
293 # and st is the stat result if the file was found in the directory.
306 # and st is the stat result if the file was found in the directory.
294 #
307 #
295 # dc is an optional arg for the current dirstate. dc is not modified
308 # dc is an optional arg for the current dirstate. dc is not modified
296 # directly by this function, but might be modified by your statmatch call.
309 # directly by this function, but might be modified by your statmatch call.
297 #
310 #
298 def walkhelper(self, files, statmatch, dc):
311 def walkhelper(self, files, statmatch, dc):
299 # recursion free walker, faster than os.walk.
312 # recursion free walker, faster than os.walk.
300 def findfiles(s):
313 def findfiles(s):
301 work = [s]
314 work = [s]
302 while work:
315 while work:
303 top = work.pop()
316 top = work.pop()
304 names = os.listdir(top)
317 names = os.listdir(top)
305 names.sort()
318 names.sort()
306 # nd is the top of the repository dir tree
319 # nd is the top of the repository dir tree
307 nd = util.normpath(top[len(self.root) + 1:])
320 nd = util.normpath(top[len(self.root) + 1:])
308 if nd == '.': nd = ''
321 if nd == '.': nd = ''
309 for f in names:
322 for f in names:
310 np = util.pconvert(os.path.join(nd, f))
323 np = util.pconvert(os.path.join(nd, f))
311 if seen(np):
324 if seen(np):
312 continue
325 continue
313 p = os.path.join(top, f)
326 p = os.path.join(top, f)
314 # don't trip over symlinks
327 # don't trip over symlinks
315 st = os.lstat(p)
328 st = os.lstat(p)
316 if stat.S_ISDIR(st.st_mode):
329 if stat.S_ISDIR(st.st_mode):
317 ds = os.path.join(nd, f +'/')
330 ds = os.path.join(nd, f +'/')
318 if statmatch(ds, st):
331 if statmatch(ds, st):
319 work.append(p)
332 work.append(p)
320 if statmatch(np, st) and np in dc:
333 if statmatch(np, st) and np in dc:
321 yield 'm', np, st
334 yield 'm', np, st
322 elif statmatch(np, st):
335 elif statmatch(np, st):
323 if self.supported_type(np, st):
336 if self.supported_type(np, st):
324 yield 'f', np, st
337 yield 'f', np, st
325 elif np in dc:
338 elif np in dc:
326 yield 'm', np, st
339 yield 'm', np, st
327
340
328 known = {'.hg': 1}
341 known = {'.hg': 1}
329 def seen(fn):
342 def seen(fn):
330 if fn in known: return True
343 if fn in known: return True
331 known[fn] = 1
344 known[fn] = 1
332
345
333 # step one, find all files that match our criteria
346 # step one, find all files that match our criteria
334 files.sort()
347 files.sort()
335 for ff in util.unique(files):
348 for ff in util.unique(files):
336 f = self.wjoin(ff)
349 f = self.wjoin(ff)
337 try:
350 try:
338 st = os.lstat(f)
351 st = os.lstat(f)
339 except OSError, inst:
352 except OSError, inst:
340 nf = util.normpath(ff)
353 nf = util.normpath(ff)
341 found = False
354 found = False
342 for fn in dc:
355 for fn in dc:
343 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
356 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
344 found = True
357 found = True
345 break
358 break
346 if not found:
359 if not found:
347 self.ui.warn('%s: %s\n' % (
360 self.ui.warn('%s: %s\n' % (
348 util.pathto(self.getcwd(), ff),
361 util.pathto(self.getcwd(), ff),
349 inst.strerror))
362 inst.strerror))
350 continue
363 continue
351 if stat.S_ISDIR(st.st_mode):
364 if stat.S_ISDIR(st.st_mode):
352 cmp1 = (lambda x, y: cmp(x[1], y[1]))
365 cmp1 = (lambda x, y: cmp(x[1], y[1]))
353 sorted = [ x for x in findfiles(f) ]
366 sorted_ = [ x for x in findfiles(f) ]
354 sorted.sort(cmp1)
367 sorted_.sort(cmp1)
355 for e in sorted:
368 for e in sorted_:
356 yield e
369 yield e
357 else:
370 else:
358 ff = util.normpath(ff)
371 ff = util.normpath(ff)
359 if seen(ff):
372 if seen(ff):
360 continue
373 continue
361 self.blockignore = True
374 self.blockignore = True
362 if statmatch(ff, st):
375 if statmatch(ff, st):
363 if self.supported_type(ff, st, verbose=True):
376 if self.supported_type(ff, st, verbose=True):
364 yield 'f', ff, st
377 yield 'f', ff, st
365 elif ff in dc:
378 elif ff in dc:
366 yield 'm', ff, st
379 yield 'm', ff, st
367 self.blockignore = False
380 self.blockignore = False
368
381
369 # step two run through anything left in the dc hash and yield
382 # step two run through anything left in the dc hash and yield
370 # if we haven't already seen it
383 # if we haven't already seen it
371 ks = dc.keys()
384 ks = dc.keys()
372 ks.sort()
385 ks.sort()
373 for k in ks:
386 for k in ks:
374 if not seen(k) and (statmatch(k, None)):
387 if not seen(k) and (statmatch(k, None)):
375 yield 'm', k, None
388 yield 'm', k, None
376
389
377 def changes(self, files=None, match=util.always):
390 def changes(self, files=None, match=util.always):
378 lookup, modified, added, unknown = [], [], [], []
391 lookup, modified, added, unknown = [], [], [], []
379 removed, deleted = [], []
392 removed, deleted = [], []
380
393
381 for src, fn, st in self.statwalk(files, match):
394 for src, fn, st in self.statwalk(files, match):
382 try:
395 try:
383 type, mode, size, time = self[fn]
396 type_, mode, size, time = self[fn]
384 except KeyError:
397 except KeyError:
385 unknown.append(fn)
398 unknown.append(fn)
386 continue
399 continue
387 if src == 'm':
400 if src == 'm':
388 nonexistent = True
401 nonexistent = True
389 if not st:
402 if not st:
390 try:
403 try:
391 f = self.wjoin(fn)
404 f = self.wjoin(fn)
392 st = os.lstat(f)
405 st = os.lstat(f)
393 except OSError, inst:
406 except OSError, inst:
394 if inst.errno != errno.ENOENT:
407 if inst.errno != errno.ENOENT:
395 raise
408 raise
396 st = None
409 st = None
397 # We need to re-check that it is a valid file
410 # We need to re-check that it is a valid file
398 if st and self.supported_type(fn, st):
411 if st and self.supported_type(fn, st):
399 nonexistent = False
412 nonexistent = False
400 # XXX: what to do with file no longer present in the fs
413 # XXX: what to do with file no longer present in the fs
401 # who are not removed in the dirstate ?
414 # who are not removed in the dirstate ?
402 if nonexistent and type in "nm":
415 if nonexistent and type_ in "nm":
403 deleted.append(fn)
416 deleted.append(fn)
404 continue
417 continue
405 # check the common case first
418 # check the common case first
406 if type == 'n':
419 if type_ == 'n':
407 if not st:
420 if not st:
408 st = os.stat(fn)
421 st = os.stat(fn)
409 if size != st.st_size or (mode ^ st.st_mode) & 0100:
422 if size >= 0 and (size != st.st_size
423 or (mode ^ st.st_mode) & 0100):
410 modified.append(fn)
424 modified.append(fn)
411 elif time != st.st_mtime:
425 elif time != st.st_mtime:
412 lookup.append(fn)
426 lookup.append(fn)
413 elif type == 'm':
427 elif type_ == 'm':
414 modified.append(fn)
428 modified.append(fn)
415 elif type == 'a':
429 elif type_ == 'a':
416 added.append(fn)
430 added.append(fn)
417 elif type == 'r':
431 elif type_ == 'r':
418 removed.append(fn)
432 removed.append(fn)
419
433
420 return (lookup, modified, added, removed, deleted, unknown)
434 return (lookup, modified, added, removed, deleted, unknown)
@@ -1,1853 +1,1853 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import struct, os, util
8 import struct, os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
14
14
15 class localrepository(object):
15 class localrepository(object):
16 def __init__(self, ui, path=None, create=0):
16 def __init__(self, ui, path=None, create=0):
17 if not path:
17 if not path:
18 p = os.getcwd()
18 p = os.getcwd()
19 while not os.path.isdir(os.path.join(p, ".hg")):
19 while not os.path.isdir(os.path.join(p, ".hg")):
20 oldp = p
20 oldp = p
21 p = os.path.dirname(p)
21 p = os.path.dirname(p)
22 if p == oldp:
22 if p == oldp:
23 raise repo.RepoError(_("no repo found"))
23 raise repo.RepoError(_("no repo found"))
24 path = p
24 path = p
25 self.path = os.path.join(path, ".hg")
25 self.path = os.path.join(path, ".hg")
26
26
27 if not create and not os.path.isdir(self.path):
27 if not create and not os.path.isdir(self.path):
28 raise repo.RepoError(_("repository %s not found") % path)
28 raise repo.RepoError(_("repository %s not found") % path)
29
29
30 self.root = os.path.abspath(path)
30 self.root = os.path.abspath(path)
31 self.ui = ui
31 self.ui = ui
32 self.opener = util.opener(self.path)
32 self.opener = util.opener(self.path)
33 self.wopener = util.opener(self.root)
33 self.wopener = util.opener(self.root)
34 self.manifest = manifest.manifest(self.opener)
34 self.manifest = manifest.manifest(self.opener)
35 self.changelog = changelog.changelog(self.opener)
35 self.changelog = changelog.changelog(self.opener)
36 self.tagscache = None
36 self.tagscache = None
37 self.nodetagscache = None
37 self.nodetagscache = None
38 self.encodepats = None
38 self.encodepats = None
39 self.decodepats = None
39 self.decodepats = None
40
40
41 if create:
41 if create:
42 os.mkdir(self.path)
42 os.mkdir(self.path)
43 os.mkdir(self.join("data"))
43 os.mkdir(self.join("data"))
44
44
45 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
45 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
46 try:
46 try:
47 self.ui.readconfig(self.join("hgrc"))
47 self.ui.readconfig(self.join("hgrc"))
48 except IOError:
48 except IOError:
49 pass
49 pass
50
50
51 def hook(self, name, throw=False, **args):
51 def hook(self, name, throw=False, **args):
52 def runhook(name, cmd):
52 def runhook(name, cmd):
53 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
53 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
54 old = {}
54 old = {}
55 for k, v in args.items():
55 for k, v in args.items():
56 k = k.upper()
56 k = k.upper()
57 old['HG_' + k] = os.environ.get(k, None)
57 old['HG_' + k] = os.environ.get(k, None)
58 old[k] = os.environ.get(k, None)
58 old[k] = os.environ.get(k, None)
59 os.environ['HG_' + k] = str(v)
59 os.environ['HG_' + k] = str(v)
60 os.environ[k] = str(v)
60 os.environ[k] = str(v)
61
61
62 try:
62 try:
63 # Hooks run in the repository root
63 # Hooks run in the repository root
64 olddir = os.getcwd()
64 olddir = os.getcwd()
65 os.chdir(self.root)
65 os.chdir(self.root)
66 r = os.system(cmd)
66 r = os.system(cmd)
67 finally:
67 finally:
68 for k, v in old.items():
68 for k, v in old.items():
69 if v is not None:
69 if v is not None:
70 os.environ[k] = v
70 os.environ[k] = v
71 else:
71 else:
72 del os.environ[k]
72 del os.environ[k]
73
73
74 os.chdir(olddir)
74 os.chdir(olddir)
75
75
76 if r:
76 if r:
77 desc, r = util.explain_exit(r)
77 desc, r = util.explain_exit(r)
78 if throw:
78 if throw:
79 raise util.Abort(_('%s hook %s') % (name, desc))
79 raise util.Abort(_('%s hook %s') % (name, desc))
80 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
80 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
81 return False
81 return False
82 return True
82 return True
83
83
84 r = True
84 r = True
85 for hname, cmd in self.ui.configitems("hooks"):
85 for hname, cmd in self.ui.configitems("hooks"):
86 s = hname.split(".")
86 s = hname.split(".")
87 if s[0] == name and cmd:
87 if s[0] == name and cmd:
88 r = runhook(hname, cmd) and r
88 r = runhook(hname, cmd) and r
89 return r
89 return r
90
90
91 def tags(self):
91 def tags(self):
92 '''return a mapping of tag to node'''
92 '''return a mapping of tag to node'''
93 if not self.tagscache:
93 if not self.tagscache:
94 self.tagscache = {}
94 self.tagscache = {}
95 def addtag(self, k, n):
95 def addtag(self, k, n):
96 try:
96 try:
97 bin_n = bin(n)
97 bin_n = bin(n)
98 except TypeError:
98 except TypeError:
99 bin_n = ''
99 bin_n = ''
100 self.tagscache[k.strip()] = bin_n
100 self.tagscache[k.strip()] = bin_n
101
101
102 try:
102 try:
103 # read each head of the tags file, ending with the tip
103 # read each head of the tags file, ending with the tip
104 # and add each tag found to the map, with "newer" ones
104 # and add each tag found to the map, with "newer" ones
105 # taking precedence
105 # taking precedence
106 fl = self.file(".hgtags")
106 fl = self.file(".hgtags")
107 h = fl.heads()
107 h = fl.heads()
108 h.reverse()
108 h.reverse()
109 for r in h:
109 for r in h:
110 for l in fl.read(r).splitlines():
110 for l in fl.read(r).splitlines():
111 if l:
111 if l:
112 n, k = l.split(" ", 1)
112 n, k = l.split(" ", 1)
113 addtag(self, k, n)
113 addtag(self, k, n)
114 except KeyError:
114 except KeyError:
115 pass
115 pass
116
116
117 try:
117 try:
118 f = self.opener("localtags")
118 f = self.opener("localtags")
119 for l in f:
119 for l in f:
120 n, k = l.split(" ", 1)
120 n, k = l.split(" ", 1)
121 addtag(self, k, n)
121 addtag(self, k, n)
122 except IOError:
122 except IOError:
123 pass
123 pass
124
124
125 self.tagscache['tip'] = self.changelog.tip()
125 self.tagscache['tip'] = self.changelog.tip()
126
126
127 return self.tagscache
127 return self.tagscache
128
128
129 def tagslist(self):
129 def tagslist(self):
130 '''return a list of tags ordered by revision'''
130 '''return a list of tags ordered by revision'''
131 l = []
131 l = []
132 for t, n in self.tags().items():
132 for t, n in self.tags().items():
133 try:
133 try:
134 r = self.changelog.rev(n)
134 r = self.changelog.rev(n)
135 except:
135 except:
136 r = -2 # sort to the beginning of the list if unknown
136 r = -2 # sort to the beginning of the list if unknown
137 l.append((r, t, n))
137 l.append((r, t, n))
138 l.sort()
138 l.sort()
139 return [(t, n) for r, t, n in l]
139 return [(t, n) for r, t, n in l]
140
140
141 def nodetags(self, node):
141 def nodetags(self, node):
142 '''return the tags associated with a node'''
142 '''return the tags associated with a node'''
143 if not self.nodetagscache:
143 if not self.nodetagscache:
144 self.nodetagscache = {}
144 self.nodetagscache = {}
145 for t, n in self.tags().items():
145 for t, n in self.tags().items():
146 self.nodetagscache.setdefault(n, []).append(t)
146 self.nodetagscache.setdefault(n, []).append(t)
147 return self.nodetagscache.get(node, [])
147 return self.nodetagscache.get(node, [])
148
148
149 def lookup(self, key):
149 def lookup(self, key):
150 try:
150 try:
151 return self.tags()[key]
151 return self.tags()[key]
152 except KeyError:
152 except KeyError:
153 try:
153 try:
154 return self.changelog.lookup(key)
154 return self.changelog.lookup(key)
155 except:
155 except:
156 raise repo.RepoError(_("unknown revision '%s'") % key)
156 raise repo.RepoError(_("unknown revision '%s'") % key)
157
157
158 def dev(self):
158 def dev(self):
159 return os.stat(self.path).st_dev
159 return os.stat(self.path).st_dev
160
160
161 def local(self):
161 def local(self):
162 return True
162 return True
163
163
164 def join(self, f):
164 def join(self, f):
165 return os.path.join(self.path, f)
165 return os.path.join(self.path, f)
166
166
167 def wjoin(self, f):
167 def wjoin(self, f):
168 return os.path.join(self.root, f)
168 return os.path.join(self.root, f)
169
169
170 def file(self, f):
170 def file(self, f):
171 if f[0] == '/':
171 if f[0] == '/':
172 f = f[1:]
172 f = f[1:]
173 return filelog.filelog(self.opener, f)
173 return filelog.filelog(self.opener, f)
174
174
175 def getcwd(self):
175 def getcwd(self):
176 return self.dirstate.getcwd()
176 return self.dirstate.getcwd()
177
177
178 def wfile(self, f, mode='r'):
178 def wfile(self, f, mode='r'):
179 return self.wopener(f, mode)
179 return self.wopener(f, mode)
180
180
181 def wread(self, filename):
181 def wread(self, filename):
182 if self.encodepats == None:
182 if self.encodepats == None:
183 l = []
183 l = []
184 for pat, cmd in self.ui.configitems("encode"):
184 for pat, cmd in self.ui.configitems("encode"):
185 mf = util.matcher("", "/", [pat], [], [])[1]
185 mf = util.matcher("", "/", [pat], [], [])[1]
186 l.append((mf, cmd))
186 l.append((mf, cmd))
187 self.encodepats = l
187 self.encodepats = l
188
188
189 data = self.wopener(filename, 'r').read()
189 data = self.wopener(filename, 'r').read()
190
190
191 for mf, cmd in self.encodepats:
191 for mf, cmd in self.encodepats:
192 if mf(filename):
192 if mf(filename):
193 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
193 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
194 data = util.filter(data, cmd)
194 data = util.filter(data, cmd)
195 break
195 break
196
196
197 return data
197 return data
198
198
199 def wwrite(self, filename, data, fd=None):
199 def wwrite(self, filename, data, fd=None):
200 if self.decodepats == None:
200 if self.decodepats == None:
201 l = []
201 l = []
202 for pat, cmd in self.ui.configitems("decode"):
202 for pat, cmd in self.ui.configitems("decode"):
203 mf = util.matcher("", "/", [pat], [], [])[1]
203 mf = util.matcher("", "/", [pat], [], [])[1]
204 l.append((mf, cmd))
204 l.append((mf, cmd))
205 self.decodepats = l
205 self.decodepats = l
206
206
207 for mf, cmd in self.decodepats:
207 for mf, cmd in self.decodepats:
208 if mf(filename):
208 if mf(filename):
209 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
209 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
210 data = util.filter(data, cmd)
210 data = util.filter(data, cmd)
211 break
211 break
212
212
213 if fd:
213 if fd:
214 return fd.write(data)
214 return fd.write(data)
215 return self.wopener(filename, 'w').write(data)
215 return self.wopener(filename, 'w').write(data)
216
216
217 def transaction(self):
217 def transaction(self):
218 # save dirstate for undo
218 # save dirstate for undo
219 try:
219 try:
220 ds = self.opener("dirstate").read()
220 ds = self.opener("dirstate").read()
221 except IOError:
221 except IOError:
222 ds = ""
222 ds = ""
223 self.opener("journal.dirstate", "w").write(ds)
223 self.opener("journal.dirstate", "w").write(ds)
224
224
225 def after():
225 def after():
226 util.rename(self.join("journal"), self.join("undo"))
226 util.rename(self.join("journal"), self.join("undo"))
227 util.rename(self.join("journal.dirstate"),
227 util.rename(self.join("journal.dirstate"),
228 self.join("undo.dirstate"))
228 self.join("undo.dirstate"))
229
229
230 return transaction.transaction(self.ui.warn, self.opener,
230 return transaction.transaction(self.ui.warn, self.opener,
231 self.join("journal"), after)
231 self.join("journal"), after)
232
232
233 def recover(self):
233 def recover(self):
234 lock = self.lock()
234 l = self.lock()
235 if os.path.exists(self.join("journal")):
235 if os.path.exists(self.join("journal")):
236 self.ui.status(_("rolling back interrupted transaction\n"))
236 self.ui.status(_("rolling back interrupted transaction\n"))
237 transaction.rollback(self.opener, self.join("journal"))
237 transaction.rollback(self.opener, self.join("journal"))
238 self.manifest = manifest.manifest(self.opener)
238 self.manifest = manifest.manifest(self.opener)
239 self.changelog = changelog.changelog(self.opener)
239 self.changelog = changelog.changelog(self.opener)
240 return True
240 return True
241 else:
241 else:
242 self.ui.warn(_("no interrupted transaction available\n"))
242 self.ui.warn(_("no interrupted transaction available\n"))
243 return False
243 return False
244
244
245 def undo(self, wlock=None):
245 def undo(self, wlock=None):
246 if not wlock:
246 if not wlock:
247 wlock = self.wlock()
247 wlock = self.wlock()
248 lock = self.lock()
248 l = self.lock()
249 if os.path.exists(self.join("undo")):
249 if os.path.exists(self.join("undo")):
250 self.ui.status(_("rolling back last transaction\n"))
250 self.ui.status(_("rolling back last transaction\n"))
251 transaction.rollback(self.opener, self.join("undo"))
251 transaction.rollback(self.opener, self.join("undo"))
252 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
252 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
253 self.dirstate.read()
253 self.dirstate.read()
254 else:
254 else:
255 self.ui.warn(_("no undo information available\n"))
255 self.ui.warn(_("no undo information available\n"))
256
256
257 def lock(self, wait=1):
257 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None):
258 try:
258 try:
259 return lock.lock(self.join("lock"), 0)
259 l = lock.lock(self.join(lockname), 0, releasefn)
260 except lock.LockHeld, inst:
261 if wait:
262 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
263 return lock.lock(self.join("lock"), wait)
264 raise inst
265
266 def wlock(self, wait=1):
267 try:
268 wlock = lock.lock(self.join("wlock"), 0, self.dirstate.write)
269 except lock.LockHeld, inst:
260 except lock.LockHeld, inst:
270 if not wait:
261 if not wait:
271 raise inst
262 raise inst
272 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
263 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
273 wlock = lock.lock(self.join("wlock"), wait, self.dirstate.write)
264 l = lock.lock(self.join(lockname), wait, releasefn)
274 self.dirstate.read()
265 if acquirefn:
275 return wlock
266 acquirefn()
267 return l
268
269 def lock(self, wait=1):
270 return self.do_lock("lock", wait)
271
272 def wlock(self, wait=1):
273 return self.do_lock("wlock", wait,
274 self.dirstate.write,
275 self.dirstate.read)
276
276
277 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
277 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
278 "determine whether a new filenode is needed"
278 "determine whether a new filenode is needed"
279 fp1 = manifest1.get(filename, nullid)
279 fp1 = manifest1.get(filename, nullid)
280 fp2 = manifest2.get(filename, nullid)
280 fp2 = manifest2.get(filename, nullid)
281
281
282 if fp2 != nullid:
282 if fp2 != nullid:
283 # is one parent an ancestor of the other?
283 # is one parent an ancestor of the other?
284 fpa = filelog.ancestor(fp1, fp2)
284 fpa = filelog.ancestor(fp1, fp2)
285 if fpa == fp1:
285 if fpa == fp1:
286 fp1, fp2 = fp2, nullid
286 fp1, fp2 = fp2, nullid
287 elif fpa == fp2:
287 elif fpa == fp2:
288 fp2 = nullid
288 fp2 = nullid
289
289
290 # is the file unmodified from the parent? report existing entry
290 # is the file unmodified from the parent? report existing entry
291 if fp2 == nullid and text == filelog.read(fp1):
291 if fp2 == nullid and text == filelog.read(fp1):
292 return (fp1, None, None)
292 return (fp1, None, None)
293
293
294 return (None, fp1, fp2)
294 return (None, fp1, fp2)
295
295
296 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
296 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
297 orig_parent = self.dirstate.parents()[0] or nullid
297 orig_parent = self.dirstate.parents()[0] or nullid
298 p1 = p1 or self.dirstate.parents()[0] or nullid
298 p1 = p1 or self.dirstate.parents()[0] or nullid
299 p2 = p2 or self.dirstate.parents()[1] or nullid
299 p2 = p2 or self.dirstate.parents()[1] or nullid
300 c1 = self.changelog.read(p1)
300 c1 = self.changelog.read(p1)
301 c2 = self.changelog.read(p2)
301 c2 = self.changelog.read(p2)
302 m1 = self.manifest.read(c1[0])
302 m1 = self.manifest.read(c1[0])
303 mf1 = self.manifest.readflags(c1[0])
303 mf1 = self.manifest.readflags(c1[0])
304 m2 = self.manifest.read(c2[0])
304 m2 = self.manifest.read(c2[0])
305 changed = []
305 changed = []
306
306
307 if orig_parent == p1:
307 if orig_parent == p1:
308 update_dirstate = 1
308 update_dirstate = 1
309 else:
309 else:
310 update_dirstate = 0
310 update_dirstate = 0
311
311
312 if not wlock:
312 if not wlock:
313 wlock = self.wlock()
313 wlock = self.wlock()
314 lock = self.lock()
314 l = self.lock()
315 tr = self.transaction()
315 tr = self.transaction()
316 mm = m1.copy()
316 mm = m1.copy()
317 mfm = mf1.copy()
317 mfm = mf1.copy()
318 linkrev = self.changelog.count()
318 linkrev = self.changelog.count()
319 for f in files:
319 for f in files:
320 try:
320 try:
321 t = self.wread(f)
321 t = self.wread(f)
322 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
322 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
323 r = self.file(f)
323 r = self.file(f)
324 mfm[f] = tm
324 mfm[f] = tm
325
325
326 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
326 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
327 if entry:
327 if entry:
328 mm[f] = entry
328 mm[f] = entry
329 continue
329 continue
330
330
331 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
331 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
332 changed.append(f)
332 changed.append(f)
333 if update_dirstate:
333 if update_dirstate:
334 self.dirstate.update([f], "n")
334 self.dirstate.update([f], "n")
335 except IOError:
335 except IOError:
336 try:
336 try:
337 del mm[f]
337 del mm[f]
338 del mfm[f]
338 del mfm[f]
339 if update_dirstate:
339 if update_dirstate:
340 self.dirstate.forget([f])
340 self.dirstate.forget([f])
341 except:
341 except:
342 # deleted from p2?
342 # deleted from p2?
343 pass
343 pass
344
344
345 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
345 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
346 user = user or self.ui.username()
346 user = user or self.ui.username()
347 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
347 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
348 tr.close()
348 tr.close()
349 if update_dirstate:
349 if update_dirstate:
350 self.dirstate.setparents(n, nullid)
350 self.dirstate.setparents(n, nullid)
351
351
352 def commit(self, files=None, text="", user=None, date=None,
352 def commit(self, files=None, text="", user=None, date=None,
353 match=util.always, force=False, wlock=None):
353 match=util.always, force=False, wlock=None):
354 commit = []
354 commit = []
355 remove = []
355 remove = []
356 changed = []
356 changed = []
357
357
358 if files:
358 if files:
359 for f in files:
359 for f in files:
360 s = self.dirstate.state(f)
360 s = self.dirstate.state(f)
361 if s in 'nmai':
361 if s in 'nmai':
362 commit.append(f)
362 commit.append(f)
363 elif s == 'r':
363 elif s == 'r':
364 remove.append(f)
364 remove.append(f)
365 else:
365 else:
366 self.ui.warn(_("%s not tracked!\n") % f)
366 self.ui.warn(_("%s not tracked!\n") % f)
367 else:
367 else:
368 modified, added, removed, deleted, unknown = self.changes(match=match)
368 modified, added, removed, deleted, unknown = self.changes(match=match)
369 commit = modified + added
369 commit = modified + added
370 remove = removed
370 remove = removed
371
371
372 p1, p2 = self.dirstate.parents()
372 p1, p2 = self.dirstate.parents()
373 c1 = self.changelog.read(p1)
373 c1 = self.changelog.read(p1)
374 c2 = self.changelog.read(p2)
374 c2 = self.changelog.read(p2)
375 m1 = self.manifest.read(c1[0])
375 m1 = self.manifest.read(c1[0])
376 mf1 = self.manifest.readflags(c1[0])
376 mf1 = self.manifest.readflags(c1[0])
377 m2 = self.manifest.read(c2[0])
377 m2 = self.manifest.read(c2[0])
378
378
379 if not commit and not remove and not force and p2 == nullid:
379 if not commit and not remove and not force and p2 == nullid:
380 self.ui.status(_("nothing changed\n"))
380 self.ui.status(_("nothing changed\n"))
381 return None
381 return None
382
382
383 xp1 = hex(p1)
383 xp1 = hex(p1)
384 if p2 == nullid: xp2 = ''
384 if p2 == nullid: xp2 = ''
385 else: xp2 = hex(p2)
385 else: xp2 = hex(p2)
386
386
387 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
387 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
388
388
389 if not wlock:
389 if not wlock:
390 wlock = self.wlock()
390 wlock = self.wlock()
391 lock = self.lock()
391 l = self.lock()
392 tr = self.transaction()
392 tr = self.transaction()
393
393
394 # check in files
394 # check in files
395 new = {}
395 new = {}
396 linkrev = self.changelog.count()
396 linkrev = self.changelog.count()
397 commit.sort()
397 commit.sort()
398 for f in commit:
398 for f in commit:
399 self.ui.note(f + "\n")
399 self.ui.note(f + "\n")
400 try:
400 try:
401 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
401 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
402 t = self.wread(f)
402 t = self.wread(f)
403 except IOError:
403 except IOError:
404 self.ui.warn(_("trouble committing %s!\n") % f)
404 self.ui.warn(_("trouble committing %s!\n") % f)
405 raise
405 raise
406
406
407 r = self.file(f)
407 r = self.file(f)
408
408
409 meta = {}
409 meta = {}
410 cp = self.dirstate.copied(f)
410 cp = self.dirstate.copied(f)
411 if cp:
411 if cp:
412 meta["copy"] = cp
412 meta["copy"] = cp
413 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
413 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
414 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
414 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
415 fp1, fp2 = nullid, nullid
415 fp1, fp2 = nullid, nullid
416 else:
416 else:
417 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
417 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
418 if entry:
418 if entry:
419 new[f] = entry
419 new[f] = entry
420 continue
420 continue
421
421
422 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
422 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
423 # remember what we've added so that we can later calculate
423 # remember what we've added so that we can later calculate
424 # the files to pull from a set of changesets
424 # the files to pull from a set of changesets
425 changed.append(f)
425 changed.append(f)
426
426
427 # update manifest
427 # update manifest
428 m1 = m1.copy()
428 m1 = m1.copy()
429 m1.update(new)
429 m1.update(new)
430 for f in remove:
430 for f in remove:
431 if f in m1:
431 if f in m1:
432 del m1[f]
432 del m1[f]
433 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
433 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
434 (new, remove))
434 (new, remove))
435
435
436 # add changeset
436 # add changeset
437 new = new.keys()
437 new = new.keys()
438 new.sort()
438 new.sort()
439
439
440 if not text:
440 if not text:
441 edittext = [""]
441 edittext = [""]
442 if p2 != nullid:
442 if p2 != nullid:
443 edittext.append("HG: branch merge")
443 edittext.append("HG: branch merge")
444 edittext.extend(["HG: changed %s" % f for f in changed])
444 edittext.extend(["HG: changed %s" % f for f in changed])
445 edittext.extend(["HG: removed %s" % f for f in remove])
445 edittext.extend(["HG: removed %s" % f for f in remove])
446 if not changed and not remove:
446 if not changed and not remove:
447 edittext.append("HG: no files changed")
447 edittext.append("HG: no files changed")
448 edittext.append("")
448 edittext.append("")
449 # run editor in the repository root
449 # run editor in the repository root
450 olddir = os.getcwd()
450 olddir = os.getcwd()
451 os.chdir(self.root)
451 os.chdir(self.root)
452 edittext = self.ui.edit("\n".join(edittext))
452 edittext = self.ui.edit("\n".join(edittext))
453 os.chdir(olddir)
453 os.chdir(olddir)
454 if not edittext.rstrip():
454 if not edittext.rstrip():
455 return None
455 return None
456 text = edittext
456 text = edittext
457
457
458 user = user or self.ui.username()
458 user = user or self.ui.username()
459 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
459 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
460 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
460 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
461 parent2=xp2)
461 parent2=xp2)
462 tr.close()
462 tr.close()
463
463
464 self.dirstate.setparents(n)
464 self.dirstate.setparents(n)
465 self.dirstate.update(new, "n")
465 self.dirstate.update(new, "n")
466 self.dirstate.forget(remove)
466 self.dirstate.forget(remove)
467
467
468 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
468 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
469 return n
469 return n
470
470
471 def walk(self, node=None, files=[], match=util.always):
471 def walk(self, node=None, files=[], match=util.always):
472 if node:
472 if node:
473 fdict = dict.fromkeys(files)
473 fdict = dict.fromkeys(files)
474 for fn in self.manifest.read(self.changelog.read(node)[0]):
474 for fn in self.manifest.read(self.changelog.read(node)[0]):
475 fdict.pop(fn, None)
475 fdict.pop(fn, None)
476 if match(fn):
476 if match(fn):
477 yield 'm', fn
477 yield 'm', fn
478 for fn in fdict:
478 for fn in fdict:
479 self.ui.warn(_('%s: No such file in rev %s\n') % (
479 self.ui.warn(_('%s: No such file in rev %s\n') % (
480 util.pathto(self.getcwd(), fn), short(node)))
480 util.pathto(self.getcwd(), fn), short(node)))
481 else:
481 else:
482 for src, fn in self.dirstate.walk(files, match):
482 for src, fn in self.dirstate.walk(files, match):
483 yield src, fn
483 yield src, fn
484
484
485 def changes(self, node1=None, node2=None, files=[], match=util.always,
485 def changes(self, node1=None, node2=None, files=[], match=util.always,
486 wlock=None):
486 wlock=None):
487 """return changes between two nodes or node and working directory
487 """return changes between two nodes or node and working directory
488
488
489 If node1 is None, use the first dirstate parent instead.
489 If node1 is None, use the first dirstate parent instead.
490 If node2 is None, compare node1 with working directory.
490 If node2 is None, compare node1 with working directory.
491 """
491 """
492
492
493 def fcmp(fn, mf):
493 def fcmp(fn, mf):
494 t1 = self.wread(fn)
494 t1 = self.wread(fn)
495 t2 = self.file(fn).read(mf.get(fn, nullid))
495 t2 = self.file(fn).read(mf.get(fn, nullid))
496 return cmp(t1, t2)
496 return cmp(t1, t2)
497
497
498 def mfmatches(node):
498 def mfmatches(node):
499 change = self.changelog.read(node)
499 change = self.changelog.read(node)
500 mf = dict(self.manifest.read(change[0]))
500 mf = dict(self.manifest.read(change[0]))
501 for fn in mf.keys():
501 for fn in mf.keys():
502 if not match(fn):
502 if not match(fn):
503 del mf[fn]
503 del mf[fn]
504 return mf
504 return mf
505
505
506 # are we comparing the working directory?
506 # are we comparing the working directory?
507 if not node2:
507 if not node2:
508 if not wlock:
508 if not wlock:
509 try:
509 try:
510 wlock = self.wlock(wait=0)
510 wlock = self.wlock(wait=0)
511 except lock.LockHeld:
511 except lock.LockException:
512 wlock = None
512 wlock = None
513 lookup, modified, added, removed, deleted, unknown = (
513 lookup, modified, added, removed, deleted, unknown = (
514 self.dirstate.changes(files, match))
514 self.dirstate.changes(files, match))
515
515
516 # are we comparing working dir against its parent?
516 # are we comparing working dir against its parent?
517 if not node1:
517 if not node1:
518 if lookup:
518 if lookup:
519 # do a full compare of any files that might have changed
519 # do a full compare of any files that might have changed
520 mf2 = mfmatches(self.dirstate.parents()[0])
520 mf2 = mfmatches(self.dirstate.parents()[0])
521 for f in lookup:
521 for f in lookup:
522 if fcmp(f, mf2):
522 if fcmp(f, mf2):
523 modified.append(f)
523 modified.append(f)
524 elif wlock is not None:
524 elif wlock is not None:
525 self.dirstate.update([f], "n")
525 self.dirstate.update([f], "n")
526 else:
526 else:
527 # we are comparing working dir against non-parent
527 # we are comparing working dir against non-parent
528 # generate a pseudo-manifest for the working dir
528 # generate a pseudo-manifest for the working dir
529 mf2 = mfmatches(self.dirstate.parents()[0])
529 mf2 = mfmatches(self.dirstate.parents()[0])
530 for f in lookup + modified + added:
530 for f in lookup + modified + added:
531 mf2[f] = ""
531 mf2[f] = ""
532 for f in removed:
532 for f in removed:
533 if f in mf2:
533 if f in mf2:
534 del mf2[f]
534 del mf2[f]
535 else:
535 else:
536 # we are comparing two revisions
536 # we are comparing two revisions
537 deleted, unknown = [], []
537 deleted, unknown = [], []
538 mf2 = mfmatches(node2)
538 mf2 = mfmatches(node2)
539
539
540 if node1:
540 if node1:
541 # flush lists from dirstate before comparing manifests
541 # flush lists from dirstate before comparing manifests
542 modified, added = [], []
542 modified, added = [], []
543
543
544 mf1 = mfmatches(node1)
544 mf1 = mfmatches(node1)
545
545
546 for fn in mf2:
546 for fn in mf2:
547 if mf1.has_key(fn):
547 if mf1.has_key(fn):
548 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
548 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
549 modified.append(fn)
549 modified.append(fn)
550 del mf1[fn]
550 del mf1[fn]
551 else:
551 else:
552 added.append(fn)
552 added.append(fn)
553
553
554 removed = mf1.keys()
554 removed = mf1.keys()
555
555
556 # sort and return results:
556 # sort and return results:
557 for l in modified, added, removed, deleted, unknown:
557 for l in modified, added, removed, deleted, unknown:
558 l.sort()
558 l.sort()
559 return (modified, added, removed, deleted, unknown)
559 return (modified, added, removed, deleted, unknown)
560
560
561 def add(self, list, wlock=None):
561 def add(self, list, wlock=None):
562 if not wlock:
562 if not wlock:
563 wlock = self.wlock()
563 wlock = self.wlock()
564 for f in list:
564 for f in list:
565 p = self.wjoin(f)
565 p = self.wjoin(f)
566 if not os.path.exists(p):
566 if not os.path.exists(p):
567 self.ui.warn(_("%s does not exist!\n") % f)
567 self.ui.warn(_("%s does not exist!\n") % f)
568 elif not os.path.isfile(p):
568 elif not os.path.isfile(p):
569 self.ui.warn(_("%s not added: only files supported currently\n")
569 self.ui.warn(_("%s not added: only files supported currently\n")
570 % f)
570 % f)
571 elif self.dirstate.state(f) in 'an':
571 elif self.dirstate.state(f) in 'an':
572 self.ui.warn(_("%s already tracked!\n") % f)
572 self.ui.warn(_("%s already tracked!\n") % f)
573 else:
573 else:
574 self.dirstate.update([f], "a")
574 self.dirstate.update([f], "a")
575
575
576 def forget(self, list, wlock=None):
576 def forget(self, list, wlock=None):
577 if not wlock:
577 if not wlock:
578 wlock = self.wlock()
578 wlock = self.wlock()
579 for f in list:
579 for f in list:
580 if self.dirstate.state(f) not in 'ai':
580 if self.dirstate.state(f) not in 'ai':
581 self.ui.warn(_("%s not added!\n") % f)
581 self.ui.warn(_("%s not added!\n") % f)
582 else:
582 else:
583 self.dirstate.forget([f])
583 self.dirstate.forget([f])
584
584
585 def remove(self, list, unlink=False, wlock=None):
585 def remove(self, list, unlink=False, wlock=None):
586 if unlink:
586 if unlink:
587 for f in list:
587 for f in list:
588 try:
588 try:
589 util.unlink(self.wjoin(f))
589 util.unlink(self.wjoin(f))
590 except OSError, inst:
590 except OSError, inst:
591 if inst.errno != errno.ENOENT:
591 if inst.errno != errno.ENOENT:
592 raise
592 raise
593 if not wlock:
593 if not wlock:
594 wlock = self.wlock()
594 wlock = self.wlock()
595 for f in list:
595 for f in list:
596 p = self.wjoin(f)
596 p = self.wjoin(f)
597 if os.path.exists(p):
597 if os.path.exists(p):
598 self.ui.warn(_("%s still exists!\n") % f)
598 self.ui.warn(_("%s still exists!\n") % f)
599 elif self.dirstate.state(f) == 'a':
599 elif self.dirstate.state(f) == 'a':
600 self.dirstate.forget([f])
600 self.dirstate.forget([f])
601 elif f not in self.dirstate:
601 elif f not in self.dirstate:
602 self.ui.warn(_("%s not tracked!\n") % f)
602 self.ui.warn(_("%s not tracked!\n") % f)
603 else:
603 else:
604 self.dirstate.update([f], "r")
604 self.dirstate.update([f], "r")
605
605
606 def undelete(self, list, wlock=None):
606 def undelete(self, list, wlock=None):
607 p = self.dirstate.parents()[0]
607 p = self.dirstate.parents()[0]
608 mn = self.changelog.read(p)[0]
608 mn = self.changelog.read(p)[0]
609 mf = self.manifest.readflags(mn)
609 mf = self.manifest.readflags(mn)
610 m = self.manifest.read(mn)
610 m = self.manifest.read(mn)
611 if not wlock:
611 if not wlock:
612 wlock = self.wlock()
612 wlock = self.wlock()
613 for f in list:
613 for f in list:
614 if self.dirstate.state(f) not in "r":
614 if self.dirstate.state(f) not in "r":
615 self.ui.warn("%s not removed!\n" % f)
615 self.ui.warn("%s not removed!\n" % f)
616 else:
616 else:
617 t = self.file(f).read(m[f])
617 t = self.file(f).read(m[f])
618 self.wwrite(f, t)
618 self.wwrite(f, t)
619 util.set_exec(self.wjoin(f), mf[f])
619 util.set_exec(self.wjoin(f), mf[f])
620 self.dirstate.update([f], "n")
620 self.dirstate.update([f], "n")
621
621
622 def copy(self, source, dest, wlock=None):
622 def copy(self, source, dest, wlock=None):
623 p = self.wjoin(dest)
623 p = self.wjoin(dest)
624 if not os.path.exists(p):
624 if not os.path.exists(p):
625 self.ui.warn(_("%s does not exist!\n") % dest)
625 self.ui.warn(_("%s does not exist!\n") % dest)
626 elif not os.path.isfile(p):
626 elif not os.path.isfile(p):
627 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
627 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
628 else:
628 else:
629 if not wlock:
629 if not wlock:
630 wlock = self.wlock()
630 wlock = self.wlock()
631 if self.dirstate.state(dest) == '?':
631 if self.dirstate.state(dest) == '?':
632 self.dirstate.update([dest], "a")
632 self.dirstate.update([dest], "a")
633 self.dirstate.copy(source, dest)
633 self.dirstate.copy(source, dest)
634
634
635 def heads(self, start=None):
635 def heads(self, start=None):
636 heads = self.changelog.heads(start)
636 heads = self.changelog.heads(start)
637 # sort the output in rev descending order
637 # sort the output in rev descending order
638 heads = [(-self.changelog.rev(h), h) for h in heads]
638 heads = [(-self.changelog.rev(h), h) for h in heads]
639 heads.sort()
639 heads.sort()
640 return [n for (r, n) in heads]
640 return [n for (r, n) in heads]
641
641
642 # branchlookup returns a dict giving a list of branches for
642 # branchlookup returns a dict giving a list of branches for
643 # each head. A branch is defined as the tag of a node or
643 # each head. A branch is defined as the tag of a node or
644 # the branch of the node's parents. If a node has multiple
644 # the branch of the node's parents. If a node has multiple
645 # branch tags, tags are eliminated if they are visible from other
645 # branch tags, tags are eliminated if they are visible from other
646 # branch tags.
646 # branch tags.
647 #
647 #
648 # So, for this graph: a->b->c->d->e
648 # So, for this graph: a->b->c->d->e
649 # \ /
649 # \ /
650 # aa -----/
650 # aa -----/
651 # a has tag 2.6.12
651 # a has tag 2.6.12
652 # d has tag 2.6.13
652 # d has tag 2.6.13
653 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
653 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
654 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
654 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
655 # from the list.
655 # from the list.
656 #
656 #
657 # It is possible that more than one head will have the same branch tag.
657 # It is possible that more than one head will have the same branch tag.
658 # callers need to check the result for multiple heads under the same
658 # callers need to check the result for multiple heads under the same
659 # branch tag if that is a problem for them (ie checkout of a specific
659 # branch tag if that is a problem for them (ie checkout of a specific
660 # branch).
660 # branch).
661 #
661 #
662 # passing in a specific branch will limit the depth of the search
662 # passing in a specific branch will limit the depth of the search
663 # through the parents. It won't limit the branches returned in the
663 # through the parents. It won't limit the branches returned in the
664 # result though.
664 # result though.
665 def branchlookup(self, heads=None, branch=None):
665 def branchlookup(self, heads=None, branch=None):
666 if not heads:
666 if not heads:
667 heads = self.heads()
667 heads = self.heads()
668 headt = [ h for h in heads ]
668 headt = [ h for h in heads ]
669 chlog = self.changelog
669 chlog = self.changelog
670 branches = {}
670 branches = {}
671 merges = []
671 merges = []
672 seenmerge = {}
672 seenmerge = {}
673
673
674 # traverse the tree once for each head, recording in the branches
674 # traverse the tree once for each head, recording in the branches
675 # dict which tags are visible from this head. The branches
675 # dict which tags are visible from this head. The branches
676 # dict also records which tags are visible from each tag
676 # dict also records which tags are visible from each tag
677 # while we traverse.
677 # while we traverse.
678 while headt or merges:
678 while headt or merges:
679 if merges:
679 if merges:
680 n, found = merges.pop()
680 n, found = merges.pop()
681 visit = [n]
681 visit = [n]
682 else:
682 else:
683 h = headt.pop()
683 h = headt.pop()
684 visit = [h]
684 visit = [h]
685 found = [h]
685 found = [h]
686 seen = {}
686 seen = {}
687 while visit:
687 while visit:
688 n = visit.pop()
688 n = visit.pop()
689 if n in seen:
689 if n in seen:
690 continue
690 continue
691 pp = chlog.parents(n)
691 pp = chlog.parents(n)
692 tags = self.nodetags(n)
692 tags = self.nodetags(n)
693 if tags:
693 if tags:
694 for x in tags:
694 for x in tags:
695 if x == 'tip':
695 if x == 'tip':
696 continue
696 continue
697 for f in found:
697 for f in found:
698 branches.setdefault(f, {})[n] = 1
698 branches.setdefault(f, {})[n] = 1
699 branches.setdefault(n, {})[n] = 1
699 branches.setdefault(n, {})[n] = 1
700 break
700 break
701 if n not in found:
701 if n not in found:
702 found.append(n)
702 found.append(n)
703 if branch in tags:
703 if branch in tags:
704 continue
704 continue
705 seen[n] = 1
705 seen[n] = 1
706 if pp[1] != nullid and n not in seenmerge:
706 if pp[1] != nullid and n not in seenmerge:
707 merges.append((pp[1], [x for x in found]))
707 merges.append((pp[1], [x for x in found]))
708 seenmerge[n] = 1
708 seenmerge[n] = 1
709 if pp[0] != nullid:
709 if pp[0] != nullid:
710 visit.append(pp[0])
710 visit.append(pp[0])
711 # traverse the branches dict, eliminating branch tags from each
711 # traverse the branches dict, eliminating branch tags from each
712 # head that are visible from another branch tag for that head.
712 # head that are visible from another branch tag for that head.
713 out = {}
713 out = {}
714 viscache = {}
714 viscache = {}
715 for h in heads:
715 for h in heads:
716 def visible(node):
716 def visible(node):
717 if node in viscache:
717 if node in viscache:
718 return viscache[node]
718 return viscache[node]
719 ret = {}
719 ret = {}
720 visit = [node]
720 visit = [node]
721 while visit:
721 while visit:
722 x = visit.pop()
722 x = visit.pop()
723 if x in viscache:
723 if x in viscache:
724 ret.update(viscache[x])
724 ret.update(viscache[x])
725 elif x not in ret:
725 elif x not in ret:
726 ret[x] = 1
726 ret[x] = 1
727 if x in branches:
727 if x in branches:
728 visit[len(visit):] = branches[x].keys()
728 visit[len(visit):] = branches[x].keys()
729 viscache[node] = ret
729 viscache[node] = ret
730 return ret
730 return ret
731 if h not in branches:
731 if h not in branches:
732 continue
732 continue
733 # O(n^2), but somewhat limited. This only searches the
733 # O(n^2), but somewhat limited. This only searches the
734 # tags visible from a specific head, not all the tags in the
734 # tags visible from a specific head, not all the tags in the
735 # whole repo.
735 # whole repo.
736 for b in branches[h]:
736 for b in branches[h]:
737 vis = False
737 vis = False
738 for bb in branches[h].keys():
738 for bb in branches[h].keys():
739 if b != bb:
739 if b != bb:
740 if b in visible(bb):
740 if b in visible(bb):
741 vis = True
741 vis = True
742 break
742 break
743 if not vis:
743 if not vis:
744 l = out.setdefault(h, [])
744 l = out.setdefault(h, [])
745 l[len(l):] = self.nodetags(b)
745 l[len(l):] = self.nodetags(b)
746 return out
746 return out
747
747
748 def branches(self, nodes):
748 def branches(self, nodes):
749 if not nodes:
749 if not nodes:
750 nodes = [self.changelog.tip()]
750 nodes = [self.changelog.tip()]
751 b = []
751 b = []
752 for n in nodes:
752 for n in nodes:
753 t = n
753 t = n
754 while n:
754 while n:
755 p = self.changelog.parents(n)
755 p = self.changelog.parents(n)
756 if p[1] != nullid or p[0] == nullid:
756 if p[1] != nullid or p[0] == nullid:
757 b.append((t, n, p[0], p[1]))
757 b.append((t, n, p[0], p[1]))
758 break
758 break
759 n = p[0]
759 n = p[0]
760 return b
760 return b
761
761
762 def between(self, pairs):
762 def between(self, pairs):
763 r = []
763 r = []
764
764
765 for top, bottom in pairs:
765 for top, bottom in pairs:
766 n, l, i = top, [], 0
766 n, l, i = top, [], 0
767 f = 1
767 f = 1
768
768
769 while n != bottom:
769 while n != bottom:
770 p = self.changelog.parents(n)[0]
770 p = self.changelog.parents(n)[0]
771 if i == f:
771 if i == f:
772 l.append(n)
772 l.append(n)
773 f = f * 2
773 f = f * 2
774 n = p
774 n = p
775 i += 1
775 i += 1
776
776
777 r.append(l)
777 r.append(l)
778
778
779 return r
779 return r
780
780
781 def findincoming(self, remote, base=None, heads=None):
781 def findincoming(self, remote, base=None, heads=None):
782 m = self.changelog.nodemap
782 m = self.changelog.nodemap
783 search = []
783 search = []
784 fetch = {}
784 fetch = {}
785 seen = {}
785 seen = {}
786 seenbranch = {}
786 seenbranch = {}
787 if base == None:
787 if base == None:
788 base = {}
788 base = {}
789
789
790 # assume we're closer to the tip than the root
790 # assume we're closer to the tip than the root
791 # and start by examining the heads
791 # and start by examining the heads
792 self.ui.status(_("searching for changes\n"))
792 self.ui.status(_("searching for changes\n"))
793
793
794 if not heads:
794 if not heads:
795 heads = remote.heads()
795 heads = remote.heads()
796
796
797 unknown = []
797 unknown = []
798 for h in heads:
798 for h in heads:
799 if h not in m:
799 if h not in m:
800 unknown.append(h)
800 unknown.append(h)
801 else:
801 else:
802 base[h] = 1
802 base[h] = 1
803
803
804 if not unknown:
804 if not unknown:
805 return None
805 return None
806
806
807 rep = {}
807 rep = {}
808 reqcnt = 0
808 reqcnt = 0
809
809
810 # search through remote branches
810 # search through remote branches
811 # a 'branch' here is a linear segment of history, with four parts:
811 # a 'branch' here is a linear segment of history, with four parts:
812 # head, root, first parent, second parent
812 # head, root, first parent, second parent
813 # (a branch always has two parents (or none) by definition)
813 # (a branch always has two parents (or none) by definition)
814 unknown = remote.branches(unknown)
814 unknown = remote.branches(unknown)
815 while unknown:
815 while unknown:
816 r = []
816 r = []
817 while unknown:
817 while unknown:
818 n = unknown.pop(0)
818 n = unknown.pop(0)
819 if n[0] in seen:
819 if n[0] in seen:
820 continue
820 continue
821
821
822 self.ui.debug(_("examining %s:%s\n")
822 self.ui.debug(_("examining %s:%s\n")
823 % (short(n[0]), short(n[1])))
823 % (short(n[0]), short(n[1])))
824 if n[0] == nullid:
824 if n[0] == nullid:
825 break
825 break
826 if n in seenbranch:
826 if n in seenbranch:
827 self.ui.debug(_("branch already found\n"))
827 self.ui.debug(_("branch already found\n"))
828 continue
828 continue
829 if n[1] and n[1] in m: # do we know the base?
829 if n[1] and n[1] in m: # do we know the base?
830 self.ui.debug(_("found incomplete branch %s:%s\n")
830 self.ui.debug(_("found incomplete branch %s:%s\n")
831 % (short(n[0]), short(n[1])))
831 % (short(n[0]), short(n[1])))
832 search.append(n) # schedule branch range for scanning
832 search.append(n) # schedule branch range for scanning
833 seenbranch[n] = 1
833 seenbranch[n] = 1
834 else:
834 else:
835 if n[1] not in seen and n[1] not in fetch:
835 if n[1] not in seen and n[1] not in fetch:
836 if n[2] in m and n[3] in m:
836 if n[2] in m and n[3] in m:
837 self.ui.debug(_("found new changeset %s\n") %
837 self.ui.debug(_("found new changeset %s\n") %
838 short(n[1]))
838 short(n[1]))
839 fetch[n[1]] = 1 # earliest unknown
839 fetch[n[1]] = 1 # earliest unknown
840 base[n[2]] = 1 # latest known
840 base[n[2]] = 1 # latest known
841 continue
841 continue
842
842
843 for a in n[2:4]:
843 for a in n[2:4]:
844 if a not in rep:
844 if a not in rep:
845 r.append(a)
845 r.append(a)
846 rep[a] = 1
846 rep[a] = 1
847
847
848 seen[n[0]] = 1
848 seen[n[0]] = 1
849
849
850 if r:
850 if r:
851 reqcnt += 1
851 reqcnt += 1
852 self.ui.debug(_("request %d: %s\n") %
852 self.ui.debug(_("request %d: %s\n") %
853 (reqcnt, " ".join(map(short, r))))
853 (reqcnt, " ".join(map(short, r))))
854 for p in range(0, len(r), 10):
854 for p in range(0, len(r), 10):
855 for b in remote.branches(r[p:p+10]):
855 for b in remote.branches(r[p:p+10]):
856 self.ui.debug(_("received %s:%s\n") %
856 self.ui.debug(_("received %s:%s\n") %
857 (short(b[0]), short(b[1])))
857 (short(b[0]), short(b[1])))
858 if b[0] in m:
858 if b[0] in m:
859 self.ui.debug(_("found base node %s\n")
859 self.ui.debug(_("found base node %s\n")
860 % short(b[0]))
860 % short(b[0]))
861 base[b[0]] = 1
861 base[b[0]] = 1
862 elif b[0] not in seen:
862 elif b[0] not in seen:
863 unknown.append(b)
863 unknown.append(b)
864
864
865 # do binary search on the branches we found
865 # do binary search on the branches we found
866 while search:
866 while search:
867 n = search.pop(0)
867 n = search.pop(0)
868 reqcnt += 1
868 reqcnt += 1
869 l = remote.between([(n[0], n[1])])[0]
869 l = remote.between([(n[0], n[1])])[0]
870 l.append(n[1])
870 l.append(n[1])
871 p = n[0]
871 p = n[0]
872 f = 1
872 f = 1
873 for i in l:
873 for i in l:
874 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
874 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
875 if i in m:
875 if i in m:
876 if f <= 2:
876 if f <= 2:
877 self.ui.debug(_("found new branch changeset %s\n") %
877 self.ui.debug(_("found new branch changeset %s\n") %
878 short(p))
878 short(p))
879 fetch[p] = 1
879 fetch[p] = 1
880 base[i] = 1
880 base[i] = 1
881 else:
881 else:
882 self.ui.debug(_("narrowed branch search to %s:%s\n")
882 self.ui.debug(_("narrowed branch search to %s:%s\n")
883 % (short(p), short(i)))
883 % (short(p), short(i)))
884 search.append((p, i))
884 search.append((p, i))
885 break
885 break
886 p, f = i, f * 2
886 p, f = i, f * 2
887
887
888 # sanity check our fetch list
888 # sanity check our fetch list
889 for f in fetch.keys():
889 for f in fetch.keys():
890 if f in m:
890 if f in m:
891 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
891 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
892
892
893 if base.keys() == [nullid]:
893 if base.keys() == [nullid]:
894 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
894 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
895
895
896 self.ui.note(_("found new changesets starting at ") +
896 self.ui.note(_("found new changesets starting at ") +
897 " ".join([short(f) for f in fetch]) + "\n")
897 " ".join([short(f) for f in fetch]) + "\n")
898
898
899 self.ui.debug(_("%d total queries\n") % reqcnt)
899 self.ui.debug(_("%d total queries\n") % reqcnt)
900
900
901 return fetch.keys()
901 return fetch.keys()
902
902
903 def findoutgoing(self, remote, base=None, heads=None):
903 def findoutgoing(self, remote, base=None, heads=None):
904 if base == None:
904 if base == None:
905 base = {}
905 base = {}
906 self.findincoming(remote, base, heads)
906 self.findincoming(remote, base, heads)
907
907
908 self.ui.debug(_("common changesets up to ")
908 self.ui.debug(_("common changesets up to ")
909 + " ".join(map(short, base.keys())) + "\n")
909 + " ".join(map(short, base.keys())) + "\n")
910
910
911 remain = dict.fromkeys(self.changelog.nodemap)
911 remain = dict.fromkeys(self.changelog.nodemap)
912
912
913 # prune everything remote has from the tree
913 # prune everything remote has from the tree
914 del remain[nullid]
914 del remain[nullid]
915 remove = base.keys()
915 remove = base.keys()
916 while remove:
916 while remove:
917 n = remove.pop(0)
917 n = remove.pop(0)
918 if n in remain:
918 if n in remain:
919 del remain[n]
919 del remain[n]
920 for p in self.changelog.parents(n):
920 for p in self.changelog.parents(n):
921 remove.append(p)
921 remove.append(p)
922
922
923 # find every node whose parents have been pruned
923 # find every node whose parents have been pruned
924 subset = []
924 subset = []
925 for n in remain:
925 for n in remain:
926 p1, p2 = self.changelog.parents(n)
926 p1, p2 = self.changelog.parents(n)
927 if p1 not in remain and p2 not in remain:
927 if p1 not in remain and p2 not in remain:
928 subset.append(n)
928 subset.append(n)
929
929
930 # this is the set of all roots we have to push
930 # this is the set of all roots we have to push
931 return subset
931 return subset
932
932
933 def pull(self, remote, heads=None):
933 def pull(self, remote, heads=None):
934 lock = self.lock()
934 l = self.lock()
935
935
936 # if we have an empty repo, fetch everything
936 # if we have an empty repo, fetch everything
937 if self.changelog.tip() == nullid:
937 if self.changelog.tip() == nullid:
938 self.ui.status(_("requesting all changes\n"))
938 self.ui.status(_("requesting all changes\n"))
939 fetch = [nullid]
939 fetch = [nullid]
940 else:
940 else:
941 fetch = self.findincoming(remote)
941 fetch = self.findincoming(remote)
942
942
943 if not fetch:
943 if not fetch:
944 self.ui.status(_("no changes found\n"))
944 self.ui.status(_("no changes found\n"))
945 return 1
945 return 1
946
946
947 if heads is None:
947 if heads is None:
948 cg = remote.changegroup(fetch, 'pull')
948 cg = remote.changegroup(fetch, 'pull')
949 else:
949 else:
950 cg = remote.changegroupsubset(fetch, heads, 'pull')
950 cg = remote.changegroupsubset(fetch, heads, 'pull')
951 return self.addchangegroup(cg)
951 return self.addchangegroup(cg)
952
952
953 def push(self, remote, force=False):
953 def push(self, remote, force=False):
954 lock = remote.lock()
954 l = remote.lock()
955
955
956 base = {}
956 base = {}
957 heads = remote.heads()
957 heads = remote.heads()
958 inc = self.findincoming(remote, base, heads)
958 inc = self.findincoming(remote, base, heads)
959 if not force and inc:
959 if not force and inc:
960 self.ui.warn(_("abort: unsynced remote changes!\n"))
960 self.ui.warn(_("abort: unsynced remote changes!\n"))
961 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
961 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
962 return 1
962 return 1
963
963
964 update = self.findoutgoing(remote, base)
964 update = self.findoutgoing(remote, base)
965 if not update:
965 if not update:
966 self.ui.status(_("no changes found\n"))
966 self.ui.status(_("no changes found\n"))
967 return 1
967 return 1
968 elif not force:
968 elif not force:
969 if len(heads) < len(self.changelog.heads()):
969 if len(heads) < len(self.changelog.heads()):
970 self.ui.warn(_("abort: push creates new remote branches!\n"))
970 self.ui.warn(_("abort: push creates new remote branches!\n"))
971 self.ui.status(_("(did you forget to merge?"
971 self.ui.status(_("(did you forget to merge?"
972 " use push -f to force)\n"))
972 " use push -f to force)\n"))
973 return 1
973 return 1
974
974
975 cg = self.changegroup(update, 'push')
975 cg = self.changegroup(update, 'push')
976 return remote.addchangegroup(cg)
976 return remote.addchangegroup(cg)
977
977
978 def changegroupsubset(self, bases, heads, source):
978 def changegroupsubset(self, bases, heads, source):
979 """This function generates a changegroup consisting of all the nodes
979 """This function generates a changegroup consisting of all the nodes
980 that are descendents of any of the bases, and ancestors of any of
980 that are descendents of any of the bases, and ancestors of any of
981 the heads.
981 the heads.
982
982
983 It is fairly complex as determining which filenodes and which
983 It is fairly complex as determining which filenodes and which
984 manifest nodes need to be included for the changeset to be complete
984 manifest nodes need to be included for the changeset to be complete
985 is non-trivial.
985 is non-trivial.
986
986
987 Another wrinkle is doing the reverse, figuring out which changeset in
987 Another wrinkle is doing the reverse, figuring out which changeset in
988 the changegroup a particular filenode or manifestnode belongs to."""
988 the changegroup a particular filenode or manifestnode belongs to."""
989
989
990 self.hook('preoutgoing', throw=True, source=source)
990 self.hook('preoutgoing', throw=True, source=source)
991
991
992 # Set up some initial variables
992 # Set up some initial variables
993 # Make it easy to refer to self.changelog
993 # Make it easy to refer to self.changelog
994 cl = self.changelog
994 cl = self.changelog
995 # msng is short for missing - compute the list of changesets in this
995 # msng is short for missing - compute the list of changesets in this
996 # changegroup.
996 # changegroup.
997 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
997 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
998 # Some bases may turn out to be superfluous, and some heads may be
998 # Some bases may turn out to be superfluous, and some heads may be
999 # too. nodesbetween will return the minimal set of bases and heads
999 # too. nodesbetween will return the minimal set of bases and heads
1000 # necessary to re-create the changegroup.
1000 # necessary to re-create the changegroup.
1001
1001
1002 # Known heads are the list of heads that it is assumed the recipient
1002 # Known heads are the list of heads that it is assumed the recipient
1003 # of this changegroup will know about.
1003 # of this changegroup will know about.
1004 knownheads = {}
1004 knownheads = {}
1005 # We assume that all parents of bases are known heads.
1005 # We assume that all parents of bases are known heads.
1006 for n in bases:
1006 for n in bases:
1007 for p in cl.parents(n):
1007 for p in cl.parents(n):
1008 if p != nullid:
1008 if p != nullid:
1009 knownheads[p] = 1
1009 knownheads[p] = 1
1010 knownheads = knownheads.keys()
1010 knownheads = knownheads.keys()
1011 if knownheads:
1011 if knownheads:
1012 # Now that we know what heads are known, we can compute which
1012 # Now that we know what heads are known, we can compute which
1013 # changesets are known. The recipient must know about all
1013 # changesets are known. The recipient must know about all
1014 # changesets required to reach the known heads from the null
1014 # changesets required to reach the known heads from the null
1015 # changeset.
1015 # changeset.
1016 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1016 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1017 junk = None
1017 junk = None
1018 # Transform the list into an ersatz set.
1018 # Transform the list into an ersatz set.
1019 has_cl_set = dict.fromkeys(has_cl_set)
1019 has_cl_set = dict.fromkeys(has_cl_set)
1020 else:
1020 else:
1021 # If there were no known heads, the recipient cannot be assumed to
1021 # If there were no known heads, the recipient cannot be assumed to
1022 # know about any changesets.
1022 # know about any changesets.
1023 has_cl_set = {}
1023 has_cl_set = {}
1024
1024
1025 # Make it easy to refer to self.manifest
1025 # Make it easy to refer to self.manifest
1026 mnfst = self.manifest
1026 mnfst = self.manifest
1027 # We don't know which manifests are missing yet
1027 # We don't know which manifests are missing yet
1028 msng_mnfst_set = {}
1028 msng_mnfst_set = {}
1029 # Nor do we know which filenodes are missing.
1029 # Nor do we know which filenodes are missing.
1030 msng_filenode_set = {}
1030 msng_filenode_set = {}
1031
1031
1032 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1032 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1033 junk = None
1033 junk = None
1034
1034
1035 # A changeset always belongs to itself, so the changenode lookup
1035 # A changeset always belongs to itself, so the changenode lookup
1036 # function for a changenode is identity.
1036 # function for a changenode is identity.
1037 def identity(x):
1037 def identity(x):
1038 return x
1038 return x
1039
1039
1040 # A function generating function. Sets up an environment for the
1040 # A function generating function. Sets up an environment for the
1041 # inner function.
1041 # inner function.
1042 def cmp_by_rev_func(revlog):
1042 def cmp_by_rev_func(revlog):
1043 # Compare two nodes by their revision number in the environment's
1043 # Compare two nodes by their revision number in the environment's
1044 # revision history. Since the revision number both represents the
1044 # revision history. Since the revision number both represents the
1045 # most efficient order to read the nodes in, and represents a
1045 # most efficient order to read the nodes in, and represents a
1046 # topological sorting of the nodes, this function is often useful.
1046 # topological sorting of the nodes, this function is often useful.
1047 def cmp_by_rev(a, b):
1047 def cmp_by_rev(a, b):
1048 return cmp(revlog.rev(a), revlog.rev(b))
1048 return cmp(revlog.rev(a), revlog.rev(b))
1049 return cmp_by_rev
1049 return cmp_by_rev
1050
1050
1051 # If we determine that a particular file or manifest node must be a
1051 # If we determine that a particular file or manifest node must be a
1052 # node that the recipient of the changegroup will already have, we can
1052 # node that the recipient of the changegroup will already have, we can
1053 # also assume the recipient will have all the parents. This function
1053 # also assume the recipient will have all the parents. This function
1054 # prunes them from the set of missing nodes.
1054 # prunes them from the set of missing nodes.
1055 def prune_parents(revlog, hasset, msngset):
1055 def prune_parents(revlog, hasset, msngset):
1056 haslst = hasset.keys()
1056 haslst = hasset.keys()
1057 haslst.sort(cmp_by_rev_func(revlog))
1057 haslst.sort(cmp_by_rev_func(revlog))
1058 for node in haslst:
1058 for node in haslst:
1059 parentlst = [p for p in revlog.parents(node) if p != nullid]
1059 parentlst = [p for p in revlog.parents(node) if p != nullid]
1060 while parentlst:
1060 while parentlst:
1061 n = parentlst.pop()
1061 n = parentlst.pop()
1062 if n not in hasset:
1062 if n not in hasset:
1063 hasset[n] = 1
1063 hasset[n] = 1
1064 p = [p for p in revlog.parents(n) if p != nullid]
1064 p = [p for p in revlog.parents(n) if p != nullid]
1065 parentlst.extend(p)
1065 parentlst.extend(p)
1066 for n in hasset:
1066 for n in hasset:
1067 msngset.pop(n, None)
1067 msngset.pop(n, None)
1068
1068
1069 # This is a function generating function used to set up an environment
1069 # This is a function generating function used to set up an environment
1070 # for the inner function to execute in.
1070 # for the inner function to execute in.
1071 def manifest_and_file_collector(changedfileset):
1071 def manifest_and_file_collector(changedfileset):
1072 # This is an information gathering function that gathers
1072 # This is an information gathering function that gathers
1073 # information from each changeset node that goes out as part of
1073 # information from each changeset node that goes out as part of
1074 # the changegroup. The information gathered is a list of which
1074 # the changegroup. The information gathered is a list of which
1075 # manifest nodes are potentially required (the recipient may
1075 # manifest nodes are potentially required (the recipient may
1076 # already have them) and total list of all files which were
1076 # already have them) and total list of all files which were
1077 # changed in any changeset in the changegroup.
1077 # changed in any changeset in the changegroup.
1078 #
1078 #
1079 # We also remember the first changenode we saw any manifest
1079 # We also remember the first changenode we saw any manifest
1080 # referenced by so we can later determine which changenode 'owns'
1080 # referenced by so we can later determine which changenode 'owns'
1081 # the manifest.
1081 # the manifest.
1082 def collect_manifests_and_files(clnode):
1082 def collect_manifests_and_files(clnode):
1083 c = cl.read(clnode)
1083 c = cl.read(clnode)
1084 for f in c[3]:
1084 for f in c[3]:
1085 # This is to make sure we only have one instance of each
1085 # This is to make sure we only have one instance of each
1086 # filename string for each filename.
1086 # filename string for each filename.
1087 changedfileset.setdefault(f, f)
1087 changedfileset.setdefault(f, f)
1088 msng_mnfst_set.setdefault(c[0], clnode)
1088 msng_mnfst_set.setdefault(c[0], clnode)
1089 return collect_manifests_and_files
1089 return collect_manifests_and_files
1090
1090
1091 # Figure out which manifest nodes (of the ones we think might be part
1091 # Figure out which manifest nodes (of the ones we think might be part
1092 # of the changegroup) the recipient must know about and remove them
1092 # of the changegroup) the recipient must know about and remove them
1093 # from the changegroup.
1093 # from the changegroup.
1094 def prune_manifests():
1094 def prune_manifests():
1095 has_mnfst_set = {}
1095 has_mnfst_set = {}
1096 for n in msng_mnfst_set:
1096 for n in msng_mnfst_set:
1097 # If a 'missing' manifest thinks it belongs to a changenode
1097 # If a 'missing' manifest thinks it belongs to a changenode
1098 # the recipient is assumed to have, obviously the recipient
1098 # the recipient is assumed to have, obviously the recipient
1099 # must have that manifest.
1099 # must have that manifest.
1100 linknode = cl.node(mnfst.linkrev(n))
1100 linknode = cl.node(mnfst.linkrev(n))
1101 if linknode in has_cl_set:
1101 if linknode in has_cl_set:
1102 has_mnfst_set[n] = 1
1102 has_mnfst_set[n] = 1
1103 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1103 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1104
1104
1105 # Use the information collected in collect_manifests_and_files to say
1105 # Use the information collected in collect_manifests_and_files to say
1106 # which changenode any manifestnode belongs to.
1106 # which changenode any manifestnode belongs to.
1107 def lookup_manifest_link(mnfstnode):
1107 def lookup_manifest_link(mnfstnode):
1108 return msng_mnfst_set[mnfstnode]
1108 return msng_mnfst_set[mnfstnode]
1109
1109
1110 # A function generating function that sets up the initial environment
1110 # A function generating function that sets up the initial environment
1111 # the inner function.
1111 # the inner function.
1112 def filenode_collector(changedfiles):
1112 def filenode_collector(changedfiles):
1113 next_rev = [0]
1113 next_rev = [0]
1114 # This gathers information from each manifestnode included in the
1114 # This gathers information from each manifestnode included in the
1115 # changegroup about which filenodes the manifest node references
1115 # changegroup about which filenodes the manifest node references
1116 # so we can include those in the changegroup too.
1116 # so we can include those in the changegroup too.
1117 #
1117 #
1118 # It also remembers which changenode each filenode belongs to. It
1118 # It also remembers which changenode each filenode belongs to. It
1119 # does this by assuming the a filenode belongs to the changenode
1119 # does this by assuming the a filenode belongs to the changenode
1120 # the first manifest that references it belongs to.
1120 # the first manifest that references it belongs to.
1121 def collect_msng_filenodes(mnfstnode):
1121 def collect_msng_filenodes(mnfstnode):
1122 r = mnfst.rev(mnfstnode)
1122 r = mnfst.rev(mnfstnode)
1123 if r == next_rev[0]:
1123 if r == next_rev[0]:
1124 # If the last rev we looked at was the one just previous,
1124 # If the last rev we looked at was the one just previous,
1125 # we only need to see a diff.
1125 # we only need to see a diff.
1126 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1126 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1127 # For each line in the delta
1127 # For each line in the delta
1128 for dline in delta.splitlines():
1128 for dline in delta.splitlines():
1129 # get the filename and filenode for that line
1129 # get the filename and filenode for that line
1130 f, fnode = dline.split('\0')
1130 f, fnode = dline.split('\0')
1131 fnode = bin(fnode[:40])
1131 fnode = bin(fnode[:40])
1132 f = changedfiles.get(f, None)
1132 f = changedfiles.get(f, None)
1133 # And if the file is in the list of files we care
1133 # And if the file is in the list of files we care
1134 # about.
1134 # about.
1135 if f is not None:
1135 if f is not None:
1136 # Get the changenode this manifest belongs to
1136 # Get the changenode this manifest belongs to
1137 clnode = msng_mnfst_set[mnfstnode]
1137 clnode = msng_mnfst_set[mnfstnode]
1138 # Create the set of filenodes for the file if
1138 # Create the set of filenodes for the file if
1139 # there isn't one already.
1139 # there isn't one already.
1140 ndset = msng_filenode_set.setdefault(f, {})
1140 ndset = msng_filenode_set.setdefault(f, {})
1141 # And set the filenode's changelog node to the
1141 # And set the filenode's changelog node to the
1142 # manifest's if it hasn't been set already.
1142 # manifest's if it hasn't been set already.
1143 ndset.setdefault(fnode, clnode)
1143 ndset.setdefault(fnode, clnode)
1144 else:
1144 else:
1145 # Otherwise we need a full manifest.
1145 # Otherwise we need a full manifest.
1146 m = mnfst.read(mnfstnode)
1146 m = mnfst.read(mnfstnode)
1147 # For every file in we care about.
1147 # For every file in we care about.
1148 for f in changedfiles:
1148 for f in changedfiles:
1149 fnode = m.get(f, None)
1149 fnode = m.get(f, None)
1150 # If it's in the manifest
1150 # If it's in the manifest
1151 if fnode is not None:
1151 if fnode is not None:
1152 # See comments above.
1152 # See comments above.
1153 clnode = msng_mnfst_set[mnfstnode]
1153 clnode = msng_mnfst_set[mnfstnode]
1154 ndset = msng_filenode_set.setdefault(f, {})
1154 ndset = msng_filenode_set.setdefault(f, {})
1155 ndset.setdefault(fnode, clnode)
1155 ndset.setdefault(fnode, clnode)
1156 # Remember the revision we hope to see next.
1156 # Remember the revision we hope to see next.
1157 next_rev[0] = r + 1
1157 next_rev[0] = r + 1
1158 return collect_msng_filenodes
1158 return collect_msng_filenodes
1159
1159
1160 # We have a list of filenodes we think we need for a file, lets remove
1160 # We have a list of filenodes we think we need for a file, lets remove
1161 # all those we now the recipient must have.
1161 # all those we now the recipient must have.
1162 def prune_filenodes(f, filerevlog):
1162 def prune_filenodes(f, filerevlog):
1163 msngset = msng_filenode_set[f]
1163 msngset = msng_filenode_set[f]
1164 hasset = {}
1164 hasset = {}
1165 # If a 'missing' filenode thinks it belongs to a changenode we
1165 # If a 'missing' filenode thinks it belongs to a changenode we
1166 # assume the recipient must have, then the recipient must have
1166 # assume the recipient must have, then the recipient must have
1167 # that filenode.
1167 # that filenode.
1168 for n in msngset:
1168 for n in msngset:
1169 clnode = cl.node(filerevlog.linkrev(n))
1169 clnode = cl.node(filerevlog.linkrev(n))
1170 if clnode in has_cl_set:
1170 if clnode in has_cl_set:
1171 hasset[n] = 1
1171 hasset[n] = 1
1172 prune_parents(filerevlog, hasset, msngset)
1172 prune_parents(filerevlog, hasset, msngset)
1173
1173
1174 # A function generator function that sets up the a context for the
1174 # A function generator function that sets up the a context for the
1175 # inner function.
1175 # inner function.
1176 def lookup_filenode_link_func(fname):
1176 def lookup_filenode_link_func(fname):
1177 msngset = msng_filenode_set[fname]
1177 msngset = msng_filenode_set[fname]
1178 # Lookup the changenode the filenode belongs to.
1178 # Lookup the changenode the filenode belongs to.
1179 def lookup_filenode_link(fnode):
1179 def lookup_filenode_link(fnode):
1180 return msngset[fnode]
1180 return msngset[fnode]
1181 return lookup_filenode_link
1181 return lookup_filenode_link
1182
1182
1183 # Now that we have all theses utility functions to help out and
1183 # Now that we have all theses utility functions to help out and
1184 # logically divide up the task, generate the group.
1184 # logically divide up the task, generate the group.
1185 def gengroup():
1185 def gengroup():
1186 # The set of changed files starts empty.
1186 # The set of changed files starts empty.
1187 changedfiles = {}
1187 changedfiles = {}
1188 # Create a changenode group generator that will call our functions
1188 # Create a changenode group generator that will call our functions
1189 # back to lookup the owning changenode and collect information.
1189 # back to lookup the owning changenode and collect information.
1190 group = cl.group(msng_cl_lst, identity,
1190 group = cl.group(msng_cl_lst, identity,
1191 manifest_and_file_collector(changedfiles))
1191 manifest_and_file_collector(changedfiles))
1192 for chnk in group:
1192 for chnk in group:
1193 yield chnk
1193 yield chnk
1194
1194
1195 # The list of manifests has been collected by the generator
1195 # The list of manifests has been collected by the generator
1196 # calling our functions back.
1196 # calling our functions back.
1197 prune_manifests()
1197 prune_manifests()
1198 msng_mnfst_lst = msng_mnfst_set.keys()
1198 msng_mnfst_lst = msng_mnfst_set.keys()
1199 # Sort the manifestnodes by revision number.
1199 # Sort the manifestnodes by revision number.
1200 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1200 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1201 # Create a generator for the manifestnodes that calls our lookup
1201 # Create a generator for the manifestnodes that calls our lookup
1202 # and data collection functions back.
1202 # and data collection functions back.
1203 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1203 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1204 filenode_collector(changedfiles))
1204 filenode_collector(changedfiles))
1205 for chnk in group:
1205 for chnk in group:
1206 yield chnk
1206 yield chnk
1207
1207
1208 # These are no longer needed, dereference and toss the memory for
1208 # These are no longer needed, dereference and toss the memory for
1209 # them.
1209 # them.
1210 msng_mnfst_lst = None
1210 msng_mnfst_lst = None
1211 msng_mnfst_set.clear()
1211 msng_mnfst_set.clear()
1212
1212
1213 changedfiles = changedfiles.keys()
1213 changedfiles = changedfiles.keys()
1214 changedfiles.sort()
1214 changedfiles.sort()
1215 # Go through all our files in order sorted by name.
1215 # Go through all our files in order sorted by name.
1216 for fname in changedfiles:
1216 for fname in changedfiles:
1217 filerevlog = self.file(fname)
1217 filerevlog = self.file(fname)
1218 # Toss out the filenodes that the recipient isn't really
1218 # Toss out the filenodes that the recipient isn't really
1219 # missing.
1219 # missing.
1220 if msng_filenode_set.has_key(fname):
1220 if msng_filenode_set.has_key(fname):
1221 prune_filenodes(fname, filerevlog)
1221 prune_filenodes(fname, filerevlog)
1222 msng_filenode_lst = msng_filenode_set[fname].keys()
1222 msng_filenode_lst = msng_filenode_set[fname].keys()
1223 else:
1223 else:
1224 msng_filenode_lst = []
1224 msng_filenode_lst = []
1225 # If any filenodes are left, generate the group for them,
1225 # If any filenodes are left, generate the group for them,
1226 # otherwise don't bother.
1226 # otherwise don't bother.
1227 if len(msng_filenode_lst) > 0:
1227 if len(msng_filenode_lst) > 0:
1228 yield struct.pack(">l", len(fname) + 4) + fname
1228 yield struct.pack(">l", len(fname) + 4) + fname
1229 # Sort the filenodes by their revision #
1229 # Sort the filenodes by their revision #
1230 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1230 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1231 # Create a group generator and only pass in a changenode
1231 # Create a group generator and only pass in a changenode
1232 # lookup function as we need to collect no information
1232 # lookup function as we need to collect no information
1233 # from filenodes.
1233 # from filenodes.
1234 group = filerevlog.group(msng_filenode_lst,
1234 group = filerevlog.group(msng_filenode_lst,
1235 lookup_filenode_link_func(fname))
1235 lookup_filenode_link_func(fname))
1236 for chnk in group:
1236 for chnk in group:
1237 yield chnk
1237 yield chnk
1238 if msng_filenode_set.has_key(fname):
1238 if msng_filenode_set.has_key(fname):
1239 # Don't need this anymore, toss it to free memory.
1239 # Don't need this anymore, toss it to free memory.
1240 del msng_filenode_set[fname]
1240 del msng_filenode_set[fname]
1241 # Signal that no more groups are left.
1241 # Signal that no more groups are left.
1242 yield struct.pack(">l", 0)
1242 yield struct.pack(">l", 0)
1243
1243
1244 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1244 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1245
1245
1246 return util.chunkbuffer(gengroup())
1246 return util.chunkbuffer(gengroup())
1247
1247
1248 def changegroup(self, basenodes, source):
1248 def changegroup(self, basenodes, source):
1249 """Generate a changegroup of all nodes that we have that a recipient
1249 """Generate a changegroup of all nodes that we have that a recipient
1250 doesn't.
1250 doesn't.
1251
1251
1252 This is much easier than the previous function as we can assume that
1252 This is much easier than the previous function as we can assume that
1253 the recipient has any changenode we aren't sending them."""
1253 the recipient has any changenode we aren't sending them."""
1254
1254
1255 self.hook('preoutgoing', throw=True, source=source)
1255 self.hook('preoutgoing', throw=True, source=source)
1256
1256
1257 cl = self.changelog
1257 cl = self.changelog
1258 nodes = cl.nodesbetween(basenodes, None)[0]
1258 nodes = cl.nodesbetween(basenodes, None)[0]
1259 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1259 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1260
1260
1261 def identity(x):
1261 def identity(x):
1262 return x
1262 return x
1263
1263
1264 def gennodelst(revlog):
1264 def gennodelst(revlog):
1265 for r in xrange(0, revlog.count()):
1265 for r in xrange(0, revlog.count()):
1266 n = revlog.node(r)
1266 n = revlog.node(r)
1267 if revlog.linkrev(n) in revset:
1267 if revlog.linkrev(n) in revset:
1268 yield n
1268 yield n
1269
1269
1270 def changed_file_collector(changedfileset):
1270 def changed_file_collector(changedfileset):
1271 def collect_changed_files(clnode):
1271 def collect_changed_files(clnode):
1272 c = cl.read(clnode)
1272 c = cl.read(clnode)
1273 for fname in c[3]:
1273 for fname in c[3]:
1274 changedfileset[fname] = 1
1274 changedfileset[fname] = 1
1275 return collect_changed_files
1275 return collect_changed_files
1276
1276
1277 def lookuprevlink_func(revlog):
1277 def lookuprevlink_func(revlog):
1278 def lookuprevlink(n):
1278 def lookuprevlink(n):
1279 return cl.node(revlog.linkrev(n))
1279 return cl.node(revlog.linkrev(n))
1280 return lookuprevlink
1280 return lookuprevlink
1281
1281
1282 def gengroup():
1282 def gengroup():
1283 # construct a list of all changed files
1283 # construct a list of all changed files
1284 changedfiles = {}
1284 changedfiles = {}
1285
1285
1286 for chnk in cl.group(nodes, identity,
1286 for chnk in cl.group(nodes, identity,
1287 changed_file_collector(changedfiles)):
1287 changed_file_collector(changedfiles)):
1288 yield chnk
1288 yield chnk
1289 changedfiles = changedfiles.keys()
1289 changedfiles = changedfiles.keys()
1290 changedfiles.sort()
1290 changedfiles.sort()
1291
1291
1292 mnfst = self.manifest
1292 mnfst = self.manifest
1293 nodeiter = gennodelst(mnfst)
1293 nodeiter = gennodelst(mnfst)
1294 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1294 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1295 yield chnk
1295 yield chnk
1296
1296
1297 for fname in changedfiles:
1297 for fname in changedfiles:
1298 filerevlog = self.file(fname)
1298 filerevlog = self.file(fname)
1299 nodeiter = gennodelst(filerevlog)
1299 nodeiter = gennodelst(filerevlog)
1300 nodeiter = list(nodeiter)
1300 nodeiter = list(nodeiter)
1301 if nodeiter:
1301 if nodeiter:
1302 yield struct.pack(">l", len(fname) + 4) + fname
1302 yield struct.pack(">l", len(fname) + 4) + fname
1303 lookup = lookuprevlink_func(filerevlog)
1303 lookup = lookuprevlink_func(filerevlog)
1304 for chnk in filerevlog.group(nodeiter, lookup):
1304 for chnk in filerevlog.group(nodeiter, lookup):
1305 yield chnk
1305 yield chnk
1306
1306
1307 yield struct.pack(">l", 0)
1307 yield struct.pack(">l", 0)
1308 self.hook('outgoing', node=hex(nodes[0]), source=source)
1308 self.hook('outgoing', node=hex(nodes[0]), source=source)
1309
1309
1310 return util.chunkbuffer(gengroup())
1310 return util.chunkbuffer(gengroup())
1311
1311
1312 def addchangegroup(self, source):
1312 def addchangegroup(self, source):
1313
1313
1314 def getchunk():
1314 def getchunk():
1315 d = source.read(4)
1315 d = source.read(4)
1316 if not d:
1316 if not d:
1317 return ""
1317 return ""
1318 l = struct.unpack(">l", d)[0]
1318 l = struct.unpack(">l", d)[0]
1319 if l <= 4:
1319 if l <= 4:
1320 return ""
1320 return ""
1321 d = source.read(l - 4)
1321 d = source.read(l - 4)
1322 if len(d) < l - 4:
1322 if len(d) < l - 4:
1323 raise repo.RepoError(_("premature EOF reading chunk"
1323 raise repo.RepoError(_("premature EOF reading chunk"
1324 " (got %d bytes, expected %d)")
1324 " (got %d bytes, expected %d)")
1325 % (len(d), l - 4))
1325 % (len(d), l - 4))
1326 return d
1326 return d
1327
1327
1328 def getgroup():
1328 def getgroup():
1329 while 1:
1329 while 1:
1330 c = getchunk()
1330 c = getchunk()
1331 if not c:
1331 if not c:
1332 break
1332 break
1333 yield c
1333 yield c
1334
1334
1335 def csmap(x):
1335 def csmap(x):
1336 self.ui.debug(_("add changeset %s\n") % short(x))
1336 self.ui.debug(_("add changeset %s\n") % short(x))
1337 return self.changelog.count()
1337 return self.changelog.count()
1338
1338
1339 def revmap(x):
1339 def revmap(x):
1340 return self.changelog.rev(x)
1340 return self.changelog.rev(x)
1341
1341
1342 if not source:
1342 if not source:
1343 return
1343 return
1344
1344
1345 self.hook('prechangegroup', throw=True)
1345 self.hook('prechangegroup', throw=True)
1346
1346
1347 changesets = files = revisions = 0
1347 changesets = files = revisions = 0
1348
1348
1349 tr = self.transaction()
1349 tr = self.transaction()
1350
1350
1351 oldheads = len(self.changelog.heads())
1351 oldheads = len(self.changelog.heads())
1352
1352
1353 # pull off the changeset group
1353 # pull off the changeset group
1354 self.ui.status(_("adding changesets\n"))
1354 self.ui.status(_("adding changesets\n"))
1355 co = self.changelog.tip()
1355 co = self.changelog.tip()
1356 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1356 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1357 cnr, cor = map(self.changelog.rev, (cn, co))
1357 cnr, cor = map(self.changelog.rev, (cn, co))
1358 if cn == nullid:
1358 if cn == nullid:
1359 cnr = cor
1359 cnr = cor
1360 changesets = cnr - cor
1360 changesets = cnr - cor
1361
1361
1362 # pull off the manifest group
1362 # pull off the manifest group
1363 self.ui.status(_("adding manifests\n"))
1363 self.ui.status(_("adding manifests\n"))
1364 mm = self.manifest.tip()
1364 mm = self.manifest.tip()
1365 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1365 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1366
1366
1367 # process the files
1367 # process the files
1368 self.ui.status(_("adding file changes\n"))
1368 self.ui.status(_("adding file changes\n"))
1369 while 1:
1369 while 1:
1370 f = getchunk()
1370 f = getchunk()
1371 if not f:
1371 if not f:
1372 break
1372 break
1373 self.ui.debug(_("adding %s revisions\n") % f)
1373 self.ui.debug(_("adding %s revisions\n") % f)
1374 fl = self.file(f)
1374 fl = self.file(f)
1375 o = fl.count()
1375 o = fl.count()
1376 n = fl.addgroup(getgroup(), revmap, tr)
1376 n = fl.addgroup(getgroup(), revmap, tr)
1377 revisions += fl.count() - o
1377 revisions += fl.count() - o
1378 files += 1
1378 files += 1
1379
1379
1380 newheads = len(self.changelog.heads())
1380 newheads = len(self.changelog.heads())
1381 heads = ""
1381 heads = ""
1382 if oldheads and newheads > oldheads:
1382 if oldheads and newheads > oldheads:
1383 heads = _(" (+%d heads)") % (newheads - oldheads)
1383 heads = _(" (+%d heads)") % (newheads - oldheads)
1384
1384
1385 self.ui.status(_("added %d changesets"
1385 self.ui.status(_("added %d changesets"
1386 " with %d changes to %d files%s\n")
1386 " with %d changes to %d files%s\n")
1387 % (changesets, revisions, files, heads))
1387 % (changesets, revisions, files, heads))
1388
1388
1389 self.hook('pretxnchangegroup', throw=True,
1389 self.hook('pretxnchangegroup', throw=True,
1390 node=hex(self.changelog.node(cor+1)))
1390 node=hex(self.changelog.node(cor+1)))
1391
1391
1392 tr.close()
1392 tr.close()
1393
1393
1394 if changesets > 0:
1394 if changesets > 0:
1395 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1395 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1396
1396
1397 for i in range(cor + 1, cnr + 1):
1397 for i in range(cor + 1, cnr + 1):
1398 self.hook("incoming", node=hex(self.changelog.node(i)))
1398 self.hook("incoming", node=hex(self.changelog.node(i)))
1399
1399
1400 def update(self, node, allow=False, force=False, choose=None,
1400 def update(self, node, allow=False, force=False, choose=None,
1401 moddirstate=True, forcemerge=False, wlock=None):
1401 moddirstate=True, forcemerge=False, wlock=None):
1402 pl = self.dirstate.parents()
1402 pl = self.dirstate.parents()
1403 if not force and pl[1] != nullid:
1403 if not force and pl[1] != nullid:
1404 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1404 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1405 return 1
1405 return 1
1406
1406
1407 err = False
1407 err = False
1408
1408
1409 p1, p2 = pl[0], node
1409 p1, p2 = pl[0], node
1410 pa = self.changelog.ancestor(p1, p2)
1410 pa = self.changelog.ancestor(p1, p2)
1411 m1n = self.changelog.read(p1)[0]
1411 m1n = self.changelog.read(p1)[0]
1412 m2n = self.changelog.read(p2)[0]
1412 m2n = self.changelog.read(p2)[0]
1413 man = self.manifest.ancestor(m1n, m2n)
1413 man = self.manifest.ancestor(m1n, m2n)
1414 m1 = self.manifest.read(m1n)
1414 m1 = self.manifest.read(m1n)
1415 mf1 = self.manifest.readflags(m1n)
1415 mf1 = self.manifest.readflags(m1n)
1416 m2 = self.manifest.read(m2n).copy()
1416 m2 = self.manifest.read(m2n).copy()
1417 mf2 = self.manifest.readflags(m2n)
1417 mf2 = self.manifest.readflags(m2n)
1418 ma = self.manifest.read(man)
1418 ma = self.manifest.read(man)
1419 mfa = self.manifest.readflags(man)
1419 mfa = self.manifest.readflags(man)
1420
1420
1421 modified, added, removed, deleted, unknown = self.changes()
1421 modified, added, removed, deleted, unknown = self.changes()
1422
1422
1423 # is this a jump, or a merge? i.e. is there a linear path
1423 # is this a jump, or a merge? i.e. is there a linear path
1424 # from p1 to p2?
1424 # from p1 to p2?
1425 linear_path = (pa == p1 or pa == p2)
1425 linear_path = (pa == p1 or pa == p2)
1426
1426
1427 if allow and linear_path:
1427 if allow and linear_path:
1428 raise util.Abort(_("there is nothing to merge, "
1428 raise util.Abort(_("there is nothing to merge, "
1429 "just use 'hg update'"))
1429 "just use 'hg update'"))
1430 if allow and not forcemerge:
1430 if allow and not forcemerge:
1431 if modified or added or removed:
1431 if modified or added or removed:
1432 raise util.Abort(_("outstanding uncommited changes"))
1432 raise util.Abort(_("outstanding uncommited changes"))
1433 if not forcemerge and not force:
1433 if not forcemerge and not force:
1434 for f in unknown:
1434 for f in unknown:
1435 if f in m2:
1435 if f in m2:
1436 t1 = self.wread(f)
1436 t1 = self.wread(f)
1437 t2 = self.file(f).read(m2[f])
1437 t2 = self.file(f).read(m2[f])
1438 if cmp(t1, t2) != 0:
1438 if cmp(t1, t2) != 0:
1439 raise util.Abort(_("'%s' already exists in the working"
1439 raise util.Abort(_("'%s' already exists in the working"
1440 " dir and differs from remote") % f)
1440 " dir and differs from remote") % f)
1441
1441
1442 # resolve the manifest to determine which files
1442 # resolve the manifest to determine which files
1443 # we care about merging
1443 # we care about merging
1444 self.ui.note(_("resolving manifests\n"))
1444 self.ui.note(_("resolving manifests\n"))
1445 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1445 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1446 (force, allow, moddirstate, linear_path))
1446 (force, allow, moddirstate, linear_path))
1447 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1447 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1448 (short(man), short(m1n), short(m2n)))
1448 (short(man), short(m1n), short(m2n)))
1449
1449
1450 merge = {}
1450 merge = {}
1451 get = {}
1451 get = {}
1452 remove = []
1452 remove = []
1453
1453
1454 # construct a working dir manifest
1454 # construct a working dir manifest
1455 mw = m1.copy()
1455 mw = m1.copy()
1456 mfw = mf1.copy()
1456 mfw = mf1.copy()
1457 umap = dict.fromkeys(unknown)
1457 umap = dict.fromkeys(unknown)
1458
1458
1459 for f in added + modified + unknown:
1459 for f in added + modified + unknown:
1460 mw[f] = ""
1460 mw[f] = ""
1461 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1461 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1462
1462
1463 if moddirstate and not wlock:
1463 if moddirstate and not wlock:
1464 wlock = self.wlock()
1464 wlock = self.wlock()
1465
1465
1466 for f in deleted + removed:
1466 for f in deleted + removed:
1467 if f in mw:
1467 if f in mw:
1468 del mw[f]
1468 del mw[f]
1469
1469
1470 # If we're jumping between revisions (as opposed to merging),
1470 # If we're jumping between revisions (as opposed to merging),
1471 # and if neither the working directory nor the target rev has
1471 # and if neither the working directory nor the target rev has
1472 # the file, then we need to remove it from the dirstate, to
1472 # the file, then we need to remove it from the dirstate, to
1473 # prevent the dirstate from listing the file when it is no
1473 # prevent the dirstate from listing the file when it is no
1474 # longer in the manifest.
1474 # longer in the manifest.
1475 if moddirstate and linear_path and f not in m2:
1475 if moddirstate and linear_path and f not in m2:
1476 self.dirstate.forget((f,))
1476 self.dirstate.forget((f,))
1477
1477
1478 # Compare manifests
1478 # Compare manifests
1479 for f, n in mw.iteritems():
1479 for f, n in mw.iteritems():
1480 if choose and not choose(f):
1480 if choose and not choose(f):
1481 continue
1481 continue
1482 if f in m2:
1482 if f in m2:
1483 s = 0
1483 s = 0
1484
1484
1485 # is the wfile new since m1, and match m2?
1485 # is the wfile new since m1, and match m2?
1486 if f not in m1:
1486 if f not in m1:
1487 t1 = self.wread(f)
1487 t1 = self.wread(f)
1488 t2 = self.file(f).read(m2[f])
1488 t2 = self.file(f).read(m2[f])
1489 if cmp(t1, t2) == 0:
1489 if cmp(t1, t2) == 0:
1490 n = m2[f]
1490 n = m2[f]
1491 del t1, t2
1491 del t1, t2
1492
1492
1493 # are files different?
1493 # are files different?
1494 if n != m2[f]:
1494 if n != m2[f]:
1495 a = ma.get(f, nullid)
1495 a = ma.get(f, nullid)
1496 # are both different from the ancestor?
1496 # are both different from the ancestor?
1497 if n != a and m2[f] != a:
1497 if n != a and m2[f] != a:
1498 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1498 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1499 # merge executable bits
1499 # merge executable bits
1500 # "if we changed or they changed, change in merge"
1500 # "if we changed or they changed, change in merge"
1501 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1501 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1502 mode = ((a^b) | (a^c)) ^ a
1502 mode = ((a^b) | (a^c)) ^ a
1503 merge[f] = (m1.get(f, nullid), m2[f], mode)
1503 merge[f] = (m1.get(f, nullid), m2[f], mode)
1504 s = 1
1504 s = 1
1505 # are we clobbering?
1505 # are we clobbering?
1506 # is remote's version newer?
1506 # is remote's version newer?
1507 # or are we going back in time?
1507 # or are we going back in time?
1508 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1508 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1509 self.ui.debug(_(" remote %s is newer, get\n") % f)
1509 self.ui.debug(_(" remote %s is newer, get\n") % f)
1510 get[f] = m2[f]
1510 get[f] = m2[f]
1511 s = 1
1511 s = 1
1512 elif f in umap:
1512 elif f in umap:
1513 # this unknown file is the same as the checkout
1513 # this unknown file is the same as the checkout
1514 get[f] = m2[f]
1514 get[f] = m2[f]
1515
1515
1516 if not s and mfw[f] != mf2[f]:
1516 if not s and mfw[f] != mf2[f]:
1517 if force:
1517 if force:
1518 self.ui.debug(_(" updating permissions for %s\n") % f)
1518 self.ui.debug(_(" updating permissions for %s\n") % f)
1519 util.set_exec(self.wjoin(f), mf2[f])
1519 util.set_exec(self.wjoin(f), mf2[f])
1520 else:
1520 else:
1521 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1521 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1522 mode = ((a^b) | (a^c)) ^ a
1522 mode = ((a^b) | (a^c)) ^ a
1523 if mode != b:
1523 if mode != b:
1524 self.ui.debug(_(" updating permissions for %s\n")
1524 self.ui.debug(_(" updating permissions for %s\n")
1525 % f)
1525 % f)
1526 util.set_exec(self.wjoin(f), mode)
1526 util.set_exec(self.wjoin(f), mode)
1527 del m2[f]
1527 del m2[f]
1528 elif f in ma:
1528 elif f in ma:
1529 if n != ma[f]:
1529 if n != ma[f]:
1530 r = _("d")
1530 r = _("d")
1531 if not force and (linear_path or allow):
1531 if not force and (linear_path or allow):
1532 r = self.ui.prompt(
1532 r = self.ui.prompt(
1533 (_(" local changed %s which remote deleted\n") % f) +
1533 (_(" local changed %s which remote deleted\n") % f) +
1534 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1534 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1535 if r == _("d"):
1535 if r == _("d"):
1536 remove.append(f)
1536 remove.append(f)
1537 else:
1537 else:
1538 self.ui.debug(_("other deleted %s\n") % f)
1538 self.ui.debug(_("other deleted %s\n") % f)
1539 remove.append(f) # other deleted it
1539 remove.append(f) # other deleted it
1540 else:
1540 else:
1541 # file is created on branch or in working directory
1541 # file is created on branch or in working directory
1542 if force and f not in umap:
1542 if force and f not in umap:
1543 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1543 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1544 remove.append(f)
1544 remove.append(f)
1545 elif n == m1.get(f, nullid): # same as parent
1545 elif n == m1.get(f, nullid): # same as parent
1546 if p2 == pa: # going backwards?
1546 if p2 == pa: # going backwards?
1547 self.ui.debug(_("remote deleted %s\n") % f)
1547 self.ui.debug(_("remote deleted %s\n") % f)
1548 remove.append(f)
1548 remove.append(f)
1549 else:
1549 else:
1550 self.ui.debug(_("local modified %s, keeping\n") % f)
1550 self.ui.debug(_("local modified %s, keeping\n") % f)
1551 else:
1551 else:
1552 self.ui.debug(_("working dir created %s, keeping\n") % f)
1552 self.ui.debug(_("working dir created %s, keeping\n") % f)
1553
1553
1554 for f, n in m2.iteritems():
1554 for f, n in m2.iteritems():
1555 if choose and not choose(f):
1555 if choose and not choose(f):
1556 continue
1556 continue
1557 if f[0] == "/":
1557 if f[0] == "/":
1558 continue
1558 continue
1559 if f in ma and n != ma[f]:
1559 if f in ma and n != ma[f]:
1560 r = _("k")
1560 r = _("k")
1561 if not force and (linear_path or allow):
1561 if not force and (linear_path or allow):
1562 r = self.ui.prompt(
1562 r = self.ui.prompt(
1563 (_("remote changed %s which local deleted\n") % f) +
1563 (_("remote changed %s which local deleted\n") % f) +
1564 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1564 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1565 if r == _("k"):
1565 if r == _("k"):
1566 get[f] = n
1566 get[f] = n
1567 elif f not in ma:
1567 elif f not in ma:
1568 self.ui.debug(_("remote created %s\n") % f)
1568 self.ui.debug(_("remote created %s\n") % f)
1569 get[f] = n
1569 get[f] = n
1570 else:
1570 else:
1571 if force or p2 == pa: # going backwards?
1571 if force or p2 == pa: # going backwards?
1572 self.ui.debug(_("local deleted %s, recreating\n") % f)
1572 self.ui.debug(_("local deleted %s, recreating\n") % f)
1573 get[f] = n
1573 get[f] = n
1574 else:
1574 else:
1575 self.ui.debug(_("local deleted %s\n") % f)
1575 self.ui.debug(_("local deleted %s\n") % f)
1576
1576
1577 del mw, m1, m2, ma
1577 del mw, m1, m2, ma
1578
1578
1579 if force:
1579 if force:
1580 for f in merge:
1580 for f in merge:
1581 get[f] = merge[f][1]
1581 get[f] = merge[f][1]
1582 merge = {}
1582 merge = {}
1583
1583
1584 if linear_path or force:
1584 if linear_path or force:
1585 # we don't need to do any magic, just jump to the new rev
1585 # we don't need to do any magic, just jump to the new rev
1586 branch_merge = False
1586 branch_merge = False
1587 p1, p2 = p2, nullid
1587 p1, p2 = p2, nullid
1588 else:
1588 else:
1589 if not allow:
1589 if not allow:
1590 self.ui.status(_("this update spans a branch"
1590 self.ui.status(_("this update spans a branch"
1591 " affecting the following files:\n"))
1591 " affecting the following files:\n"))
1592 fl = merge.keys() + get.keys()
1592 fl = merge.keys() + get.keys()
1593 fl.sort()
1593 fl.sort()
1594 for f in fl:
1594 for f in fl:
1595 cf = ""
1595 cf = ""
1596 if f in merge:
1596 if f in merge:
1597 cf = _(" (resolve)")
1597 cf = _(" (resolve)")
1598 self.ui.status(" %s%s\n" % (f, cf))
1598 self.ui.status(" %s%s\n" % (f, cf))
1599 self.ui.warn(_("aborting update spanning branches!\n"))
1599 self.ui.warn(_("aborting update spanning branches!\n"))
1600 self.ui.status(_("(use update -m to merge across branches"
1600 self.ui.status(_("(use update -m to merge across branches"
1601 " or -C to lose changes)\n"))
1601 " or -C to lose changes)\n"))
1602 return 1
1602 return 1
1603 branch_merge = True
1603 branch_merge = True
1604
1604
1605 # get the files we don't need to change
1605 # get the files we don't need to change
1606 files = get.keys()
1606 files = get.keys()
1607 files.sort()
1607 files.sort()
1608 for f in files:
1608 for f in files:
1609 if f[0] == "/":
1609 if f[0] == "/":
1610 continue
1610 continue
1611 self.ui.note(_("getting %s\n") % f)
1611 self.ui.note(_("getting %s\n") % f)
1612 t = self.file(f).read(get[f])
1612 t = self.file(f).read(get[f])
1613 self.wwrite(f, t)
1613 self.wwrite(f, t)
1614 util.set_exec(self.wjoin(f), mf2[f])
1614 util.set_exec(self.wjoin(f), mf2[f])
1615 if moddirstate:
1615 if moddirstate:
1616 if branch_merge:
1616 if branch_merge:
1617 self.dirstate.update([f], 'n', st_mtime=-1)
1617 self.dirstate.update([f], 'n', st_mtime=-1)
1618 else:
1618 else:
1619 self.dirstate.update([f], 'n')
1619 self.dirstate.update([f], 'n')
1620
1620
1621 # merge the tricky bits
1621 # merge the tricky bits
1622 files = merge.keys()
1622 files = merge.keys()
1623 files.sort()
1623 files.sort()
1624 for f in files:
1624 for f in files:
1625 self.ui.status(_("merging %s\n") % f)
1625 self.ui.status(_("merging %s\n") % f)
1626 my, other, flag = merge[f]
1626 my, other, flag = merge[f]
1627 ret = self.merge3(f, my, other)
1627 ret = self.merge3(f, my, other)
1628 if ret:
1628 if ret:
1629 err = True
1629 err = True
1630 util.set_exec(self.wjoin(f), flag)
1630 util.set_exec(self.wjoin(f), flag)
1631 if moddirstate:
1631 if moddirstate:
1632 if branch_merge:
1632 if branch_merge:
1633 # We've done a branch merge, mark this file as merged
1633 # We've done a branch merge, mark this file as merged
1634 # so that we properly record the merger later
1634 # so that we properly record the merger later
1635 self.dirstate.update([f], 'm')
1635 self.dirstate.update([f], 'm')
1636 else:
1636 else:
1637 # We've update-merged a locally modified file, so
1637 # We've update-merged a locally modified file, so
1638 # we set the dirstate to emulate a normal checkout
1638 # we set the dirstate to emulate a normal checkout
1639 # of that file some time in the past. Thus our
1639 # of that file some time in the past. Thus our
1640 # merge will appear as a normal local file
1640 # merge will appear as a normal local file
1641 # modification.
1641 # modification.
1642 f_len = len(self.file(f).read(other))
1642 f_len = len(self.file(f).read(other))
1643 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1643 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1644
1644
1645 remove.sort()
1645 remove.sort()
1646 for f in remove:
1646 for f in remove:
1647 self.ui.note(_("removing %s\n") % f)
1647 self.ui.note(_("removing %s\n") % f)
1648 try:
1648 try:
1649 util.unlink(self.wjoin(f))
1649 util.unlink(self.wjoin(f))
1650 except OSError, inst:
1650 except OSError, inst:
1651 if inst.errno != errno.ENOENT:
1651 if inst.errno != errno.ENOENT:
1652 self.ui.warn(_("update failed to remove %s: %s!\n") %
1652 self.ui.warn(_("update failed to remove %s: %s!\n") %
1653 (f, inst.strerror))
1653 (f, inst.strerror))
1654 if moddirstate:
1654 if moddirstate:
1655 if branch_merge:
1655 if branch_merge:
1656 self.dirstate.update(remove, 'r')
1656 self.dirstate.update(remove, 'r')
1657 else:
1657 else:
1658 self.dirstate.forget(remove)
1658 self.dirstate.forget(remove)
1659
1659
1660 if moddirstate:
1660 if moddirstate:
1661 self.dirstate.setparents(p1, p2)
1661 self.dirstate.setparents(p1, p2)
1662 return err
1662 return err
1663
1663
1664 def merge3(self, fn, my, other):
1664 def merge3(self, fn, my, other):
1665 """perform a 3-way merge in the working directory"""
1665 """perform a 3-way merge in the working directory"""
1666
1666
1667 def temp(prefix, node):
1667 def temp(prefix, node):
1668 pre = "%s~%s." % (os.path.basename(fn), prefix)
1668 pre = "%s~%s." % (os.path.basename(fn), prefix)
1669 (fd, name) = tempfile.mkstemp("", pre)
1669 (fd, name) = tempfile.mkstemp("", pre)
1670 f = os.fdopen(fd, "wb")
1670 f = os.fdopen(fd, "wb")
1671 self.wwrite(fn, fl.read(node), f)
1671 self.wwrite(fn, fl.read(node), f)
1672 f.close()
1672 f.close()
1673 return name
1673 return name
1674
1674
1675 fl = self.file(fn)
1675 fl = self.file(fn)
1676 base = fl.ancestor(my, other)
1676 base = fl.ancestor(my, other)
1677 a = self.wjoin(fn)
1677 a = self.wjoin(fn)
1678 b = temp("base", base)
1678 b = temp("base", base)
1679 c = temp("other", other)
1679 c = temp("other", other)
1680
1680
1681 self.ui.note(_("resolving %s\n") % fn)
1681 self.ui.note(_("resolving %s\n") % fn)
1682 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1682 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1683 (fn, short(my), short(other), short(base)))
1683 (fn, short(my), short(other), short(base)))
1684
1684
1685 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1685 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1686 or "hgmerge")
1686 or "hgmerge")
1687 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1687 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1688 if r:
1688 if r:
1689 self.ui.warn(_("merging %s failed!\n") % fn)
1689 self.ui.warn(_("merging %s failed!\n") % fn)
1690
1690
1691 os.unlink(b)
1691 os.unlink(b)
1692 os.unlink(c)
1692 os.unlink(c)
1693 return r
1693 return r
1694
1694
1695 def verify(self):
1695 def verify(self):
1696 filelinkrevs = {}
1696 filelinkrevs = {}
1697 filenodes = {}
1697 filenodes = {}
1698 changesets = revisions = files = 0
1698 changesets = revisions = files = 0
1699 errors = [0]
1699 errors = [0]
1700 neededmanifests = {}
1700 neededmanifests = {}
1701
1701
1702 def err(msg):
1702 def err(msg):
1703 self.ui.warn(msg + "\n")
1703 self.ui.warn(msg + "\n")
1704 errors[0] += 1
1704 errors[0] += 1
1705
1705
1706 def checksize(obj, name):
1706 def checksize(obj, name):
1707 d = obj.checksize()
1707 d = obj.checksize()
1708 if d[0]:
1708 if d[0]:
1709 err(_("%s data length off by %d bytes") % (name, d[0]))
1709 err(_("%s data length off by %d bytes") % (name, d[0]))
1710 if d[1]:
1710 if d[1]:
1711 err(_("%s index contains %d extra bytes") % (name, d[1]))
1711 err(_("%s index contains %d extra bytes") % (name, d[1]))
1712
1712
1713 seen = {}
1713 seen = {}
1714 self.ui.status(_("checking changesets\n"))
1714 self.ui.status(_("checking changesets\n"))
1715 checksize(self.changelog, "changelog")
1715 checksize(self.changelog, "changelog")
1716
1716
1717 for i in range(self.changelog.count()):
1717 for i in range(self.changelog.count()):
1718 changesets += 1
1718 changesets += 1
1719 n = self.changelog.node(i)
1719 n = self.changelog.node(i)
1720 l = self.changelog.linkrev(n)
1720 l = self.changelog.linkrev(n)
1721 if l != i:
1721 if l != i:
1722 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1722 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1723 if n in seen:
1723 if n in seen:
1724 err(_("duplicate changeset at revision %d") % i)
1724 err(_("duplicate changeset at revision %d") % i)
1725 seen[n] = 1
1725 seen[n] = 1
1726
1726
1727 for p in self.changelog.parents(n):
1727 for p in self.changelog.parents(n):
1728 if p not in self.changelog.nodemap:
1728 if p not in self.changelog.nodemap:
1729 err(_("changeset %s has unknown parent %s") %
1729 err(_("changeset %s has unknown parent %s") %
1730 (short(n), short(p)))
1730 (short(n), short(p)))
1731 try:
1731 try:
1732 changes = self.changelog.read(n)
1732 changes = self.changelog.read(n)
1733 except KeyboardInterrupt:
1733 except KeyboardInterrupt:
1734 self.ui.warn(_("interrupted"))
1734 self.ui.warn(_("interrupted"))
1735 raise
1735 raise
1736 except Exception, inst:
1736 except Exception, inst:
1737 err(_("unpacking changeset %s: %s") % (short(n), inst))
1737 err(_("unpacking changeset %s: %s") % (short(n), inst))
1738
1738
1739 neededmanifests[changes[0]] = n
1739 neededmanifests[changes[0]] = n
1740
1740
1741 for f in changes[3]:
1741 for f in changes[3]:
1742 filelinkrevs.setdefault(f, []).append(i)
1742 filelinkrevs.setdefault(f, []).append(i)
1743
1743
1744 seen = {}
1744 seen = {}
1745 self.ui.status(_("checking manifests\n"))
1745 self.ui.status(_("checking manifests\n"))
1746 checksize(self.manifest, "manifest")
1746 checksize(self.manifest, "manifest")
1747
1747
1748 for i in range(self.manifest.count()):
1748 for i in range(self.manifest.count()):
1749 n = self.manifest.node(i)
1749 n = self.manifest.node(i)
1750 l = self.manifest.linkrev(n)
1750 l = self.manifest.linkrev(n)
1751
1751
1752 if l < 0 or l >= self.changelog.count():
1752 if l < 0 or l >= self.changelog.count():
1753 err(_("bad manifest link (%d) at revision %d") % (l, i))
1753 err(_("bad manifest link (%d) at revision %d") % (l, i))
1754
1754
1755 if n in neededmanifests:
1755 if n in neededmanifests:
1756 del neededmanifests[n]
1756 del neededmanifests[n]
1757
1757
1758 if n in seen:
1758 if n in seen:
1759 err(_("duplicate manifest at revision %d") % i)
1759 err(_("duplicate manifest at revision %d") % i)
1760
1760
1761 seen[n] = 1
1761 seen[n] = 1
1762
1762
1763 for p in self.manifest.parents(n):
1763 for p in self.manifest.parents(n):
1764 if p not in self.manifest.nodemap:
1764 if p not in self.manifest.nodemap:
1765 err(_("manifest %s has unknown parent %s") %
1765 err(_("manifest %s has unknown parent %s") %
1766 (short(n), short(p)))
1766 (short(n), short(p)))
1767
1767
1768 try:
1768 try:
1769 delta = mdiff.patchtext(self.manifest.delta(n))
1769 delta = mdiff.patchtext(self.manifest.delta(n))
1770 except KeyboardInterrupt:
1770 except KeyboardInterrupt:
1771 self.ui.warn(_("interrupted"))
1771 self.ui.warn(_("interrupted"))
1772 raise
1772 raise
1773 except Exception, inst:
1773 except Exception, inst:
1774 err(_("unpacking manifest %s: %s") % (short(n), inst))
1774 err(_("unpacking manifest %s: %s") % (short(n), inst))
1775
1775
1776 ff = [ l.split('\0') for l in delta.splitlines() ]
1776 ff = [ l.split('\0') for l in delta.splitlines() ]
1777 for f, fn in ff:
1777 for f, fn in ff:
1778 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1778 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1779
1779
1780 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1780 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1781
1781
1782 for m, c in neededmanifests.items():
1782 for m, c in neededmanifests.items():
1783 err(_("Changeset %s refers to unknown manifest %s") %
1783 err(_("Changeset %s refers to unknown manifest %s") %
1784 (short(m), short(c)))
1784 (short(m), short(c)))
1785 del neededmanifests
1785 del neededmanifests
1786
1786
1787 for f in filenodes:
1787 for f in filenodes:
1788 if f not in filelinkrevs:
1788 if f not in filelinkrevs:
1789 err(_("file %s in manifest but not in changesets") % f)
1789 err(_("file %s in manifest but not in changesets") % f)
1790
1790
1791 for f in filelinkrevs:
1791 for f in filelinkrevs:
1792 if f not in filenodes:
1792 if f not in filenodes:
1793 err(_("file %s in changeset but not in manifest") % f)
1793 err(_("file %s in changeset but not in manifest") % f)
1794
1794
1795 self.ui.status(_("checking files\n"))
1795 self.ui.status(_("checking files\n"))
1796 ff = filenodes.keys()
1796 ff = filenodes.keys()
1797 ff.sort()
1797 ff.sort()
1798 for f in ff:
1798 for f in ff:
1799 if f == "/dev/null":
1799 if f == "/dev/null":
1800 continue
1800 continue
1801 files += 1
1801 files += 1
1802 fl = self.file(f)
1802 fl = self.file(f)
1803 checksize(fl, f)
1803 checksize(fl, f)
1804
1804
1805 nodes = {nullid: 1}
1805 nodes = {nullid: 1}
1806 seen = {}
1806 seen = {}
1807 for i in range(fl.count()):
1807 for i in range(fl.count()):
1808 revisions += 1
1808 revisions += 1
1809 n = fl.node(i)
1809 n = fl.node(i)
1810
1810
1811 if n in seen:
1811 if n in seen:
1812 err(_("%s: duplicate revision %d") % (f, i))
1812 err(_("%s: duplicate revision %d") % (f, i))
1813 if n not in filenodes[f]:
1813 if n not in filenodes[f]:
1814 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1814 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1815 else:
1815 else:
1816 del filenodes[f][n]
1816 del filenodes[f][n]
1817
1817
1818 flr = fl.linkrev(n)
1818 flr = fl.linkrev(n)
1819 if flr not in filelinkrevs[f]:
1819 if flr not in filelinkrevs[f]:
1820 err(_("%s:%s points to unexpected changeset %d")
1820 err(_("%s:%s points to unexpected changeset %d")
1821 % (f, short(n), flr))
1821 % (f, short(n), flr))
1822 else:
1822 else:
1823 filelinkrevs[f].remove(flr)
1823 filelinkrevs[f].remove(flr)
1824
1824
1825 # verify contents
1825 # verify contents
1826 try:
1826 try:
1827 t = fl.read(n)
1827 t = fl.read(n)
1828 except KeyboardInterrupt:
1828 except KeyboardInterrupt:
1829 self.ui.warn(_("interrupted"))
1829 self.ui.warn(_("interrupted"))
1830 raise
1830 raise
1831 except Exception, inst:
1831 except Exception, inst:
1832 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1832 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1833
1833
1834 # verify parents
1834 # verify parents
1835 (p1, p2) = fl.parents(n)
1835 (p1, p2) = fl.parents(n)
1836 if p1 not in nodes:
1836 if p1 not in nodes:
1837 err(_("file %s:%s unknown parent 1 %s") %
1837 err(_("file %s:%s unknown parent 1 %s") %
1838 (f, short(n), short(p1)))
1838 (f, short(n), short(p1)))
1839 if p2 not in nodes:
1839 if p2 not in nodes:
1840 err(_("file %s:%s unknown parent 2 %s") %
1840 err(_("file %s:%s unknown parent 2 %s") %
1841 (f, short(n), short(p1)))
1841 (f, short(n), short(p1)))
1842 nodes[n] = 1
1842 nodes[n] = 1
1843
1843
1844 # cross-check
1844 # cross-check
1845 for node in filenodes[f]:
1845 for node in filenodes[f]:
1846 err(_("node %s in manifests not in %s") % (hex(node), f))
1846 err(_("node %s in manifests not in %s") % (hex(node), f))
1847
1847
1848 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1848 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1849 (files, changesets, revisions))
1849 (files, changesets, revisions))
1850
1850
1851 if errors[0]:
1851 if errors[0]:
1852 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1852 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1853 return 1
1853 return 1
@@ -1,52 +1,59 b''
1 # lock.py - simple locking scheme for mercurial
1 # lock.py - simple locking scheme for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os, time
8 import errno, os, time
9 import util
9 import util
10
10
11 class LockHeld(Exception):
11 class LockException(Exception):
12 pass
13 class LockHeld(LockException):
14 pass
15 class LockUnavailable(LockException):
12 pass
16 pass
13
17
14 class lock(object):
18 class lock(object):
15 def __init__(self, file, wait=1, releasefn=None):
19 def __init__(self, file, wait=1, releasefn=None):
16 self.f = file
20 self.f = file
17 self.held = 0
21 self.held = 0
18 self.wait = wait
22 self.wait = wait
19 self.releasefn = releasefn
23 self.releasefn = releasefn
20 self.lock()
24 self.lock()
21
25
22 def __del__(self):
26 def __del__(self):
23 self.release()
27 self.release()
24
28
25 def lock(self):
29 def lock(self):
26 while 1:
30 while 1:
27 try:
31 try:
28 self.trylock()
32 self.trylock()
29 return 1
33 return 1
30 except LockHeld, inst:
34 except LockHeld, inst:
31 if self.wait:
35 if self.wait:
32 time.sleep(1)
36 time.sleep(1)
33 continue
37 continue
34 raise inst
38 raise inst
35
39
36 def trylock(self):
40 def trylock(self):
37 pid = os.getpid()
41 pid = os.getpid()
38 try:
42 try:
39 util.makelock(str(pid), self.f)
43 util.makelock(str(pid), self.f)
40 self.held = 1
44 self.held = 1
41 except (OSError, IOError):
45 except (OSError, IOError), why:
42 raise LockHeld(util.readlock(self.f))
46 if why.errno == errno.EEXIST:
47 raise LockHeld(util.readlock(self.f))
48 else:
49 raise LockUnavailable(why)
43
50
44 def release(self):
51 def release(self):
45 if self.held:
52 if self.held:
46 self.held = 0
53 self.held = 0
47 if self.releasefn:
54 if self.releasefn:
48 self.releasefn()
55 self.releasefn()
49 try:
56 try:
50 os.unlink(self.f)
57 os.unlink(self.f)
51 except: pass
58 except: pass
52
59
@@ -1,870 +1,864 b''
1 """
1 """
2 revlog.py - storage back-end for mercurial
2 revlog.py - storage back-end for mercurial
3
3
4 This provides efficient delta storage with O(1) retrieve and append
4 This provides efficient delta storage with O(1) retrieve and append
5 and O(changes) merge between branches
5 and O(changes) merge between branches
6
6
7 Copyright 2005 Matt Mackall <mpm@selenic.com>
7 Copyright 2005 Matt Mackall <mpm@selenic.com>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License, incorporated herein by reference.
10 of the GNU General Public License, incorporated herein by reference.
11 """
11 """
12
12
13 from node import *
13 from node import *
14 from i18n import gettext as _
14 from i18n import gettext as _
15 from demandload import demandload
15 from demandload import demandload
16 demandload(globals(), "binascii errno heapq mdiff sha struct zlib")
16 demandload(globals(), "binascii errno heapq mdiff sha struct zlib")
17
17
18 def hash(text, p1, p2):
18 def hash(text, p1, p2):
19 """generate a hash from the given text and its parent hashes
19 """generate a hash from the given text and its parent hashes
20
20
21 This hash combines both the current file contents and its history
21 This hash combines both the current file contents and its history
22 in a manner that makes it easy to distinguish nodes with the same
22 in a manner that makes it easy to distinguish nodes with the same
23 content in the revision graph.
23 content in the revision graph.
24 """
24 """
25 l = [p1, p2]
25 l = [p1, p2]
26 l.sort()
26 l.sort()
27 s = sha.new(l[0])
27 s = sha.new(l[0])
28 s.update(l[1])
28 s.update(l[1])
29 s.update(text)
29 s.update(text)
30 return s.digest()
30 return s.digest()
31
31
32 def compress(text):
32 def compress(text):
33 """ generate a possibly-compressed representation of text """
33 """ generate a possibly-compressed representation of text """
34 if not text: return ("", text)
34 if not text: return ("", text)
35 if len(text) < 44:
35 if len(text) < 44:
36 if text[0] == '\0': return ("", text)
36 if text[0] == '\0': return ("", text)
37 return ('u', text)
37 return ('u', text)
38 bin = zlib.compress(text)
38 bin = zlib.compress(text)
39 if len(bin) > len(text):
39 if len(bin) > len(text):
40 if text[0] == '\0': return ("", text)
40 if text[0] == '\0': return ("", text)
41 return ('u', text)
41 return ('u', text)
42 return ("", bin)
42 return ("", bin)
43
43
44 def decompress(bin):
44 def decompress(bin):
45 """ decompress the given input """
45 """ decompress the given input """
46 if not bin: return bin
46 if not bin: return bin
47 t = bin[0]
47 t = bin[0]
48 if t == '\0': return bin
48 if t == '\0': return bin
49 if t == 'x': return zlib.decompress(bin)
49 if t == 'x': return zlib.decompress(bin)
50 if t == 'u': return bin[1:]
50 if t == 'u': return bin[1:]
51 raise RevlogError(_("unknown compression type %s") % t)
51 raise RevlogError(_("unknown compression type %s") % t)
52
52
53 indexformat = ">4l20s20s20s"
53 indexformat = ">4l20s20s20s"
54
54
55 class lazyparser(object):
55 class lazyparser(object):
56 """
56 """
57 this class avoids the need to parse the entirety of large indices
57 this class avoids the need to parse the entirety of large indices
58
58
59 By default we parse and load 1000 entries at a time.
59 By default we parse and load 1000 entries at a time.
60
60
61 If no position is specified, we load the whole index, and replace
61 If no position is specified, we load the whole index, and replace
62 the lazy objects in revlog with the underlying objects for
62 the lazy objects in revlog with the underlying objects for
63 efficiency in cases where we look at most of the nodes.
63 efficiency in cases where we look at most of the nodes.
64 """
64 """
65 def __init__(self, data, revlog):
65 def __init__(self, data, revlog):
66 self.data = data
66 self.data = data
67 self.s = struct.calcsize(indexformat)
67 self.s = struct.calcsize(indexformat)
68 self.l = len(data)/self.s
68 self.l = len(data)/self.s
69 self.index = [None] * self.l
69 self.index = [None] * self.l
70 self.map = {nullid: -1}
70 self.map = {nullid: -1}
71 self.all = 0
71 self.all = 0
72 self.revlog = revlog
72 self.revlog = revlog
73
73
74 def trunc(self, pos):
74 def trunc(self, pos):
75 self.l = pos/self.s
75 self.l = pos/self.s
76
76
77 def load(self, pos=None):
77 def load(self, pos=None):
78 if self.all: return
78 if self.all: return
79 if pos is not None:
79 if pos is not None:
80 block = pos / 1000
80 block = pos / 1000
81 i = block * 1000
81 i = block * 1000
82 end = min(self.l, i + 1000)
82 end = min(self.l, i + 1000)
83 else:
83 else:
84 self.all = 1
84 self.all = 1
85 i = 0
85 i = 0
86 end = self.l
86 end = self.l
87 self.revlog.index = self.index
87 self.revlog.index = self.index
88 self.revlog.nodemap = self.map
88 self.revlog.nodemap = self.map
89
89
90 while i < end:
90 while i < end:
91 d = self.data[i * self.s: (i + 1) * self.s]
91 d = self.data[i * self.s: (i + 1) * self.s]
92 e = struct.unpack(indexformat, d)
92 e = struct.unpack(indexformat, d)
93 self.index[i] = e
93 self.index[i] = e
94 self.map[e[6]] = i
94 self.map[e[6]] = i
95 i += 1
95 i += 1
96
96
97 class lazyindex(object):
97 class lazyindex(object):
98 """a lazy version of the index array"""
98 """a lazy version of the index array"""
99 def __init__(self, parser):
99 def __init__(self, parser):
100 self.p = parser
100 self.p = parser
101 def __len__(self):
101 def __len__(self):
102 return len(self.p.index)
102 return len(self.p.index)
103 def load(self, pos):
103 def load(self, pos):
104 if pos < 0:
104 if pos < 0:
105 pos += len(self.p.index)
105 pos += len(self.p.index)
106 self.p.load(pos)
106 self.p.load(pos)
107 return self.p.index[pos]
107 return self.p.index[pos]
108 def __getitem__(self, pos):
108 def __getitem__(self, pos):
109 return self.p.index[pos] or self.load(pos)
109 return self.p.index[pos] or self.load(pos)
110 def __delitem__(self, pos):
110 def __delitem__(self, pos):
111 del self.p.index[pos]
111 del self.p.index[pos]
112 def append(self, e):
112 def append(self, e):
113 self.p.index.append(e)
113 self.p.index.append(e)
114 def trunc(self, pos):
114 def trunc(self, pos):
115 self.p.trunc(pos)
115 self.p.trunc(pos)
116
116
117 class lazymap(object):
117 class lazymap(object):
118 """a lazy version of the node map"""
118 """a lazy version of the node map"""
119 def __init__(self, parser):
119 def __init__(self, parser):
120 self.p = parser
120 self.p = parser
121 def load(self, key):
121 def load(self, key):
122 if self.p.all: return
122 if self.p.all: return
123 n = self.p.data.find(key)
123 n = self.p.data.find(key)
124 if n < 0:
124 if n < 0:
125 raise KeyError(key)
125 raise KeyError(key)
126 pos = n / self.p.s
126 pos = n / self.p.s
127 self.p.load(pos)
127 self.p.load(pos)
128 def __contains__(self, key):
128 def __contains__(self, key):
129 self.p.load()
129 self.p.load()
130 return key in self.p.map
130 return key in self.p.map
131 def __iter__(self):
131 def __iter__(self):
132 yield nullid
132 yield nullid
133 for i in xrange(self.p.l):
133 for i in xrange(self.p.l):
134 try:
134 try:
135 yield self.p.index[i][6]
135 yield self.p.index[i][6]
136 except:
136 except:
137 self.p.load(i)
137 self.p.load(i)
138 yield self.p.index[i][6]
138 yield self.p.index[i][6]
139 def __getitem__(self, key):
139 def __getitem__(self, key):
140 try:
140 try:
141 return self.p.map[key]
141 return self.p.map[key]
142 except KeyError:
142 except KeyError:
143 try:
143 try:
144 self.load(key)
144 self.load(key)
145 return self.p.map[key]
145 return self.p.map[key]
146 except KeyError:
146 except KeyError:
147 raise KeyError("node " + hex(key))
147 raise KeyError("node " + hex(key))
148 def __setitem__(self, key, val):
148 def __setitem__(self, key, val):
149 self.p.map[key] = val
149 self.p.map[key] = val
150 def __delitem__(self, key):
150 def __delitem__(self, key):
151 del self.p.map[key]
151 del self.p.map[key]
152
152
153 class RevlogError(Exception): pass
153 class RevlogError(Exception): pass
154
154
155 class revlog(object):
155 class revlog(object):
156 """
156 """
157 the underlying revision storage object
157 the underlying revision storage object
158
158
159 A revlog consists of two parts, an index and the revision data.
159 A revlog consists of two parts, an index and the revision data.
160
160
161 The index is a file with a fixed record size containing
161 The index is a file with a fixed record size containing
162 information on each revision, includings its nodeid (hash), the
162 information on each revision, includings its nodeid (hash), the
163 nodeids of its parents, the position and offset of its data within
163 nodeids of its parents, the position and offset of its data within
164 the data file, and the revision it's based on. Finally, each entry
164 the data file, and the revision it's based on. Finally, each entry
165 contains a linkrev entry that can serve as a pointer to external
165 contains a linkrev entry that can serve as a pointer to external
166 data.
166 data.
167
167
168 The revision data itself is a linear collection of data chunks.
168 The revision data itself is a linear collection of data chunks.
169 Each chunk represents a revision and is usually represented as a
169 Each chunk represents a revision and is usually represented as a
170 delta against the previous chunk. To bound lookup time, runs of
170 delta against the previous chunk. To bound lookup time, runs of
171 deltas are limited to about 2 times the length of the original
171 deltas are limited to about 2 times the length of the original
172 version data. This makes retrieval of a version proportional to
172 version data. This makes retrieval of a version proportional to
173 its size, or O(1) relative to the number of revisions.
173 its size, or O(1) relative to the number of revisions.
174
174
175 Both pieces of the revlog are written to in an append-only
175 Both pieces of the revlog are written to in an append-only
176 fashion, which means we never need to rewrite a file to insert or
176 fashion, which means we never need to rewrite a file to insert or
177 remove data, and can use some simple techniques to avoid the need
177 remove data, and can use some simple techniques to avoid the need
178 for locking while reading.
178 for locking while reading.
179 """
179 """
180 def __init__(self, opener, indexfile, datafile):
180 def __init__(self, opener, indexfile, datafile):
181 """
181 """
182 create a revlog object
182 create a revlog object
183
183
184 opener is a function that abstracts the file opening operation
184 opener is a function that abstracts the file opening operation
185 and can be used to implement COW semantics or the like.
185 and can be used to implement COW semantics or the like.
186 """
186 """
187 self.indexfile = indexfile
187 self.indexfile = indexfile
188 self.datafile = datafile
188 self.datafile = datafile
189 self.opener = opener
189 self.opener = opener
190 self.cache = None
190 self.cache = None
191 self.chunkcache = None
191 self.chunkcache = None
192
192
193 try:
193 try:
194 i = self.opener(self.indexfile).read()
194 i = self.opener(self.indexfile).read()
195 except IOError, inst:
195 except IOError, inst:
196 if inst.errno != errno.ENOENT:
196 if inst.errno != errno.ENOENT:
197 raise
197 raise
198 i = ""
198 i = ""
199
199
200 if i and i[:4] != "\0\0\0\0":
200 if i and i[:4] != "\0\0\0\0":
201 raise RevlogError(_("incompatible revlog signature on %s") %
201 raise RevlogError(_("incompatible revlog signature on %s") %
202 self.indexfile)
202 self.indexfile)
203
203
204 if len(i) > 10000:
204 if len(i) > 10000:
205 # big index, let's parse it on demand
205 # big index, let's parse it on demand
206 parser = lazyparser(i, self)
206 parser = lazyparser(i, self)
207 self.index = lazyindex(parser)
207 self.index = lazyindex(parser)
208 self.nodemap = lazymap(parser)
208 self.nodemap = lazymap(parser)
209 else:
209 else:
210 s = struct.calcsize(indexformat)
210 s = struct.calcsize(indexformat)
211 l = len(i) / s
211 l = len(i) / s
212 self.index = [None] * l
212 self.index = [None] * l
213 m = [None] * l
213 m = [None] * l
214
214
215 n = 0
215 n = 0
216 for f in xrange(0, l * s, s):
216 for f in xrange(0, l * s, s):
217 # offset, size, base, linkrev, p1, p2, nodeid
217 # offset, size, base, linkrev, p1, p2, nodeid
218 e = struct.unpack(indexformat, i[f:f + s])
218 e = struct.unpack(indexformat, i[f:f + s])
219 m[n] = (e[6], n)
219 m[n] = (e[6], n)
220 self.index[n] = e
220 self.index[n] = e
221 n += 1
221 n += 1
222
222
223 self.nodemap = dict(m)
223 self.nodemap = dict(m)
224 self.nodemap[nullid] = -1
224 self.nodemap[nullid] = -1
225
225
226 def tip(self): return self.node(len(self.index) - 1)
226 def tip(self): return self.node(len(self.index) - 1)
227 def count(self): return len(self.index)
227 def count(self): return len(self.index)
228 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
228 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
229 def rev(self, node):
229 def rev(self, node):
230 try:
230 try:
231 return self.nodemap[node]
231 return self.nodemap[node]
232 except KeyError:
232 except KeyError:
233 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
233 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
234 def linkrev(self, node): return self.index[self.rev(node)][3]
234 def linkrev(self, node): return self.index[self.rev(node)][3]
235 def parents(self, node):
235 def parents(self, node):
236 if node == nullid: return (nullid, nullid)
236 if node == nullid: return (nullid, nullid)
237 return self.index[self.rev(node)][4:6]
237 return self.index[self.rev(node)][4:6]
238
238
239 def start(self, rev): return self.index[rev][0]
239 def start(self, rev): return self.index[rev][0]
240 def length(self, rev): return self.index[rev][1]
240 def length(self, rev): return self.index[rev][1]
241 def end(self, rev): return self.start(rev) + self.length(rev)
241 def end(self, rev): return self.start(rev) + self.length(rev)
242 def base(self, rev): return self.index[rev][2]
242 def base(self, rev): return self.index[rev][2]
243
243
244 def reachable(self, rev, stop=None):
244 def reachable(self, rev, stop=None):
245 reachable = {}
245 reachable = {}
246 visit = [rev]
246 visit = [rev]
247 reachable[rev] = 1
247 reachable[rev] = 1
248 if stop:
248 if stop:
249 stopn = self.rev(stop)
249 stopn = self.rev(stop)
250 else:
250 else:
251 stopn = 0
251 stopn = 0
252 while visit:
252 while visit:
253 n = visit.pop(0)
253 n = visit.pop(0)
254 if n == stop:
254 if n == stop:
255 continue
255 continue
256 if n == nullid:
256 if n == nullid:
257 continue
257 continue
258 for p in self.parents(n):
258 for p in self.parents(n):
259 if self.rev(p) < stopn:
259 if self.rev(p) < stopn:
260 continue
260 continue
261 if p not in reachable:
261 if p not in reachable:
262 reachable[p] = 1
262 reachable[p] = 1
263 visit.append(p)
263 visit.append(p)
264 return reachable
264 return reachable
265
265
266 def nodesbetween(self, roots=None, heads=None):
266 def nodesbetween(self, roots=None, heads=None):
267 """Return a tuple containing three elements. Elements 1 and 2 contain
267 """Return a tuple containing three elements. Elements 1 and 2 contain
268 a final list bases and heads after all the unreachable ones have been
268 a final list bases and heads after all the unreachable ones have been
269 pruned. Element 0 contains a topologically sorted list of all
269 pruned. Element 0 contains a topologically sorted list of all
270
270
271 nodes that satisfy these constraints:
271 nodes that satisfy these constraints:
272 1. All nodes must be descended from a node in roots (the nodes on
272 1. All nodes must be descended from a node in roots (the nodes on
273 roots are considered descended from themselves).
273 roots are considered descended from themselves).
274 2. All nodes must also be ancestors of a node in heads (the nodes in
274 2. All nodes must also be ancestors of a node in heads (the nodes in
275 heads are considered to be their own ancestors).
275 heads are considered to be their own ancestors).
276
276
277 If roots is unspecified, nullid is assumed as the only root.
277 If roots is unspecified, nullid is assumed as the only root.
278 If heads is unspecified, it is taken to be the output of the
278 If heads is unspecified, it is taken to be the output of the
279 heads method (i.e. a list of all nodes in the repository that
279 heads method (i.e. a list of all nodes in the repository that
280 have no children)."""
280 have no children)."""
281 nonodes = ([], [], [])
281 nonodes = ([], [], [])
282 if roots is not None:
282 if roots is not None:
283 roots = list(roots)
283 roots = list(roots)
284 if not roots:
284 if not roots:
285 return nonodes
285 return nonodes
286 lowestrev = min([self.rev(n) for n in roots])
286 lowestrev = min([self.rev(n) for n in roots])
287 else:
287 else:
288 roots = [nullid] # Everybody's a descendent of nullid
288 roots = [nullid] # Everybody's a descendent of nullid
289 lowestrev = -1
289 lowestrev = -1
290 if (lowestrev == -1) and (heads is None):
290 if (lowestrev == -1) and (heads is None):
291 # We want _all_ the nodes!
291 # We want _all_ the nodes!
292 return ([self.node(r) for r in xrange(0, self.count())],
292 return ([self.node(r) for r in xrange(0, self.count())],
293 [nullid], list(self.heads()))
293 [nullid], list(self.heads()))
294 if heads is None:
294 if heads is None:
295 # All nodes are ancestors, so the latest ancestor is the last
295 # All nodes are ancestors, so the latest ancestor is the last
296 # node.
296 # node.
297 highestrev = self.count() - 1
297 highestrev = self.count() - 1
298 # Set ancestors to None to signal that every node is an ancestor.
298 # Set ancestors to None to signal that every node is an ancestor.
299 ancestors = None
299 ancestors = None
300 # Set heads to an empty dictionary for later discovery of heads
300 # Set heads to an empty dictionary for later discovery of heads
301 heads = {}
301 heads = {}
302 else:
302 else:
303 heads = list(heads)
303 heads = list(heads)
304 if not heads:
304 if not heads:
305 return nonodes
305 return nonodes
306 ancestors = {}
306 ancestors = {}
307 # Start at the top and keep marking parents until we're done.
307 # Start at the top and keep marking parents until we're done.
308 nodestotag = heads[:]
308 nodestotag = heads[:]
309 # Turn heads into a dictionary so we can remove 'fake' heads.
309 # Turn heads into a dictionary so we can remove 'fake' heads.
310 # Also, later we will be using it to filter out the heads we can't
310 # Also, later we will be using it to filter out the heads we can't
311 # find from roots.
311 # find from roots.
312 heads = dict.fromkeys(heads, 0)
312 heads = dict.fromkeys(heads, 0)
313 # Remember where the top was so we can use it as a limit later.
313 # Remember where the top was so we can use it as a limit later.
314 highestrev = max([self.rev(n) for n in nodestotag])
314 highestrev = max([self.rev(n) for n in nodestotag])
315 while nodestotag:
315 while nodestotag:
316 # grab a node to tag
316 # grab a node to tag
317 n = nodestotag.pop()
317 n = nodestotag.pop()
318 # Never tag nullid
318 # Never tag nullid
319 if n == nullid:
319 if n == nullid:
320 continue
320 continue
321 # A node's revision number represents its place in a
321 # A node's revision number represents its place in a
322 # topologically sorted list of nodes.
322 # topologically sorted list of nodes.
323 r = self.rev(n)
323 r = self.rev(n)
324 if r >= lowestrev:
324 if r >= lowestrev:
325 if n not in ancestors:
325 if n not in ancestors:
326 # If we are possibly a descendent of one of the roots
326 # If we are possibly a descendent of one of the roots
327 # and we haven't already been marked as an ancestor
327 # and we haven't already been marked as an ancestor
328 ancestors[n] = 1 # Mark as ancestor
328 ancestors[n] = 1 # Mark as ancestor
329 # Add non-nullid parents to list of nodes to tag.
329 # Add non-nullid parents to list of nodes to tag.
330 nodestotag.extend([p for p in self.parents(n) if
330 nodestotag.extend([p for p in self.parents(n) if
331 p != nullid])
331 p != nullid])
332 elif n in heads: # We've seen it before, is it a fake head?
332 elif n in heads: # We've seen it before, is it a fake head?
333 # So it is, real heads should not be the ancestors of
333 # So it is, real heads should not be the ancestors of
334 # any other heads.
334 # any other heads.
335 heads.pop(n)
335 heads.pop(n)
336 if not ancestors:
336 if not ancestors:
337 return nonodes
337 return nonodes
338 # Now that we have our set of ancestors, we want to remove any
338 # Now that we have our set of ancestors, we want to remove any
339 # roots that are not ancestors.
339 # roots that are not ancestors.
340
340
341 # If one of the roots was nullid, everything is included anyway.
341 # If one of the roots was nullid, everything is included anyway.
342 if lowestrev > -1:
342 if lowestrev > -1:
343 # But, since we weren't, let's recompute the lowest rev to not
343 # But, since we weren't, let's recompute the lowest rev to not
344 # include roots that aren't ancestors.
344 # include roots that aren't ancestors.
345
345
346 # Filter out roots that aren't ancestors of heads
346 # Filter out roots that aren't ancestors of heads
347 roots = [n for n in roots if n in ancestors]
347 roots = [n for n in roots if n in ancestors]
348 # Recompute the lowest revision
348 # Recompute the lowest revision
349 if roots:
349 if roots:
350 lowestrev = min([self.rev(n) for n in roots])
350 lowestrev = min([self.rev(n) for n in roots])
351 else:
351 else:
352 # No more roots? Return empty list
352 # No more roots? Return empty list
353 return nonodes
353 return nonodes
354 else:
354 else:
355 # We are descending from nullid, and don't need to care about
355 # We are descending from nullid, and don't need to care about
356 # any other roots.
356 # any other roots.
357 lowestrev = -1
357 lowestrev = -1
358 roots = [nullid]
358 roots = [nullid]
359 # Transform our roots list into a 'set' (i.e. a dictionary where the
359 # Transform our roots list into a 'set' (i.e. a dictionary where the
360 # values don't matter.
360 # values don't matter.
361 descendents = dict.fromkeys(roots, 1)
361 descendents = dict.fromkeys(roots, 1)
362 # Also, keep the original roots so we can filter out roots that aren't
362 # Also, keep the original roots so we can filter out roots that aren't
363 # 'real' roots (i.e. are descended from other roots).
363 # 'real' roots (i.e. are descended from other roots).
364 roots = descendents.copy()
364 roots = descendents.copy()
365 # Our topologically sorted list of output nodes.
365 # Our topologically sorted list of output nodes.
366 orderedout = []
366 orderedout = []
367 # Don't start at nullid since we don't want nullid in our output list,
367 # Don't start at nullid since we don't want nullid in our output list,
368 # and if nullid shows up in descedents, empty parents will look like
368 # and if nullid shows up in descedents, empty parents will look like
369 # they're descendents.
369 # they're descendents.
370 for r in xrange(max(lowestrev, 0), highestrev + 1):
370 for r in xrange(max(lowestrev, 0), highestrev + 1):
371 n = self.node(r)
371 n = self.node(r)
372 isdescendent = False
372 isdescendent = False
373 if lowestrev == -1: # Everybody is a descendent of nullid
373 if lowestrev == -1: # Everybody is a descendent of nullid
374 isdescendent = True
374 isdescendent = True
375 elif n in descendents:
375 elif n in descendents:
376 # n is already a descendent
376 # n is already a descendent
377 isdescendent = True
377 isdescendent = True
378 # This check only needs to be done here because all the roots
378 # This check only needs to be done here because all the roots
379 # will start being marked is descendents before the loop.
379 # will start being marked is descendents before the loop.
380 if n in roots:
380 if n in roots:
381 # If n was a root, check if it's a 'real' root.
381 # If n was a root, check if it's a 'real' root.
382 p = tuple(self.parents(n))
382 p = tuple(self.parents(n))
383 # If any of its parents are descendents, it's not a root.
383 # If any of its parents are descendents, it's not a root.
384 if (p[0] in descendents) or (p[1] in descendents):
384 if (p[0] in descendents) or (p[1] in descendents):
385 roots.pop(n)
385 roots.pop(n)
386 else:
386 else:
387 p = tuple(self.parents(n))
387 p = tuple(self.parents(n))
388 # A node is a descendent if either of its parents are
388 # A node is a descendent if either of its parents are
389 # descendents. (We seeded the dependents list with the roots
389 # descendents. (We seeded the dependents list with the roots
390 # up there, remember?)
390 # up there, remember?)
391 if (p[0] in descendents) or (p[1] in descendents):
391 if (p[0] in descendents) or (p[1] in descendents):
392 descendents[n] = 1
392 descendents[n] = 1
393 isdescendent = True
393 isdescendent = True
394 if isdescendent and ((ancestors is None) or (n in ancestors)):
394 if isdescendent and ((ancestors is None) or (n in ancestors)):
395 # Only include nodes that are both descendents and ancestors.
395 # Only include nodes that are both descendents and ancestors.
396 orderedout.append(n)
396 orderedout.append(n)
397 if (ancestors is not None) and (n in heads):
397 if (ancestors is not None) and (n in heads):
398 # We're trying to figure out which heads are reachable
398 # We're trying to figure out which heads are reachable
399 # from roots.
399 # from roots.
400 # Mark this head as having been reached
400 # Mark this head as having been reached
401 heads[n] = 1
401 heads[n] = 1
402 elif ancestors is None:
402 elif ancestors is None:
403 # Otherwise, we're trying to discover the heads.
403 # Otherwise, we're trying to discover the heads.
404 # Assume this is a head because if it isn't, the next step
404 # Assume this is a head because if it isn't, the next step
405 # will eventually remove it.
405 # will eventually remove it.
406 heads[n] = 1
406 heads[n] = 1
407 # But, obviously its parents aren't.
407 # But, obviously its parents aren't.
408 for p in self.parents(n):
408 for p in self.parents(n):
409 heads.pop(p, None)
409 heads.pop(p, None)
410 heads = [n for n in heads.iterkeys() if heads[n] != 0]
410 heads = [n for n in heads.iterkeys() if heads[n] != 0]
411 roots = roots.keys()
411 roots = roots.keys()
412 assert orderedout
412 assert orderedout
413 assert roots
413 assert roots
414 assert heads
414 assert heads
415 return (orderedout, roots, heads)
415 return (orderedout, roots, heads)
416
416
417 def heads(self, start=None):
417 def heads(self, start=None):
418 """return the list of all nodes that have no children
418 """return the list of all nodes that have no children
419
419
420 if start is specified, only heads that are descendants of
420 if start is specified, only heads that are descendants of
421 start will be returned
421 start will be returned
422
422
423 """
423 """
424 if start is None:
424 if start is None:
425 start = nullid
425 start = nullid
426 reachable = {start: 1}
426 reachable = {start: 1}
427 heads = {start: 1}
427 heads = {start: 1}
428 startrev = self.rev(start)
428 startrev = self.rev(start)
429
429
430 for r in xrange(startrev + 1, self.count()):
430 for r in xrange(startrev + 1, self.count()):
431 n = self.node(r)
431 n = self.node(r)
432 for pn in self.parents(n):
432 for pn in self.parents(n):
433 if pn in reachable:
433 if pn in reachable:
434 reachable[n] = 1
434 reachable[n] = 1
435 heads[n] = 1
435 heads[n] = 1
436 if pn in heads:
436 if pn in heads:
437 del heads[pn]
437 del heads[pn]
438 return heads.keys()
438 return heads.keys()
439
439
440 def children(self, node):
440 def children(self, node):
441 """find the children of a given node"""
441 """find the children of a given node"""
442 c = []
442 c = []
443 p = self.rev(node)
443 p = self.rev(node)
444 for r in range(p + 1, self.count()):
444 for r in range(p + 1, self.count()):
445 n = self.node(r)
445 n = self.node(r)
446 for pn in self.parents(n):
446 for pn in self.parents(n):
447 if pn == node:
447 if pn == node:
448 c.append(n)
448 c.append(n)
449 continue
449 continue
450 elif pn == nullid:
450 elif pn == nullid:
451 continue
451 continue
452 return c
452 return c
453
453
454 def lookup(self, id):
454 def lookup(self, id):
455 """locate a node based on revision number or subset of hex nodeid"""
455 """locate a node based on revision number or subset of hex nodeid"""
456 try:
456 try:
457 rev = int(id)
457 rev = int(id)
458 if str(rev) != id: raise ValueError
458 if str(rev) != id: raise ValueError
459 if rev < 0: rev = self.count() + rev
459 if rev < 0: rev = self.count() + rev
460 if rev < 0 or rev >= self.count(): raise ValueError
460 if rev < 0 or rev >= self.count(): raise ValueError
461 return self.node(rev)
461 return self.node(rev)
462 except (ValueError, OverflowError):
462 except (ValueError, OverflowError):
463 c = []
463 c = []
464 for n in self.nodemap:
464 for n in self.nodemap:
465 if hex(n).startswith(id):
465 if hex(n).startswith(id):
466 c.append(n)
466 c.append(n)
467 if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
467 if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
468 if len(c) < 1: raise RevlogError(_("No match found"))
468 if len(c) < 1: raise RevlogError(_("No match found"))
469 return c[0]
469 return c[0]
470
470
471 return None
471 return None
472
472
473 def diff(self, a, b):
473 def diff(self, a, b):
474 """return a delta between two revisions"""
474 """return a delta between two revisions"""
475 return mdiff.textdiff(a, b)
475 return mdiff.textdiff(a, b)
476
476
477 def patches(self, t, pl):
477 def patches(self, t, pl):
478 """apply a list of patches to a string"""
478 """apply a list of patches to a string"""
479 return mdiff.patches(t, pl)
479 return mdiff.patches(t, pl)
480
480
481 def chunk(self, rev):
481 def chunk(self, rev):
482 start, length = self.start(rev), self.length(rev)
482 start, length = self.start(rev), self.length(rev)
483 end = start + length
483 end = start + length
484
484
485 def loadcache():
485 def loadcache():
486 cache_length = max(4096 * 1024, length) # 4Mo
486 cache_length = max(4096 * 1024, length) # 4Mo
487 df = self.opener(self.datafile)
487 df = self.opener(self.datafile)
488 df.seek(start)
488 df.seek(start)
489 self.chunkcache = (start, df.read(cache_length))
489 self.chunkcache = (start, df.read(cache_length))
490
490
491 if not self.chunkcache:
491 if not self.chunkcache:
492 loadcache()
492 loadcache()
493
493
494 cache_start = self.chunkcache[0]
494 cache_start = self.chunkcache[0]
495 cache_end = cache_start + len(self.chunkcache[1])
495 cache_end = cache_start + len(self.chunkcache[1])
496 if start >= cache_start and end <= cache_end:
496 if start >= cache_start and end <= cache_end:
497 # it is cached
497 # it is cached
498 offset = start - cache_start
498 offset = start - cache_start
499 else:
499 else:
500 loadcache()
500 loadcache()
501 offset = 0
501 offset = 0
502
502
503 #def checkchunk():
503 #def checkchunk():
504 # df = self.opener(self.datafile)
504 # df = self.opener(self.datafile)
505 # df.seek(start)
505 # df.seek(start)
506 # return df.read(length)
506 # return df.read(length)
507 #assert s == checkchunk()
507 #assert s == checkchunk()
508 return decompress(self.chunkcache[1][offset:offset + length])
508 return decompress(self.chunkcache[1][offset:offset + length])
509
509
510 def delta(self, node):
510 def delta(self, node):
511 """return or calculate a delta between a node and its predecessor"""
511 """return or calculate a delta between a node and its predecessor"""
512 r = self.rev(node)
512 r = self.rev(node)
513 b = self.base(r)
513 b = self.base(r)
514 if r == b:
514 if r == b:
515 return self.diff(self.revision(self.node(r - 1)),
515 return self.diff(self.revision(self.node(r - 1)),
516 self.revision(node))
516 self.revision(node))
517 else:
517 else:
518 return self.chunk(r)
518 return self.chunk(r)
519
519
520 def revision(self, node):
520 def revision(self, node):
521 """return an uncompressed revision of a given"""
521 """return an uncompressed revision of a given"""
522 if node == nullid: return ""
522 if node == nullid: return ""
523 if self.cache and self.cache[0] == node: return self.cache[2]
523 if self.cache and self.cache[0] == node: return self.cache[2]
524
524
525 # look up what we need to read
525 # look up what we need to read
526 text = None
526 text = None
527 rev = self.rev(node)
527 rev = self.rev(node)
528 base = self.base(rev)
528 base = self.base(rev)
529
529
530 # do we have useful data cached?
530 # do we have useful data cached?
531 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
531 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
532 base = self.cache[1]
532 base = self.cache[1]
533 text = self.cache[2]
533 text = self.cache[2]
534 else:
534 else:
535 text = self.chunk(base)
535 text = self.chunk(base)
536
536
537 bins = []
537 bins = []
538 for r in xrange(base + 1, rev + 1):
538 for r in xrange(base + 1, rev + 1):
539 bins.append(self.chunk(r))
539 bins.append(self.chunk(r))
540
540
541 text = mdiff.patches(text, bins)
541 text = mdiff.patches(text, bins)
542
542
543 p1, p2 = self.parents(node)
543 p1, p2 = self.parents(node)
544 if node != hash(text, p1, p2):
544 if node != hash(text, p1, p2):
545 raise RevlogError(_("integrity check failed on %s:%d")
545 raise RevlogError(_("integrity check failed on %s:%d")
546 % (self.datafile, rev))
546 % (self.datafile, rev))
547
547
548 self.cache = (node, rev, text)
548 self.cache = (node, rev, text)
549 return text
549 return text
550
550
551 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
551 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
552 """add a revision to the log
552 """add a revision to the log
553
553
554 text - the revision data to add
554 text - the revision data to add
555 transaction - the transaction object used for rollback
555 transaction - the transaction object used for rollback
556 link - the linkrev data to add
556 link - the linkrev data to add
557 p1, p2 - the parent nodeids of the revision
557 p1, p2 - the parent nodeids of the revision
558 d - an optional precomputed delta
558 d - an optional precomputed delta
559 """
559 """
560 if text is None: text = ""
560 if text is None: text = ""
561 if p1 is None: p1 = self.tip()
561 if p1 is None: p1 = self.tip()
562 if p2 is None: p2 = nullid
562 if p2 is None: p2 = nullid
563
563
564 node = hash(text, p1, p2)
564 node = hash(text, p1, p2)
565
565
566 if node in self.nodemap:
566 if node in self.nodemap:
567 return node
567 return node
568
568
569 n = self.count()
569 n = self.count()
570 t = n - 1
570 t = n - 1
571
571
572 if n:
572 if n:
573 base = self.base(t)
573 base = self.base(t)
574 start = self.start(base)
574 start = self.start(base)
575 end = self.end(t)
575 end = self.end(t)
576 if not d:
576 if not d:
577 prev = self.revision(self.tip())
577 prev = self.revision(self.tip())
578 d = self.diff(prev, str(text))
578 d = self.diff(prev, str(text))
579 data = compress(d)
579 data = compress(d)
580 l = len(data[1]) + len(data[0])
580 l = len(data[1]) + len(data[0])
581 dist = end - start + l
581 dist = end - start + l
582
582
583 # full versions are inserted when the needed deltas
583 # full versions are inserted when the needed deltas
584 # become comparable to the uncompressed text
584 # become comparable to the uncompressed text
585 if not n or dist > len(text) * 2:
585 if not n or dist > len(text) * 2:
586 data = compress(text)
586 data = compress(text)
587 l = len(data[1]) + len(data[0])
587 l = len(data[1]) + len(data[0])
588 base = n
588 base = n
589 else:
589 else:
590 base = self.base(t)
590 base = self.base(t)
591
591
592 offset = 0
592 offset = 0
593 if t >= 0:
593 if t >= 0:
594 offset = self.end(t)
594 offset = self.end(t)
595
595
596 e = (offset, l, base, link, p1, p2, node)
596 e = (offset, l, base, link, p1, p2, node)
597
597
598 self.index.append(e)
598 self.index.append(e)
599 self.nodemap[node] = n
599 self.nodemap[node] = n
600 entry = struct.pack(indexformat, *e)
600 entry = struct.pack(indexformat, *e)
601
601
602 transaction.add(self.datafile, e[0])
602 transaction.add(self.datafile, e[0])
603 f = self.opener(self.datafile, "a")
603 f = self.opener(self.datafile, "a")
604 if data[0]:
604 if data[0]:
605 f.write(data[0])
605 f.write(data[0])
606 f.write(data[1])
606 f.write(data[1])
607 transaction.add(self.indexfile, n * len(entry))
607 transaction.add(self.indexfile, n * len(entry))
608 self.opener(self.indexfile, "a").write(entry)
608 self.opener(self.indexfile, "a").write(entry)
609
609
610 self.cache = (node, n, text)
610 self.cache = (node, n, text)
611 return node
611 return node
612
612
613 def ancestor(self, a, b):
613 def ancestor(self, a, b):
614 """calculate the least common ancestor of nodes a and b"""
614 """calculate the least common ancestor of nodes a and b"""
615 # calculate the distance of every node from root
615 # calculate the distance of every node from root
616 dist = {nullid: 0}
616 dist = {nullid: 0}
617 for i in xrange(self.count()):
617 for i in xrange(self.count()):
618 n = self.node(i)
618 n = self.node(i)
619 p1, p2 = self.parents(n)
619 p1, p2 = self.parents(n)
620 dist[n] = max(dist[p1], dist[p2]) + 1
620 dist[n] = max(dist[p1], dist[p2]) + 1
621
621
622 # traverse ancestors in order of decreasing distance from root
622 # traverse ancestors in order of decreasing distance from root
623 def ancestors(node):
623 def ancestors(node):
624 # we store negative distances because heap returns smallest member
624 # we store negative distances because heap returns smallest member
625 h = [(-dist[node], node)]
625 h = [(-dist[node], node)]
626 seen = {}
626 seen = {}
627 earliest = self.count()
628 while h:
627 while h:
629 d, n = heapq.heappop(h)
628 d, n = heapq.heappop(h)
630 if n not in seen:
629 if n not in seen:
631 seen[n] = 1
630 seen[n] = 1
632 r = self.rev(n)
633 yield (-d, n)
631 yield (-d, n)
634 for p in self.parents(n):
632 for p in self.parents(n):
635 heapq.heappush(h, (-dist[p], p))
633 heapq.heappush(h, (-dist[p], p))
636
634
637 def generations(node):
635 def generations(node):
638 sg, s = None, {}
636 sg, s = None, {}
639 for g,n in ancestors(node):
637 for g,n in ancestors(node):
640 if g != sg:
638 if g != sg:
641 if sg:
639 if sg:
642 yield sg, s
640 yield sg, s
643 sg, s = g, {n:1}
641 sg, s = g, {n:1}
644 else:
642 else:
645 s[n] = 1
643 s[n] = 1
646 yield sg, s
644 yield sg, s
647
645
648 x = generations(a)
646 x = generations(a)
649 y = generations(b)
647 y = generations(b)
650 gx = x.next()
648 gx = x.next()
651 gy = y.next()
649 gy = y.next()
652
650
653 # increment each ancestor list until it is closer to root than
651 # increment each ancestor list until it is closer to root than
654 # the other, or they match
652 # the other, or they match
655 while 1:
653 while 1:
656 #print "ancestor gen %s %s" % (gx[0], gy[0])
654 #print "ancestor gen %s %s" % (gx[0], gy[0])
657 if gx[0] == gy[0]:
655 if gx[0] == gy[0]:
658 # find the intersection
656 # find the intersection
659 i = [ n for n in gx[1] if n in gy[1] ]
657 i = [ n for n in gx[1] if n in gy[1] ]
660 if i:
658 if i:
661 return i[0]
659 return i[0]
662 else:
660 else:
663 #print "next"
661 #print "next"
664 gy = y.next()
662 gy = y.next()
665 gx = x.next()
663 gx = x.next()
666 elif gx[0] < gy[0]:
664 elif gx[0] < gy[0]:
667 #print "next y"
665 #print "next y"
668 gy = y.next()
666 gy = y.next()
669 else:
667 else:
670 #print "next x"
668 #print "next x"
671 gx = x.next()
669 gx = x.next()
672
670
673 def group(self, nodelist, lookup, infocollect=None):
671 def group(self, nodelist, lookup, infocollect=None):
674 """calculate a delta group
672 """calculate a delta group
675
673
676 Given a list of changeset revs, return a set of deltas and
674 Given a list of changeset revs, return a set of deltas and
677 metadata corresponding to nodes. the first delta is
675 metadata corresponding to nodes. the first delta is
678 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
676 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
679 have this parent as it has all history before these
677 have this parent as it has all history before these
680 changesets. parent is parent[0]
678 changesets. parent is parent[0]
681 """
679 """
682 revs = [self.rev(n) for n in nodelist]
680 revs = [self.rev(n) for n in nodelist]
683
681
684 # if we don't have any revisions touched by these changesets, bail
682 # if we don't have any revisions touched by these changesets, bail
685 if not revs:
683 if not revs:
686 yield struct.pack(">l", 0)
684 yield struct.pack(">l", 0)
687 return
685 return
688
686
689 # add the parent of the first rev
687 # add the parent of the first rev
690 p = self.parents(self.node(revs[0]))[0]
688 p = self.parents(self.node(revs[0]))[0]
691 revs.insert(0, self.rev(p))
689 revs.insert(0, self.rev(p))
692
690
693 # helper to reconstruct intermediate versions
694 def construct(text, base, rev):
695 bins = [self.chunk(r) for r in xrange(base + 1, rev + 1)]
696 return mdiff.patches(text, bins)
697
698 # build deltas
691 # build deltas
699 for d in xrange(0, len(revs) - 1):
692 for d in xrange(0, len(revs) - 1):
700 a, b = revs[d], revs[d + 1]
693 a, b = revs[d], revs[d + 1]
701 na = self.node(a)
694 na = self.node(a)
702 nb = self.node(b)
695 nb = self.node(b)
703
696
704 if infocollect is not None:
697 if infocollect is not None:
705 infocollect(nb)
698 infocollect(nb)
706
699
707 # do we need to construct a new delta?
700 # do we need to construct a new delta?
708 if a + 1 != b or self.base(b) == b:
701 if a + 1 != b or self.base(b) == b:
709 ta = self.revision(na)
702 ta = self.revision(na)
710 tb = self.revision(nb)
703 tb = self.revision(nb)
711 d = self.diff(ta, tb)
704 d = self.diff(ta, tb)
712 else:
705 else:
713 d = self.chunk(b)
706 d = self.chunk(b)
714
707
715 p = self.parents(nb)
708 p = self.parents(nb)
716 meta = nb + p[0] + p[1] + lookup(nb)
709 meta = nb + p[0] + p[1] + lookup(nb)
717 l = struct.pack(">l", len(meta) + len(d) + 4)
710 l = struct.pack(">l", len(meta) + len(d) + 4)
718 yield l
711 yield l
719 yield meta
712 yield meta
720 yield d
713 yield d
721
714
722 yield struct.pack(">l", 0)
715 yield struct.pack(">l", 0)
723
716
724 def addgroup(self, revs, linkmapper, transaction, unique=0):
717 def addgroup(self, revs, linkmapper, transaction, unique=0):
725 """
718 """
726 add a delta group
719 add a delta group
727
720
728 given a set of deltas, add them to the revision log. the
721 given a set of deltas, add them to the revision log. the
729 first delta is against its parent, which should be in our
722 first delta is against its parent, which should be in our
730 log, the rest are against the previous delta.
723 log, the rest are against the previous delta.
731 """
724 """
732
725
733 #track the base of the current delta log
726 #track the base of the current delta log
734 r = self.count()
727 r = self.count()
735 t = r - 1
728 t = r - 1
736 node = nullid
729 node = nullid
737
730
738 base = prev = -1
731 base = prev = -1
739 start = end = measure = 0
732 start = end = measure = 0
740 if r:
733 if r:
741 start = self.start(self.base(t))
734 base = self.base(t)
735 start = self.start(base)
742 end = self.end(t)
736 end = self.end(t)
743 measure = self.length(self.base(t))
737 measure = self.length(base)
744 base = self.base(t)
745 prev = self.tip()
738 prev = self.tip()
746
739
747 transaction.add(self.datafile, end)
740 transaction.add(self.datafile, end)
748 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
741 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
749 dfh = self.opener(self.datafile, "a")
742 dfh = self.opener(self.datafile, "a")
750 ifh = self.opener(self.indexfile, "a")
743 ifh = self.opener(self.indexfile, "a")
751
744
752 # loop through our set of deltas
745 # loop through our set of deltas
753 chain = None
746 chain = None
754 for chunk in revs:
747 for chunk in revs:
755 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
748 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
756 link = linkmapper(cs)
749 link = linkmapper(cs)
757 if node in self.nodemap:
750 if node in self.nodemap:
758 # this can happen if two branches make the same change
751 # this can happen if two branches make the same change
759 # if unique:
752 # if unique:
760 # raise RevlogError(_("already have %s") % hex(node[:4]))
753 # raise RevlogError(_("already have %s") % hex(node[:4]))
761 chain = node
754 chain = node
762 continue
755 continue
763 delta = chunk[80:]
756 delta = chunk[80:]
764
757
765 for p in (p1, p2):
758 for p in (p1, p2):
766 if not p in self.nodemap:
759 if not p in self.nodemap:
767 raise RevlogError(_("unknown parent %s") % short(p1))
760 raise RevlogError(_("unknown parent %s") % short(p1))
768
761
769 if not chain:
762 if not chain:
770 # retrieve the parent revision of the delta chain
763 # retrieve the parent revision of the delta chain
771 chain = p1
764 chain = p1
772 if not chain in self.nodemap:
765 if not chain in self.nodemap:
773 raise RevlogError(_("unknown base %s") % short(chain[:4]))
766 raise RevlogError(_("unknown base %s") % short(chain[:4]))
774
767
775 # full versions are inserted when the needed deltas become
768 # full versions are inserted when the needed deltas become
776 # comparable to the uncompressed text or when the previous
769 # comparable to the uncompressed text or when the previous
777 # version is not the one we have a delta against. We use
770 # version is not the one we have a delta against. We use
778 # the size of the previous full rev as a proxy for the
771 # the size of the previous full rev as a proxy for the
779 # current size.
772 # current size.
780
773
781 if chain == prev:
774 if chain == prev:
782 tempd = compress(delta)
775 tempd = compress(delta)
783 cdelta = tempd[0] + tempd[1]
776 cdelta = tempd[0] + tempd[1]
784
777
785 if chain != prev or (end - start + len(cdelta)) > measure * 2:
778 if chain != prev or (end - start + len(cdelta)) > measure * 2:
786 # flush our writes here so we can read it in revision
779 # flush our writes here so we can read it in revision
787 dfh.flush()
780 dfh.flush()
788 ifh.flush()
781 ifh.flush()
789 text = self.revision(chain)
782 text = self.revision(chain)
790 text = self.patches(text, [delta])
783 text = self.patches(text, [delta])
791 chk = self.addrevision(text, transaction, link, p1, p2)
784 chk = self.addrevision(text, transaction, link, p1, p2)
792 if chk != node:
785 if chk != node:
793 raise RevlogError(_("consistency error adding group"))
786 raise RevlogError(_("consistency error adding group"))
794 measure = len(text)
787 measure = len(text)
795 else:
788 else:
796 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
789 e = (end, len(cdelta), base, link, p1, p2, node)
797 self.index.append(e)
790 self.index.append(e)
798 self.nodemap[node] = r
791 self.nodemap[node] = r
799 dfh.write(cdelta)
792 dfh.write(cdelta)
800 ifh.write(struct.pack(indexformat, *e))
793 ifh.write(struct.pack(indexformat, *e))
801
794
802 t, r, chain, prev = r, r + 1, node, node
795 t, r, chain, prev = r, r + 1, node, node
803 start = self.start(self.base(t))
796 base = self.base(t)
797 start = self.start(base)
804 end = self.end(t)
798 end = self.end(t)
805
799
806 dfh.close()
800 dfh.close()
807 ifh.close()
801 ifh.close()
808 return node
802 return node
809
803
810 def strip(self, rev, minlink):
804 def strip(self, rev, minlink):
811 if self.count() == 0 or rev >= self.count():
805 if self.count() == 0 or rev >= self.count():
812 return
806 return
813
807
814 # When stripping away a revision, we need to make sure it
808 # When stripping away a revision, we need to make sure it
815 # does not actually belong to an older changeset.
809 # does not actually belong to an older changeset.
816 # The minlink parameter defines the oldest revision
810 # The minlink parameter defines the oldest revision
817 # we're allowed to strip away.
811 # we're allowed to strip away.
818 while minlink > self.index[rev][3]:
812 while minlink > self.index[rev][3]:
819 rev += 1
813 rev += 1
820 if rev >= self.count():
814 if rev >= self.count():
821 return
815 return
822
816
823 # first truncate the files on disk
817 # first truncate the files on disk
824 end = self.start(rev)
818 end = self.start(rev)
825 self.opener(self.datafile, "a").truncate(end)
819 self.opener(self.datafile, "a").truncate(end)
826 end = rev * struct.calcsize(indexformat)
820 end = rev * struct.calcsize(indexformat)
827 self.opener(self.indexfile, "a").truncate(end)
821 self.opener(self.indexfile, "a").truncate(end)
828
822
829 # then reset internal state in memory to forget those revisions
823 # then reset internal state in memory to forget those revisions
830 self.cache = None
824 self.cache = None
831 self.chunkcache = None
825 self.chunkcache = None
832 for p in self.index[rev:]:
826 for p in self.index[rev:]:
833 del self.nodemap[p[6]]
827 del self.nodemap[p[6]]
834 del self.index[rev:]
828 del self.index[rev:]
835
829
836 # truncating the lazyindex also truncates the lazymap.
830 # truncating the lazyindex also truncates the lazymap.
837 if isinstance(self.index, lazyindex):
831 if isinstance(self.index, lazyindex):
838 self.index.trunc(end)
832 self.index.trunc(end)
839
833
840
834
841 def checksize(self):
835 def checksize(self):
842 expected = 0
836 expected = 0
843 if self.count():
837 if self.count():
844 expected = self.end(self.count() - 1)
838 expected = self.end(self.count() - 1)
845
839
846 try:
840 try:
847 f = self.opener(self.datafile)
841 f = self.opener(self.datafile)
848 f.seek(0, 2)
842 f.seek(0, 2)
849 actual = f.tell()
843 actual = f.tell()
850 dd = actual - expected
844 dd = actual - expected
851 except IOError, inst:
845 except IOError, inst:
852 if inst.errno != errno.ENOENT:
846 if inst.errno != errno.ENOENT:
853 raise
847 raise
854 dd = 0
848 dd = 0
855
849
856 try:
850 try:
857 f = self.opener(self.indexfile)
851 f = self.opener(self.indexfile)
858 f.seek(0, 2)
852 f.seek(0, 2)
859 actual = f.tell()
853 actual = f.tell()
860 s = struct.calcsize(indexformat)
854 s = struct.calcsize(indexformat)
861 i = actual / s
855 i = actual / s
862 di = actual - (i * s)
856 di = actual - (i * s)
863 except IOError, inst:
857 except IOError, inst:
864 if inst.errno != errno.ENOENT:
858 if inst.errno != errno.ENOENT:
865 raise
859 raise
866 di = 0
860 di = 0
867
861
868 return (dd, di)
862 return (dd, di)
869
863
870
864
@@ -1,20 +1,19 b''
1 #!/bin/sh
1 #!/bin/sh
2
2
3 mkdir a
3 mkdir a
4 cd a
4 cd a
5 hg init
5 hg init
6 echo foo > b
6 echo foo > b
7 hg add b
7 hg add b
8 hg ci -m "b" -d "0 0"
8 hg ci -m "b" -d "0 0"
9
9
10 chmod -w .hg
10 chmod -w .hg
11
11
12 cd ..
12 cd ..
13
13
14 hg clone a b
14 hg clone a b
15
16 chmod +w a/.hg # let test clean up
17
15 cd b
18 cd b
16 hg verify
19 hg verify
17
18 cd ..
19
20 chmod +w a/.hg # let test clean up
General Comments 0
You need to be logged in to leave comments. Login now