##// END OF EJS Templates
dirstate: break update into separate functions
Matt Mackall -
r4904:6fd953d5 default
parent child Browse files
Show More
@@ -1,97 +1,97 b''
1 # hg backend for convert extension
1 # hg backend for convert extension
2
2
3 import os, time
3 import os, time
4 from mercurial import hg
4 from mercurial import hg
5
5
6 from common import NoRepo, converter_sink
6 from common import NoRepo, converter_sink
7
7
8 class convert_mercurial(converter_sink):
8 class convert_mercurial(converter_sink):
9 def __init__(self, ui, path):
9 def __init__(self, ui, path):
10 self.path = path
10 self.path = path
11 self.ui = ui
11 self.ui = ui
12 try:
12 try:
13 self.repo = hg.repository(self.ui, path)
13 self.repo = hg.repository(self.ui, path)
14 except:
14 except:
15 raise NoRepo("could open hg repo %s" % path)
15 raise NoRepo("could open hg repo %s" % path)
16
16
17 def mapfile(self):
17 def mapfile(self):
18 return os.path.join(self.path, ".hg", "shamap")
18 return os.path.join(self.path, ".hg", "shamap")
19
19
20 def authorfile(self):
20 def authorfile(self):
21 return os.path.join(self.path, ".hg", "authormap")
21 return os.path.join(self.path, ".hg", "authormap")
22
22
23 def getheads(self):
23 def getheads(self):
24 h = self.repo.changelog.heads()
24 h = self.repo.changelog.heads()
25 return [ hg.hex(x) for x in h ]
25 return [ hg.hex(x) for x in h ]
26
26
27 def putfile(self, f, e, data):
27 def putfile(self, f, e, data):
28 self.repo.wwrite(f, data, e)
28 self.repo.wwrite(f, data, e)
29 if self.repo.dirstate.state(f) == '?':
29 if self.repo.dirstate.state(f) == '?':
30 self.repo.dirstate.update([f], "a")
30 self.repo.dirstate.add(f)
31
31
32 def copyfile(self, source, dest):
32 def copyfile(self, source, dest):
33 self.repo.copy(source, dest)
33 self.repo.copy(source, dest)
34
34
35 def delfile(self, f):
35 def delfile(self, f):
36 try:
36 try:
37 os.unlink(self.repo.wjoin(f))
37 os.unlink(self.repo.wjoin(f))
38 #self.repo.remove([f])
38 #self.repo.remove([f])
39 except:
39 except:
40 pass
40 pass
41
41
42 def putcommit(self, files, parents, commit):
42 def putcommit(self, files, parents, commit):
43 seen = {}
43 seen = {}
44 pl = []
44 pl = []
45 for p in parents:
45 for p in parents:
46 if p not in seen:
46 if p not in seen:
47 pl.append(p)
47 pl.append(p)
48 seen[p] = 1
48 seen[p] = 1
49 parents = pl
49 parents = pl
50
50
51 if len(parents) < 2: parents.append("0" * 40)
51 if len(parents) < 2: parents.append("0" * 40)
52 if len(parents) < 2: parents.append("0" * 40)
52 if len(parents) < 2: parents.append("0" * 40)
53 p2 = parents.pop(0)
53 p2 = parents.pop(0)
54
54
55 text = commit.desc
55 text = commit.desc
56 extra = {}
56 extra = {}
57 if commit.branch:
57 if commit.branch:
58 extra['branch'] = commit.branch
58 extra['branch'] = commit.branch
59 if commit.rev:
59 if commit.rev:
60 extra['convert_revision'] = commit.rev
60 extra['convert_revision'] = commit.rev
61
61
62 while parents:
62 while parents:
63 p1 = p2
63 p1 = p2
64 p2 = parents.pop(0)
64 p2 = parents.pop(0)
65 a = self.repo.rawcommit(files, text, commit.author, commit.date,
65 a = self.repo.rawcommit(files, text, commit.author, commit.date,
66 hg.bin(p1), hg.bin(p2), extra=extra)
66 hg.bin(p1), hg.bin(p2), extra=extra)
67 text = "(octopus merge fixup)\n"
67 text = "(octopus merge fixup)\n"
68 p2 = hg.hex(self.repo.changelog.tip())
68 p2 = hg.hex(self.repo.changelog.tip())
69
69
70 return p2
70 return p2
71
71
72 def puttags(self, tags):
72 def puttags(self, tags):
73 try:
73 try:
74 old = self.repo.wfile(".hgtags").read()
74 old = self.repo.wfile(".hgtags").read()
75 oldlines = old.splitlines(1)
75 oldlines = old.splitlines(1)
76 oldlines.sort()
76 oldlines.sort()
77 except:
77 except:
78 oldlines = []
78 oldlines = []
79
79
80 k = tags.keys()
80 k = tags.keys()
81 k.sort()
81 k.sort()
82 newlines = []
82 newlines = []
83 for tag in k:
83 for tag in k:
84 newlines.append("%s %s\n" % (tags[tag], tag))
84 newlines.append("%s %s\n" % (tags[tag], tag))
85
85
86 newlines.sort()
86 newlines.sort()
87
87
88 if newlines != oldlines:
88 if newlines != oldlines:
89 self.ui.status("updating tags\n")
89 self.ui.status("updating tags\n")
90 f = self.repo.wfile(".hgtags", "w")
90 f = self.repo.wfile(".hgtags", "w")
91 f.write("".join(newlines))
91 f.write("".join(newlines))
92 f.close()
92 f.close()
93 if not oldlines: self.repo.add([".hgtags"])
93 if not oldlines: self.repo.add([".hgtags"])
94 date = "%s 0" % int(time.mktime(time.gmtime()))
94 date = "%s 0" % int(time.mktime(time.gmtime()))
95 self.repo.rawcommit([".hgtags"], "update tags", "convert-repo",
95 self.repo.rawcommit([".hgtags"], "update tags", "convert-repo",
96 date, self.repo.changelog.tip(), hg.nullid)
96 date, self.repo.changelog.tip(), hg.nullid)
97 return hg.hex(self.repo.changelog.tip())
97 return hg.hex(self.repo.changelog.tip())
@@ -1,2229 +1,2235 b''
1 # queue.py - patch queues for mercurial
1 # queue.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 '''patch management and development
8 '''patch management and development
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use "hg help command" for more details):
17 Common tasks (use "hg help command" for more details):
18
18
19 prepare repository to work with patches qinit
19 prepare repository to work with patches qinit
20 create new patch qnew
20 create new patch qnew
21 import existing patch qimport
21 import existing patch qimport
22
22
23 print patch series qseries
23 print patch series qseries
24 print applied patches qapplied
24 print applied patches qapplied
25 print name of top applied patch qtop
25 print name of top applied patch qtop
26
26
27 add known patch to applied stack qpush
27 add known patch to applied stack qpush
28 remove patch from applied stack qpop
28 remove patch from applied stack qpop
29 refresh contents of top applied patch qrefresh
29 refresh contents of top applied patch qrefresh
30 '''
30 '''
31
31
32 from mercurial.i18n import _
32 from mercurial.i18n import _
33 from mercurial import commands, cmdutil, hg, patch, revlog, util
33 from mercurial import commands, cmdutil, hg, patch, revlog, util
34 from mercurial import repair
34 from mercurial import repair
35 import os, sys, re, errno
35 import os, sys, re, errno
36
36
37 commands.norepo += " qclone qversion"
37 commands.norepo += " qclone qversion"
38
38
39 # Patch names looks like unix-file names.
39 # Patch names looks like unix-file names.
40 # They must be joinable with queue directory and result in the patch path.
40 # They must be joinable with queue directory and result in the patch path.
41 normname = util.normpath
41 normname = util.normpath
42
42
43 class statusentry:
43 class statusentry:
44 def __init__(self, rev, name=None):
44 def __init__(self, rev, name=None):
45 if not name:
45 if not name:
46 fields = rev.split(':', 1)
46 fields = rev.split(':', 1)
47 if len(fields) == 2:
47 if len(fields) == 2:
48 self.rev, self.name = fields
48 self.rev, self.name = fields
49 else:
49 else:
50 self.rev, self.name = None, None
50 self.rev, self.name = None, None
51 else:
51 else:
52 self.rev, self.name = rev, name
52 self.rev, self.name = rev, name
53
53
54 def __str__(self):
54 def __str__(self):
55 return self.rev + ':' + self.name
55 return self.rev + ':' + self.name
56
56
57 class queue:
57 class queue:
58 def __init__(self, ui, path, patchdir=None):
58 def __init__(self, ui, path, patchdir=None):
59 self.basepath = path
59 self.basepath = path
60 self.path = patchdir or os.path.join(path, "patches")
60 self.path = patchdir or os.path.join(path, "patches")
61 self.opener = util.opener(self.path)
61 self.opener = util.opener(self.path)
62 self.ui = ui
62 self.ui = ui
63 self.applied = []
63 self.applied = []
64 self.full_series = []
64 self.full_series = []
65 self.applied_dirty = 0
65 self.applied_dirty = 0
66 self.series_dirty = 0
66 self.series_dirty = 0
67 self.series_path = "series"
67 self.series_path = "series"
68 self.status_path = "status"
68 self.status_path = "status"
69 self.guards_path = "guards"
69 self.guards_path = "guards"
70 self.active_guards = None
70 self.active_guards = None
71 self.guards_dirty = False
71 self.guards_dirty = False
72 self._diffopts = None
72 self._diffopts = None
73
73
74 if os.path.exists(self.join(self.series_path)):
74 if os.path.exists(self.join(self.series_path)):
75 self.full_series = self.opener(self.series_path).read().splitlines()
75 self.full_series = self.opener(self.series_path).read().splitlines()
76 self.parse_series()
76 self.parse_series()
77
77
78 if os.path.exists(self.join(self.status_path)):
78 if os.path.exists(self.join(self.status_path)):
79 lines = self.opener(self.status_path).read().splitlines()
79 lines = self.opener(self.status_path).read().splitlines()
80 self.applied = [statusentry(l) for l in lines]
80 self.applied = [statusentry(l) for l in lines]
81
81
82 def diffopts(self):
82 def diffopts(self):
83 if self._diffopts is None:
83 if self._diffopts is None:
84 self._diffopts = patch.diffopts(self.ui)
84 self._diffopts = patch.diffopts(self.ui)
85 return self._diffopts
85 return self._diffopts
86
86
87 def join(self, *p):
87 def join(self, *p):
88 return os.path.join(self.path, *p)
88 return os.path.join(self.path, *p)
89
89
90 def find_series(self, patch):
90 def find_series(self, patch):
91 pre = re.compile("(\s*)([^#]+)")
91 pre = re.compile("(\s*)([^#]+)")
92 index = 0
92 index = 0
93 for l in self.full_series:
93 for l in self.full_series:
94 m = pre.match(l)
94 m = pre.match(l)
95 if m:
95 if m:
96 s = m.group(2)
96 s = m.group(2)
97 s = s.rstrip()
97 s = s.rstrip()
98 if s == patch:
98 if s == patch:
99 return index
99 return index
100 index += 1
100 index += 1
101 return None
101 return None
102
102
103 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
103 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
104
104
105 def parse_series(self):
105 def parse_series(self):
106 self.series = []
106 self.series = []
107 self.series_guards = []
107 self.series_guards = []
108 for l in self.full_series:
108 for l in self.full_series:
109 h = l.find('#')
109 h = l.find('#')
110 if h == -1:
110 if h == -1:
111 patch = l
111 patch = l
112 comment = ''
112 comment = ''
113 elif h == 0:
113 elif h == 0:
114 continue
114 continue
115 else:
115 else:
116 patch = l[:h]
116 patch = l[:h]
117 comment = l[h:]
117 comment = l[h:]
118 patch = patch.strip()
118 patch = patch.strip()
119 if patch:
119 if patch:
120 if patch in self.series:
120 if patch in self.series:
121 raise util.Abort(_('%s appears more than once in %s') %
121 raise util.Abort(_('%s appears more than once in %s') %
122 (patch, self.join(self.series_path)))
122 (patch, self.join(self.series_path)))
123 self.series.append(patch)
123 self.series.append(patch)
124 self.series_guards.append(self.guard_re.findall(comment))
124 self.series_guards.append(self.guard_re.findall(comment))
125
125
126 def check_guard(self, guard):
126 def check_guard(self, guard):
127 bad_chars = '# \t\r\n\f'
127 bad_chars = '# \t\r\n\f'
128 first = guard[0]
128 first = guard[0]
129 for c in '-+':
129 for c in '-+':
130 if first == c:
130 if first == c:
131 return (_('guard %r starts with invalid character: %r') %
131 return (_('guard %r starts with invalid character: %r') %
132 (guard, c))
132 (guard, c))
133 for c in bad_chars:
133 for c in bad_chars:
134 if c in guard:
134 if c in guard:
135 return _('invalid character in guard %r: %r') % (guard, c)
135 return _('invalid character in guard %r: %r') % (guard, c)
136
136
137 def set_active(self, guards):
137 def set_active(self, guards):
138 for guard in guards:
138 for guard in guards:
139 bad = self.check_guard(guard)
139 bad = self.check_guard(guard)
140 if bad:
140 if bad:
141 raise util.Abort(bad)
141 raise util.Abort(bad)
142 guards = dict.fromkeys(guards).keys()
142 guards = dict.fromkeys(guards).keys()
143 guards.sort()
143 guards.sort()
144 self.ui.debug('active guards: %s\n' % ' '.join(guards))
144 self.ui.debug('active guards: %s\n' % ' '.join(guards))
145 self.active_guards = guards
145 self.active_guards = guards
146 self.guards_dirty = True
146 self.guards_dirty = True
147
147
148 def active(self):
148 def active(self):
149 if self.active_guards is None:
149 if self.active_guards is None:
150 self.active_guards = []
150 self.active_guards = []
151 try:
151 try:
152 guards = self.opener(self.guards_path).read().split()
152 guards = self.opener(self.guards_path).read().split()
153 except IOError, err:
153 except IOError, err:
154 if err.errno != errno.ENOENT: raise
154 if err.errno != errno.ENOENT: raise
155 guards = []
155 guards = []
156 for i, guard in enumerate(guards):
156 for i, guard in enumerate(guards):
157 bad = self.check_guard(guard)
157 bad = self.check_guard(guard)
158 if bad:
158 if bad:
159 self.ui.warn('%s:%d: %s\n' %
159 self.ui.warn('%s:%d: %s\n' %
160 (self.join(self.guards_path), i + 1, bad))
160 (self.join(self.guards_path), i + 1, bad))
161 else:
161 else:
162 self.active_guards.append(guard)
162 self.active_guards.append(guard)
163 return self.active_guards
163 return self.active_guards
164
164
165 def set_guards(self, idx, guards):
165 def set_guards(self, idx, guards):
166 for g in guards:
166 for g in guards:
167 if len(g) < 2:
167 if len(g) < 2:
168 raise util.Abort(_('guard %r too short') % g)
168 raise util.Abort(_('guard %r too short') % g)
169 if g[0] not in '-+':
169 if g[0] not in '-+':
170 raise util.Abort(_('guard %r starts with invalid char') % g)
170 raise util.Abort(_('guard %r starts with invalid char') % g)
171 bad = self.check_guard(g[1:])
171 bad = self.check_guard(g[1:])
172 if bad:
172 if bad:
173 raise util.Abort(bad)
173 raise util.Abort(bad)
174 drop = self.guard_re.sub('', self.full_series[idx])
174 drop = self.guard_re.sub('', self.full_series[idx])
175 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
175 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
176 self.parse_series()
176 self.parse_series()
177 self.series_dirty = True
177 self.series_dirty = True
178
178
179 def pushable(self, idx):
179 def pushable(self, idx):
180 if isinstance(idx, str):
180 if isinstance(idx, str):
181 idx = self.series.index(idx)
181 idx = self.series.index(idx)
182 patchguards = self.series_guards[idx]
182 patchguards = self.series_guards[idx]
183 if not patchguards:
183 if not patchguards:
184 return True, None
184 return True, None
185 default = False
185 default = False
186 guards = self.active()
186 guards = self.active()
187 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
187 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
188 if exactneg:
188 if exactneg:
189 return False, exactneg[0]
189 return False, exactneg[0]
190 pos = [g for g in patchguards if g[0] == '+']
190 pos = [g for g in patchguards if g[0] == '+']
191 exactpos = [g for g in pos if g[1:] in guards]
191 exactpos = [g for g in pos if g[1:] in guards]
192 if pos:
192 if pos:
193 if exactpos:
193 if exactpos:
194 return True, exactpos[0]
194 return True, exactpos[0]
195 return False, pos
195 return False, pos
196 return True, ''
196 return True, ''
197
197
198 def explain_pushable(self, idx, all_patches=False):
198 def explain_pushable(self, idx, all_patches=False):
199 write = all_patches and self.ui.write or self.ui.warn
199 write = all_patches and self.ui.write or self.ui.warn
200 if all_patches or self.ui.verbose:
200 if all_patches or self.ui.verbose:
201 if isinstance(idx, str):
201 if isinstance(idx, str):
202 idx = self.series.index(idx)
202 idx = self.series.index(idx)
203 pushable, why = self.pushable(idx)
203 pushable, why = self.pushable(idx)
204 if all_patches and pushable:
204 if all_patches and pushable:
205 if why is None:
205 if why is None:
206 write(_('allowing %s - no guards in effect\n') %
206 write(_('allowing %s - no guards in effect\n') %
207 self.series[idx])
207 self.series[idx])
208 else:
208 else:
209 if not why:
209 if not why:
210 write(_('allowing %s - no matching negative guards\n') %
210 write(_('allowing %s - no matching negative guards\n') %
211 self.series[idx])
211 self.series[idx])
212 else:
212 else:
213 write(_('allowing %s - guarded by %r\n') %
213 write(_('allowing %s - guarded by %r\n') %
214 (self.series[idx], why))
214 (self.series[idx], why))
215 if not pushable:
215 if not pushable:
216 if why:
216 if why:
217 write(_('skipping %s - guarded by %r\n') %
217 write(_('skipping %s - guarded by %r\n') %
218 (self.series[idx], why))
218 (self.series[idx], why))
219 else:
219 else:
220 write(_('skipping %s - no matching guards\n') %
220 write(_('skipping %s - no matching guards\n') %
221 self.series[idx])
221 self.series[idx])
222
222
223 def save_dirty(self):
223 def save_dirty(self):
224 def write_list(items, path):
224 def write_list(items, path):
225 fp = self.opener(path, 'w')
225 fp = self.opener(path, 'w')
226 for i in items:
226 for i in items:
227 print >> fp, i
227 print >> fp, i
228 fp.close()
228 fp.close()
229 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
229 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
230 if self.series_dirty: write_list(self.full_series, self.series_path)
230 if self.series_dirty: write_list(self.full_series, self.series_path)
231 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
231 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
232
232
233 def readheaders(self, patch):
233 def readheaders(self, patch):
234 def eatdiff(lines):
234 def eatdiff(lines):
235 while lines:
235 while lines:
236 l = lines[-1]
236 l = lines[-1]
237 if (l.startswith("diff -") or
237 if (l.startswith("diff -") or
238 l.startswith("Index:") or
238 l.startswith("Index:") or
239 l.startswith("===========")):
239 l.startswith("===========")):
240 del lines[-1]
240 del lines[-1]
241 else:
241 else:
242 break
242 break
243 def eatempty(lines):
243 def eatempty(lines):
244 while lines:
244 while lines:
245 l = lines[-1]
245 l = lines[-1]
246 if re.match('\s*$', l):
246 if re.match('\s*$', l):
247 del lines[-1]
247 del lines[-1]
248 else:
248 else:
249 break
249 break
250
250
251 pf = self.join(patch)
251 pf = self.join(patch)
252 message = []
252 message = []
253 comments = []
253 comments = []
254 user = None
254 user = None
255 date = None
255 date = None
256 format = None
256 format = None
257 subject = None
257 subject = None
258 diffstart = 0
258 diffstart = 0
259
259
260 for line in file(pf):
260 for line in file(pf):
261 line = line.rstrip()
261 line = line.rstrip()
262 if line.startswith('diff --git'):
262 if line.startswith('diff --git'):
263 diffstart = 2
263 diffstart = 2
264 break
264 break
265 if diffstart:
265 if diffstart:
266 if line.startswith('+++ '):
266 if line.startswith('+++ '):
267 diffstart = 2
267 diffstart = 2
268 break
268 break
269 if line.startswith("--- "):
269 if line.startswith("--- "):
270 diffstart = 1
270 diffstart = 1
271 continue
271 continue
272 elif format == "hgpatch":
272 elif format == "hgpatch":
273 # parse values when importing the result of an hg export
273 # parse values when importing the result of an hg export
274 if line.startswith("# User "):
274 if line.startswith("# User "):
275 user = line[7:]
275 user = line[7:]
276 elif line.startswith("# Date "):
276 elif line.startswith("# Date "):
277 date = line[7:]
277 date = line[7:]
278 elif not line.startswith("# ") and line:
278 elif not line.startswith("# ") and line:
279 message.append(line)
279 message.append(line)
280 format = None
280 format = None
281 elif line == '# HG changeset patch':
281 elif line == '# HG changeset patch':
282 format = "hgpatch"
282 format = "hgpatch"
283 elif (format != "tagdone" and (line.startswith("Subject: ") or
283 elif (format != "tagdone" and (line.startswith("Subject: ") or
284 line.startswith("subject: "))):
284 line.startswith("subject: "))):
285 subject = line[9:]
285 subject = line[9:]
286 format = "tag"
286 format = "tag"
287 elif (format != "tagdone" and (line.startswith("From: ") or
287 elif (format != "tagdone" and (line.startswith("From: ") or
288 line.startswith("from: "))):
288 line.startswith("from: "))):
289 user = line[6:]
289 user = line[6:]
290 format = "tag"
290 format = "tag"
291 elif format == "tag" and line == "":
291 elif format == "tag" and line == "":
292 # when looking for tags (subject: from: etc) they
292 # when looking for tags (subject: from: etc) they
293 # end once you find a blank line in the source
293 # end once you find a blank line in the source
294 format = "tagdone"
294 format = "tagdone"
295 elif message or line:
295 elif message or line:
296 message.append(line)
296 message.append(line)
297 comments.append(line)
297 comments.append(line)
298
298
299 eatdiff(message)
299 eatdiff(message)
300 eatdiff(comments)
300 eatdiff(comments)
301 eatempty(message)
301 eatempty(message)
302 eatempty(comments)
302 eatempty(comments)
303
303
304 # make sure message isn't empty
304 # make sure message isn't empty
305 if format and format.startswith("tag") and subject:
305 if format and format.startswith("tag") and subject:
306 message.insert(0, "")
306 message.insert(0, "")
307 message.insert(0, subject)
307 message.insert(0, subject)
308 return (message, comments, user, date, diffstart > 1)
308 return (message, comments, user, date, diffstart > 1)
309
309
310 def removeundo(self, repo):
310 def removeundo(self, repo):
311 undo = repo.sjoin('undo')
311 undo = repo.sjoin('undo')
312 if not os.path.exists(undo):
312 if not os.path.exists(undo):
313 return
313 return
314 try:
314 try:
315 os.unlink(undo)
315 os.unlink(undo)
316 except OSError, inst:
316 except OSError, inst:
317 self.ui.warn('error removing undo: %s\n' % str(inst))
317 self.ui.warn('error removing undo: %s\n' % str(inst))
318
318
319 def printdiff(self, repo, node1, node2=None, files=None,
319 def printdiff(self, repo, node1, node2=None, files=None,
320 fp=None, changes=None, opts={}):
320 fp=None, changes=None, opts={}):
321 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
321 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
322
322
323 patch.diff(repo, node1, node2, fns, match=matchfn,
323 patch.diff(repo, node1, node2, fns, match=matchfn,
324 fp=fp, changes=changes, opts=self.diffopts())
324 fp=fp, changes=changes, opts=self.diffopts())
325
325
326 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
326 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
327 # first try just applying the patch
327 # first try just applying the patch
328 (err, n) = self.apply(repo, [ patch ], update_status=False,
328 (err, n) = self.apply(repo, [ patch ], update_status=False,
329 strict=True, merge=rev, wlock=wlock)
329 strict=True, merge=rev, wlock=wlock)
330
330
331 if err == 0:
331 if err == 0:
332 return (err, n)
332 return (err, n)
333
333
334 if n is None:
334 if n is None:
335 raise util.Abort(_("apply failed for patch %s") % patch)
335 raise util.Abort(_("apply failed for patch %s") % patch)
336
336
337 self.ui.warn("patch didn't work out, merging %s\n" % patch)
337 self.ui.warn("patch didn't work out, merging %s\n" % patch)
338
338
339 # apply failed, strip away that rev and merge.
339 # apply failed, strip away that rev and merge.
340 hg.clean(repo, head, wlock=wlock)
340 hg.clean(repo, head, wlock=wlock)
341 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
341 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
342
342
343 ctx = repo.changectx(rev)
343 ctx = repo.changectx(rev)
344 ret = hg.merge(repo, rev, wlock=wlock)
344 ret = hg.merge(repo, rev, wlock=wlock)
345 if ret:
345 if ret:
346 raise util.Abort(_("update returned %d") % ret)
346 raise util.Abort(_("update returned %d") % ret)
347 n = repo.commit(None, ctx.description(), ctx.user(),
347 n = repo.commit(None, ctx.description(), ctx.user(),
348 force=1, wlock=wlock)
348 force=1, wlock=wlock)
349 if n == None:
349 if n == None:
350 raise util.Abort(_("repo commit failed"))
350 raise util.Abort(_("repo commit failed"))
351 try:
351 try:
352 message, comments, user, date, patchfound = mergeq.readheaders(patch)
352 message, comments, user, date, patchfound = mergeq.readheaders(patch)
353 except:
353 except:
354 raise util.Abort(_("unable to read %s") % patch)
354 raise util.Abort(_("unable to read %s") % patch)
355
355
356 patchf = self.opener(patch, "w")
356 patchf = self.opener(patch, "w")
357 if comments:
357 if comments:
358 comments = "\n".join(comments) + '\n\n'
358 comments = "\n".join(comments) + '\n\n'
359 patchf.write(comments)
359 patchf.write(comments)
360 self.printdiff(repo, head, n, fp=patchf)
360 self.printdiff(repo, head, n, fp=patchf)
361 patchf.close()
361 patchf.close()
362 self.removeundo(repo)
362 self.removeundo(repo)
363 return (0, n)
363 return (0, n)
364
364
365 def qparents(self, repo, rev=None):
365 def qparents(self, repo, rev=None):
366 if rev is None:
366 if rev is None:
367 (p1, p2) = repo.dirstate.parents()
367 (p1, p2) = repo.dirstate.parents()
368 if p2 == revlog.nullid:
368 if p2 == revlog.nullid:
369 return p1
369 return p1
370 if len(self.applied) == 0:
370 if len(self.applied) == 0:
371 return None
371 return None
372 return revlog.bin(self.applied[-1].rev)
372 return revlog.bin(self.applied[-1].rev)
373 pp = repo.changelog.parents(rev)
373 pp = repo.changelog.parents(rev)
374 if pp[1] != revlog.nullid:
374 if pp[1] != revlog.nullid:
375 arevs = [ x.rev for x in self.applied ]
375 arevs = [ x.rev for x in self.applied ]
376 p0 = revlog.hex(pp[0])
376 p0 = revlog.hex(pp[0])
377 p1 = revlog.hex(pp[1])
377 p1 = revlog.hex(pp[1])
378 if p0 in arevs:
378 if p0 in arevs:
379 return pp[0]
379 return pp[0]
380 if p1 in arevs:
380 if p1 in arevs:
381 return pp[1]
381 return pp[1]
382 return pp[0]
382 return pp[0]
383
383
384 def mergepatch(self, repo, mergeq, series, wlock):
384 def mergepatch(self, repo, mergeq, series, wlock):
385 if len(self.applied) == 0:
385 if len(self.applied) == 0:
386 # each of the patches merged in will have two parents. This
386 # each of the patches merged in will have two parents. This
387 # can confuse the qrefresh, qdiff, and strip code because it
387 # can confuse the qrefresh, qdiff, and strip code because it
388 # needs to know which parent is actually in the patch queue.
388 # needs to know which parent is actually in the patch queue.
389 # so, we insert a merge marker with only one parent. This way
389 # so, we insert a merge marker with only one parent. This way
390 # the first patch in the queue is never a merge patch
390 # the first patch in the queue is never a merge patch
391 #
391 #
392 pname = ".hg.patches.merge.marker"
392 pname = ".hg.patches.merge.marker"
393 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
393 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
394 wlock=wlock)
394 wlock=wlock)
395 self.removeundo(repo)
395 self.removeundo(repo)
396 self.applied.append(statusentry(revlog.hex(n), pname))
396 self.applied.append(statusentry(revlog.hex(n), pname))
397 self.applied_dirty = 1
397 self.applied_dirty = 1
398
398
399 head = self.qparents(repo)
399 head = self.qparents(repo)
400
400
401 for patch in series:
401 for patch in series:
402 patch = mergeq.lookup(patch, strict=True)
402 patch = mergeq.lookup(patch, strict=True)
403 if not patch:
403 if not patch:
404 self.ui.warn("patch %s does not exist\n" % patch)
404 self.ui.warn("patch %s does not exist\n" % patch)
405 return (1, None)
405 return (1, None)
406 pushable, reason = self.pushable(patch)
406 pushable, reason = self.pushable(patch)
407 if not pushable:
407 if not pushable:
408 self.explain_pushable(patch, all_patches=True)
408 self.explain_pushable(patch, all_patches=True)
409 continue
409 continue
410 info = mergeq.isapplied(patch)
410 info = mergeq.isapplied(patch)
411 if not info:
411 if not info:
412 self.ui.warn("patch %s is not applied\n" % patch)
412 self.ui.warn("patch %s is not applied\n" % patch)
413 return (1, None)
413 return (1, None)
414 rev = revlog.bin(info[1])
414 rev = revlog.bin(info[1])
415 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
415 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
416 if head:
416 if head:
417 self.applied.append(statusentry(revlog.hex(head), patch))
417 self.applied.append(statusentry(revlog.hex(head), patch))
418 self.applied_dirty = 1
418 self.applied_dirty = 1
419 if err:
419 if err:
420 return (err, head)
420 return (err, head)
421 self.save_dirty()
421 self.save_dirty()
422 return (0, head)
422 return (0, head)
423
423
424 def patch(self, repo, patchfile):
424 def patch(self, repo, patchfile):
425 '''Apply patchfile to the working directory.
425 '''Apply patchfile to the working directory.
426 patchfile: file name of patch'''
426 patchfile: file name of patch'''
427 files = {}
427 files = {}
428 try:
428 try:
429 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
429 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
430 files=files)
430 files=files)
431 except Exception, inst:
431 except Exception, inst:
432 self.ui.note(str(inst) + '\n')
432 self.ui.note(str(inst) + '\n')
433 if not self.ui.verbose:
433 if not self.ui.verbose:
434 self.ui.warn("patch failed, unable to continue (try -v)\n")
434 self.ui.warn("patch failed, unable to continue (try -v)\n")
435 return (False, files, False)
435 return (False, files, False)
436
436
437 return (True, files, fuzz)
437 return (True, files, fuzz)
438
438
439 def apply(self, repo, series, list=False, update_status=True,
439 def apply(self, repo, series, list=False, update_status=True,
440 strict=False, patchdir=None, merge=None, wlock=None,
440 strict=False, patchdir=None, merge=None, wlock=None,
441 all_files={}):
441 all_files={}):
442 if not wlock:
442 if not wlock:
443 wlock = repo.wlock()
443 wlock = repo.wlock()
444 lock = repo.lock()
444 lock = repo.lock()
445 tr = repo.transaction()
445 tr = repo.transaction()
446 try:
446 try:
447 ret = self._apply(tr, repo, series, list, update_status,
447 ret = self._apply(tr, repo, series, list, update_status,
448 strict, patchdir, merge, wlock,
448 strict, patchdir, merge, wlock,
449 lock=lock, all_files=all_files)
449 lock=lock, all_files=all_files)
450 tr.close()
450 tr.close()
451 self.save_dirty()
451 self.save_dirty()
452 return ret
452 return ret
453 except:
453 except:
454 try:
454 try:
455 tr.abort()
455 tr.abort()
456 finally:
456 finally:
457 repo.invalidate()
457 repo.invalidate()
458 repo.dirstate.invalidate()
458 repo.dirstate.invalidate()
459 raise
459 raise
460
460
461 def _apply(self, tr, repo, series, list=False, update_status=True,
461 def _apply(self, tr, repo, series, list=False, update_status=True,
462 strict=False, patchdir=None, merge=None, wlock=None,
462 strict=False, patchdir=None, merge=None, wlock=None,
463 lock=None, all_files={}):
463 lock=None, all_files={}):
464 # TODO unify with commands.py
464 # TODO unify with commands.py
465 if not patchdir:
465 if not patchdir:
466 patchdir = self.path
466 patchdir = self.path
467 err = 0
467 err = 0
468 n = None
468 n = None
469 for patchname in series:
469 for patchname in series:
470 pushable, reason = self.pushable(patchname)
470 pushable, reason = self.pushable(patchname)
471 if not pushable:
471 if not pushable:
472 self.explain_pushable(patchname, all_patches=True)
472 self.explain_pushable(patchname, all_patches=True)
473 continue
473 continue
474 self.ui.warn("applying %s\n" % patchname)
474 self.ui.warn("applying %s\n" % patchname)
475 pf = os.path.join(patchdir, patchname)
475 pf = os.path.join(patchdir, patchname)
476
476
477 try:
477 try:
478 message, comments, user, date, patchfound = self.readheaders(patchname)
478 message, comments, user, date, patchfound = self.readheaders(patchname)
479 except:
479 except:
480 self.ui.warn("Unable to read %s\n" % patchname)
480 self.ui.warn("Unable to read %s\n" % patchname)
481 err = 1
481 err = 1
482 break
482 break
483
483
484 if not message:
484 if not message:
485 message = "imported patch %s\n" % patchname
485 message = "imported patch %s\n" % patchname
486 else:
486 else:
487 if list:
487 if list:
488 message.append("\nimported patch %s" % patchname)
488 message.append("\nimported patch %s" % patchname)
489 message = '\n'.join(message)
489 message = '\n'.join(message)
490
490
491 (patcherr, files, fuzz) = self.patch(repo, pf)
491 (patcherr, files, fuzz) = self.patch(repo, pf)
492 all_files.update(files)
492 all_files.update(files)
493 patcherr = not patcherr
493 patcherr = not patcherr
494
494
495 if merge and files:
495 if merge and files:
496 # Mark as removed/merged and update dirstate parent info
496 # Mark as removed/merged and update dirstate parent info
497 removed = []
497 removed = []
498 merged = []
498 merged = []
499 for f in files:
499 for f in files:
500 if os.path.exists(repo.dirstate.wjoin(f)):
500 if os.path.exists(repo.dirstate.wjoin(f)):
501 merged.append(f)
501 merged.append(f)
502 else:
502 else:
503 removed.append(f)
503 removed.append(f)
504 repo.dirstate.update(repo.dirstate.filterfiles(removed), 'r')
504 for f in removed:
505 repo.dirstate.update(repo.dirstate.filterfiles(merged), 'm')
505 repo.dirstate.remove(f)
506 for f in merged:
507 repo.dirstate.merge(f)
506 p1, p2 = repo.dirstate.parents()
508 p1, p2 = repo.dirstate.parents()
507 repo.dirstate.setparents(p1, merge)
509 repo.dirstate.setparents(p1, merge)
508 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
510 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
509 n = repo.commit(files, message, user, date, force=1, lock=lock,
511 n = repo.commit(files, message, user, date, force=1, lock=lock,
510 wlock=wlock)
512 wlock=wlock)
511
513
512 if n == None:
514 if n == None:
513 raise util.Abort(_("repo commit failed"))
515 raise util.Abort(_("repo commit failed"))
514
516
515 if update_status:
517 if update_status:
516 self.applied.append(statusentry(revlog.hex(n), patchname))
518 self.applied.append(statusentry(revlog.hex(n), patchname))
517
519
518 if patcherr:
520 if patcherr:
519 if not patchfound:
521 if not patchfound:
520 self.ui.warn("patch %s is empty\n" % patchname)
522 self.ui.warn("patch %s is empty\n" % patchname)
521 err = 0
523 err = 0
522 else:
524 else:
523 self.ui.warn("patch failed, rejects left in working dir\n")
525 self.ui.warn("patch failed, rejects left in working dir\n")
524 err = 1
526 err = 1
525 break
527 break
526
528
527 if fuzz and strict:
529 if fuzz and strict:
528 self.ui.warn("fuzz found when applying patch, stopping\n")
530 self.ui.warn("fuzz found when applying patch, stopping\n")
529 err = 1
531 err = 1
530 break
532 break
531 self.removeundo(repo)
533 self.removeundo(repo)
532 return (err, n)
534 return (err, n)
533
535
534 def delete(self, repo, patches, opts):
536 def delete(self, repo, patches, opts):
535 if not patches and not opts.get('rev'):
537 if not patches and not opts.get('rev'):
536 raise util.Abort(_('qdelete requires at least one revision or '
538 raise util.Abort(_('qdelete requires at least one revision or '
537 'patch name'))
539 'patch name'))
538
540
539 realpatches = []
541 realpatches = []
540 for patch in patches:
542 for patch in patches:
541 patch = self.lookup(patch, strict=True)
543 patch = self.lookup(patch, strict=True)
542 info = self.isapplied(patch)
544 info = self.isapplied(patch)
543 if info:
545 if info:
544 raise util.Abort(_("cannot delete applied patch %s") % patch)
546 raise util.Abort(_("cannot delete applied patch %s") % patch)
545 if patch not in self.series:
547 if patch not in self.series:
546 raise util.Abort(_("patch %s not in series file") % patch)
548 raise util.Abort(_("patch %s not in series file") % patch)
547 realpatches.append(patch)
549 realpatches.append(patch)
548
550
549 appliedbase = 0
551 appliedbase = 0
550 if opts.get('rev'):
552 if opts.get('rev'):
551 if not self.applied:
553 if not self.applied:
552 raise util.Abort(_('no patches applied'))
554 raise util.Abort(_('no patches applied'))
553 revs = cmdutil.revrange(repo, opts['rev'])
555 revs = cmdutil.revrange(repo, opts['rev'])
554 if len(revs) > 1 and revs[0] > revs[1]:
556 if len(revs) > 1 and revs[0] > revs[1]:
555 revs.reverse()
557 revs.reverse()
556 for rev in revs:
558 for rev in revs:
557 if appliedbase >= len(self.applied):
559 if appliedbase >= len(self.applied):
558 raise util.Abort(_("revision %d is not managed") % rev)
560 raise util.Abort(_("revision %d is not managed") % rev)
559
561
560 base = revlog.bin(self.applied[appliedbase].rev)
562 base = revlog.bin(self.applied[appliedbase].rev)
561 node = repo.changelog.node(rev)
563 node = repo.changelog.node(rev)
562 if node != base:
564 if node != base:
563 raise util.Abort(_("cannot delete revision %d above "
565 raise util.Abort(_("cannot delete revision %d above "
564 "applied patches") % rev)
566 "applied patches") % rev)
565 realpatches.append(self.applied[appliedbase].name)
567 realpatches.append(self.applied[appliedbase].name)
566 appliedbase += 1
568 appliedbase += 1
567
569
568 if not opts.get('keep'):
570 if not opts.get('keep'):
569 r = self.qrepo()
571 r = self.qrepo()
570 if r:
572 if r:
571 r.remove(realpatches, True)
573 r.remove(realpatches, True)
572 else:
574 else:
573 for p in realpatches:
575 for p in realpatches:
574 os.unlink(self.join(p))
576 os.unlink(self.join(p))
575
577
576 if appliedbase:
578 if appliedbase:
577 del self.applied[:appliedbase]
579 del self.applied[:appliedbase]
578 self.applied_dirty = 1
580 self.applied_dirty = 1
579 indices = [self.find_series(p) for p in realpatches]
581 indices = [self.find_series(p) for p in realpatches]
580 indices.sort()
582 indices.sort()
581 for i in indices[-1::-1]:
583 for i in indices[-1::-1]:
582 del self.full_series[i]
584 del self.full_series[i]
583 self.parse_series()
585 self.parse_series()
584 self.series_dirty = 1
586 self.series_dirty = 1
585
587
586 def check_toppatch(self, repo):
588 def check_toppatch(self, repo):
587 if len(self.applied) > 0:
589 if len(self.applied) > 0:
588 top = revlog.bin(self.applied[-1].rev)
590 top = revlog.bin(self.applied[-1].rev)
589 pp = repo.dirstate.parents()
591 pp = repo.dirstate.parents()
590 if top not in pp:
592 if top not in pp:
591 raise util.Abort(_("queue top not at same revision as working directory"))
593 raise util.Abort(_("queue top not at same revision as working directory"))
592 return top
594 return top
593 return None
595 return None
594 def check_localchanges(self, repo, force=False, refresh=True):
596 def check_localchanges(self, repo, force=False, refresh=True):
595 m, a, r, d = repo.status()[:4]
597 m, a, r, d = repo.status()[:4]
596 if m or a or r or d:
598 if m or a or r or d:
597 if not force:
599 if not force:
598 if refresh:
600 if refresh:
599 raise util.Abort(_("local changes found, refresh first"))
601 raise util.Abort(_("local changes found, refresh first"))
600 else:
602 else:
601 raise util.Abort(_("local changes found"))
603 raise util.Abort(_("local changes found"))
602 return m, a, r, d
604 return m, a, r, d
603
605
604 def new(self, repo, patch, *pats, **opts):
606 def new(self, repo, patch, *pats, **opts):
605 msg = opts.get('msg')
607 msg = opts.get('msg')
606 force = opts.get('force')
608 force = opts.get('force')
607 if os.path.exists(self.join(patch)):
609 if os.path.exists(self.join(patch)):
608 raise util.Abort(_('patch "%s" already exists') % patch)
610 raise util.Abort(_('patch "%s" already exists') % patch)
609 if opts.get('include') or opts.get('exclude') or pats:
611 if opts.get('include') or opts.get('exclude') or pats:
610 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
612 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
611 m, a, r, d = repo.status(files=fns, match=match)[:4]
613 m, a, r, d = repo.status(files=fns, match=match)[:4]
612 else:
614 else:
613 m, a, r, d = self.check_localchanges(repo, force)
615 m, a, r, d = self.check_localchanges(repo, force)
614 commitfiles = m + a + r
616 commitfiles = m + a + r
615 self.check_toppatch(repo)
617 self.check_toppatch(repo)
616 wlock = repo.wlock()
618 wlock = repo.wlock()
617 insert = self.full_series_end()
619 insert = self.full_series_end()
618 if msg:
620 if msg:
619 n = repo.commit(commitfiles, msg, force=True, wlock=wlock)
621 n = repo.commit(commitfiles, msg, force=True, wlock=wlock)
620 else:
622 else:
621 n = repo.commit(commitfiles,
623 n = repo.commit(commitfiles,
622 "[mq]: %s" % patch, force=True, wlock=wlock)
624 "[mq]: %s" % patch, force=True, wlock=wlock)
623 if n == None:
625 if n == None:
624 raise util.Abort(_("repo commit failed"))
626 raise util.Abort(_("repo commit failed"))
625 self.full_series[insert:insert] = [patch]
627 self.full_series[insert:insert] = [patch]
626 self.applied.append(statusentry(revlog.hex(n), patch))
628 self.applied.append(statusentry(revlog.hex(n), patch))
627 self.parse_series()
629 self.parse_series()
628 self.series_dirty = 1
630 self.series_dirty = 1
629 self.applied_dirty = 1
631 self.applied_dirty = 1
630 p = self.opener(patch, "w")
632 p = self.opener(patch, "w")
631 if msg:
633 if msg:
632 msg = msg + "\n"
634 msg = msg + "\n"
633 p.write(msg)
635 p.write(msg)
634 p.close()
636 p.close()
635 wlock = None
637 wlock = None
636 r = self.qrepo()
638 r = self.qrepo()
637 if r: r.add([patch])
639 if r: r.add([patch])
638 if commitfiles:
640 if commitfiles:
639 self.refresh(repo, short=True)
641 self.refresh(repo, short=True)
640 self.removeundo(repo)
642 self.removeundo(repo)
641
643
642 def strip(self, repo, rev, update=True, backup="all", wlock=None):
644 def strip(self, repo, rev, update=True, backup="all", wlock=None):
643 if not wlock:
645 if not wlock:
644 wlock = repo.wlock()
646 wlock = repo.wlock()
645 lock = repo.lock()
647 lock = repo.lock()
646
648
647 if update:
649 if update:
648 self.check_localchanges(repo, refresh=False)
650 self.check_localchanges(repo, refresh=False)
649 urev = self.qparents(repo, rev)
651 urev = self.qparents(repo, rev)
650 hg.clean(repo, urev, wlock=wlock)
652 hg.clean(repo, urev, wlock=wlock)
651 repo.dirstate.write()
653 repo.dirstate.write()
652
654
653 self.removeundo(repo)
655 self.removeundo(repo)
654 repair.strip(self.ui, repo, rev, backup)
656 repair.strip(self.ui, repo, rev, backup)
655
657
656 def isapplied(self, patch):
658 def isapplied(self, patch):
657 """returns (index, rev, patch)"""
659 """returns (index, rev, patch)"""
658 for i in xrange(len(self.applied)):
660 for i in xrange(len(self.applied)):
659 a = self.applied[i]
661 a = self.applied[i]
660 if a.name == patch:
662 if a.name == patch:
661 return (i, a.rev, a.name)
663 return (i, a.rev, a.name)
662 return None
664 return None
663
665
664 # if the exact patch name does not exist, we try a few
666 # if the exact patch name does not exist, we try a few
665 # variations. If strict is passed, we try only #1
667 # variations. If strict is passed, we try only #1
666 #
668 #
667 # 1) a number to indicate an offset in the series file
669 # 1) a number to indicate an offset in the series file
668 # 2) a unique substring of the patch name was given
670 # 2) a unique substring of the patch name was given
669 # 3) patchname[-+]num to indicate an offset in the series file
671 # 3) patchname[-+]num to indicate an offset in the series file
670 def lookup(self, patch, strict=False):
672 def lookup(self, patch, strict=False):
671 patch = patch and str(patch)
673 patch = patch and str(patch)
672
674
673 def partial_name(s):
675 def partial_name(s):
674 if s in self.series:
676 if s in self.series:
675 return s
677 return s
676 matches = [x for x in self.series if s in x]
678 matches = [x for x in self.series if s in x]
677 if len(matches) > 1:
679 if len(matches) > 1:
678 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
680 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
679 for m in matches:
681 for m in matches:
680 self.ui.warn(' %s\n' % m)
682 self.ui.warn(' %s\n' % m)
681 return None
683 return None
682 if matches:
684 if matches:
683 return matches[0]
685 return matches[0]
684 if len(self.series) > 0 and len(self.applied) > 0:
686 if len(self.series) > 0 and len(self.applied) > 0:
685 if s == 'qtip':
687 if s == 'qtip':
686 return self.series[self.series_end(True)-1]
688 return self.series[self.series_end(True)-1]
687 if s == 'qbase':
689 if s == 'qbase':
688 return self.series[0]
690 return self.series[0]
689 return None
691 return None
690 if patch == None:
692 if patch == None:
691 return None
693 return None
692
694
693 # we don't want to return a partial match until we make
695 # we don't want to return a partial match until we make
694 # sure the file name passed in does not exist (checked below)
696 # sure the file name passed in does not exist (checked below)
695 res = partial_name(patch)
697 res = partial_name(patch)
696 if res and res == patch:
698 if res and res == patch:
697 return res
699 return res
698
700
699 if not os.path.isfile(self.join(patch)):
701 if not os.path.isfile(self.join(patch)):
700 try:
702 try:
701 sno = int(patch)
703 sno = int(patch)
702 except(ValueError, OverflowError):
704 except(ValueError, OverflowError):
703 pass
705 pass
704 else:
706 else:
705 if sno < len(self.series):
707 if sno < len(self.series):
706 return self.series[sno]
708 return self.series[sno]
707 if not strict:
709 if not strict:
708 # return any partial match made above
710 # return any partial match made above
709 if res:
711 if res:
710 return res
712 return res
711 minus = patch.rfind('-')
713 minus = patch.rfind('-')
712 if minus >= 0:
714 if minus >= 0:
713 res = partial_name(patch[:minus])
715 res = partial_name(patch[:minus])
714 if res:
716 if res:
715 i = self.series.index(res)
717 i = self.series.index(res)
716 try:
718 try:
717 off = int(patch[minus+1:] or 1)
719 off = int(patch[minus+1:] or 1)
718 except(ValueError, OverflowError):
720 except(ValueError, OverflowError):
719 pass
721 pass
720 else:
722 else:
721 if i - off >= 0:
723 if i - off >= 0:
722 return self.series[i - off]
724 return self.series[i - off]
723 plus = patch.rfind('+')
725 plus = patch.rfind('+')
724 if plus >= 0:
726 if plus >= 0:
725 res = partial_name(patch[:plus])
727 res = partial_name(patch[:plus])
726 if res:
728 if res:
727 i = self.series.index(res)
729 i = self.series.index(res)
728 try:
730 try:
729 off = int(patch[plus+1:] or 1)
731 off = int(patch[plus+1:] or 1)
730 except(ValueError, OverflowError):
732 except(ValueError, OverflowError):
731 pass
733 pass
732 else:
734 else:
733 if i + off < len(self.series):
735 if i + off < len(self.series):
734 return self.series[i + off]
736 return self.series[i + off]
735 raise util.Abort(_("patch %s not in series") % patch)
737 raise util.Abort(_("patch %s not in series") % patch)
736
738
737 def push(self, repo, patch=None, force=False, list=False,
739 def push(self, repo, patch=None, force=False, list=False,
738 mergeq=None, wlock=None):
740 mergeq=None, wlock=None):
739 if not wlock:
741 if not wlock:
740 wlock = repo.wlock()
742 wlock = repo.wlock()
741 patch = self.lookup(patch)
743 patch = self.lookup(patch)
742 # Suppose our series file is: A B C and the current 'top' patch is B.
744 # Suppose our series file is: A B C and the current 'top' patch is B.
743 # qpush C should be performed (moving forward)
745 # qpush C should be performed (moving forward)
744 # qpush B is a NOP (no change)
746 # qpush B is a NOP (no change)
745 # qpush A is an error (can't go backwards with qpush)
747 # qpush A is an error (can't go backwards with qpush)
746 if patch:
748 if patch:
747 info = self.isapplied(patch)
749 info = self.isapplied(patch)
748 if info:
750 if info:
749 if info[0] < len(self.applied) - 1:
751 if info[0] < len(self.applied) - 1:
750 raise util.Abort(_("cannot push to a previous patch: %s") %
752 raise util.Abort(_("cannot push to a previous patch: %s") %
751 patch)
753 patch)
752 if info[0] < len(self.series) - 1:
754 if info[0] < len(self.series) - 1:
753 self.ui.warn(_('qpush: %s is already at the top\n') % patch)
755 self.ui.warn(_('qpush: %s is already at the top\n') % patch)
754 else:
756 else:
755 self.ui.warn(_('all patches are currently applied\n'))
757 self.ui.warn(_('all patches are currently applied\n'))
756 return
758 return
757
759
758 # Following the above example, starting at 'top' of B:
760 # Following the above example, starting at 'top' of B:
759 # qpush should be performed (pushes C), but a subsequent qpush without
761 # qpush should be performed (pushes C), but a subsequent qpush without
760 # an argument is an error (nothing to apply). This allows a loop
762 # an argument is an error (nothing to apply). This allows a loop
761 # of "...while hg qpush..." to work as it detects an error when done
763 # of "...while hg qpush..." to work as it detects an error when done
762 if self.series_end() == len(self.series):
764 if self.series_end() == len(self.series):
763 self.ui.warn(_('patch series already fully applied\n'))
765 self.ui.warn(_('patch series already fully applied\n'))
764 return 1
766 return 1
765 if not force:
767 if not force:
766 self.check_localchanges(repo)
768 self.check_localchanges(repo)
767
769
768 self.applied_dirty = 1;
770 self.applied_dirty = 1;
769 start = self.series_end()
771 start = self.series_end()
770 if start > 0:
772 if start > 0:
771 self.check_toppatch(repo)
773 self.check_toppatch(repo)
772 if not patch:
774 if not patch:
773 patch = self.series[start]
775 patch = self.series[start]
774 end = start + 1
776 end = start + 1
775 else:
777 else:
776 end = self.series.index(patch, start) + 1
778 end = self.series.index(patch, start) + 1
777 s = self.series[start:end]
779 s = self.series[start:end]
778 all_files = {}
780 all_files = {}
779 try:
781 try:
780 if mergeq:
782 if mergeq:
781 ret = self.mergepatch(repo, mergeq, s, wlock)
783 ret = self.mergepatch(repo, mergeq, s, wlock)
782 else:
784 else:
783 ret = self.apply(repo, s, list, wlock=wlock,
785 ret = self.apply(repo, s, list, wlock=wlock,
784 all_files=all_files)
786 all_files=all_files)
785 except:
787 except:
786 self.ui.warn(_('cleaning up working directory...'))
788 self.ui.warn(_('cleaning up working directory...'))
787 node = repo.dirstate.parents()[0]
789 node = repo.dirstate.parents()[0]
788 hg.revert(repo, node, None, wlock)
790 hg.revert(repo, node, None, wlock)
789 unknown = repo.status(wlock=wlock)[4]
791 unknown = repo.status(wlock=wlock)[4]
790 # only remove unknown files that we know we touched or
792 # only remove unknown files that we know we touched or
791 # created while patching
793 # created while patching
792 for f in unknown:
794 for f in unknown:
793 if f in all_files:
795 if f in all_files:
794 util.unlink(repo.wjoin(f))
796 util.unlink(repo.wjoin(f))
795 self.ui.warn(_('done\n'))
797 self.ui.warn(_('done\n'))
796 raise
798 raise
797 top = self.applied[-1].name
799 top = self.applied[-1].name
798 if ret[0]:
800 if ret[0]:
799 self.ui.write("Errors during apply, please fix and refresh %s\n" %
801 self.ui.write("Errors during apply, please fix and refresh %s\n" %
800 top)
802 top)
801 else:
803 else:
802 self.ui.write("Now at: %s\n" % top)
804 self.ui.write("Now at: %s\n" % top)
803 return ret[0]
805 return ret[0]
804
806
805 def pop(self, repo, patch=None, force=False, update=True, all=False,
807 def pop(self, repo, patch=None, force=False, update=True, all=False,
806 wlock=None):
808 wlock=None):
807 def getfile(f, rev):
809 def getfile(f, rev):
808 t = repo.file(f).read(rev)
810 t = repo.file(f).read(rev)
809 repo.wfile(f, "w").write(t)
811 repo.wfile(f, "w").write(t)
810
812
811 if not wlock:
813 if not wlock:
812 wlock = repo.wlock()
814 wlock = repo.wlock()
813 if patch:
815 if patch:
814 # index, rev, patch
816 # index, rev, patch
815 info = self.isapplied(patch)
817 info = self.isapplied(patch)
816 if not info:
818 if not info:
817 patch = self.lookup(patch)
819 patch = self.lookup(patch)
818 info = self.isapplied(patch)
820 info = self.isapplied(patch)
819 if not info:
821 if not info:
820 raise util.Abort(_("patch %s is not applied") % patch)
822 raise util.Abort(_("patch %s is not applied") % patch)
821
823
822 if len(self.applied) == 0:
824 if len(self.applied) == 0:
823 # Allow qpop -a to work repeatedly,
825 # Allow qpop -a to work repeatedly,
824 # but not qpop without an argument
826 # but not qpop without an argument
825 self.ui.warn(_("no patches applied\n"))
827 self.ui.warn(_("no patches applied\n"))
826 return not all
828 return not all
827
829
828 if not update:
830 if not update:
829 parents = repo.dirstate.parents()
831 parents = repo.dirstate.parents()
830 rr = [ revlog.bin(x.rev) for x in self.applied ]
832 rr = [ revlog.bin(x.rev) for x in self.applied ]
831 for p in parents:
833 for p in parents:
832 if p in rr:
834 if p in rr:
833 self.ui.warn("qpop: forcing dirstate update\n")
835 self.ui.warn("qpop: forcing dirstate update\n")
834 update = True
836 update = True
835
837
836 if not force and update:
838 if not force and update:
837 self.check_localchanges(repo)
839 self.check_localchanges(repo)
838
840
839 self.applied_dirty = 1;
841 self.applied_dirty = 1;
840 end = len(self.applied)
842 end = len(self.applied)
841 if not patch:
843 if not patch:
842 if all:
844 if all:
843 popi = 0
845 popi = 0
844 else:
846 else:
845 popi = len(self.applied) - 1
847 popi = len(self.applied) - 1
846 else:
848 else:
847 popi = info[0] + 1
849 popi = info[0] + 1
848 if popi >= end:
850 if popi >= end:
849 self.ui.warn("qpop: %s is already at the top\n" % patch)
851 self.ui.warn("qpop: %s is already at the top\n" % patch)
850 return
852 return
851 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
853 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
852
854
853 start = info[0]
855 start = info[0]
854 rev = revlog.bin(info[1])
856 rev = revlog.bin(info[1])
855
857
856 # we know there are no local changes, so we can make a simplified
858 # we know there are no local changes, so we can make a simplified
857 # form of hg.update.
859 # form of hg.update.
858 if update:
860 if update:
859 top = self.check_toppatch(repo)
861 top = self.check_toppatch(repo)
860 qp = self.qparents(repo, rev)
862 qp = self.qparents(repo, rev)
861 changes = repo.changelog.read(qp)
863 changes = repo.changelog.read(qp)
862 mmap = repo.manifest.read(changes[0])
864 mmap = repo.manifest.read(changes[0])
863 m, a, r, d, u = repo.status(qp, top)[:5]
865 m, a, r, d, u = repo.status(qp, top)[:5]
864 if d:
866 if d:
865 raise util.Abort("deletions found between repo revs")
867 raise util.Abort("deletions found between repo revs")
866 for f in m:
868 for f in m:
867 getfile(f, mmap[f])
869 getfile(f, mmap[f])
868 for f in r:
870 for f in r:
869 getfile(f, mmap[f])
871 getfile(f, mmap[f])
870 util.set_exec(repo.wjoin(f), mmap.execf(f))
872 util.set_exec(repo.wjoin(f), mmap.execf(f))
871 repo.dirstate.update(m + r, 'n')
873 for f in m + r:
874 repo.dirstate.normal(f)
872 for f in a:
875 for f in a:
873 try:
876 try:
874 os.unlink(repo.wjoin(f))
877 os.unlink(repo.wjoin(f))
875 except OSError, e:
878 except OSError, e:
876 if e.errno != errno.ENOENT:
879 if e.errno != errno.ENOENT:
877 raise
880 raise
878 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
881 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
879 except: pass
882 except: pass
880 if a:
883 repo.dirstate.forget(f)
881 repo.dirstate.forget(a)
882 repo.dirstate.setparents(qp, revlog.nullid)
884 repo.dirstate.setparents(qp, revlog.nullid)
883 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
885 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
884 del self.applied[start:end]
886 del self.applied[start:end]
885 if len(self.applied):
887 if len(self.applied):
886 self.ui.write("Now at: %s\n" % self.applied[-1].name)
888 self.ui.write("Now at: %s\n" % self.applied[-1].name)
887 else:
889 else:
888 self.ui.write("Patch queue now empty\n")
890 self.ui.write("Patch queue now empty\n")
889
891
890 def diff(self, repo, pats, opts):
892 def diff(self, repo, pats, opts):
891 top = self.check_toppatch(repo)
893 top = self.check_toppatch(repo)
892 if not top:
894 if not top:
893 self.ui.write("No patches applied\n")
895 self.ui.write("No patches applied\n")
894 return
896 return
895 qp = self.qparents(repo, top)
897 qp = self.qparents(repo, top)
896 if opts.get('git'):
898 if opts.get('git'):
897 self.diffopts().git = True
899 self.diffopts().git = True
898 self.printdiff(repo, qp, files=pats, opts=opts)
900 self.printdiff(repo, qp, files=pats, opts=opts)
899
901
900 def refresh(self, repo, pats=None, **opts):
902 def refresh(self, repo, pats=None, **opts):
901 if len(self.applied) == 0:
903 if len(self.applied) == 0:
902 self.ui.write("No patches applied\n")
904 self.ui.write("No patches applied\n")
903 return 1
905 return 1
904 wlock = repo.wlock()
906 wlock = repo.wlock()
905 self.check_toppatch(repo)
907 self.check_toppatch(repo)
906 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
908 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
907 top = revlog.bin(top)
909 top = revlog.bin(top)
908 cparents = repo.changelog.parents(top)
910 cparents = repo.changelog.parents(top)
909 patchparent = self.qparents(repo, top)
911 patchparent = self.qparents(repo, top)
910 message, comments, user, date, patchfound = self.readheaders(patchfn)
912 message, comments, user, date, patchfound = self.readheaders(patchfn)
911
913
912 patchf = self.opener(patchfn, 'r+')
914 patchf = self.opener(patchfn, 'r+')
913
915
914 # if the patch was a git patch, refresh it as a git patch
916 # if the patch was a git patch, refresh it as a git patch
915 for line in patchf:
917 for line in patchf:
916 if line.startswith('diff --git'):
918 if line.startswith('diff --git'):
917 self.diffopts().git = True
919 self.diffopts().git = True
918 break
920 break
919 patchf.seek(0)
921 patchf.seek(0)
920 patchf.truncate()
922 patchf.truncate()
921
923
922 msg = opts.get('msg', '').rstrip()
924 msg = opts.get('msg', '').rstrip()
923 if msg:
925 if msg:
924 if comments:
926 if comments:
925 # Remove existing message.
927 # Remove existing message.
926 ci = 0
928 ci = 0
927 subj = None
929 subj = None
928 for mi in xrange(len(message)):
930 for mi in xrange(len(message)):
929 if comments[ci].lower().startswith('subject: '):
931 if comments[ci].lower().startswith('subject: '):
930 subj = comments[ci][9:]
932 subj = comments[ci][9:]
931 while message[mi] != comments[ci] and message[mi] != subj:
933 while message[mi] != comments[ci] and message[mi] != subj:
932 ci += 1
934 ci += 1
933 del comments[ci]
935 del comments[ci]
934 comments.append(msg)
936 comments.append(msg)
935 if comments:
937 if comments:
936 comments = "\n".join(comments) + '\n\n'
938 comments = "\n".join(comments) + '\n\n'
937 patchf.write(comments)
939 patchf.write(comments)
938
940
939 if opts.get('git'):
941 if opts.get('git'):
940 self.diffopts().git = True
942 self.diffopts().git = True
941 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
943 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
942 tip = repo.changelog.tip()
944 tip = repo.changelog.tip()
943 if top == tip:
945 if top == tip:
944 # if the top of our patch queue is also the tip, there is an
946 # if the top of our patch queue is also the tip, there is an
945 # optimization here. We update the dirstate in place and strip
947 # optimization here. We update the dirstate in place and strip
946 # off the tip commit. Then just commit the current directory
948 # off the tip commit. Then just commit the current directory
947 # tree. We can also send repo.commit the list of files
949 # tree. We can also send repo.commit the list of files
948 # changed to speed up the diff
950 # changed to speed up the diff
949 #
951 #
950 # in short mode, we only diff the files included in the
952 # in short mode, we only diff the files included in the
951 # patch already
953 # patch already
952 #
954 #
953 # this should really read:
955 # this should really read:
954 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
956 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
955 # but we do it backwards to take advantage of manifest/chlog
957 # but we do it backwards to take advantage of manifest/chlog
956 # caching against the next repo.status call
958 # caching against the next repo.status call
957 #
959 #
958 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
960 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
959 changes = repo.changelog.read(tip)
961 changes = repo.changelog.read(tip)
960 man = repo.manifest.read(changes[0])
962 man = repo.manifest.read(changes[0])
961 aaa = aa[:]
963 aaa = aa[:]
962 if opts.get('short'):
964 if opts.get('short'):
963 filelist = mm + aa + dd
965 filelist = mm + aa + dd
964 match = dict.fromkeys(filelist).__contains__
966 match = dict.fromkeys(filelist).__contains__
965 else:
967 else:
966 filelist = None
968 filelist = None
967 match = util.always
969 match = util.always
968 m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
970 m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
969
971
970 # we might end up with files that were added between tip and
972 # we might end up with files that were added between tip and
971 # the dirstate parent, but then changed in the local dirstate.
973 # the dirstate parent, but then changed in the local dirstate.
972 # in this case, we want them to only show up in the added section
974 # in this case, we want them to only show up in the added section
973 for x in m:
975 for x in m:
974 if x not in aa:
976 if x not in aa:
975 mm.append(x)
977 mm.append(x)
976 # we might end up with files added by the local dirstate that
978 # we might end up with files added by the local dirstate that
977 # were deleted by the patch. In this case, they should only
979 # were deleted by the patch. In this case, they should only
978 # show up in the changed section.
980 # show up in the changed section.
979 for x in a:
981 for x in a:
980 if x in dd:
982 if x in dd:
981 del dd[dd.index(x)]
983 del dd[dd.index(x)]
982 mm.append(x)
984 mm.append(x)
983 else:
985 else:
984 aa.append(x)
986 aa.append(x)
985 # make sure any files deleted in the local dirstate
987 # make sure any files deleted in the local dirstate
986 # are not in the add or change column of the patch
988 # are not in the add or change column of the patch
987 forget = []
989 forget = []
988 for x in d + r:
990 for x in d + r:
989 if x in aa:
991 if x in aa:
990 del aa[aa.index(x)]
992 del aa[aa.index(x)]
991 forget.append(x)
993 forget.append(x)
992 continue
994 continue
993 elif x in mm:
995 elif x in mm:
994 del mm[mm.index(x)]
996 del mm[mm.index(x)]
995 dd.append(x)
997 dd.append(x)
996
998
997 m = util.unique(mm)
999 m = util.unique(mm)
998 r = util.unique(dd)
1000 r = util.unique(dd)
999 a = util.unique(aa)
1001 a = util.unique(aa)
1000 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1002 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1001 filelist = util.unique(c[0] + c[1] + c[2])
1003 filelist = util.unique(c[0] + c[1] + c[2])
1002 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1004 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1003 fp=patchf, changes=c, opts=self.diffopts())
1005 fp=patchf, changes=c, opts=self.diffopts())
1004 patchf.close()
1006 patchf.close()
1005
1007
1006 repo.dirstate.setparents(*cparents)
1008 repo.dirstate.setparents(*cparents)
1007 copies = {}
1009 copies = {}
1008 for dst in a:
1010 for dst in a:
1009 src = repo.dirstate.copied(dst)
1011 src = repo.dirstate.copied(dst)
1010 if src is None:
1012 if src is None:
1011 continue
1013 continue
1012 copies.setdefault(src, []).append(dst)
1014 copies.setdefault(src, []).append(dst)
1013 repo.dirstate.update(a, 'a')
1015 repo.dirstate.add(dst)
1014 # remember the copies between patchparent and tip
1016 # remember the copies between patchparent and tip
1015 # this may be slow, so don't do it if we're not tracking copies
1017 # this may be slow, so don't do it if we're not tracking copies
1016 if self.diffopts().git:
1018 if self.diffopts().git:
1017 for dst in aaa:
1019 for dst in aaa:
1018 f = repo.file(dst)
1020 f = repo.file(dst)
1019 src = f.renamed(man[dst])
1021 src = f.renamed(man[dst])
1020 if src:
1022 if src:
1021 copies[src[0]] = copies.get(dst, [])
1023 copies[src[0]] = copies.get(dst, [])
1022 if dst in a:
1024 if dst in a:
1023 copies[src[0]].append(dst)
1025 copies[src[0]].append(dst)
1024 # we can't copy a file created by the patch itself
1026 # we can't copy a file created by the patch itself
1025 if dst in copies:
1027 if dst in copies:
1026 del copies[dst]
1028 del copies[dst]
1027 for src, dsts in copies.iteritems():
1029 for src, dsts in copies.iteritems():
1028 for dst in dsts:
1030 for dst in dsts:
1029 repo.dirstate.copy(src, dst)
1031 repo.dirstate.copy(src, dst)
1030 repo.dirstate.update(r, 'r')
1032 for f in r:
1033 repo.dirstate.remove(f)
1031 # if the patch excludes a modified file, mark that file with mtime=0
1034 # if the patch excludes a modified file, mark that file with mtime=0
1032 # so status can see it.
1035 # so status can see it.
1033 mm = []
1036 mm = []
1034 for i in xrange(len(m)-1, -1, -1):
1037 for i in xrange(len(m)-1, -1, -1):
1035 if not matchfn(m[i]):
1038 if not matchfn(m[i]):
1036 mm.append(m[i])
1039 mm.append(m[i])
1037 del m[i]
1040 del m[i]
1038 repo.dirstate.update(m, 'n')
1041 for f in m:
1039 repo.dirstate.update(mm, 'n', st_mtime=-1, st_size=-1)
1042 repo.dirstate.normal(f)
1040 repo.dirstate.forget(forget)
1043 for f in mm:
1044 repo.dirstate.normaldirty(f)
1045 for f in forget:
1046 repo.dirstate.forget(f)
1041
1047
1042 if not msg:
1048 if not msg:
1043 if not message:
1049 if not message:
1044 message = "[mq]: %s\n" % patchfn
1050 message = "[mq]: %s\n" % patchfn
1045 else:
1051 else:
1046 message = "\n".join(message)
1052 message = "\n".join(message)
1047 else:
1053 else:
1048 message = msg
1054 message = msg
1049
1055
1050 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1056 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1051 n = repo.commit(filelist, message, changes[1], match=matchfn,
1057 n = repo.commit(filelist, message, changes[1], match=matchfn,
1052 force=1, wlock=wlock)
1058 force=1, wlock=wlock)
1053 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1059 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1054 self.applied_dirty = 1
1060 self.applied_dirty = 1
1055 self.removeundo(repo)
1061 self.removeundo(repo)
1056 else:
1062 else:
1057 self.printdiff(repo, patchparent, fp=patchf)
1063 self.printdiff(repo, patchparent, fp=patchf)
1058 patchf.close()
1064 patchf.close()
1059 added = repo.status()[1]
1065 added = repo.status()[1]
1060 for a in added:
1066 for a in added:
1061 f = repo.wjoin(a)
1067 f = repo.wjoin(a)
1062 try:
1068 try:
1063 os.unlink(f)
1069 os.unlink(f)
1064 except OSError, e:
1070 except OSError, e:
1065 if e.errno != errno.ENOENT:
1071 if e.errno != errno.ENOENT:
1066 raise
1072 raise
1067 try: os.removedirs(os.path.dirname(f))
1073 try: os.removedirs(os.path.dirname(f))
1068 except: pass
1074 except: pass
1069 # forget the file copies in the dirstate
1075 # forget the file copies in the dirstate
1070 # push should readd the files later on
1076 # push should readd the files later on
1071 repo.dirstate.forget(added)
1077 repo.dirstate.forget(a)
1072 self.pop(repo, force=True, wlock=wlock)
1078 self.pop(repo, force=True, wlock=wlock)
1073 self.push(repo, force=True, wlock=wlock)
1079 self.push(repo, force=True, wlock=wlock)
1074
1080
1075 def init(self, repo, create=False):
1081 def init(self, repo, create=False):
1076 if not create and os.path.isdir(self.path):
1082 if not create and os.path.isdir(self.path):
1077 raise util.Abort(_("patch queue directory already exists"))
1083 raise util.Abort(_("patch queue directory already exists"))
1078 try:
1084 try:
1079 os.mkdir(self.path)
1085 os.mkdir(self.path)
1080 except OSError, inst:
1086 except OSError, inst:
1081 if inst.errno != errno.EEXIST or not create:
1087 if inst.errno != errno.EEXIST or not create:
1082 raise
1088 raise
1083 if create:
1089 if create:
1084 return self.qrepo(create=True)
1090 return self.qrepo(create=True)
1085
1091
1086 def unapplied(self, repo, patch=None):
1092 def unapplied(self, repo, patch=None):
1087 if patch and patch not in self.series:
1093 if patch and patch not in self.series:
1088 raise util.Abort(_("patch %s is not in series file") % patch)
1094 raise util.Abort(_("patch %s is not in series file") % patch)
1089 if not patch:
1095 if not patch:
1090 start = self.series_end()
1096 start = self.series_end()
1091 else:
1097 else:
1092 start = self.series.index(patch) + 1
1098 start = self.series.index(patch) + 1
1093 unapplied = []
1099 unapplied = []
1094 for i in xrange(start, len(self.series)):
1100 for i in xrange(start, len(self.series)):
1095 pushable, reason = self.pushable(i)
1101 pushable, reason = self.pushable(i)
1096 if pushable:
1102 if pushable:
1097 unapplied.append((i, self.series[i]))
1103 unapplied.append((i, self.series[i]))
1098 self.explain_pushable(i)
1104 self.explain_pushable(i)
1099 return unapplied
1105 return unapplied
1100
1106
1101 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1107 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1102 summary=False):
1108 summary=False):
1103 def displayname(patchname):
1109 def displayname(patchname):
1104 if summary:
1110 if summary:
1105 msg = self.readheaders(patchname)[0]
1111 msg = self.readheaders(patchname)[0]
1106 msg = msg and ': ' + msg[0] or ': '
1112 msg = msg and ': ' + msg[0] or ': '
1107 else:
1113 else:
1108 msg = ''
1114 msg = ''
1109 return '%s%s' % (patchname, msg)
1115 return '%s%s' % (patchname, msg)
1110
1116
1111 applied = dict.fromkeys([p.name for p in self.applied])
1117 applied = dict.fromkeys([p.name for p in self.applied])
1112 if length is None:
1118 if length is None:
1113 length = len(self.series) - start
1119 length = len(self.series) - start
1114 if not missing:
1120 if not missing:
1115 for i in xrange(start, start+length):
1121 for i in xrange(start, start+length):
1116 patch = self.series[i]
1122 patch = self.series[i]
1117 if patch in applied:
1123 if patch in applied:
1118 stat = 'A'
1124 stat = 'A'
1119 elif self.pushable(i)[0]:
1125 elif self.pushable(i)[0]:
1120 stat = 'U'
1126 stat = 'U'
1121 else:
1127 else:
1122 stat = 'G'
1128 stat = 'G'
1123 pfx = ''
1129 pfx = ''
1124 if self.ui.verbose:
1130 if self.ui.verbose:
1125 pfx = '%d %s ' % (i, stat)
1131 pfx = '%d %s ' % (i, stat)
1126 elif status and status != stat:
1132 elif status and status != stat:
1127 continue
1133 continue
1128 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1134 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1129 else:
1135 else:
1130 msng_list = []
1136 msng_list = []
1131 for root, dirs, files in os.walk(self.path):
1137 for root, dirs, files in os.walk(self.path):
1132 d = root[len(self.path) + 1:]
1138 d = root[len(self.path) + 1:]
1133 for f in files:
1139 for f in files:
1134 fl = os.path.join(d, f)
1140 fl = os.path.join(d, f)
1135 if (fl not in self.series and
1141 if (fl not in self.series and
1136 fl not in (self.status_path, self.series_path,
1142 fl not in (self.status_path, self.series_path,
1137 self.guards_path)
1143 self.guards_path)
1138 and not fl.startswith('.')):
1144 and not fl.startswith('.')):
1139 msng_list.append(fl)
1145 msng_list.append(fl)
1140 msng_list.sort()
1146 msng_list.sort()
1141 for x in msng_list:
1147 for x in msng_list:
1142 pfx = self.ui.verbose and ('D ') or ''
1148 pfx = self.ui.verbose and ('D ') or ''
1143 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1149 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1144
1150
1145 def issaveline(self, l):
1151 def issaveline(self, l):
1146 if l.name == '.hg.patches.save.line':
1152 if l.name == '.hg.patches.save.line':
1147 return True
1153 return True
1148
1154
1149 def qrepo(self, create=False):
1155 def qrepo(self, create=False):
1150 if create or os.path.isdir(self.join(".hg")):
1156 if create or os.path.isdir(self.join(".hg")):
1151 return hg.repository(self.ui, path=self.path, create=create)
1157 return hg.repository(self.ui, path=self.path, create=create)
1152
1158
1153 def restore(self, repo, rev, delete=None, qupdate=None):
1159 def restore(self, repo, rev, delete=None, qupdate=None):
1154 c = repo.changelog.read(rev)
1160 c = repo.changelog.read(rev)
1155 desc = c[4].strip()
1161 desc = c[4].strip()
1156 lines = desc.splitlines()
1162 lines = desc.splitlines()
1157 i = 0
1163 i = 0
1158 datastart = None
1164 datastart = None
1159 series = []
1165 series = []
1160 applied = []
1166 applied = []
1161 qpp = None
1167 qpp = None
1162 for i in xrange(0, len(lines)):
1168 for i in xrange(0, len(lines)):
1163 if lines[i] == 'Patch Data:':
1169 if lines[i] == 'Patch Data:':
1164 datastart = i + 1
1170 datastart = i + 1
1165 elif lines[i].startswith('Dirstate:'):
1171 elif lines[i].startswith('Dirstate:'):
1166 l = lines[i].rstrip()
1172 l = lines[i].rstrip()
1167 l = l[10:].split(' ')
1173 l = l[10:].split(' ')
1168 qpp = [ hg.bin(x) for x in l ]
1174 qpp = [ hg.bin(x) for x in l ]
1169 elif datastart != None:
1175 elif datastart != None:
1170 l = lines[i].rstrip()
1176 l = lines[i].rstrip()
1171 se = statusentry(l)
1177 se = statusentry(l)
1172 file_ = se.name
1178 file_ = se.name
1173 if se.rev:
1179 if se.rev:
1174 applied.append(se)
1180 applied.append(se)
1175 else:
1181 else:
1176 series.append(file_)
1182 series.append(file_)
1177 if datastart == None:
1183 if datastart == None:
1178 self.ui.warn("No saved patch data found\n")
1184 self.ui.warn("No saved patch data found\n")
1179 return 1
1185 return 1
1180 self.ui.warn("restoring status: %s\n" % lines[0])
1186 self.ui.warn("restoring status: %s\n" % lines[0])
1181 self.full_series = series
1187 self.full_series = series
1182 self.applied = applied
1188 self.applied = applied
1183 self.parse_series()
1189 self.parse_series()
1184 self.series_dirty = 1
1190 self.series_dirty = 1
1185 self.applied_dirty = 1
1191 self.applied_dirty = 1
1186 heads = repo.changelog.heads()
1192 heads = repo.changelog.heads()
1187 if delete:
1193 if delete:
1188 if rev not in heads:
1194 if rev not in heads:
1189 self.ui.warn("save entry has children, leaving it alone\n")
1195 self.ui.warn("save entry has children, leaving it alone\n")
1190 else:
1196 else:
1191 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1197 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1192 pp = repo.dirstate.parents()
1198 pp = repo.dirstate.parents()
1193 if rev in pp:
1199 if rev in pp:
1194 update = True
1200 update = True
1195 else:
1201 else:
1196 update = False
1202 update = False
1197 self.strip(repo, rev, update=update, backup='strip')
1203 self.strip(repo, rev, update=update, backup='strip')
1198 if qpp:
1204 if qpp:
1199 self.ui.warn("saved queue repository parents: %s %s\n" %
1205 self.ui.warn("saved queue repository parents: %s %s\n" %
1200 (hg.short(qpp[0]), hg.short(qpp[1])))
1206 (hg.short(qpp[0]), hg.short(qpp[1])))
1201 if qupdate:
1207 if qupdate:
1202 print "queue directory updating"
1208 print "queue directory updating"
1203 r = self.qrepo()
1209 r = self.qrepo()
1204 if not r:
1210 if not r:
1205 self.ui.warn("Unable to load queue repository\n")
1211 self.ui.warn("Unable to load queue repository\n")
1206 return 1
1212 return 1
1207 hg.clean(r, qpp[0])
1213 hg.clean(r, qpp[0])
1208
1214
1209 def save(self, repo, msg=None):
1215 def save(self, repo, msg=None):
1210 if len(self.applied) == 0:
1216 if len(self.applied) == 0:
1211 self.ui.warn("save: no patches applied, exiting\n")
1217 self.ui.warn("save: no patches applied, exiting\n")
1212 return 1
1218 return 1
1213 if self.issaveline(self.applied[-1]):
1219 if self.issaveline(self.applied[-1]):
1214 self.ui.warn("status is already saved\n")
1220 self.ui.warn("status is already saved\n")
1215 return 1
1221 return 1
1216
1222
1217 ar = [ ':' + x for x in self.full_series ]
1223 ar = [ ':' + x for x in self.full_series ]
1218 if not msg:
1224 if not msg:
1219 msg = "hg patches saved state"
1225 msg = "hg patches saved state"
1220 else:
1226 else:
1221 msg = "hg patches: " + msg.rstrip('\r\n')
1227 msg = "hg patches: " + msg.rstrip('\r\n')
1222 r = self.qrepo()
1228 r = self.qrepo()
1223 if r:
1229 if r:
1224 pp = r.dirstate.parents()
1230 pp = r.dirstate.parents()
1225 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1231 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1226 msg += "\n\nPatch Data:\n"
1232 msg += "\n\nPatch Data:\n"
1227 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1233 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1228 "\n".join(ar) + '\n' or "")
1234 "\n".join(ar) + '\n' or "")
1229 n = repo.commit(None, text, user=None, force=1)
1235 n = repo.commit(None, text, user=None, force=1)
1230 if not n:
1236 if not n:
1231 self.ui.warn("repo commit failed\n")
1237 self.ui.warn("repo commit failed\n")
1232 return 1
1238 return 1
1233 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1239 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1234 self.applied_dirty = 1
1240 self.applied_dirty = 1
1235 self.removeundo(repo)
1241 self.removeundo(repo)
1236
1242
1237 def full_series_end(self):
1243 def full_series_end(self):
1238 if len(self.applied) > 0:
1244 if len(self.applied) > 0:
1239 p = self.applied[-1].name
1245 p = self.applied[-1].name
1240 end = self.find_series(p)
1246 end = self.find_series(p)
1241 if end == None:
1247 if end == None:
1242 return len(self.full_series)
1248 return len(self.full_series)
1243 return end + 1
1249 return end + 1
1244 return 0
1250 return 0
1245
1251
1246 def series_end(self, all_patches=False):
1252 def series_end(self, all_patches=False):
1247 """If all_patches is False, return the index of the next pushable patch
1253 """If all_patches is False, return the index of the next pushable patch
1248 in the series, or the series length. If all_patches is True, return the
1254 in the series, or the series length. If all_patches is True, return the
1249 index of the first patch past the last applied one.
1255 index of the first patch past the last applied one.
1250 """
1256 """
1251 end = 0
1257 end = 0
1252 def next(start):
1258 def next(start):
1253 if all_patches:
1259 if all_patches:
1254 return start
1260 return start
1255 i = start
1261 i = start
1256 while i < len(self.series):
1262 while i < len(self.series):
1257 p, reason = self.pushable(i)
1263 p, reason = self.pushable(i)
1258 if p:
1264 if p:
1259 break
1265 break
1260 self.explain_pushable(i)
1266 self.explain_pushable(i)
1261 i += 1
1267 i += 1
1262 return i
1268 return i
1263 if len(self.applied) > 0:
1269 if len(self.applied) > 0:
1264 p = self.applied[-1].name
1270 p = self.applied[-1].name
1265 try:
1271 try:
1266 end = self.series.index(p)
1272 end = self.series.index(p)
1267 except ValueError:
1273 except ValueError:
1268 return 0
1274 return 0
1269 return next(end + 1)
1275 return next(end + 1)
1270 return next(end)
1276 return next(end)
1271
1277
1272 def appliedname(self, index):
1278 def appliedname(self, index):
1273 pname = self.applied[index].name
1279 pname = self.applied[index].name
1274 if not self.ui.verbose:
1280 if not self.ui.verbose:
1275 p = pname
1281 p = pname
1276 else:
1282 else:
1277 p = str(self.series.index(pname)) + " " + pname
1283 p = str(self.series.index(pname)) + " " + pname
1278 return p
1284 return p
1279
1285
1280 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1286 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1281 force=None, git=False):
1287 force=None, git=False):
1282 def checkseries(patchname):
1288 def checkseries(patchname):
1283 if patchname in self.series:
1289 if patchname in self.series:
1284 raise util.Abort(_('patch %s is already in the series file')
1290 raise util.Abort(_('patch %s is already in the series file')
1285 % patchname)
1291 % patchname)
1286 def checkfile(patchname):
1292 def checkfile(patchname):
1287 if not force and os.path.exists(self.join(patchname)):
1293 if not force and os.path.exists(self.join(patchname)):
1288 raise util.Abort(_('patch "%s" already exists')
1294 raise util.Abort(_('patch "%s" already exists')
1289 % patchname)
1295 % patchname)
1290
1296
1291 if rev:
1297 if rev:
1292 if files:
1298 if files:
1293 raise util.Abort(_('option "-r" not valid when importing '
1299 raise util.Abort(_('option "-r" not valid when importing '
1294 'files'))
1300 'files'))
1295 rev = cmdutil.revrange(repo, rev)
1301 rev = cmdutil.revrange(repo, rev)
1296 rev.sort(lambda x, y: cmp(y, x))
1302 rev.sort(lambda x, y: cmp(y, x))
1297 if (len(files) > 1 or len(rev) > 1) and patchname:
1303 if (len(files) > 1 or len(rev) > 1) and patchname:
1298 raise util.Abort(_('option "-n" not valid when importing multiple '
1304 raise util.Abort(_('option "-n" not valid when importing multiple '
1299 'patches'))
1305 'patches'))
1300 i = 0
1306 i = 0
1301 added = []
1307 added = []
1302 if rev:
1308 if rev:
1303 # If mq patches are applied, we can only import revisions
1309 # If mq patches are applied, we can only import revisions
1304 # that form a linear path to qbase.
1310 # that form a linear path to qbase.
1305 # Otherwise, they should form a linear path to a head.
1311 # Otherwise, they should form a linear path to a head.
1306 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1312 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1307 if len(heads) > 1:
1313 if len(heads) > 1:
1308 raise util.Abort(_('revision %d is the root of more than one '
1314 raise util.Abort(_('revision %d is the root of more than one '
1309 'branch') % rev[-1])
1315 'branch') % rev[-1])
1310 if self.applied:
1316 if self.applied:
1311 base = revlog.hex(repo.changelog.node(rev[0]))
1317 base = revlog.hex(repo.changelog.node(rev[0]))
1312 if base in [n.rev for n in self.applied]:
1318 if base in [n.rev for n in self.applied]:
1313 raise util.Abort(_('revision %d is already managed')
1319 raise util.Abort(_('revision %d is already managed')
1314 % rev[0])
1320 % rev[0])
1315 if heads != [revlog.bin(self.applied[-1].rev)]:
1321 if heads != [revlog.bin(self.applied[-1].rev)]:
1316 raise util.Abort(_('revision %d is not the parent of '
1322 raise util.Abort(_('revision %d is not the parent of '
1317 'the queue') % rev[0])
1323 'the queue') % rev[0])
1318 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1324 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1319 lastparent = repo.changelog.parentrevs(base)[0]
1325 lastparent = repo.changelog.parentrevs(base)[0]
1320 else:
1326 else:
1321 if heads != [repo.changelog.node(rev[0])]:
1327 if heads != [repo.changelog.node(rev[0])]:
1322 raise util.Abort(_('revision %d has unmanaged children')
1328 raise util.Abort(_('revision %d has unmanaged children')
1323 % rev[0])
1329 % rev[0])
1324 lastparent = None
1330 lastparent = None
1325
1331
1326 if git:
1332 if git:
1327 self.diffopts().git = True
1333 self.diffopts().git = True
1328
1334
1329 for r in rev:
1335 for r in rev:
1330 p1, p2 = repo.changelog.parentrevs(r)
1336 p1, p2 = repo.changelog.parentrevs(r)
1331 n = repo.changelog.node(r)
1337 n = repo.changelog.node(r)
1332 if p2 != revlog.nullrev:
1338 if p2 != revlog.nullrev:
1333 raise util.Abort(_('cannot import merge revision %d') % r)
1339 raise util.Abort(_('cannot import merge revision %d') % r)
1334 if lastparent and lastparent != r:
1340 if lastparent and lastparent != r:
1335 raise util.Abort(_('revision %d is not the parent of %d')
1341 raise util.Abort(_('revision %d is not the parent of %d')
1336 % (r, lastparent))
1342 % (r, lastparent))
1337 lastparent = p1
1343 lastparent = p1
1338
1344
1339 if not patchname:
1345 if not patchname:
1340 patchname = normname('%d.diff' % r)
1346 patchname = normname('%d.diff' % r)
1341 checkseries(patchname)
1347 checkseries(patchname)
1342 checkfile(patchname)
1348 checkfile(patchname)
1343 self.full_series.insert(0, patchname)
1349 self.full_series.insert(0, patchname)
1344
1350
1345 patchf = self.opener(patchname, "w")
1351 patchf = self.opener(patchname, "w")
1346 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1352 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1347 patchf.close()
1353 patchf.close()
1348
1354
1349 se = statusentry(revlog.hex(n), patchname)
1355 se = statusentry(revlog.hex(n), patchname)
1350 self.applied.insert(0, se)
1356 self.applied.insert(0, se)
1351
1357
1352 added.append(patchname)
1358 added.append(patchname)
1353 patchname = None
1359 patchname = None
1354 self.parse_series()
1360 self.parse_series()
1355 self.applied_dirty = 1
1361 self.applied_dirty = 1
1356
1362
1357 for filename in files:
1363 for filename in files:
1358 if existing:
1364 if existing:
1359 if filename == '-':
1365 if filename == '-':
1360 raise util.Abort(_('-e is incompatible with import from -'))
1366 raise util.Abort(_('-e is incompatible with import from -'))
1361 if not patchname:
1367 if not patchname:
1362 patchname = normname(filename)
1368 patchname = normname(filename)
1363 if not os.path.isfile(self.join(patchname)):
1369 if not os.path.isfile(self.join(patchname)):
1364 raise util.Abort(_("patch %s does not exist") % patchname)
1370 raise util.Abort(_("patch %s does not exist") % patchname)
1365 else:
1371 else:
1366 try:
1372 try:
1367 if filename == '-':
1373 if filename == '-':
1368 if not patchname:
1374 if not patchname:
1369 raise util.Abort(_('need --name to import a patch from -'))
1375 raise util.Abort(_('need --name to import a patch from -'))
1370 text = sys.stdin.read()
1376 text = sys.stdin.read()
1371 else:
1377 else:
1372 text = file(filename).read()
1378 text = file(filename).read()
1373 except IOError:
1379 except IOError:
1374 raise util.Abort(_("unable to read %s") % patchname)
1380 raise util.Abort(_("unable to read %s") % patchname)
1375 if not patchname:
1381 if not patchname:
1376 patchname = normname(os.path.basename(filename))
1382 patchname = normname(os.path.basename(filename))
1377 checkfile(patchname)
1383 checkfile(patchname)
1378 patchf = self.opener(patchname, "w")
1384 patchf = self.opener(patchname, "w")
1379 patchf.write(text)
1385 patchf.write(text)
1380 checkseries(patchname)
1386 checkseries(patchname)
1381 index = self.full_series_end() + i
1387 index = self.full_series_end() + i
1382 self.full_series[index:index] = [patchname]
1388 self.full_series[index:index] = [patchname]
1383 self.parse_series()
1389 self.parse_series()
1384 self.ui.warn("adding %s to series file\n" % patchname)
1390 self.ui.warn("adding %s to series file\n" % patchname)
1385 i += 1
1391 i += 1
1386 added.append(patchname)
1392 added.append(patchname)
1387 patchname = None
1393 patchname = None
1388 self.series_dirty = 1
1394 self.series_dirty = 1
1389 qrepo = self.qrepo()
1395 qrepo = self.qrepo()
1390 if qrepo:
1396 if qrepo:
1391 qrepo.add(added)
1397 qrepo.add(added)
1392
1398
1393 def delete(ui, repo, *patches, **opts):
1399 def delete(ui, repo, *patches, **opts):
1394 """remove patches from queue
1400 """remove patches from queue
1395
1401
1396 The patches must not be applied, unless they are arguments to
1402 The patches must not be applied, unless they are arguments to
1397 the --rev parameter. At least one patch or revision is required.
1403 the --rev parameter. At least one patch or revision is required.
1398
1404
1399 With --rev, mq will stop managing the named revisions (converting
1405 With --rev, mq will stop managing the named revisions (converting
1400 them to regular mercurial changesets). The patches must be applied
1406 them to regular mercurial changesets). The patches must be applied
1401 and at the base of the stack. This option is useful when the patches
1407 and at the base of the stack. This option is useful when the patches
1402 have been applied upstream.
1408 have been applied upstream.
1403
1409
1404 With --keep, the patch files are preserved in the patch directory."""
1410 With --keep, the patch files are preserved in the patch directory."""
1405 q = repo.mq
1411 q = repo.mq
1406 q.delete(repo, patches, opts)
1412 q.delete(repo, patches, opts)
1407 q.save_dirty()
1413 q.save_dirty()
1408 return 0
1414 return 0
1409
1415
1410 def applied(ui, repo, patch=None, **opts):
1416 def applied(ui, repo, patch=None, **opts):
1411 """print the patches already applied"""
1417 """print the patches already applied"""
1412 q = repo.mq
1418 q = repo.mq
1413 if patch:
1419 if patch:
1414 if patch not in q.series:
1420 if patch not in q.series:
1415 raise util.Abort(_("patch %s is not in series file") % patch)
1421 raise util.Abort(_("patch %s is not in series file") % patch)
1416 end = q.series.index(patch) + 1
1422 end = q.series.index(patch) + 1
1417 else:
1423 else:
1418 end = q.series_end(True)
1424 end = q.series_end(True)
1419 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1425 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1420
1426
1421 def unapplied(ui, repo, patch=None, **opts):
1427 def unapplied(ui, repo, patch=None, **opts):
1422 """print the patches not yet applied"""
1428 """print the patches not yet applied"""
1423 q = repo.mq
1429 q = repo.mq
1424 if patch:
1430 if patch:
1425 if patch not in q.series:
1431 if patch not in q.series:
1426 raise util.Abort(_("patch %s is not in series file") % patch)
1432 raise util.Abort(_("patch %s is not in series file") % patch)
1427 start = q.series.index(patch) + 1
1433 start = q.series.index(patch) + 1
1428 else:
1434 else:
1429 start = q.series_end(True)
1435 start = q.series_end(True)
1430 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1436 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1431
1437
1432 def qimport(ui, repo, *filename, **opts):
1438 def qimport(ui, repo, *filename, **opts):
1433 """import a patch
1439 """import a patch
1434
1440
1435 The patch will have the same name as its source file unless you
1441 The patch will have the same name as its source file unless you
1436 give it a new one with --name.
1442 give it a new one with --name.
1437
1443
1438 You can register an existing patch inside the patch directory
1444 You can register an existing patch inside the patch directory
1439 with the --existing flag.
1445 with the --existing flag.
1440
1446
1441 With --force, an existing patch of the same name will be overwritten.
1447 With --force, an existing patch of the same name will be overwritten.
1442
1448
1443 An existing changeset may be placed under mq control with --rev
1449 An existing changeset may be placed under mq control with --rev
1444 (e.g. qimport --rev tip -n patch will place tip under mq control).
1450 (e.g. qimport --rev tip -n patch will place tip under mq control).
1445 With --git, patches imported with --rev will use the git diff
1451 With --git, patches imported with --rev will use the git diff
1446 format.
1452 format.
1447 """
1453 """
1448 q = repo.mq
1454 q = repo.mq
1449 q.qimport(repo, filename, patchname=opts['name'],
1455 q.qimport(repo, filename, patchname=opts['name'],
1450 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1456 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1451 git=opts['git'])
1457 git=opts['git'])
1452 q.save_dirty()
1458 q.save_dirty()
1453 return 0
1459 return 0
1454
1460
1455 def init(ui, repo, **opts):
1461 def init(ui, repo, **opts):
1456 """init a new queue repository
1462 """init a new queue repository
1457
1463
1458 The queue repository is unversioned by default. If -c is
1464 The queue repository is unversioned by default. If -c is
1459 specified, qinit will create a separate nested repository
1465 specified, qinit will create a separate nested repository
1460 for patches (qinit -c may also be run later to convert
1466 for patches (qinit -c may also be run later to convert
1461 an unversioned patch repository into a versioned one).
1467 an unversioned patch repository into a versioned one).
1462 You can use qcommit to commit changes to this queue repository."""
1468 You can use qcommit to commit changes to this queue repository."""
1463 q = repo.mq
1469 q = repo.mq
1464 r = q.init(repo, create=opts['create_repo'])
1470 r = q.init(repo, create=opts['create_repo'])
1465 q.save_dirty()
1471 q.save_dirty()
1466 if r:
1472 if r:
1467 if not os.path.exists(r.wjoin('.hgignore')):
1473 if not os.path.exists(r.wjoin('.hgignore')):
1468 fp = r.wopener('.hgignore', 'w')
1474 fp = r.wopener('.hgignore', 'w')
1469 fp.write('syntax: glob\n')
1475 fp.write('syntax: glob\n')
1470 fp.write('status\n')
1476 fp.write('status\n')
1471 fp.write('guards\n')
1477 fp.write('guards\n')
1472 fp.close()
1478 fp.close()
1473 if not os.path.exists(r.wjoin('series')):
1479 if not os.path.exists(r.wjoin('series')):
1474 r.wopener('series', 'w').close()
1480 r.wopener('series', 'w').close()
1475 r.add(['.hgignore', 'series'])
1481 r.add(['.hgignore', 'series'])
1476 commands.add(ui, r)
1482 commands.add(ui, r)
1477 return 0
1483 return 0
1478
1484
1479 def clone(ui, source, dest=None, **opts):
1485 def clone(ui, source, dest=None, **opts):
1480 '''clone main and patch repository at same time
1486 '''clone main and patch repository at same time
1481
1487
1482 If source is local, destination will have no patches applied. If
1488 If source is local, destination will have no patches applied. If
1483 source is remote, this command can not check if patches are
1489 source is remote, this command can not check if patches are
1484 applied in source, so cannot guarantee that patches are not
1490 applied in source, so cannot guarantee that patches are not
1485 applied in destination. If you clone remote repository, be sure
1491 applied in destination. If you clone remote repository, be sure
1486 before that it has no patches applied.
1492 before that it has no patches applied.
1487
1493
1488 Source patch repository is looked for in <src>/.hg/patches by
1494 Source patch repository is looked for in <src>/.hg/patches by
1489 default. Use -p <url> to change.
1495 default. Use -p <url> to change.
1490
1496
1491 The patch directory must be a nested mercurial repository, as
1497 The patch directory must be a nested mercurial repository, as
1492 would be created by qinit -c.
1498 would be created by qinit -c.
1493 '''
1499 '''
1494 cmdutil.setremoteconfig(ui, opts)
1500 cmdutil.setremoteconfig(ui, opts)
1495 if dest is None:
1501 if dest is None:
1496 dest = hg.defaultdest(source)
1502 dest = hg.defaultdest(source)
1497 sr = hg.repository(ui, ui.expandpath(source))
1503 sr = hg.repository(ui, ui.expandpath(source))
1498 patchdir = opts['patches'] or (sr.url() + '/.hg/patches')
1504 patchdir = opts['patches'] or (sr.url() + '/.hg/patches')
1499 try:
1505 try:
1500 pr = hg.repository(ui, patchdir)
1506 pr = hg.repository(ui, patchdir)
1501 except hg.RepoError:
1507 except hg.RepoError:
1502 raise util.Abort(_('versioned patch repository not found'
1508 raise util.Abort(_('versioned patch repository not found'
1503 ' (see qinit -c)'))
1509 ' (see qinit -c)'))
1504 qbase, destrev = None, None
1510 qbase, destrev = None, None
1505 if sr.local():
1511 if sr.local():
1506 if sr.mq.applied:
1512 if sr.mq.applied:
1507 qbase = revlog.bin(sr.mq.applied[0].rev)
1513 qbase = revlog.bin(sr.mq.applied[0].rev)
1508 if not hg.islocal(dest):
1514 if not hg.islocal(dest):
1509 heads = dict.fromkeys(sr.heads())
1515 heads = dict.fromkeys(sr.heads())
1510 for h in sr.heads(qbase):
1516 for h in sr.heads(qbase):
1511 del heads[h]
1517 del heads[h]
1512 destrev = heads.keys()
1518 destrev = heads.keys()
1513 destrev.append(sr.changelog.parents(qbase)[0])
1519 destrev.append(sr.changelog.parents(qbase)[0])
1514 ui.note(_('cloning main repo\n'))
1520 ui.note(_('cloning main repo\n'))
1515 sr, dr = hg.clone(ui, sr.url(), dest,
1521 sr, dr = hg.clone(ui, sr.url(), dest,
1516 pull=opts['pull'],
1522 pull=opts['pull'],
1517 rev=destrev,
1523 rev=destrev,
1518 update=False,
1524 update=False,
1519 stream=opts['uncompressed'])
1525 stream=opts['uncompressed'])
1520 ui.note(_('cloning patch repo\n'))
1526 ui.note(_('cloning patch repo\n'))
1521 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1527 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1522 dr.url() + '/.hg/patches',
1528 dr.url() + '/.hg/patches',
1523 pull=opts['pull'],
1529 pull=opts['pull'],
1524 update=not opts['noupdate'],
1530 update=not opts['noupdate'],
1525 stream=opts['uncompressed'])
1531 stream=opts['uncompressed'])
1526 if dr.local():
1532 if dr.local():
1527 if qbase:
1533 if qbase:
1528 ui.note(_('stripping applied patches from destination repo\n'))
1534 ui.note(_('stripping applied patches from destination repo\n'))
1529 dr.mq.strip(dr, qbase, update=False, backup=None)
1535 dr.mq.strip(dr, qbase, update=False, backup=None)
1530 if not opts['noupdate']:
1536 if not opts['noupdate']:
1531 ui.note(_('updating destination repo\n'))
1537 ui.note(_('updating destination repo\n'))
1532 hg.update(dr, dr.changelog.tip())
1538 hg.update(dr, dr.changelog.tip())
1533
1539
1534 def commit(ui, repo, *pats, **opts):
1540 def commit(ui, repo, *pats, **opts):
1535 """commit changes in the queue repository"""
1541 """commit changes in the queue repository"""
1536 q = repo.mq
1542 q = repo.mq
1537 r = q.qrepo()
1543 r = q.qrepo()
1538 if not r: raise util.Abort('no queue repository')
1544 if not r: raise util.Abort('no queue repository')
1539 commands.commit(r.ui, r, *pats, **opts)
1545 commands.commit(r.ui, r, *pats, **opts)
1540
1546
1541 def series(ui, repo, **opts):
1547 def series(ui, repo, **opts):
1542 """print the entire series file"""
1548 """print the entire series file"""
1543 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1549 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1544 return 0
1550 return 0
1545
1551
1546 def top(ui, repo, **opts):
1552 def top(ui, repo, **opts):
1547 """print the name of the current patch"""
1553 """print the name of the current patch"""
1548 q = repo.mq
1554 q = repo.mq
1549 t = q.applied and q.series_end(True) or 0
1555 t = q.applied and q.series_end(True) or 0
1550 if t:
1556 if t:
1551 return q.qseries(repo, start=t-1, length=1, status='A',
1557 return q.qseries(repo, start=t-1, length=1, status='A',
1552 summary=opts.get('summary'))
1558 summary=opts.get('summary'))
1553 else:
1559 else:
1554 ui.write("No patches applied\n")
1560 ui.write("No patches applied\n")
1555 return 1
1561 return 1
1556
1562
1557 def next(ui, repo, **opts):
1563 def next(ui, repo, **opts):
1558 """print the name of the next patch"""
1564 """print the name of the next patch"""
1559 q = repo.mq
1565 q = repo.mq
1560 end = q.series_end()
1566 end = q.series_end()
1561 if end == len(q.series):
1567 if end == len(q.series):
1562 ui.write("All patches applied\n")
1568 ui.write("All patches applied\n")
1563 return 1
1569 return 1
1564 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1570 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1565
1571
1566 def prev(ui, repo, **opts):
1572 def prev(ui, repo, **opts):
1567 """print the name of the previous patch"""
1573 """print the name of the previous patch"""
1568 q = repo.mq
1574 q = repo.mq
1569 l = len(q.applied)
1575 l = len(q.applied)
1570 if l == 1:
1576 if l == 1:
1571 ui.write("Only one patch applied\n")
1577 ui.write("Only one patch applied\n")
1572 return 1
1578 return 1
1573 if not l:
1579 if not l:
1574 ui.write("No patches applied\n")
1580 ui.write("No patches applied\n")
1575 return 1
1581 return 1
1576 return q.qseries(repo, start=l-2, length=1, status='A',
1582 return q.qseries(repo, start=l-2, length=1, status='A',
1577 summary=opts.get('summary'))
1583 summary=opts.get('summary'))
1578
1584
1579 def new(ui, repo, patch, *args, **opts):
1585 def new(ui, repo, patch, *args, **opts):
1580 """create a new patch
1586 """create a new patch
1581
1587
1582 qnew creates a new patch on top of the currently-applied patch
1588 qnew creates a new patch on top of the currently-applied patch
1583 (if any). It will refuse to run if there are any outstanding
1589 (if any). It will refuse to run if there are any outstanding
1584 changes unless -f is specified, in which case the patch will
1590 changes unless -f is specified, in which case the patch will
1585 be initialised with them. You may also use -I, -X, and/or a list of
1591 be initialised with them. You may also use -I, -X, and/or a list of
1586 files after the patch name to add only changes to matching files
1592 files after the patch name to add only changes to matching files
1587 to the new patch, leaving the rest as uncommitted modifications.
1593 to the new patch, leaving the rest as uncommitted modifications.
1588
1594
1589 -e, -m or -l set the patch header as well as the commit message.
1595 -e, -m or -l set the patch header as well as the commit message.
1590 If none is specified, the patch header is empty and the
1596 If none is specified, the patch header is empty and the
1591 commit message is '[mq]: PATCH'"""
1597 commit message is '[mq]: PATCH'"""
1592 q = repo.mq
1598 q = repo.mq
1593 message = cmdutil.logmessage(opts)
1599 message = cmdutil.logmessage(opts)
1594 if opts['edit']:
1600 if opts['edit']:
1595 message = ui.edit(message, ui.username())
1601 message = ui.edit(message, ui.username())
1596 opts['msg'] = message
1602 opts['msg'] = message
1597 q.new(repo, patch, *args, **opts)
1603 q.new(repo, patch, *args, **opts)
1598 q.save_dirty()
1604 q.save_dirty()
1599 return 0
1605 return 0
1600
1606
1601 def refresh(ui, repo, *pats, **opts):
1607 def refresh(ui, repo, *pats, **opts):
1602 """update the current patch
1608 """update the current patch
1603
1609
1604 If any file patterns are provided, the refreshed patch will contain only
1610 If any file patterns are provided, the refreshed patch will contain only
1605 the modifications that match those patterns; the remaining modifications
1611 the modifications that match those patterns; the remaining modifications
1606 will remain in the working directory.
1612 will remain in the working directory.
1607
1613
1608 hg add/remove/copy/rename work as usual, though you might want to use
1614 hg add/remove/copy/rename work as usual, though you might want to use
1609 git-style patches (--git or [diff] git=1) to track copies and renames.
1615 git-style patches (--git or [diff] git=1) to track copies and renames.
1610 """
1616 """
1611 q = repo.mq
1617 q = repo.mq
1612 message = cmdutil.logmessage(opts)
1618 message = cmdutil.logmessage(opts)
1613 if opts['edit']:
1619 if opts['edit']:
1614 if message:
1620 if message:
1615 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1621 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1616 patch = q.applied[-1].name
1622 patch = q.applied[-1].name
1617 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1623 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1618 message = ui.edit('\n'.join(message), user or ui.username())
1624 message = ui.edit('\n'.join(message), user or ui.username())
1619 ret = q.refresh(repo, pats, msg=message, **opts)
1625 ret = q.refresh(repo, pats, msg=message, **opts)
1620 q.save_dirty()
1626 q.save_dirty()
1621 return ret
1627 return ret
1622
1628
1623 def diff(ui, repo, *pats, **opts):
1629 def diff(ui, repo, *pats, **opts):
1624 """diff of the current patch"""
1630 """diff of the current patch"""
1625 repo.mq.diff(repo, pats, opts)
1631 repo.mq.diff(repo, pats, opts)
1626 return 0
1632 return 0
1627
1633
1628 def fold(ui, repo, *files, **opts):
1634 def fold(ui, repo, *files, **opts):
1629 """fold the named patches into the current patch
1635 """fold the named patches into the current patch
1630
1636
1631 Patches must not yet be applied. Each patch will be successively
1637 Patches must not yet be applied. Each patch will be successively
1632 applied to the current patch in the order given. If all the
1638 applied to the current patch in the order given. If all the
1633 patches apply successfully, the current patch will be refreshed
1639 patches apply successfully, the current patch will be refreshed
1634 with the new cumulative patch, and the folded patches will
1640 with the new cumulative patch, and the folded patches will
1635 be deleted. With -k/--keep, the folded patch files will not
1641 be deleted. With -k/--keep, the folded patch files will not
1636 be removed afterwards.
1642 be removed afterwards.
1637
1643
1638 The header for each folded patch will be concatenated with
1644 The header for each folded patch will be concatenated with
1639 the current patch header, separated by a line of '* * *'."""
1645 the current patch header, separated by a line of '* * *'."""
1640
1646
1641 q = repo.mq
1647 q = repo.mq
1642
1648
1643 if not files:
1649 if not files:
1644 raise util.Abort(_('qfold requires at least one patch name'))
1650 raise util.Abort(_('qfold requires at least one patch name'))
1645 if not q.check_toppatch(repo):
1651 if not q.check_toppatch(repo):
1646 raise util.Abort(_('No patches applied'))
1652 raise util.Abort(_('No patches applied'))
1647
1653
1648 message = cmdutil.logmessage(opts)
1654 message = cmdutil.logmessage(opts)
1649 if opts['edit']:
1655 if opts['edit']:
1650 if message:
1656 if message:
1651 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1657 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1652
1658
1653 parent = q.lookup('qtip')
1659 parent = q.lookup('qtip')
1654 patches = []
1660 patches = []
1655 messages = []
1661 messages = []
1656 for f in files:
1662 for f in files:
1657 p = q.lookup(f)
1663 p = q.lookup(f)
1658 if p in patches or p == parent:
1664 if p in patches or p == parent:
1659 ui.warn(_('Skipping already folded patch %s') % p)
1665 ui.warn(_('Skipping already folded patch %s') % p)
1660 if q.isapplied(p):
1666 if q.isapplied(p):
1661 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1667 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1662 patches.append(p)
1668 patches.append(p)
1663
1669
1664 for p in patches:
1670 for p in patches:
1665 if not message:
1671 if not message:
1666 messages.append(q.readheaders(p)[0])
1672 messages.append(q.readheaders(p)[0])
1667 pf = q.join(p)
1673 pf = q.join(p)
1668 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1674 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1669 if not patchsuccess:
1675 if not patchsuccess:
1670 raise util.Abort(_('Error folding patch %s') % p)
1676 raise util.Abort(_('Error folding patch %s') % p)
1671 patch.updatedir(ui, repo, files)
1677 patch.updatedir(ui, repo, files)
1672
1678
1673 if not message:
1679 if not message:
1674 message, comments, user = q.readheaders(parent)[0:3]
1680 message, comments, user = q.readheaders(parent)[0:3]
1675 for msg in messages:
1681 for msg in messages:
1676 message.append('* * *')
1682 message.append('* * *')
1677 message.extend(msg)
1683 message.extend(msg)
1678 message = '\n'.join(message)
1684 message = '\n'.join(message)
1679
1685
1680 if opts['edit']:
1686 if opts['edit']:
1681 message = ui.edit(message, user or ui.username())
1687 message = ui.edit(message, user or ui.username())
1682
1688
1683 q.refresh(repo, msg=message)
1689 q.refresh(repo, msg=message)
1684 q.delete(repo, patches, opts)
1690 q.delete(repo, patches, opts)
1685 q.save_dirty()
1691 q.save_dirty()
1686
1692
1687 def goto(ui, repo, patch, **opts):
1693 def goto(ui, repo, patch, **opts):
1688 '''push or pop patches until named patch is at top of stack'''
1694 '''push or pop patches until named patch is at top of stack'''
1689 q = repo.mq
1695 q = repo.mq
1690 patch = q.lookup(patch)
1696 patch = q.lookup(patch)
1691 if q.isapplied(patch):
1697 if q.isapplied(patch):
1692 ret = q.pop(repo, patch, force=opts['force'])
1698 ret = q.pop(repo, patch, force=opts['force'])
1693 else:
1699 else:
1694 ret = q.push(repo, patch, force=opts['force'])
1700 ret = q.push(repo, patch, force=opts['force'])
1695 q.save_dirty()
1701 q.save_dirty()
1696 return ret
1702 return ret
1697
1703
1698 def guard(ui, repo, *args, **opts):
1704 def guard(ui, repo, *args, **opts):
1699 '''set or print guards for a patch
1705 '''set or print guards for a patch
1700
1706
1701 Guards control whether a patch can be pushed. A patch with no
1707 Guards control whether a patch can be pushed. A patch with no
1702 guards is always pushed. A patch with a positive guard ("+foo") is
1708 guards is always pushed. A patch with a positive guard ("+foo") is
1703 pushed only if the qselect command has activated it. A patch with
1709 pushed only if the qselect command has activated it. A patch with
1704 a negative guard ("-foo") is never pushed if the qselect command
1710 a negative guard ("-foo") is never pushed if the qselect command
1705 has activated it.
1711 has activated it.
1706
1712
1707 With no arguments, print the currently active guards.
1713 With no arguments, print the currently active guards.
1708 With arguments, set guards for the named patch.
1714 With arguments, set guards for the named patch.
1709
1715
1710 To set a negative guard "-foo" on topmost patch ("--" is needed so
1716 To set a negative guard "-foo" on topmost patch ("--" is needed so
1711 hg will not interpret "-foo" as an option):
1717 hg will not interpret "-foo" as an option):
1712 hg qguard -- -foo
1718 hg qguard -- -foo
1713
1719
1714 To set guards on another patch:
1720 To set guards on another patch:
1715 hg qguard other.patch +2.6.17 -stable
1721 hg qguard other.patch +2.6.17 -stable
1716 '''
1722 '''
1717 def status(idx):
1723 def status(idx):
1718 guards = q.series_guards[idx] or ['unguarded']
1724 guards = q.series_guards[idx] or ['unguarded']
1719 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1725 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1720 q = repo.mq
1726 q = repo.mq
1721 patch = None
1727 patch = None
1722 args = list(args)
1728 args = list(args)
1723 if opts['list']:
1729 if opts['list']:
1724 if args or opts['none']:
1730 if args or opts['none']:
1725 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1731 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1726 for i in xrange(len(q.series)):
1732 for i in xrange(len(q.series)):
1727 status(i)
1733 status(i)
1728 return
1734 return
1729 if not args or args[0][0:1] in '-+':
1735 if not args or args[0][0:1] in '-+':
1730 if not q.applied:
1736 if not q.applied:
1731 raise util.Abort(_('no patches applied'))
1737 raise util.Abort(_('no patches applied'))
1732 patch = q.applied[-1].name
1738 patch = q.applied[-1].name
1733 if patch is None and args[0][0:1] not in '-+':
1739 if patch is None and args[0][0:1] not in '-+':
1734 patch = args.pop(0)
1740 patch = args.pop(0)
1735 if patch is None:
1741 if patch is None:
1736 raise util.Abort(_('no patch to work with'))
1742 raise util.Abort(_('no patch to work with'))
1737 if args or opts['none']:
1743 if args or opts['none']:
1738 idx = q.find_series(patch)
1744 idx = q.find_series(patch)
1739 if idx is None:
1745 if idx is None:
1740 raise util.Abort(_('no patch named %s') % patch)
1746 raise util.Abort(_('no patch named %s') % patch)
1741 q.set_guards(idx, args)
1747 q.set_guards(idx, args)
1742 q.save_dirty()
1748 q.save_dirty()
1743 else:
1749 else:
1744 status(q.series.index(q.lookup(patch)))
1750 status(q.series.index(q.lookup(patch)))
1745
1751
1746 def header(ui, repo, patch=None):
1752 def header(ui, repo, patch=None):
1747 """Print the header of the topmost or specified patch"""
1753 """Print the header of the topmost or specified patch"""
1748 q = repo.mq
1754 q = repo.mq
1749
1755
1750 if patch:
1756 if patch:
1751 patch = q.lookup(patch)
1757 patch = q.lookup(patch)
1752 else:
1758 else:
1753 if not q.applied:
1759 if not q.applied:
1754 ui.write('No patches applied\n')
1760 ui.write('No patches applied\n')
1755 return 1
1761 return 1
1756 patch = q.lookup('qtip')
1762 patch = q.lookup('qtip')
1757 message = repo.mq.readheaders(patch)[0]
1763 message = repo.mq.readheaders(patch)[0]
1758
1764
1759 ui.write('\n'.join(message) + '\n')
1765 ui.write('\n'.join(message) + '\n')
1760
1766
1761 def lastsavename(path):
1767 def lastsavename(path):
1762 (directory, base) = os.path.split(path)
1768 (directory, base) = os.path.split(path)
1763 names = os.listdir(directory)
1769 names = os.listdir(directory)
1764 namere = re.compile("%s.([0-9]+)" % base)
1770 namere = re.compile("%s.([0-9]+)" % base)
1765 maxindex = None
1771 maxindex = None
1766 maxname = None
1772 maxname = None
1767 for f in names:
1773 for f in names:
1768 m = namere.match(f)
1774 m = namere.match(f)
1769 if m:
1775 if m:
1770 index = int(m.group(1))
1776 index = int(m.group(1))
1771 if maxindex == None or index > maxindex:
1777 if maxindex == None or index > maxindex:
1772 maxindex = index
1778 maxindex = index
1773 maxname = f
1779 maxname = f
1774 if maxname:
1780 if maxname:
1775 return (os.path.join(directory, maxname), maxindex)
1781 return (os.path.join(directory, maxname), maxindex)
1776 return (None, None)
1782 return (None, None)
1777
1783
1778 def savename(path):
1784 def savename(path):
1779 (last, index) = lastsavename(path)
1785 (last, index) = lastsavename(path)
1780 if last is None:
1786 if last is None:
1781 index = 0
1787 index = 0
1782 newpath = path + ".%d" % (index + 1)
1788 newpath = path + ".%d" % (index + 1)
1783 return newpath
1789 return newpath
1784
1790
1785 def push(ui, repo, patch=None, **opts):
1791 def push(ui, repo, patch=None, **opts):
1786 """push the next patch onto the stack"""
1792 """push the next patch onto the stack"""
1787 q = repo.mq
1793 q = repo.mq
1788 mergeq = None
1794 mergeq = None
1789
1795
1790 if opts['all']:
1796 if opts['all']:
1791 if not q.series:
1797 if not q.series:
1792 ui.warn(_('no patches in series\n'))
1798 ui.warn(_('no patches in series\n'))
1793 return 0
1799 return 0
1794 patch = q.series[-1]
1800 patch = q.series[-1]
1795 if opts['merge']:
1801 if opts['merge']:
1796 if opts['name']:
1802 if opts['name']:
1797 newpath = opts['name']
1803 newpath = opts['name']
1798 else:
1804 else:
1799 newpath, i = lastsavename(q.path)
1805 newpath, i = lastsavename(q.path)
1800 if not newpath:
1806 if not newpath:
1801 ui.warn("no saved queues found, please use -n\n")
1807 ui.warn("no saved queues found, please use -n\n")
1802 return 1
1808 return 1
1803 mergeq = queue(ui, repo.join(""), newpath)
1809 mergeq = queue(ui, repo.join(""), newpath)
1804 ui.warn("merging with queue at: %s\n" % mergeq.path)
1810 ui.warn("merging with queue at: %s\n" % mergeq.path)
1805 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1811 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1806 mergeq=mergeq)
1812 mergeq=mergeq)
1807 return ret
1813 return ret
1808
1814
1809 def pop(ui, repo, patch=None, **opts):
1815 def pop(ui, repo, patch=None, **opts):
1810 """pop the current patch off the stack"""
1816 """pop the current patch off the stack"""
1811 localupdate = True
1817 localupdate = True
1812 if opts['name']:
1818 if opts['name']:
1813 q = queue(ui, repo.join(""), repo.join(opts['name']))
1819 q = queue(ui, repo.join(""), repo.join(opts['name']))
1814 ui.warn('using patch queue: %s\n' % q.path)
1820 ui.warn('using patch queue: %s\n' % q.path)
1815 localupdate = False
1821 localupdate = False
1816 else:
1822 else:
1817 q = repo.mq
1823 q = repo.mq
1818 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1824 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1819 all=opts['all'])
1825 all=opts['all'])
1820 q.save_dirty()
1826 q.save_dirty()
1821 return ret
1827 return ret
1822
1828
1823 def rename(ui, repo, patch, name=None, **opts):
1829 def rename(ui, repo, patch, name=None, **opts):
1824 """rename a patch
1830 """rename a patch
1825
1831
1826 With one argument, renames the current patch to PATCH1.
1832 With one argument, renames the current patch to PATCH1.
1827 With two arguments, renames PATCH1 to PATCH2."""
1833 With two arguments, renames PATCH1 to PATCH2."""
1828
1834
1829 q = repo.mq
1835 q = repo.mq
1830
1836
1831 if not name:
1837 if not name:
1832 name = patch
1838 name = patch
1833 patch = None
1839 patch = None
1834
1840
1835 if patch:
1841 if patch:
1836 patch = q.lookup(patch)
1842 patch = q.lookup(patch)
1837 else:
1843 else:
1838 if not q.applied:
1844 if not q.applied:
1839 ui.write(_('No patches applied\n'))
1845 ui.write(_('No patches applied\n'))
1840 return
1846 return
1841 patch = q.lookup('qtip')
1847 patch = q.lookup('qtip')
1842 absdest = q.join(name)
1848 absdest = q.join(name)
1843 if os.path.isdir(absdest):
1849 if os.path.isdir(absdest):
1844 name = normname(os.path.join(name, os.path.basename(patch)))
1850 name = normname(os.path.join(name, os.path.basename(patch)))
1845 absdest = q.join(name)
1851 absdest = q.join(name)
1846 if os.path.exists(absdest):
1852 if os.path.exists(absdest):
1847 raise util.Abort(_('%s already exists') % absdest)
1853 raise util.Abort(_('%s already exists') % absdest)
1848
1854
1849 if name in q.series:
1855 if name in q.series:
1850 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1856 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1851
1857
1852 if ui.verbose:
1858 if ui.verbose:
1853 ui.write('Renaming %s to %s\n' % (patch, name))
1859 ui.write('Renaming %s to %s\n' % (patch, name))
1854 i = q.find_series(patch)
1860 i = q.find_series(patch)
1855 guards = q.guard_re.findall(q.full_series[i])
1861 guards = q.guard_re.findall(q.full_series[i])
1856 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1862 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1857 q.parse_series()
1863 q.parse_series()
1858 q.series_dirty = 1
1864 q.series_dirty = 1
1859
1865
1860 info = q.isapplied(patch)
1866 info = q.isapplied(patch)
1861 if info:
1867 if info:
1862 q.applied[info[0]] = statusentry(info[1], name)
1868 q.applied[info[0]] = statusentry(info[1], name)
1863 q.applied_dirty = 1
1869 q.applied_dirty = 1
1864
1870
1865 util.rename(q.join(patch), absdest)
1871 util.rename(q.join(patch), absdest)
1866 r = q.qrepo()
1872 r = q.qrepo()
1867 if r:
1873 if r:
1868 wlock = r.wlock()
1874 wlock = r.wlock()
1869 if r.dirstate.state(name) == 'r':
1875 if r.dirstate.state(name) == 'r':
1870 r.undelete([name], wlock)
1876 r.undelete([name], wlock)
1871 r.copy(patch, name, wlock)
1877 r.copy(patch, name, wlock)
1872 r.remove([patch], False, wlock)
1878 r.remove([patch], False, wlock)
1873
1879
1874 q.save_dirty()
1880 q.save_dirty()
1875
1881
1876 def restore(ui, repo, rev, **opts):
1882 def restore(ui, repo, rev, **opts):
1877 """restore the queue state saved by a rev"""
1883 """restore the queue state saved by a rev"""
1878 rev = repo.lookup(rev)
1884 rev = repo.lookup(rev)
1879 q = repo.mq
1885 q = repo.mq
1880 q.restore(repo, rev, delete=opts['delete'],
1886 q.restore(repo, rev, delete=opts['delete'],
1881 qupdate=opts['update'])
1887 qupdate=opts['update'])
1882 q.save_dirty()
1888 q.save_dirty()
1883 return 0
1889 return 0
1884
1890
1885 def save(ui, repo, **opts):
1891 def save(ui, repo, **opts):
1886 """save current queue state"""
1892 """save current queue state"""
1887 q = repo.mq
1893 q = repo.mq
1888 message = cmdutil.logmessage(opts)
1894 message = cmdutil.logmessage(opts)
1889 ret = q.save(repo, msg=message)
1895 ret = q.save(repo, msg=message)
1890 if ret:
1896 if ret:
1891 return ret
1897 return ret
1892 q.save_dirty()
1898 q.save_dirty()
1893 if opts['copy']:
1899 if opts['copy']:
1894 path = q.path
1900 path = q.path
1895 if opts['name']:
1901 if opts['name']:
1896 newpath = os.path.join(q.basepath, opts['name'])
1902 newpath = os.path.join(q.basepath, opts['name'])
1897 if os.path.exists(newpath):
1903 if os.path.exists(newpath):
1898 if not os.path.isdir(newpath):
1904 if not os.path.isdir(newpath):
1899 raise util.Abort(_('destination %s exists and is not '
1905 raise util.Abort(_('destination %s exists and is not '
1900 'a directory') % newpath)
1906 'a directory') % newpath)
1901 if not opts['force']:
1907 if not opts['force']:
1902 raise util.Abort(_('destination %s exists, '
1908 raise util.Abort(_('destination %s exists, '
1903 'use -f to force') % newpath)
1909 'use -f to force') % newpath)
1904 else:
1910 else:
1905 newpath = savename(path)
1911 newpath = savename(path)
1906 ui.warn("copy %s to %s\n" % (path, newpath))
1912 ui.warn("copy %s to %s\n" % (path, newpath))
1907 util.copyfiles(path, newpath)
1913 util.copyfiles(path, newpath)
1908 if opts['empty']:
1914 if opts['empty']:
1909 try:
1915 try:
1910 os.unlink(q.join(q.status_path))
1916 os.unlink(q.join(q.status_path))
1911 except:
1917 except:
1912 pass
1918 pass
1913 return 0
1919 return 0
1914
1920
1915 def strip(ui, repo, rev, **opts):
1921 def strip(ui, repo, rev, **opts):
1916 """strip a revision and all later revs on the same branch"""
1922 """strip a revision and all later revs on the same branch"""
1917 rev = repo.lookup(rev)
1923 rev = repo.lookup(rev)
1918 backup = 'all'
1924 backup = 'all'
1919 if opts['backup']:
1925 if opts['backup']:
1920 backup = 'strip'
1926 backup = 'strip'
1921 elif opts['nobackup']:
1927 elif opts['nobackup']:
1922 backup = 'none'
1928 backup = 'none'
1923 update = repo.dirstate.parents()[0] != revlog.nullid
1929 update = repo.dirstate.parents()[0] != revlog.nullid
1924 repo.mq.strip(repo, rev, backup=backup, update=update)
1930 repo.mq.strip(repo, rev, backup=backup, update=update)
1925 return 0
1931 return 0
1926
1932
1927 def select(ui, repo, *args, **opts):
1933 def select(ui, repo, *args, **opts):
1928 '''set or print guarded patches to push
1934 '''set or print guarded patches to push
1929
1935
1930 Use the qguard command to set or print guards on patch, then use
1936 Use the qguard command to set or print guards on patch, then use
1931 qselect to tell mq which guards to use. A patch will be pushed if it
1937 qselect to tell mq which guards to use. A patch will be pushed if it
1932 has no guards or any positive guards match the currently selected guard,
1938 has no guards or any positive guards match the currently selected guard,
1933 but will not be pushed if any negative guards match the current guard.
1939 but will not be pushed if any negative guards match the current guard.
1934 For example:
1940 For example:
1935
1941
1936 qguard foo.patch -stable (negative guard)
1942 qguard foo.patch -stable (negative guard)
1937 qguard bar.patch +stable (positive guard)
1943 qguard bar.patch +stable (positive guard)
1938 qselect stable
1944 qselect stable
1939
1945
1940 This activates the "stable" guard. mq will skip foo.patch (because
1946 This activates the "stable" guard. mq will skip foo.patch (because
1941 it has a negative match) but push bar.patch (because it
1947 it has a negative match) but push bar.patch (because it
1942 has a positive match).
1948 has a positive match).
1943
1949
1944 With no arguments, prints the currently active guards.
1950 With no arguments, prints the currently active guards.
1945 With one argument, sets the active guard.
1951 With one argument, sets the active guard.
1946
1952
1947 Use -n/--none to deactivate guards (no other arguments needed).
1953 Use -n/--none to deactivate guards (no other arguments needed).
1948 When no guards are active, patches with positive guards are skipped
1954 When no guards are active, patches with positive guards are skipped
1949 and patches with negative guards are pushed.
1955 and patches with negative guards are pushed.
1950
1956
1951 qselect can change the guards on applied patches. It does not pop
1957 qselect can change the guards on applied patches. It does not pop
1952 guarded patches by default. Use --pop to pop back to the last applied
1958 guarded patches by default. Use --pop to pop back to the last applied
1953 patch that is not guarded. Use --reapply (which implies --pop) to push
1959 patch that is not guarded. Use --reapply (which implies --pop) to push
1954 back to the current patch afterwards, but skip guarded patches.
1960 back to the current patch afterwards, but skip guarded patches.
1955
1961
1956 Use -s/--series to print a list of all guards in the series file (no
1962 Use -s/--series to print a list of all guards in the series file (no
1957 other arguments needed). Use -v for more information.'''
1963 other arguments needed). Use -v for more information.'''
1958
1964
1959 q = repo.mq
1965 q = repo.mq
1960 guards = q.active()
1966 guards = q.active()
1961 if args or opts['none']:
1967 if args or opts['none']:
1962 old_unapplied = q.unapplied(repo)
1968 old_unapplied = q.unapplied(repo)
1963 old_guarded = [i for i in xrange(len(q.applied)) if
1969 old_guarded = [i for i in xrange(len(q.applied)) if
1964 not q.pushable(i)[0]]
1970 not q.pushable(i)[0]]
1965 q.set_active(args)
1971 q.set_active(args)
1966 q.save_dirty()
1972 q.save_dirty()
1967 if not args:
1973 if not args:
1968 ui.status(_('guards deactivated\n'))
1974 ui.status(_('guards deactivated\n'))
1969 if not opts['pop'] and not opts['reapply']:
1975 if not opts['pop'] and not opts['reapply']:
1970 unapplied = q.unapplied(repo)
1976 unapplied = q.unapplied(repo)
1971 guarded = [i for i in xrange(len(q.applied))
1977 guarded = [i for i in xrange(len(q.applied))
1972 if not q.pushable(i)[0]]
1978 if not q.pushable(i)[0]]
1973 if len(unapplied) != len(old_unapplied):
1979 if len(unapplied) != len(old_unapplied):
1974 ui.status(_('number of unguarded, unapplied patches has '
1980 ui.status(_('number of unguarded, unapplied patches has '
1975 'changed from %d to %d\n') %
1981 'changed from %d to %d\n') %
1976 (len(old_unapplied), len(unapplied)))
1982 (len(old_unapplied), len(unapplied)))
1977 if len(guarded) != len(old_guarded):
1983 if len(guarded) != len(old_guarded):
1978 ui.status(_('number of guarded, applied patches has changed '
1984 ui.status(_('number of guarded, applied patches has changed '
1979 'from %d to %d\n') %
1985 'from %d to %d\n') %
1980 (len(old_guarded), len(guarded)))
1986 (len(old_guarded), len(guarded)))
1981 elif opts['series']:
1987 elif opts['series']:
1982 guards = {}
1988 guards = {}
1983 noguards = 0
1989 noguards = 0
1984 for gs in q.series_guards:
1990 for gs in q.series_guards:
1985 if not gs:
1991 if not gs:
1986 noguards += 1
1992 noguards += 1
1987 for g in gs:
1993 for g in gs:
1988 guards.setdefault(g, 0)
1994 guards.setdefault(g, 0)
1989 guards[g] += 1
1995 guards[g] += 1
1990 if ui.verbose:
1996 if ui.verbose:
1991 guards['NONE'] = noguards
1997 guards['NONE'] = noguards
1992 guards = guards.items()
1998 guards = guards.items()
1993 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
1999 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
1994 if guards:
2000 if guards:
1995 ui.note(_('guards in series file:\n'))
2001 ui.note(_('guards in series file:\n'))
1996 for guard, count in guards:
2002 for guard, count in guards:
1997 ui.note('%2d ' % count)
2003 ui.note('%2d ' % count)
1998 ui.write(guard, '\n')
2004 ui.write(guard, '\n')
1999 else:
2005 else:
2000 ui.note(_('no guards in series file\n'))
2006 ui.note(_('no guards in series file\n'))
2001 else:
2007 else:
2002 if guards:
2008 if guards:
2003 ui.note(_('active guards:\n'))
2009 ui.note(_('active guards:\n'))
2004 for g in guards:
2010 for g in guards:
2005 ui.write(g, '\n')
2011 ui.write(g, '\n')
2006 else:
2012 else:
2007 ui.write(_('no active guards\n'))
2013 ui.write(_('no active guards\n'))
2008 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2014 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2009 popped = False
2015 popped = False
2010 if opts['pop'] or opts['reapply']:
2016 if opts['pop'] or opts['reapply']:
2011 for i in xrange(len(q.applied)):
2017 for i in xrange(len(q.applied)):
2012 pushable, reason = q.pushable(i)
2018 pushable, reason = q.pushable(i)
2013 if not pushable:
2019 if not pushable:
2014 ui.status(_('popping guarded patches\n'))
2020 ui.status(_('popping guarded patches\n'))
2015 popped = True
2021 popped = True
2016 if i == 0:
2022 if i == 0:
2017 q.pop(repo, all=True)
2023 q.pop(repo, all=True)
2018 else:
2024 else:
2019 q.pop(repo, i-1)
2025 q.pop(repo, i-1)
2020 break
2026 break
2021 if popped:
2027 if popped:
2022 try:
2028 try:
2023 if reapply:
2029 if reapply:
2024 ui.status(_('reapplying unguarded patches\n'))
2030 ui.status(_('reapplying unguarded patches\n'))
2025 q.push(repo, reapply)
2031 q.push(repo, reapply)
2026 finally:
2032 finally:
2027 q.save_dirty()
2033 q.save_dirty()
2028
2034
2029 def reposetup(ui, repo):
2035 def reposetup(ui, repo):
2030 class mqrepo(repo.__class__):
2036 class mqrepo(repo.__class__):
2031 def abort_if_wdir_patched(self, errmsg, force=False):
2037 def abort_if_wdir_patched(self, errmsg, force=False):
2032 if self.mq.applied and not force:
2038 if self.mq.applied and not force:
2033 parent = revlog.hex(self.dirstate.parents()[0])
2039 parent = revlog.hex(self.dirstate.parents()[0])
2034 if parent in [s.rev for s in self.mq.applied]:
2040 if parent in [s.rev for s in self.mq.applied]:
2035 raise util.Abort(errmsg)
2041 raise util.Abort(errmsg)
2036
2042
2037 def commit(self, *args, **opts):
2043 def commit(self, *args, **opts):
2038 if len(args) >= 6:
2044 if len(args) >= 6:
2039 force = args[5]
2045 force = args[5]
2040 else:
2046 else:
2041 force = opts.get('force')
2047 force = opts.get('force')
2042 self.abort_if_wdir_patched(
2048 self.abort_if_wdir_patched(
2043 _('cannot commit over an applied mq patch'),
2049 _('cannot commit over an applied mq patch'),
2044 force)
2050 force)
2045
2051
2046 return super(mqrepo, self).commit(*args, **opts)
2052 return super(mqrepo, self).commit(*args, **opts)
2047
2053
2048 def push(self, remote, force=False, revs=None):
2054 def push(self, remote, force=False, revs=None):
2049 if self.mq.applied and not force and not revs:
2055 if self.mq.applied and not force and not revs:
2050 raise util.Abort(_('source has mq patches applied'))
2056 raise util.Abort(_('source has mq patches applied'))
2051 return super(mqrepo, self).push(remote, force, revs)
2057 return super(mqrepo, self).push(remote, force, revs)
2052
2058
2053 def tags(self):
2059 def tags(self):
2054 if self.tagscache:
2060 if self.tagscache:
2055 return self.tagscache
2061 return self.tagscache
2056
2062
2057 tagscache = super(mqrepo, self).tags()
2063 tagscache = super(mqrepo, self).tags()
2058
2064
2059 q = self.mq
2065 q = self.mq
2060 if not q.applied:
2066 if not q.applied:
2061 return tagscache
2067 return tagscache
2062
2068
2063 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2069 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2064 mqtags.append((mqtags[-1][0], 'qtip'))
2070 mqtags.append((mqtags[-1][0], 'qtip'))
2065 mqtags.append((mqtags[0][0], 'qbase'))
2071 mqtags.append((mqtags[0][0], 'qbase'))
2066 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2072 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2067 for patch in mqtags:
2073 for patch in mqtags:
2068 if patch[1] in tagscache:
2074 if patch[1] in tagscache:
2069 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2075 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2070 else:
2076 else:
2071 tagscache[patch[1]] = patch[0]
2077 tagscache[patch[1]] = patch[0]
2072
2078
2073 return tagscache
2079 return tagscache
2074
2080
2075 def _branchtags(self):
2081 def _branchtags(self):
2076 q = self.mq
2082 q = self.mq
2077 if not q.applied:
2083 if not q.applied:
2078 return super(mqrepo, self)._branchtags()
2084 return super(mqrepo, self)._branchtags()
2079
2085
2080 self.branchcache = {} # avoid recursion in changectx
2086 self.branchcache = {} # avoid recursion in changectx
2081 cl = self.changelog
2087 cl = self.changelog
2082 partial, last, lrev = self._readbranchcache()
2088 partial, last, lrev = self._readbranchcache()
2083
2089
2084 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2090 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2085 start = lrev + 1
2091 start = lrev + 1
2086 if start < qbase:
2092 if start < qbase:
2087 # update the cache (excluding the patches) and save it
2093 # update the cache (excluding the patches) and save it
2088 self._updatebranchcache(partial, lrev+1, qbase)
2094 self._updatebranchcache(partial, lrev+1, qbase)
2089 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2095 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2090 start = qbase
2096 start = qbase
2091 # if start = qbase, the cache is as updated as it should be.
2097 # if start = qbase, the cache is as updated as it should be.
2092 # if start > qbase, the cache includes (part of) the patches.
2098 # if start > qbase, the cache includes (part of) the patches.
2093 # we might as well use it, but we won't save it.
2099 # we might as well use it, but we won't save it.
2094
2100
2095 # update the cache up to the tip
2101 # update the cache up to the tip
2096 self._updatebranchcache(partial, start, cl.count())
2102 self._updatebranchcache(partial, start, cl.count())
2097
2103
2098 return partial
2104 return partial
2099
2105
2100 if repo.local():
2106 if repo.local():
2101 repo.__class__ = mqrepo
2107 repo.__class__ = mqrepo
2102 repo.mq = queue(ui, repo.join(""))
2108 repo.mq = queue(ui, repo.join(""))
2103
2109
2104 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2110 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2105
2111
2106 cmdtable = {
2112 cmdtable = {
2107 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2113 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2108 "qclone":
2114 "qclone":
2109 (clone,
2115 (clone,
2110 [('', 'pull', None, _('use pull protocol to copy metadata')),
2116 [('', 'pull', None, _('use pull protocol to copy metadata')),
2111 ('U', 'noupdate', None, _('do not update the new working directories')),
2117 ('U', 'noupdate', None, _('do not update the new working directories')),
2112 ('', 'uncompressed', None,
2118 ('', 'uncompressed', None,
2113 _('use uncompressed transfer (fast over LAN)')),
2119 _('use uncompressed transfer (fast over LAN)')),
2114 ('e', 'ssh', '', _('specify ssh command to use')),
2120 ('e', 'ssh', '', _('specify ssh command to use')),
2115 ('p', 'patches', '', _('location of source patch repo')),
2121 ('p', 'patches', '', _('location of source patch repo')),
2116 ('', 'remotecmd', '',
2122 ('', 'remotecmd', '',
2117 _('specify hg command to run on the remote side'))],
2123 _('specify hg command to run on the remote side'))],
2118 _('hg qclone [OPTION]... SOURCE [DEST]')),
2124 _('hg qclone [OPTION]... SOURCE [DEST]')),
2119 "qcommit|qci":
2125 "qcommit|qci":
2120 (commit,
2126 (commit,
2121 commands.table["^commit|ci"][1],
2127 commands.table["^commit|ci"][1],
2122 _('hg qcommit [OPTION]... [FILE]...')),
2128 _('hg qcommit [OPTION]... [FILE]...')),
2123 "^qdiff":
2129 "^qdiff":
2124 (diff,
2130 (diff,
2125 [('g', 'git', None, _('use git extended diff format')),
2131 [('g', 'git', None, _('use git extended diff format')),
2126 ('I', 'include', [], _('include names matching the given patterns')),
2132 ('I', 'include', [], _('include names matching the given patterns')),
2127 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2133 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2128 _('hg qdiff [-I] [-X] [-g] [FILE]...')),
2134 _('hg qdiff [-I] [-X] [-g] [FILE]...')),
2129 "qdelete|qremove|qrm":
2135 "qdelete|qremove|qrm":
2130 (delete,
2136 (delete,
2131 [('k', 'keep', None, _('keep patch file')),
2137 [('k', 'keep', None, _('keep patch file')),
2132 ('r', 'rev', [], _('stop managing a revision'))],
2138 ('r', 'rev', [], _('stop managing a revision'))],
2133 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2139 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2134 'qfold':
2140 'qfold':
2135 (fold,
2141 (fold,
2136 [('e', 'edit', None, _('edit patch header')),
2142 [('e', 'edit', None, _('edit patch header')),
2137 ('k', 'keep', None, _('keep folded patch files')),
2143 ('k', 'keep', None, _('keep folded patch files')),
2138 ] + commands.commitopts,
2144 ] + commands.commitopts,
2139 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2145 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2140 'qgoto':
2146 'qgoto':
2141 (goto,
2147 (goto,
2142 [('f', 'force', None, _('overwrite any local changes'))],
2148 [('f', 'force', None, _('overwrite any local changes'))],
2143 _('hg qgoto [OPTION]... PATCH')),
2149 _('hg qgoto [OPTION]... PATCH')),
2144 'qguard':
2150 'qguard':
2145 (guard,
2151 (guard,
2146 [('l', 'list', None, _('list all patches and guards')),
2152 [('l', 'list', None, _('list all patches and guards')),
2147 ('n', 'none', None, _('drop all guards'))],
2153 ('n', 'none', None, _('drop all guards'))],
2148 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2154 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2149 'qheader': (header, [], _('hg qheader [PATCH]')),
2155 'qheader': (header, [], _('hg qheader [PATCH]')),
2150 "^qimport":
2156 "^qimport":
2151 (qimport,
2157 (qimport,
2152 [('e', 'existing', None, 'import file in patch dir'),
2158 [('e', 'existing', None, 'import file in patch dir'),
2153 ('n', 'name', '', 'patch file name'),
2159 ('n', 'name', '', 'patch file name'),
2154 ('f', 'force', None, 'overwrite existing files'),
2160 ('f', 'force', None, 'overwrite existing files'),
2155 ('r', 'rev', [], 'place existing revisions under mq control'),
2161 ('r', 'rev', [], 'place existing revisions under mq control'),
2156 ('g', 'git', None, _('use git extended diff format'))],
2162 ('g', 'git', None, _('use git extended diff format'))],
2157 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2163 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2158 "^qinit":
2164 "^qinit":
2159 (init,
2165 (init,
2160 [('c', 'create-repo', None, 'create queue repository')],
2166 [('c', 'create-repo', None, 'create queue repository')],
2161 _('hg qinit [-c]')),
2167 _('hg qinit [-c]')),
2162 "qnew":
2168 "qnew":
2163 (new,
2169 (new,
2164 [('e', 'edit', None, _('edit commit message')),
2170 [('e', 'edit', None, _('edit commit message')),
2165 ('f', 'force', None, _('import uncommitted changes into patch')),
2171 ('f', 'force', None, _('import uncommitted changes into patch')),
2166 ('I', 'include', [], _('include names matching the given patterns')),
2172 ('I', 'include', [], _('include names matching the given patterns')),
2167 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2173 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2168 ] + commands.commitopts,
2174 ] + commands.commitopts,
2169 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2175 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2170 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2176 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2171 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2177 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2172 "^qpop":
2178 "^qpop":
2173 (pop,
2179 (pop,
2174 [('a', 'all', None, _('pop all patches')),
2180 [('a', 'all', None, _('pop all patches')),
2175 ('n', 'name', '', _('queue name to pop')),
2181 ('n', 'name', '', _('queue name to pop')),
2176 ('f', 'force', None, _('forget any local changes'))],
2182 ('f', 'force', None, _('forget any local changes'))],
2177 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2183 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2178 "^qpush":
2184 "^qpush":
2179 (push,
2185 (push,
2180 [('f', 'force', None, _('apply if the patch has rejects')),
2186 [('f', 'force', None, _('apply if the patch has rejects')),
2181 ('l', 'list', None, _('list patch name in commit text')),
2187 ('l', 'list', None, _('list patch name in commit text')),
2182 ('a', 'all', None, _('apply all patches')),
2188 ('a', 'all', None, _('apply all patches')),
2183 ('m', 'merge', None, _('merge from another queue')),
2189 ('m', 'merge', None, _('merge from another queue')),
2184 ('n', 'name', '', _('merge queue name'))],
2190 ('n', 'name', '', _('merge queue name'))],
2185 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2191 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2186 "^qrefresh":
2192 "^qrefresh":
2187 (refresh,
2193 (refresh,
2188 [('e', 'edit', None, _('edit commit message')),
2194 [('e', 'edit', None, _('edit commit message')),
2189 ('g', 'git', None, _('use git extended diff format')),
2195 ('g', 'git', None, _('use git extended diff format')),
2190 ('s', 'short', None, _('refresh only files already in the patch')),
2196 ('s', 'short', None, _('refresh only files already in the patch')),
2191 ('I', 'include', [], _('include names matching the given patterns')),
2197 ('I', 'include', [], _('include names matching the given patterns')),
2192 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2198 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2193 ] + commands.commitopts,
2199 ] + commands.commitopts,
2194 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2200 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2195 'qrename|qmv':
2201 'qrename|qmv':
2196 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2202 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2197 "qrestore":
2203 "qrestore":
2198 (restore,
2204 (restore,
2199 [('d', 'delete', None, _('delete save entry')),
2205 [('d', 'delete', None, _('delete save entry')),
2200 ('u', 'update', None, _('update queue working dir'))],
2206 ('u', 'update', None, _('update queue working dir'))],
2201 _('hg qrestore [-d] [-u] REV')),
2207 _('hg qrestore [-d] [-u] REV')),
2202 "qsave":
2208 "qsave":
2203 (save,
2209 (save,
2204 [('c', 'copy', None, _('copy patch directory')),
2210 [('c', 'copy', None, _('copy patch directory')),
2205 ('n', 'name', '', _('copy directory name')),
2211 ('n', 'name', '', _('copy directory name')),
2206 ('e', 'empty', None, _('clear queue status file')),
2212 ('e', 'empty', None, _('clear queue status file')),
2207 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2213 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2208 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2214 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2209 "qselect":
2215 "qselect":
2210 (select,
2216 (select,
2211 [('n', 'none', None, _('disable all guards')),
2217 [('n', 'none', None, _('disable all guards')),
2212 ('s', 'series', None, _('list all guards in series file')),
2218 ('s', 'series', None, _('list all guards in series file')),
2213 ('', 'pop', None, _('pop to before first guarded applied patch')),
2219 ('', 'pop', None, _('pop to before first guarded applied patch')),
2214 ('', 'reapply', None, _('pop, then reapply patches'))],
2220 ('', 'reapply', None, _('pop, then reapply patches'))],
2215 _('hg qselect [OPTION]... [GUARD]...')),
2221 _('hg qselect [OPTION]... [GUARD]...')),
2216 "qseries":
2222 "qseries":
2217 (series,
2223 (series,
2218 [('m', 'missing', None, _('print patches not in series')),
2224 [('m', 'missing', None, _('print patches not in series')),
2219 ] + seriesopts,
2225 ] + seriesopts,
2220 _('hg qseries [-ms]')),
2226 _('hg qseries [-ms]')),
2221 "^strip":
2227 "^strip":
2222 (strip,
2228 (strip,
2223 [('f', 'force', None, _('force multi-head removal')),
2229 [('f', 'force', None, _('force multi-head removal')),
2224 ('b', 'backup', None, _('bundle unrelated changesets')),
2230 ('b', 'backup', None, _('bundle unrelated changesets')),
2225 ('n', 'nobackup', None, _('no backups'))],
2231 ('n', 'nobackup', None, _('no backups'))],
2226 _('hg strip [-f] [-b] [-n] REV')),
2232 _('hg strip [-f] [-b] [-n] REV')),
2227 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2233 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2228 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2234 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2229 }
2235 }
@@ -1,3159 +1,3163 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import demandimport; demandimport.enable()
8 import demandimport; demandimport.enable()
9 from node import *
9 from node import *
10 from i18n import _
10 from i18n import _
11 import bisect, os, re, sys, urllib, shlex, stat
11 import bisect, os, re, sys, urllib, shlex, stat
12 import ui, hg, util, revlog, bundlerepo, extensions
12 import ui, hg, util, revlog, bundlerepo, extensions
13 import difflib, patch, time, help, mdiff, tempfile
13 import difflib, patch, time, help, mdiff, tempfile
14 import errno, version, socket
14 import errno, version, socket
15 import archival, changegroup, cmdutil, hgweb.server, sshserver
15 import archival, changegroup, cmdutil, hgweb.server, sshserver
16
16
17 # Commands start here, listed alphabetically
17 # Commands start here, listed alphabetically
18
18
19 def add(ui, repo, *pats, **opts):
19 def add(ui, repo, *pats, **opts):
20 """add the specified files on the next commit
20 """add the specified files on the next commit
21
21
22 Schedule files to be version controlled and added to the repository.
22 Schedule files to be version controlled and added to the repository.
23
23
24 The files will be added to the repository at the next commit. To
24 The files will be added to the repository at the next commit. To
25 undo an add before that, see hg revert.
25 undo an add before that, see hg revert.
26
26
27 If no names are given, add all files in the repository.
27 If no names are given, add all files in the repository.
28 """
28 """
29
29
30 names = []
30 names = []
31 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
31 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
32 if exact:
32 if exact:
33 if ui.verbose:
33 if ui.verbose:
34 ui.status(_('adding %s\n') % rel)
34 ui.status(_('adding %s\n') % rel)
35 names.append(abs)
35 names.append(abs)
36 elif repo.dirstate.state(abs) == '?':
36 elif repo.dirstate.state(abs) == '?':
37 ui.status(_('adding %s\n') % rel)
37 ui.status(_('adding %s\n') % rel)
38 names.append(abs)
38 names.append(abs)
39 if not opts.get('dry_run'):
39 if not opts.get('dry_run'):
40 repo.add(names)
40 repo.add(names)
41
41
42 def addremove(ui, repo, *pats, **opts):
42 def addremove(ui, repo, *pats, **opts):
43 """add all new files, delete all missing files
43 """add all new files, delete all missing files
44
44
45 Add all new files and remove all missing files from the repository.
45 Add all new files and remove all missing files from the repository.
46
46
47 New files are ignored if they match any of the patterns in .hgignore. As
47 New files are ignored if they match any of the patterns in .hgignore. As
48 with add, these changes take effect at the next commit.
48 with add, these changes take effect at the next commit.
49
49
50 Use the -s option to detect renamed files. With a parameter > 0,
50 Use the -s option to detect renamed files. With a parameter > 0,
51 this compares every removed file with every added file and records
51 this compares every removed file with every added file and records
52 those similar enough as renames. This option takes a percentage
52 those similar enough as renames. This option takes a percentage
53 between 0 (disabled) and 100 (files must be identical) as its
53 between 0 (disabled) and 100 (files must be identical) as its
54 parameter. Detecting renamed files this way can be expensive.
54 parameter. Detecting renamed files this way can be expensive.
55 """
55 """
56 sim = float(opts.get('similarity') or 0)
56 sim = float(opts.get('similarity') or 0)
57 if sim < 0 or sim > 100:
57 if sim < 0 or sim > 100:
58 raise util.Abort(_('similarity must be between 0 and 100'))
58 raise util.Abort(_('similarity must be between 0 and 100'))
59 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
59 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
60
60
61 def annotate(ui, repo, *pats, **opts):
61 def annotate(ui, repo, *pats, **opts):
62 """show changeset information per file line
62 """show changeset information per file line
63
63
64 List changes in files, showing the revision id responsible for each line
64 List changes in files, showing the revision id responsible for each line
65
65
66 This command is useful to discover who did a change or when a change took
66 This command is useful to discover who did a change or when a change took
67 place.
67 place.
68
68
69 Without the -a option, annotate will avoid processing files it
69 Without the -a option, annotate will avoid processing files it
70 detects as binary. With -a, annotate will generate an annotation
70 detects as binary. With -a, annotate will generate an annotation
71 anyway, probably with undesirable results.
71 anyway, probably with undesirable results.
72 """
72 """
73 getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
73 getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
74
74
75 if not pats:
75 if not pats:
76 raise util.Abort(_('at least one file name or pattern required'))
76 raise util.Abort(_('at least one file name or pattern required'))
77
77
78 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
78 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
79 ('number', lambda x: str(x[0].rev())),
79 ('number', lambda x: str(x[0].rev())),
80 ('changeset', lambda x: short(x[0].node())),
80 ('changeset', lambda x: short(x[0].node())),
81 ('date', getdate),
81 ('date', getdate),
82 ('follow', lambda x: x[0].path()),
82 ('follow', lambda x: x[0].path()),
83 ]
83 ]
84
84
85 if (not opts['user'] and not opts['changeset'] and not opts['date']
85 if (not opts['user'] and not opts['changeset'] and not opts['date']
86 and not opts['follow']):
86 and not opts['follow']):
87 opts['number'] = 1
87 opts['number'] = 1
88
88
89 linenumber = opts.get('line_number') is not None
89 linenumber = opts.get('line_number') is not None
90 if (linenumber and (not opts['changeset']) and (not opts['number'])):
90 if (linenumber and (not opts['changeset']) and (not opts['number'])):
91 raise util.Abort(_('at least one of -n/-c is required for -l'))
91 raise util.Abort(_('at least one of -n/-c is required for -l'))
92
92
93 funcmap = [func for op, func in opmap if opts.get(op)]
93 funcmap = [func for op, func in opmap if opts.get(op)]
94 if linenumber:
94 if linenumber:
95 lastfunc = funcmap[-1]
95 lastfunc = funcmap[-1]
96 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
96 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
97
97
98 ctx = repo.changectx(opts['rev'])
98 ctx = repo.changectx(opts['rev'])
99
99
100 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
100 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
101 node=ctx.node()):
101 node=ctx.node()):
102 fctx = ctx.filectx(abs)
102 fctx = ctx.filectx(abs)
103 if not opts['text'] and util.binary(fctx.data()):
103 if not opts['text'] and util.binary(fctx.data()):
104 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
104 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
105 continue
105 continue
106
106
107 lines = fctx.annotate(follow=opts.get('follow'),
107 lines = fctx.annotate(follow=opts.get('follow'),
108 linenumber=linenumber)
108 linenumber=linenumber)
109 pieces = []
109 pieces = []
110
110
111 for f in funcmap:
111 for f in funcmap:
112 l = [f(n) for n, dummy in lines]
112 l = [f(n) for n, dummy in lines]
113 if l:
113 if l:
114 m = max(map(len, l))
114 m = max(map(len, l))
115 pieces.append(["%*s" % (m, x) for x in l])
115 pieces.append(["%*s" % (m, x) for x in l])
116
116
117 if pieces:
117 if pieces:
118 for p, l in zip(zip(*pieces), lines):
118 for p, l in zip(zip(*pieces), lines):
119 ui.write("%s: %s" % (" ".join(p), l[1]))
119 ui.write("%s: %s" % (" ".join(p), l[1]))
120
120
121 def archive(ui, repo, dest, **opts):
121 def archive(ui, repo, dest, **opts):
122 '''create unversioned archive of a repository revision
122 '''create unversioned archive of a repository revision
123
123
124 By default, the revision used is the parent of the working
124 By default, the revision used is the parent of the working
125 directory; use "-r" to specify a different revision.
125 directory; use "-r" to specify a different revision.
126
126
127 To specify the type of archive to create, use "-t". Valid
127 To specify the type of archive to create, use "-t". Valid
128 types are:
128 types are:
129
129
130 "files" (default): a directory full of files
130 "files" (default): a directory full of files
131 "tar": tar archive, uncompressed
131 "tar": tar archive, uncompressed
132 "tbz2": tar archive, compressed using bzip2
132 "tbz2": tar archive, compressed using bzip2
133 "tgz": tar archive, compressed using gzip
133 "tgz": tar archive, compressed using gzip
134 "uzip": zip archive, uncompressed
134 "uzip": zip archive, uncompressed
135 "zip": zip archive, compressed using deflate
135 "zip": zip archive, compressed using deflate
136
136
137 The exact name of the destination archive or directory is given
137 The exact name of the destination archive or directory is given
138 using a format string; see "hg help export" for details.
138 using a format string; see "hg help export" for details.
139
139
140 Each member added to an archive file has a directory prefix
140 Each member added to an archive file has a directory prefix
141 prepended. Use "-p" to specify a format string for the prefix.
141 prepended. Use "-p" to specify a format string for the prefix.
142 The default is the basename of the archive, with suffixes removed.
142 The default is the basename of the archive, with suffixes removed.
143 '''
143 '''
144
144
145 ctx = repo.changectx(opts['rev'])
145 ctx = repo.changectx(opts['rev'])
146 if not ctx:
146 if not ctx:
147 raise util.Abort(_('repository has no revisions'))
147 raise util.Abort(_('repository has no revisions'))
148 node = ctx.node()
148 node = ctx.node()
149 dest = cmdutil.make_filename(repo, dest, node)
149 dest = cmdutil.make_filename(repo, dest, node)
150 if os.path.realpath(dest) == repo.root:
150 if os.path.realpath(dest) == repo.root:
151 raise util.Abort(_('repository root cannot be destination'))
151 raise util.Abort(_('repository root cannot be destination'))
152 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
152 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
153 kind = opts.get('type') or 'files'
153 kind = opts.get('type') or 'files'
154 prefix = opts['prefix']
154 prefix = opts['prefix']
155 if dest == '-':
155 if dest == '-':
156 if kind == 'files':
156 if kind == 'files':
157 raise util.Abort(_('cannot archive plain files to stdout'))
157 raise util.Abort(_('cannot archive plain files to stdout'))
158 dest = sys.stdout
158 dest = sys.stdout
159 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
159 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
160 prefix = cmdutil.make_filename(repo, prefix, node)
160 prefix = cmdutil.make_filename(repo, prefix, node)
161 archival.archive(repo, dest, node, kind, not opts['no_decode'],
161 archival.archive(repo, dest, node, kind, not opts['no_decode'],
162 matchfn, prefix)
162 matchfn, prefix)
163
163
164 def backout(ui, repo, node=None, rev=None, **opts):
164 def backout(ui, repo, node=None, rev=None, **opts):
165 '''reverse effect of earlier changeset
165 '''reverse effect of earlier changeset
166
166
167 Commit the backed out changes as a new changeset. The new
167 Commit the backed out changes as a new changeset. The new
168 changeset is a child of the backed out changeset.
168 changeset is a child of the backed out changeset.
169
169
170 If you back out a changeset other than the tip, a new head is
170 If you back out a changeset other than the tip, a new head is
171 created. This head is the parent of the working directory. If
171 created. This head is the parent of the working directory. If
172 you back out an old changeset, your working directory will appear
172 you back out an old changeset, your working directory will appear
173 old after the backout. You should merge the backout changeset
173 old after the backout. You should merge the backout changeset
174 with another head.
174 with another head.
175
175
176 The --merge option remembers the parent of the working directory
176 The --merge option remembers the parent of the working directory
177 before starting the backout, then merges the new head with that
177 before starting the backout, then merges the new head with that
178 changeset afterwards. This saves you from doing the merge by
178 changeset afterwards. This saves you from doing the merge by
179 hand. The result of this merge is not committed, as for a normal
179 hand. The result of this merge is not committed, as for a normal
180 merge.'''
180 merge.'''
181 if rev and node:
181 if rev and node:
182 raise util.Abort(_("please specify just one revision"))
182 raise util.Abort(_("please specify just one revision"))
183
183
184 if not rev:
184 if not rev:
185 rev = node
185 rev = node
186
186
187 if not rev:
187 if not rev:
188 raise util.Abort(_("please specify a revision to backout"))
188 raise util.Abort(_("please specify a revision to backout"))
189
189
190 cmdutil.bail_if_changed(repo)
190 cmdutil.bail_if_changed(repo)
191 op1, op2 = repo.dirstate.parents()
191 op1, op2 = repo.dirstate.parents()
192 if op2 != nullid:
192 if op2 != nullid:
193 raise util.Abort(_('outstanding uncommitted merge'))
193 raise util.Abort(_('outstanding uncommitted merge'))
194 node = repo.lookup(rev)
194 node = repo.lookup(rev)
195 p1, p2 = repo.changelog.parents(node)
195 p1, p2 = repo.changelog.parents(node)
196 if p1 == nullid:
196 if p1 == nullid:
197 raise util.Abort(_('cannot back out a change with no parents'))
197 raise util.Abort(_('cannot back out a change with no parents'))
198 if p2 != nullid:
198 if p2 != nullid:
199 if not opts['parent']:
199 if not opts['parent']:
200 raise util.Abort(_('cannot back out a merge changeset without '
200 raise util.Abort(_('cannot back out a merge changeset without '
201 '--parent'))
201 '--parent'))
202 p = repo.lookup(opts['parent'])
202 p = repo.lookup(opts['parent'])
203 if p not in (p1, p2):
203 if p not in (p1, p2):
204 raise util.Abort(_('%s is not a parent of %s') %
204 raise util.Abort(_('%s is not a parent of %s') %
205 (short(p), short(node)))
205 (short(p), short(node)))
206 parent = p
206 parent = p
207 else:
207 else:
208 if opts['parent']:
208 if opts['parent']:
209 raise util.Abort(_('cannot use --parent on non-merge changeset'))
209 raise util.Abort(_('cannot use --parent on non-merge changeset'))
210 parent = p1
210 parent = p1
211 hg.clean(repo, node, show_stats=False)
211 hg.clean(repo, node, show_stats=False)
212 revert_opts = opts.copy()
212 revert_opts = opts.copy()
213 revert_opts['date'] = None
213 revert_opts['date'] = None
214 revert_opts['all'] = True
214 revert_opts['all'] = True
215 revert_opts['rev'] = hex(parent)
215 revert_opts['rev'] = hex(parent)
216 revert(ui, repo, **revert_opts)
216 revert(ui, repo, **revert_opts)
217 commit_opts = opts.copy()
217 commit_opts = opts.copy()
218 commit_opts['addremove'] = False
218 commit_opts['addremove'] = False
219 if not commit_opts['message'] and not commit_opts['logfile']:
219 if not commit_opts['message'] and not commit_opts['logfile']:
220 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
220 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
221 commit_opts['force_editor'] = True
221 commit_opts['force_editor'] = True
222 commit(ui, repo, **commit_opts)
222 commit(ui, repo, **commit_opts)
223 def nice(node):
223 def nice(node):
224 return '%d:%s' % (repo.changelog.rev(node), short(node))
224 return '%d:%s' % (repo.changelog.rev(node), short(node))
225 ui.status(_('changeset %s backs out changeset %s\n') %
225 ui.status(_('changeset %s backs out changeset %s\n') %
226 (nice(repo.changelog.tip()), nice(node)))
226 (nice(repo.changelog.tip()), nice(node)))
227 if op1 != node:
227 if op1 != node:
228 if opts['merge']:
228 if opts['merge']:
229 ui.status(_('merging with changeset %s\n') % nice(op1))
229 ui.status(_('merging with changeset %s\n') % nice(op1))
230 hg.merge(repo, hex(op1))
230 hg.merge(repo, hex(op1))
231 else:
231 else:
232 ui.status(_('the backout changeset is a new head - '
232 ui.status(_('the backout changeset is a new head - '
233 'do not forget to merge\n'))
233 'do not forget to merge\n'))
234 ui.status(_('(use "backout --merge" '
234 ui.status(_('(use "backout --merge" '
235 'if you want to auto-merge)\n'))
235 'if you want to auto-merge)\n'))
236
236
237 def branch(ui, repo, label=None, **opts):
237 def branch(ui, repo, label=None, **opts):
238 """set or show the current branch name
238 """set or show the current branch name
239
239
240 With no argument, show the current branch name. With one argument,
240 With no argument, show the current branch name. With one argument,
241 set the working directory branch name (the branch does not exist in
241 set the working directory branch name (the branch does not exist in
242 the repository until the next commit).
242 the repository until the next commit).
243
243
244 Unless --force is specified, branch will not let you set a
244 Unless --force is specified, branch will not let you set a
245 branch name that shadows an existing branch.
245 branch name that shadows an existing branch.
246 """
246 """
247
247
248 if label:
248 if label:
249 if not opts.get('force') and label in repo.branchtags():
249 if not opts.get('force') and label in repo.branchtags():
250 if label not in [p.branch() for p in repo.workingctx().parents()]:
250 if label not in [p.branch() for p in repo.workingctx().parents()]:
251 raise util.Abort(_('a branch of the same name already exists'
251 raise util.Abort(_('a branch of the same name already exists'
252 ' (use --force to override)'))
252 ' (use --force to override)'))
253 repo.dirstate.setbranch(util.fromlocal(label))
253 repo.dirstate.setbranch(util.fromlocal(label))
254 ui.status(_('marked working directory as branch %s\n') % label)
254 ui.status(_('marked working directory as branch %s\n') % label)
255 else:
255 else:
256 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
256 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
257
257
258 def branches(ui, repo, active=False):
258 def branches(ui, repo, active=False):
259 """list repository named branches
259 """list repository named branches
260
260
261 List the repository's named branches, indicating which ones are
261 List the repository's named branches, indicating which ones are
262 inactive. If active is specified, only show active branches.
262 inactive. If active is specified, only show active branches.
263
263
264 A branch is considered active if it contains unmerged heads.
264 A branch is considered active if it contains unmerged heads.
265 """
265 """
266 b = repo.branchtags()
266 b = repo.branchtags()
267 heads = dict.fromkeys(repo.heads(), 1)
267 heads = dict.fromkeys(repo.heads(), 1)
268 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
268 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
269 l.sort()
269 l.sort()
270 l.reverse()
270 l.reverse()
271 for ishead, r, n, t in l:
271 for ishead, r, n, t in l:
272 if active and not ishead:
272 if active and not ishead:
273 # If we're only displaying active branches, abort the loop on
273 # If we're only displaying active branches, abort the loop on
274 # encountering the first inactive head
274 # encountering the first inactive head
275 break
275 break
276 else:
276 else:
277 hexfunc = ui.debugflag and hex or short
277 hexfunc = ui.debugflag and hex or short
278 if ui.quiet:
278 if ui.quiet:
279 ui.write("%s\n" % t)
279 ui.write("%s\n" % t)
280 else:
280 else:
281 spaces = " " * (30 - util.locallen(t))
281 spaces = " " * (30 - util.locallen(t))
282 # The code only gets here if inactive branches are being
282 # The code only gets here if inactive branches are being
283 # displayed or the branch is active.
283 # displayed or the branch is active.
284 isinactive = ((not ishead) and " (inactive)") or ''
284 isinactive = ((not ishead) and " (inactive)") or ''
285 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
285 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
286
286
287 def bundle(ui, repo, fname, dest=None, **opts):
287 def bundle(ui, repo, fname, dest=None, **opts):
288 """create a changegroup file
288 """create a changegroup file
289
289
290 Generate a compressed changegroup file collecting changesets not
290 Generate a compressed changegroup file collecting changesets not
291 found in the other repository.
291 found in the other repository.
292
292
293 If no destination repository is specified the destination is assumed
293 If no destination repository is specified the destination is assumed
294 to have all the nodes specified by one or more --base parameters.
294 to have all the nodes specified by one or more --base parameters.
295
295
296 The bundle file can then be transferred using conventional means and
296 The bundle file can then be transferred using conventional means and
297 applied to another repository with the unbundle or pull command.
297 applied to another repository with the unbundle or pull command.
298 This is useful when direct push and pull are not available or when
298 This is useful when direct push and pull are not available or when
299 exporting an entire repository is undesirable.
299 exporting an entire repository is undesirable.
300
300
301 Applying bundles preserves all changeset contents including
301 Applying bundles preserves all changeset contents including
302 permissions, copy/rename information, and revision history.
302 permissions, copy/rename information, and revision history.
303 """
303 """
304 revs = opts.get('rev') or None
304 revs = opts.get('rev') or None
305 if revs:
305 if revs:
306 revs = [repo.lookup(rev) for rev in revs]
306 revs = [repo.lookup(rev) for rev in revs]
307 base = opts.get('base')
307 base = opts.get('base')
308 if base:
308 if base:
309 if dest:
309 if dest:
310 raise util.Abort(_("--base is incompatible with specifiying "
310 raise util.Abort(_("--base is incompatible with specifiying "
311 "a destination"))
311 "a destination"))
312 base = [repo.lookup(rev) for rev in base]
312 base = [repo.lookup(rev) for rev in base]
313 # create the right base
313 # create the right base
314 # XXX: nodesbetween / changegroup* should be "fixed" instead
314 # XXX: nodesbetween / changegroup* should be "fixed" instead
315 o = []
315 o = []
316 has = {nullid: None}
316 has = {nullid: None}
317 for n in base:
317 for n in base:
318 has.update(repo.changelog.reachable(n))
318 has.update(repo.changelog.reachable(n))
319 if revs:
319 if revs:
320 visit = list(revs)
320 visit = list(revs)
321 else:
321 else:
322 visit = repo.changelog.heads()
322 visit = repo.changelog.heads()
323 seen = {}
323 seen = {}
324 while visit:
324 while visit:
325 n = visit.pop(0)
325 n = visit.pop(0)
326 parents = [p for p in repo.changelog.parents(n) if p not in has]
326 parents = [p for p in repo.changelog.parents(n) if p not in has]
327 if len(parents) == 0:
327 if len(parents) == 0:
328 o.insert(0, n)
328 o.insert(0, n)
329 else:
329 else:
330 for p in parents:
330 for p in parents:
331 if p not in seen:
331 if p not in seen:
332 seen[p] = 1
332 seen[p] = 1
333 visit.append(p)
333 visit.append(p)
334 else:
334 else:
335 cmdutil.setremoteconfig(ui, opts)
335 cmdutil.setremoteconfig(ui, opts)
336 dest, revs = cmdutil.parseurl(
336 dest, revs = cmdutil.parseurl(
337 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
337 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
338 other = hg.repository(ui, dest)
338 other = hg.repository(ui, dest)
339 o = repo.findoutgoing(other, force=opts['force'])
339 o = repo.findoutgoing(other, force=opts['force'])
340
340
341 if revs:
341 if revs:
342 cg = repo.changegroupsubset(o, revs, 'bundle')
342 cg = repo.changegroupsubset(o, revs, 'bundle')
343 else:
343 else:
344 cg = repo.changegroup(o, 'bundle')
344 cg = repo.changegroup(o, 'bundle')
345 changegroup.writebundle(cg, fname, "HG10BZ")
345 changegroup.writebundle(cg, fname, "HG10BZ")
346
346
347 def cat(ui, repo, file1, *pats, **opts):
347 def cat(ui, repo, file1, *pats, **opts):
348 """output the current or given revision of files
348 """output the current or given revision of files
349
349
350 Print the specified files as they were at the given revision.
350 Print the specified files as they were at the given revision.
351 If no revision is given, the parent of the working directory is used,
351 If no revision is given, the parent of the working directory is used,
352 or tip if no revision is checked out.
352 or tip if no revision is checked out.
353
353
354 Output may be to a file, in which case the name of the file is
354 Output may be to a file, in which case the name of the file is
355 given using a format string. The formatting rules are the same as
355 given using a format string. The formatting rules are the same as
356 for the export command, with the following additions:
356 for the export command, with the following additions:
357
357
358 %s basename of file being printed
358 %s basename of file being printed
359 %d dirname of file being printed, or '.' if in repo root
359 %d dirname of file being printed, or '.' if in repo root
360 %p root-relative path name of file being printed
360 %p root-relative path name of file being printed
361 """
361 """
362 ctx = repo.changectx(opts['rev'])
362 ctx = repo.changectx(opts['rev'])
363 err = 1
363 err = 1
364 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
364 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
365 ctx.node()):
365 ctx.node()):
366 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
366 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
367 fp.write(ctx.filectx(abs).data())
367 fp.write(ctx.filectx(abs).data())
368 err = 0
368 err = 0
369 return err
369 return err
370
370
371 def clone(ui, source, dest=None, **opts):
371 def clone(ui, source, dest=None, **opts):
372 """make a copy of an existing repository
372 """make a copy of an existing repository
373
373
374 Create a copy of an existing repository in a new directory.
374 Create a copy of an existing repository in a new directory.
375
375
376 If no destination directory name is specified, it defaults to the
376 If no destination directory name is specified, it defaults to the
377 basename of the source.
377 basename of the source.
378
378
379 The location of the source is added to the new repository's
379 The location of the source is added to the new repository's
380 .hg/hgrc file, as the default to be used for future pulls.
380 .hg/hgrc file, as the default to be used for future pulls.
381
381
382 For efficiency, hardlinks are used for cloning whenever the source
382 For efficiency, hardlinks are used for cloning whenever the source
383 and destination are on the same filesystem (note this applies only
383 and destination are on the same filesystem (note this applies only
384 to the repository data, not to the checked out files). Some
384 to the repository data, not to the checked out files). Some
385 filesystems, such as AFS, implement hardlinking incorrectly, but
385 filesystems, such as AFS, implement hardlinking incorrectly, but
386 do not report errors. In these cases, use the --pull option to
386 do not report errors. In these cases, use the --pull option to
387 avoid hardlinking.
387 avoid hardlinking.
388
388
389 You can safely clone repositories and checked out files using full
389 You can safely clone repositories and checked out files using full
390 hardlinks with
390 hardlinks with
391
391
392 $ cp -al REPO REPOCLONE
392 $ cp -al REPO REPOCLONE
393
393
394 which is the fastest way to clone. However, the operation is not
394 which is the fastest way to clone. However, the operation is not
395 atomic (making sure REPO is not modified during the operation is
395 atomic (making sure REPO is not modified during the operation is
396 up to you) and you have to make sure your editor breaks hardlinks
396 up to you) and you have to make sure your editor breaks hardlinks
397 (Emacs and most Linux Kernel tools do so).
397 (Emacs and most Linux Kernel tools do so).
398
398
399 If you use the -r option to clone up to a specific revision, no
399 If you use the -r option to clone up to a specific revision, no
400 subsequent revisions will be present in the cloned repository.
400 subsequent revisions will be present in the cloned repository.
401 This option implies --pull, even on local repositories.
401 This option implies --pull, even on local repositories.
402
402
403 See pull for valid source format details.
403 See pull for valid source format details.
404
404
405 It is possible to specify an ssh:// URL as the destination, but no
405 It is possible to specify an ssh:// URL as the destination, but no
406 .hg/hgrc and working directory will be created on the remote side.
406 .hg/hgrc and working directory will be created on the remote side.
407 Look at the help text for the pull command for important details
407 Look at the help text for the pull command for important details
408 about ssh:// URLs.
408 about ssh:// URLs.
409 """
409 """
410 cmdutil.setremoteconfig(ui, opts)
410 cmdutil.setremoteconfig(ui, opts)
411 hg.clone(ui, source, dest,
411 hg.clone(ui, source, dest,
412 pull=opts['pull'],
412 pull=opts['pull'],
413 stream=opts['uncompressed'],
413 stream=opts['uncompressed'],
414 rev=opts['rev'],
414 rev=opts['rev'],
415 update=not opts['noupdate'])
415 update=not opts['noupdate'])
416
416
417 def commit(ui, repo, *pats, **opts):
417 def commit(ui, repo, *pats, **opts):
418 """commit the specified files or all outstanding changes
418 """commit the specified files or all outstanding changes
419
419
420 Commit changes to the given files into the repository.
420 Commit changes to the given files into the repository.
421
421
422 If a list of files is omitted, all changes reported by "hg status"
422 If a list of files is omitted, all changes reported by "hg status"
423 will be committed.
423 will be committed.
424
424
425 If no commit message is specified, the editor configured in your hgrc
425 If no commit message is specified, the editor configured in your hgrc
426 or in the EDITOR environment variable is started to enter a message.
426 or in the EDITOR environment variable is started to enter a message.
427 """
427 """
428 message = cmdutil.logmessage(opts)
428 message = cmdutil.logmessage(opts)
429
429
430 if opts['addremove']:
430 if opts['addremove']:
431 cmdutil.addremove(repo, pats, opts)
431 cmdutil.addremove(repo, pats, opts)
432 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
432 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
433 if pats:
433 if pats:
434 status = repo.status(files=fns, match=match)
434 status = repo.status(files=fns, match=match)
435 modified, added, removed, deleted, unknown = status[:5]
435 modified, added, removed, deleted, unknown = status[:5]
436 files = modified + added + removed
436 files = modified + added + removed
437 slist = None
437 slist = None
438 for f in fns:
438 for f in fns:
439 if f == '.':
439 if f == '.':
440 continue
440 continue
441 if f not in files:
441 if f not in files:
442 rf = repo.wjoin(f)
442 rf = repo.wjoin(f)
443 try:
443 try:
444 mode = os.lstat(rf)[stat.ST_MODE]
444 mode = os.lstat(rf)[stat.ST_MODE]
445 except OSError:
445 except OSError:
446 raise util.Abort(_("file %s not found!") % rf)
446 raise util.Abort(_("file %s not found!") % rf)
447 if stat.S_ISDIR(mode):
447 if stat.S_ISDIR(mode):
448 name = f + '/'
448 name = f + '/'
449 if slist is None:
449 if slist is None:
450 slist = list(files)
450 slist = list(files)
451 slist.sort()
451 slist.sort()
452 i = bisect.bisect(slist, name)
452 i = bisect.bisect(slist, name)
453 if i >= len(slist) or not slist[i].startswith(name):
453 if i >= len(slist) or not slist[i].startswith(name):
454 raise util.Abort(_("no match under directory %s!")
454 raise util.Abort(_("no match under directory %s!")
455 % rf)
455 % rf)
456 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
456 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
457 raise util.Abort(_("can't commit %s: "
457 raise util.Abort(_("can't commit %s: "
458 "unsupported file type!") % rf)
458 "unsupported file type!") % rf)
459 elif repo.dirstate.state(f) == '?':
459 elif repo.dirstate.state(f) == '?':
460 raise util.Abort(_("file %s not tracked!") % rf)
460 raise util.Abort(_("file %s not tracked!") % rf)
461 else:
461 else:
462 files = []
462 files = []
463 try:
463 try:
464 repo.commit(files, message, opts['user'], opts['date'], match,
464 repo.commit(files, message, opts['user'], opts['date'], match,
465 force_editor=opts.get('force_editor'))
465 force_editor=opts.get('force_editor'))
466 except ValueError, inst:
466 except ValueError, inst:
467 raise util.Abort(str(inst))
467 raise util.Abort(str(inst))
468
468
469 def docopy(ui, repo, pats, opts, wlock):
469 def docopy(ui, repo, pats, opts, wlock):
470 # called with the repo lock held
470 # called with the repo lock held
471 #
471 #
472 # hgsep => pathname that uses "/" to separate directories
472 # hgsep => pathname that uses "/" to separate directories
473 # ossep => pathname that uses os.sep to separate directories
473 # ossep => pathname that uses os.sep to separate directories
474 cwd = repo.getcwd()
474 cwd = repo.getcwd()
475 errors = 0
475 errors = 0
476 copied = []
476 copied = []
477 targets = {}
477 targets = {}
478
478
479 # abs: hgsep
479 # abs: hgsep
480 # rel: ossep
480 # rel: ossep
481 # return: hgsep
481 # return: hgsep
482 def okaytocopy(abs, rel, exact):
482 def okaytocopy(abs, rel, exact):
483 reasons = {'?': _('is not managed'),
483 reasons = {'?': _('is not managed'),
484 'r': _('has been marked for remove')}
484 'r': _('has been marked for remove')}
485 state = repo.dirstate.state(abs)
485 state = repo.dirstate.state(abs)
486 reason = reasons.get(state)
486 reason = reasons.get(state)
487 if reason:
487 if reason:
488 if exact:
488 if exact:
489 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
489 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
490 else:
490 else:
491 if state == 'a':
491 if state == 'a':
492 origsrc = repo.dirstate.copied(abs)
492 origsrc = repo.dirstate.copied(abs)
493 if origsrc is not None:
493 if origsrc is not None:
494 return origsrc
494 return origsrc
495 return abs
495 return abs
496
496
497 # origsrc: hgsep
497 # origsrc: hgsep
498 # abssrc: hgsep
498 # abssrc: hgsep
499 # relsrc: ossep
499 # relsrc: ossep
500 # otarget: ossep
500 # otarget: ossep
501 def copy(origsrc, abssrc, relsrc, otarget, exact):
501 def copy(origsrc, abssrc, relsrc, otarget, exact):
502 abstarget = util.canonpath(repo.root, cwd, otarget)
502 abstarget = util.canonpath(repo.root, cwd, otarget)
503 reltarget = repo.pathto(abstarget, cwd)
503 reltarget = repo.pathto(abstarget, cwd)
504 prevsrc = targets.get(abstarget)
504 prevsrc = targets.get(abstarget)
505 src = repo.wjoin(abssrc)
505 src = repo.wjoin(abssrc)
506 target = repo.wjoin(abstarget)
506 target = repo.wjoin(abstarget)
507 if prevsrc is not None:
507 if prevsrc is not None:
508 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
508 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
509 (reltarget, repo.pathto(abssrc, cwd),
509 (reltarget, repo.pathto(abssrc, cwd),
510 repo.pathto(prevsrc, cwd)))
510 repo.pathto(prevsrc, cwd)))
511 return
511 return
512 if (not opts['after'] and os.path.exists(target) or
512 if (not opts['after'] and os.path.exists(target) or
513 opts['after'] and repo.dirstate.state(abstarget) not in '?ar'):
513 opts['after'] and repo.dirstate.state(abstarget) not in '?ar'):
514 if not opts['force']:
514 if not opts['force']:
515 ui.warn(_('%s: not overwriting - file exists\n') %
515 ui.warn(_('%s: not overwriting - file exists\n') %
516 reltarget)
516 reltarget)
517 return
517 return
518 if not opts['after'] and not opts.get('dry_run'):
518 if not opts['after'] and not opts.get('dry_run'):
519 os.unlink(target)
519 os.unlink(target)
520 if opts['after']:
520 if opts['after']:
521 if not os.path.exists(target):
521 if not os.path.exists(target):
522 return
522 return
523 else:
523 else:
524 targetdir = os.path.dirname(target) or '.'
524 targetdir = os.path.dirname(target) or '.'
525 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
525 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
526 os.makedirs(targetdir)
526 os.makedirs(targetdir)
527 try:
527 try:
528 restore = repo.dirstate.state(abstarget) == 'r'
528 restore = repo.dirstate.state(abstarget) == 'r'
529 if restore and not opts.get('dry_run'):
529 if restore and not opts.get('dry_run'):
530 repo.undelete([abstarget], wlock)
530 repo.undelete([abstarget], wlock)
531 try:
531 try:
532 if not opts.get('dry_run'):
532 if not opts.get('dry_run'):
533 util.copyfile(src, target)
533 util.copyfile(src, target)
534 restore = False
534 restore = False
535 finally:
535 finally:
536 if restore:
536 if restore:
537 repo.remove([abstarget], wlock=wlock)
537 repo.remove([abstarget], wlock=wlock)
538 except IOError, inst:
538 except IOError, inst:
539 if inst.errno == errno.ENOENT:
539 if inst.errno == errno.ENOENT:
540 ui.warn(_('%s: deleted in working copy\n') % relsrc)
540 ui.warn(_('%s: deleted in working copy\n') % relsrc)
541 else:
541 else:
542 ui.warn(_('%s: cannot copy - %s\n') %
542 ui.warn(_('%s: cannot copy - %s\n') %
543 (relsrc, inst.strerror))
543 (relsrc, inst.strerror))
544 errors += 1
544 errors += 1
545 return
545 return
546 if ui.verbose or not exact:
546 if ui.verbose or not exact:
547 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
547 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
548 targets[abstarget] = abssrc
548 targets[abstarget] = abssrc
549 if abstarget != origsrc:
549 if abstarget != origsrc:
550 if repo.dirstate.state(origsrc) == 'a':
550 if repo.dirstate.state(origsrc) == 'a':
551 if not ui.quiet:
551 if not ui.quiet:
552 ui.warn(_("%s has not been committed yet, so no copy "
552 ui.warn(_("%s has not been committed yet, so no copy "
553 "data will be stored for %s.\n")
553 "data will be stored for %s.\n")
554 % (repo.pathto(origsrc, cwd), reltarget))
554 % (repo.pathto(origsrc, cwd), reltarget))
555 if abstarget not in repo.dirstate and not opts.get('dry_run'):
555 if abstarget not in repo.dirstate and not opts.get('dry_run'):
556 repo.add([abstarget], wlock)
556 repo.add([abstarget], wlock)
557 elif not opts.get('dry_run'):
557 elif not opts.get('dry_run'):
558 repo.copy(origsrc, abstarget, wlock)
558 repo.copy(origsrc, abstarget, wlock)
559 copied.append((abssrc, relsrc, exact))
559 copied.append((abssrc, relsrc, exact))
560
560
561 # pat: ossep
561 # pat: ossep
562 # dest ossep
562 # dest ossep
563 # srcs: list of (hgsep, hgsep, ossep, bool)
563 # srcs: list of (hgsep, hgsep, ossep, bool)
564 # return: function that takes hgsep and returns ossep
564 # return: function that takes hgsep and returns ossep
565 def targetpathfn(pat, dest, srcs):
565 def targetpathfn(pat, dest, srcs):
566 if os.path.isdir(pat):
566 if os.path.isdir(pat):
567 abspfx = util.canonpath(repo.root, cwd, pat)
567 abspfx = util.canonpath(repo.root, cwd, pat)
568 abspfx = util.localpath(abspfx)
568 abspfx = util.localpath(abspfx)
569 if destdirexists:
569 if destdirexists:
570 striplen = len(os.path.split(abspfx)[0])
570 striplen = len(os.path.split(abspfx)[0])
571 else:
571 else:
572 striplen = len(abspfx)
572 striplen = len(abspfx)
573 if striplen:
573 if striplen:
574 striplen += len(os.sep)
574 striplen += len(os.sep)
575 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
575 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
576 elif destdirexists:
576 elif destdirexists:
577 res = lambda p: os.path.join(dest,
577 res = lambda p: os.path.join(dest,
578 os.path.basename(util.localpath(p)))
578 os.path.basename(util.localpath(p)))
579 else:
579 else:
580 res = lambda p: dest
580 res = lambda p: dest
581 return res
581 return res
582
582
583 # pat: ossep
583 # pat: ossep
584 # dest ossep
584 # dest ossep
585 # srcs: list of (hgsep, hgsep, ossep, bool)
585 # srcs: list of (hgsep, hgsep, ossep, bool)
586 # return: function that takes hgsep and returns ossep
586 # return: function that takes hgsep and returns ossep
587 def targetpathafterfn(pat, dest, srcs):
587 def targetpathafterfn(pat, dest, srcs):
588 if util.patkind(pat, None)[0]:
588 if util.patkind(pat, None)[0]:
589 # a mercurial pattern
589 # a mercurial pattern
590 res = lambda p: os.path.join(dest,
590 res = lambda p: os.path.join(dest,
591 os.path.basename(util.localpath(p)))
591 os.path.basename(util.localpath(p)))
592 else:
592 else:
593 abspfx = util.canonpath(repo.root, cwd, pat)
593 abspfx = util.canonpath(repo.root, cwd, pat)
594 if len(abspfx) < len(srcs[0][0]):
594 if len(abspfx) < len(srcs[0][0]):
595 # A directory. Either the target path contains the last
595 # A directory. Either the target path contains the last
596 # component of the source path or it does not.
596 # component of the source path or it does not.
597 def evalpath(striplen):
597 def evalpath(striplen):
598 score = 0
598 score = 0
599 for s in srcs:
599 for s in srcs:
600 t = os.path.join(dest, util.localpath(s[0])[striplen:])
600 t = os.path.join(dest, util.localpath(s[0])[striplen:])
601 if os.path.exists(t):
601 if os.path.exists(t):
602 score += 1
602 score += 1
603 return score
603 return score
604
604
605 abspfx = util.localpath(abspfx)
605 abspfx = util.localpath(abspfx)
606 striplen = len(abspfx)
606 striplen = len(abspfx)
607 if striplen:
607 if striplen:
608 striplen += len(os.sep)
608 striplen += len(os.sep)
609 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
609 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
610 score = evalpath(striplen)
610 score = evalpath(striplen)
611 striplen1 = len(os.path.split(abspfx)[0])
611 striplen1 = len(os.path.split(abspfx)[0])
612 if striplen1:
612 if striplen1:
613 striplen1 += len(os.sep)
613 striplen1 += len(os.sep)
614 if evalpath(striplen1) > score:
614 if evalpath(striplen1) > score:
615 striplen = striplen1
615 striplen = striplen1
616 res = lambda p: os.path.join(dest,
616 res = lambda p: os.path.join(dest,
617 util.localpath(p)[striplen:])
617 util.localpath(p)[striplen:])
618 else:
618 else:
619 # a file
619 # a file
620 if destdirexists:
620 if destdirexists:
621 res = lambda p: os.path.join(dest,
621 res = lambda p: os.path.join(dest,
622 os.path.basename(util.localpath(p)))
622 os.path.basename(util.localpath(p)))
623 else:
623 else:
624 res = lambda p: dest
624 res = lambda p: dest
625 return res
625 return res
626
626
627
627
628 pats = util.expand_glob(pats)
628 pats = util.expand_glob(pats)
629 if not pats:
629 if not pats:
630 raise util.Abort(_('no source or destination specified'))
630 raise util.Abort(_('no source or destination specified'))
631 if len(pats) == 1:
631 if len(pats) == 1:
632 raise util.Abort(_('no destination specified'))
632 raise util.Abort(_('no destination specified'))
633 dest = pats.pop()
633 dest = pats.pop()
634 destdirexists = os.path.isdir(dest)
634 destdirexists = os.path.isdir(dest)
635 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
635 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
636 raise util.Abort(_('with multiple sources, destination must be an '
636 raise util.Abort(_('with multiple sources, destination must be an '
637 'existing directory'))
637 'existing directory'))
638 if opts['after']:
638 if opts['after']:
639 tfn = targetpathafterfn
639 tfn = targetpathafterfn
640 else:
640 else:
641 tfn = targetpathfn
641 tfn = targetpathfn
642 copylist = []
642 copylist = []
643 for pat in pats:
643 for pat in pats:
644 srcs = []
644 srcs = []
645 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
645 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
646 globbed=True):
646 globbed=True):
647 origsrc = okaytocopy(abssrc, relsrc, exact)
647 origsrc = okaytocopy(abssrc, relsrc, exact)
648 if origsrc:
648 if origsrc:
649 srcs.append((origsrc, abssrc, relsrc, exact))
649 srcs.append((origsrc, abssrc, relsrc, exact))
650 if not srcs:
650 if not srcs:
651 continue
651 continue
652 copylist.append((tfn(pat, dest, srcs), srcs))
652 copylist.append((tfn(pat, dest, srcs), srcs))
653 if not copylist:
653 if not copylist:
654 raise util.Abort(_('no files to copy'))
654 raise util.Abort(_('no files to copy'))
655
655
656 for targetpath, srcs in copylist:
656 for targetpath, srcs in copylist:
657 for origsrc, abssrc, relsrc, exact in srcs:
657 for origsrc, abssrc, relsrc, exact in srcs:
658 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
658 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
659
659
660 if errors:
660 if errors:
661 ui.warn(_('(consider using --after)\n'))
661 ui.warn(_('(consider using --after)\n'))
662 return errors, copied
662 return errors, copied
663
663
664 def copy(ui, repo, *pats, **opts):
664 def copy(ui, repo, *pats, **opts):
665 """mark files as copied for the next commit
665 """mark files as copied for the next commit
666
666
667 Mark dest as having copies of source files. If dest is a
667 Mark dest as having copies of source files. If dest is a
668 directory, copies are put in that directory. If dest is a file,
668 directory, copies are put in that directory. If dest is a file,
669 there can only be one source.
669 there can only be one source.
670
670
671 By default, this command copies the contents of files as they
671 By default, this command copies the contents of files as they
672 stand in the working directory. If invoked with --after, the
672 stand in the working directory. If invoked with --after, the
673 operation is recorded, but no copying is performed.
673 operation is recorded, but no copying is performed.
674
674
675 This command takes effect in the next commit. To undo a copy
675 This command takes effect in the next commit. To undo a copy
676 before that, see hg revert.
676 before that, see hg revert.
677 """
677 """
678 wlock = repo.wlock(0)
678 wlock = repo.wlock(0)
679 errs, copied = docopy(ui, repo, pats, opts, wlock)
679 errs, copied = docopy(ui, repo, pats, opts, wlock)
680 return errs
680 return errs
681
681
682 def debugancestor(ui, index, rev1, rev2):
682 def debugancestor(ui, index, rev1, rev2):
683 """find the ancestor revision of two revisions in a given index"""
683 """find the ancestor revision of two revisions in a given index"""
684 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
684 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
685 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
685 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
686 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
686 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
687
687
688 def debugcomplete(ui, cmd='', **opts):
688 def debugcomplete(ui, cmd='', **opts):
689 """returns the completion list associated with the given command"""
689 """returns the completion list associated with the given command"""
690
690
691 if opts['options']:
691 if opts['options']:
692 options = []
692 options = []
693 otables = [globalopts]
693 otables = [globalopts]
694 if cmd:
694 if cmd:
695 aliases, entry = cmdutil.findcmd(ui, cmd)
695 aliases, entry = cmdutil.findcmd(ui, cmd)
696 otables.append(entry[1])
696 otables.append(entry[1])
697 for t in otables:
697 for t in otables:
698 for o in t:
698 for o in t:
699 if o[0]:
699 if o[0]:
700 options.append('-%s' % o[0])
700 options.append('-%s' % o[0])
701 options.append('--%s' % o[1])
701 options.append('--%s' % o[1])
702 ui.write("%s\n" % "\n".join(options))
702 ui.write("%s\n" % "\n".join(options))
703 return
703 return
704
704
705 clist = cmdutil.findpossible(ui, cmd).keys()
705 clist = cmdutil.findpossible(ui, cmd).keys()
706 clist.sort()
706 clist.sort()
707 ui.write("%s\n" % "\n".join(clist))
707 ui.write("%s\n" % "\n".join(clist))
708
708
709 def debugrebuildstate(ui, repo, rev=""):
709 def debugrebuildstate(ui, repo, rev=""):
710 """rebuild the dirstate as it would look like for the given revision"""
710 """rebuild the dirstate as it would look like for the given revision"""
711 if rev == "":
711 if rev == "":
712 rev = repo.changelog.tip()
712 rev = repo.changelog.tip()
713 ctx = repo.changectx(rev)
713 ctx = repo.changectx(rev)
714 files = ctx.manifest()
714 files = ctx.manifest()
715 wlock = repo.wlock()
715 wlock = repo.wlock()
716 repo.dirstate.rebuild(rev, files)
716 repo.dirstate.rebuild(rev, files)
717
717
718 def debugcheckstate(ui, repo):
718 def debugcheckstate(ui, repo):
719 """validate the correctness of the current dirstate"""
719 """validate the correctness of the current dirstate"""
720 parent1, parent2 = repo.dirstate.parents()
720 parent1, parent2 = repo.dirstate.parents()
721 dc = repo.dirstate
721 dc = repo.dirstate
722 m1 = repo.changectx(parent1).manifest()
722 m1 = repo.changectx(parent1).manifest()
723 m2 = repo.changectx(parent2).manifest()
723 m2 = repo.changectx(parent2).manifest()
724 errors = 0
724 errors = 0
725 for f in dc:
725 for f in dc:
726 state = repo.dirstate.state(f)
726 state = repo.dirstate.state(f)
727 if state in "nr" and f not in m1:
727 if state in "nr" and f not in m1:
728 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
728 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
729 errors += 1
729 errors += 1
730 if state in "a" and f in m1:
730 if state in "a" and f in m1:
731 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
731 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
732 errors += 1
732 errors += 1
733 if state in "m" and f not in m1 and f not in m2:
733 if state in "m" and f not in m1 and f not in m2:
734 ui.warn(_("%s in state %s, but not in either manifest\n") %
734 ui.warn(_("%s in state %s, but not in either manifest\n") %
735 (f, state))
735 (f, state))
736 errors += 1
736 errors += 1
737 for f in m1:
737 for f in m1:
738 state = repo.dirstate.state(f)
738 state = repo.dirstate.state(f)
739 if state not in "nrm":
739 if state not in "nrm":
740 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
740 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
741 errors += 1
741 errors += 1
742 if errors:
742 if errors:
743 error = _(".hg/dirstate inconsistent with current parent's manifest")
743 error = _(".hg/dirstate inconsistent with current parent's manifest")
744 raise util.Abort(error)
744 raise util.Abort(error)
745
745
746 def showconfig(ui, repo, *values, **opts):
746 def showconfig(ui, repo, *values, **opts):
747 """show combined config settings from all hgrc files
747 """show combined config settings from all hgrc files
748
748
749 With no args, print names and values of all config items.
749 With no args, print names and values of all config items.
750
750
751 With one arg of the form section.name, print just the value of
751 With one arg of the form section.name, print just the value of
752 that config item.
752 that config item.
753
753
754 With multiple args, print names and values of all config items
754 With multiple args, print names and values of all config items
755 with matching section names."""
755 with matching section names."""
756
756
757 untrusted = bool(opts.get('untrusted'))
757 untrusted = bool(opts.get('untrusted'))
758 if values:
758 if values:
759 if len([v for v in values if '.' in v]) > 1:
759 if len([v for v in values if '.' in v]) > 1:
760 raise util.Abort(_('only one config item permitted'))
760 raise util.Abort(_('only one config item permitted'))
761 for section, name, value in ui.walkconfig(untrusted=untrusted):
761 for section, name, value in ui.walkconfig(untrusted=untrusted):
762 sectname = section + '.' + name
762 sectname = section + '.' + name
763 if values:
763 if values:
764 for v in values:
764 for v in values:
765 if v == section:
765 if v == section:
766 ui.write('%s=%s\n' % (sectname, value))
766 ui.write('%s=%s\n' % (sectname, value))
767 elif v == sectname:
767 elif v == sectname:
768 ui.write(value, '\n')
768 ui.write(value, '\n')
769 else:
769 else:
770 ui.write('%s=%s\n' % (sectname, value))
770 ui.write('%s=%s\n' % (sectname, value))
771
771
772 def debugsetparents(ui, repo, rev1, rev2=None):
772 def debugsetparents(ui, repo, rev1, rev2=None):
773 """manually set the parents of the current working directory
773 """manually set the parents of the current working directory
774
774
775 This is useful for writing repository conversion tools, but should
775 This is useful for writing repository conversion tools, but should
776 be used with care.
776 be used with care.
777 """
777 """
778
778
779 if not rev2:
779 if not rev2:
780 rev2 = hex(nullid)
780 rev2 = hex(nullid)
781
781
782 wlock = repo.wlock()
782 wlock = repo.wlock()
783 try:
783 try:
784 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
784 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
785 finally:
785 finally:
786 wlock.release()
786 wlock.release()
787
787
788 def debugstate(ui, repo):
788 def debugstate(ui, repo):
789 """show the contents of the current dirstate"""
789 """show the contents of the current dirstate"""
790 dc = repo.dirstate
790 dc = repo.dirstate
791 for file_ in dc:
791 for file_ in dc:
792 if dc[file_][3] == -1:
792 if dc[file_][3] == -1:
793 # Pad or slice to locale representation
793 # Pad or slice to locale representation
794 locale_len = len(time.strftime("%x %X", time.localtime(0)))
794 locale_len = len(time.strftime("%x %X", time.localtime(0)))
795 timestr = 'unset'
795 timestr = 'unset'
796 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
796 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
797 else:
797 else:
798 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
798 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
799 ui.write("%c %3o %10d %s %s\n"
799 ui.write("%c %3o %10d %s %s\n"
800 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
800 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
801 timestr, file_))
801 timestr, file_))
802 for f in repo.dirstate.copies():
802 for f in repo.dirstate.copies():
803 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
803 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
804
804
805 def debugdata(ui, file_, rev):
805 def debugdata(ui, file_, rev):
806 """dump the contents of a data file revision"""
806 """dump the contents of a data file revision"""
807 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
807 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
808 try:
808 try:
809 ui.write(r.revision(r.lookup(rev)))
809 ui.write(r.revision(r.lookup(rev)))
810 except KeyError:
810 except KeyError:
811 raise util.Abort(_('invalid revision identifier %s') % rev)
811 raise util.Abort(_('invalid revision identifier %s') % rev)
812
812
813 def debugdate(ui, date, range=None, **opts):
813 def debugdate(ui, date, range=None, **opts):
814 """parse and display a date"""
814 """parse and display a date"""
815 if opts["extended"]:
815 if opts["extended"]:
816 d = util.parsedate(date, util.extendeddateformats)
816 d = util.parsedate(date, util.extendeddateformats)
817 else:
817 else:
818 d = util.parsedate(date)
818 d = util.parsedate(date)
819 ui.write("internal: %s %s\n" % d)
819 ui.write("internal: %s %s\n" % d)
820 ui.write("standard: %s\n" % util.datestr(d))
820 ui.write("standard: %s\n" % util.datestr(d))
821 if range:
821 if range:
822 m = util.matchdate(range)
822 m = util.matchdate(range)
823 ui.write("match: %s\n" % m(d[0]))
823 ui.write("match: %s\n" % m(d[0]))
824
824
825 def debugindex(ui, file_):
825 def debugindex(ui, file_):
826 """dump the contents of an index file"""
826 """dump the contents of an index file"""
827 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
827 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
828 ui.write(" rev offset length base linkrev" +
828 ui.write(" rev offset length base linkrev" +
829 " nodeid p1 p2\n")
829 " nodeid p1 p2\n")
830 for i in xrange(r.count()):
830 for i in xrange(r.count()):
831 node = r.node(i)
831 node = r.node(i)
832 pp = r.parents(node)
832 pp = r.parents(node)
833 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
833 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
834 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
834 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
835 short(node), short(pp[0]), short(pp[1])))
835 short(node), short(pp[0]), short(pp[1])))
836
836
837 def debugindexdot(ui, file_):
837 def debugindexdot(ui, file_):
838 """dump an index DAG as a .dot file"""
838 """dump an index DAG as a .dot file"""
839 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
839 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
840 ui.write("digraph G {\n")
840 ui.write("digraph G {\n")
841 for i in xrange(r.count()):
841 for i in xrange(r.count()):
842 node = r.node(i)
842 node = r.node(i)
843 pp = r.parents(node)
843 pp = r.parents(node)
844 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
844 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
845 if pp[1] != nullid:
845 if pp[1] != nullid:
846 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
846 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
847 ui.write("}\n")
847 ui.write("}\n")
848
848
849 def debuginstall(ui):
849 def debuginstall(ui):
850 '''test Mercurial installation'''
850 '''test Mercurial installation'''
851
851
852 def writetemp(contents):
852 def writetemp(contents):
853 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
853 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
854 f = os.fdopen(fd, "wb")
854 f = os.fdopen(fd, "wb")
855 f.write(contents)
855 f.write(contents)
856 f.close()
856 f.close()
857 return name
857 return name
858
858
859 problems = 0
859 problems = 0
860
860
861 # encoding
861 # encoding
862 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
862 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
863 try:
863 try:
864 util.fromlocal("test")
864 util.fromlocal("test")
865 except util.Abort, inst:
865 except util.Abort, inst:
866 ui.write(" %s\n" % inst)
866 ui.write(" %s\n" % inst)
867 ui.write(_(" (check that your locale is properly set)\n"))
867 ui.write(_(" (check that your locale is properly set)\n"))
868 problems += 1
868 problems += 1
869
869
870 # compiled modules
870 # compiled modules
871 ui.status(_("Checking extensions...\n"))
871 ui.status(_("Checking extensions...\n"))
872 try:
872 try:
873 import bdiff, mpatch, base85
873 import bdiff, mpatch, base85
874 except Exception, inst:
874 except Exception, inst:
875 ui.write(" %s\n" % inst)
875 ui.write(" %s\n" % inst)
876 ui.write(_(" One or more extensions could not be found"))
876 ui.write(_(" One or more extensions could not be found"))
877 ui.write(_(" (check that you compiled the extensions)\n"))
877 ui.write(_(" (check that you compiled the extensions)\n"))
878 problems += 1
878 problems += 1
879
879
880 # templates
880 # templates
881 ui.status(_("Checking templates...\n"))
881 ui.status(_("Checking templates...\n"))
882 try:
882 try:
883 import templater
883 import templater
884 t = templater.templater(templater.templatepath("map-cmdline.default"))
884 t = templater.templater(templater.templatepath("map-cmdline.default"))
885 except Exception, inst:
885 except Exception, inst:
886 ui.write(" %s\n" % inst)
886 ui.write(" %s\n" % inst)
887 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
887 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
888 problems += 1
888 problems += 1
889
889
890 # patch
890 # patch
891 ui.status(_("Checking patch...\n"))
891 ui.status(_("Checking patch...\n"))
892 patcher = ui.config('ui', 'patch')
892 patcher = ui.config('ui', 'patch')
893 patcher = ((patcher and util.find_exe(patcher)) or
893 patcher = ((patcher and util.find_exe(patcher)) or
894 util.find_exe('gpatch') or
894 util.find_exe('gpatch') or
895 util.find_exe('patch'))
895 util.find_exe('patch'))
896 if not patcher:
896 if not patcher:
897 ui.write(_(" Can't find patch or gpatch in PATH\n"))
897 ui.write(_(" Can't find patch or gpatch in PATH\n"))
898 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
898 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
899 problems += 1
899 problems += 1
900 else:
900 else:
901 # actually attempt a patch here
901 # actually attempt a patch here
902 a = "1\n2\n3\n4\n"
902 a = "1\n2\n3\n4\n"
903 b = "1\n2\n3\ninsert\n4\n"
903 b = "1\n2\n3\ninsert\n4\n"
904 fa = writetemp(a)
904 fa = writetemp(a)
905 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
905 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
906 fd = writetemp(d)
906 fd = writetemp(d)
907
907
908 files = {}
908 files = {}
909 try:
909 try:
910 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
910 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
911 except util.Abort, e:
911 except util.Abort, e:
912 ui.write(_(" patch call failed:\n"))
912 ui.write(_(" patch call failed:\n"))
913 ui.write(" " + str(e) + "\n")
913 ui.write(" " + str(e) + "\n")
914 problems += 1
914 problems += 1
915 else:
915 else:
916 if list(files) != [os.path.basename(fa)]:
916 if list(files) != [os.path.basename(fa)]:
917 ui.write(_(" unexpected patch output!"))
917 ui.write(_(" unexpected patch output!"))
918 ui.write(_(" (you may have an incompatible version of patch)\n"))
918 ui.write(_(" (you may have an incompatible version of patch)\n"))
919 problems += 1
919 problems += 1
920 a = file(fa).read()
920 a = file(fa).read()
921 if a != b:
921 if a != b:
922 ui.write(_(" patch test failed!"))
922 ui.write(_(" patch test failed!"))
923 ui.write(_(" (you may have an incompatible version of patch)\n"))
923 ui.write(_(" (you may have an incompatible version of patch)\n"))
924 problems += 1
924 problems += 1
925
925
926 os.unlink(fa)
926 os.unlink(fa)
927 os.unlink(fd)
927 os.unlink(fd)
928
928
929 # merge helper
929 # merge helper
930 ui.status(_("Checking merge helper...\n"))
930 ui.status(_("Checking merge helper...\n"))
931 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
931 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
932 or "hgmerge")
932 or "hgmerge")
933 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
933 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
934 if not cmdpath:
934 if not cmdpath:
935 if cmd == 'hgmerge':
935 if cmd == 'hgmerge':
936 ui.write(_(" No merge helper set and can't find default"
936 ui.write(_(" No merge helper set and can't find default"
937 " hgmerge script in PATH\n"))
937 " hgmerge script in PATH\n"))
938 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
938 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
939 else:
939 else:
940 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
940 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
941 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
941 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
942 problems += 1
942 problems += 1
943 else:
943 else:
944 # actually attempt a patch here
944 # actually attempt a patch here
945 fa = writetemp("1\n2\n3\n4\n")
945 fa = writetemp("1\n2\n3\n4\n")
946 fl = writetemp("1\n2\n3\ninsert\n4\n")
946 fl = writetemp("1\n2\n3\ninsert\n4\n")
947 fr = writetemp("begin\n1\n2\n3\n4\n")
947 fr = writetemp("begin\n1\n2\n3\n4\n")
948 r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr))
948 r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr))
949 if r:
949 if r:
950 ui.write(_(" Got unexpected merge error %d!\n") % r)
950 ui.write(_(" Got unexpected merge error %d!\n") % r)
951 problems += 1
951 problems += 1
952 m = file(fl).read()
952 m = file(fl).read()
953 if m != "begin\n1\n2\n3\ninsert\n4\n":
953 if m != "begin\n1\n2\n3\ninsert\n4\n":
954 ui.write(_(" Got unexpected merge results!\n"))
954 ui.write(_(" Got unexpected merge results!\n"))
955 ui.write(_(" (your merge helper may have the"
955 ui.write(_(" (your merge helper may have the"
956 " wrong argument order)\n"))
956 " wrong argument order)\n"))
957 ui.write(_(" Result: %r\n") % m)
957 ui.write(_(" Result: %r\n") % m)
958 problems += 1
958 problems += 1
959 os.unlink(fa)
959 os.unlink(fa)
960 os.unlink(fl)
960 os.unlink(fl)
961 os.unlink(fr)
961 os.unlink(fr)
962
962
963 # editor
963 # editor
964 ui.status(_("Checking commit editor...\n"))
964 ui.status(_("Checking commit editor...\n"))
965 editor = (os.environ.get("HGEDITOR") or
965 editor = (os.environ.get("HGEDITOR") or
966 ui.config("ui", "editor") or
966 ui.config("ui", "editor") or
967 os.environ.get("EDITOR", "vi"))
967 os.environ.get("EDITOR", "vi"))
968 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
968 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
969 if not cmdpath:
969 if not cmdpath:
970 if editor == 'vi':
970 if editor == 'vi':
971 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
971 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
972 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
972 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
973 else:
973 else:
974 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
974 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
975 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
975 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
976 problems += 1
976 problems += 1
977
977
978 # check username
978 # check username
979 ui.status(_("Checking username...\n"))
979 ui.status(_("Checking username...\n"))
980 user = os.environ.get("HGUSER")
980 user = os.environ.get("HGUSER")
981 if user is None:
981 if user is None:
982 user = ui.config("ui", "username")
982 user = ui.config("ui", "username")
983 if user is None:
983 if user is None:
984 user = os.environ.get("EMAIL")
984 user = os.environ.get("EMAIL")
985 if not user:
985 if not user:
986 ui.warn(" ")
986 ui.warn(" ")
987 ui.username()
987 ui.username()
988 ui.write(_(" (specify a username in your .hgrc file)\n"))
988 ui.write(_(" (specify a username in your .hgrc file)\n"))
989
989
990 if not problems:
990 if not problems:
991 ui.status(_("No problems detected\n"))
991 ui.status(_("No problems detected\n"))
992 else:
992 else:
993 ui.write(_("%s problems detected,"
993 ui.write(_("%s problems detected,"
994 " please check your install!\n") % problems)
994 " please check your install!\n") % problems)
995
995
996 return problems
996 return problems
997
997
998 def debugrename(ui, repo, file1, *pats, **opts):
998 def debugrename(ui, repo, file1, *pats, **opts):
999 """dump rename information"""
999 """dump rename information"""
1000
1000
1001 ctx = repo.changectx(opts.get('rev', 'tip'))
1001 ctx = repo.changectx(opts.get('rev', 'tip'))
1002 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
1002 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
1003 ctx.node()):
1003 ctx.node()):
1004 m = ctx.filectx(abs).renamed()
1004 m = ctx.filectx(abs).renamed()
1005 if m:
1005 if m:
1006 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
1006 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
1007 else:
1007 else:
1008 ui.write(_("%s not renamed\n") % rel)
1008 ui.write(_("%s not renamed\n") % rel)
1009
1009
1010 def debugwalk(ui, repo, *pats, **opts):
1010 def debugwalk(ui, repo, *pats, **opts):
1011 """show how files match on given patterns"""
1011 """show how files match on given patterns"""
1012 items = list(cmdutil.walk(repo, pats, opts))
1012 items = list(cmdutil.walk(repo, pats, opts))
1013 if not items:
1013 if not items:
1014 return
1014 return
1015 fmt = '%%s %%-%ds %%-%ds %%s' % (
1015 fmt = '%%s %%-%ds %%-%ds %%s' % (
1016 max([len(abs) for (src, abs, rel, exact) in items]),
1016 max([len(abs) for (src, abs, rel, exact) in items]),
1017 max([len(rel) for (src, abs, rel, exact) in items]))
1017 max([len(rel) for (src, abs, rel, exact) in items]))
1018 for src, abs, rel, exact in items:
1018 for src, abs, rel, exact in items:
1019 line = fmt % (src, abs, rel, exact and 'exact' or '')
1019 line = fmt % (src, abs, rel, exact and 'exact' or '')
1020 ui.write("%s\n" % line.rstrip())
1020 ui.write("%s\n" % line.rstrip())
1021
1021
1022 def diff(ui, repo, *pats, **opts):
1022 def diff(ui, repo, *pats, **opts):
1023 """diff repository (or selected files)
1023 """diff repository (or selected files)
1024
1024
1025 Show differences between revisions for the specified files.
1025 Show differences between revisions for the specified files.
1026
1026
1027 Differences between files are shown using the unified diff format.
1027 Differences between files are shown using the unified diff format.
1028
1028
1029 NOTE: diff may generate unexpected results for merges, as it will
1029 NOTE: diff may generate unexpected results for merges, as it will
1030 default to comparing against the working directory's first parent
1030 default to comparing against the working directory's first parent
1031 changeset if no revisions are specified.
1031 changeset if no revisions are specified.
1032
1032
1033 When two revision arguments are given, then changes are shown
1033 When two revision arguments are given, then changes are shown
1034 between those revisions. If only one revision is specified then
1034 between those revisions. If only one revision is specified then
1035 that revision is compared to the working directory, and, when no
1035 that revision is compared to the working directory, and, when no
1036 revisions are specified, the working directory files are compared
1036 revisions are specified, the working directory files are compared
1037 to its parent.
1037 to its parent.
1038
1038
1039 Without the -a option, diff will avoid generating diffs of files
1039 Without the -a option, diff will avoid generating diffs of files
1040 it detects as binary. With -a, diff will generate a diff anyway,
1040 it detects as binary. With -a, diff will generate a diff anyway,
1041 probably with undesirable results.
1041 probably with undesirable results.
1042 """
1042 """
1043 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1043 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1044
1044
1045 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1045 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1046
1046
1047 patch.diff(repo, node1, node2, fns, match=matchfn,
1047 patch.diff(repo, node1, node2, fns, match=matchfn,
1048 opts=patch.diffopts(ui, opts))
1048 opts=patch.diffopts(ui, opts))
1049
1049
1050 def export(ui, repo, *changesets, **opts):
1050 def export(ui, repo, *changesets, **opts):
1051 """dump the header and diffs for one or more changesets
1051 """dump the header and diffs for one or more changesets
1052
1052
1053 Print the changeset header and diffs for one or more revisions.
1053 Print the changeset header and diffs for one or more revisions.
1054
1054
1055 The information shown in the changeset header is: author,
1055 The information shown in the changeset header is: author,
1056 changeset hash, parent(s) and commit comment.
1056 changeset hash, parent(s) and commit comment.
1057
1057
1058 NOTE: export may generate unexpected diff output for merge changesets,
1058 NOTE: export may generate unexpected diff output for merge changesets,
1059 as it will compare the merge changeset against its first parent only.
1059 as it will compare the merge changeset against its first parent only.
1060
1060
1061 Output may be to a file, in which case the name of the file is
1061 Output may be to a file, in which case the name of the file is
1062 given using a format string. The formatting rules are as follows:
1062 given using a format string. The formatting rules are as follows:
1063
1063
1064 %% literal "%" character
1064 %% literal "%" character
1065 %H changeset hash (40 bytes of hexadecimal)
1065 %H changeset hash (40 bytes of hexadecimal)
1066 %N number of patches being generated
1066 %N number of patches being generated
1067 %R changeset revision number
1067 %R changeset revision number
1068 %b basename of the exporting repository
1068 %b basename of the exporting repository
1069 %h short-form changeset hash (12 bytes of hexadecimal)
1069 %h short-form changeset hash (12 bytes of hexadecimal)
1070 %n zero-padded sequence number, starting at 1
1070 %n zero-padded sequence number, starting at 1
1071 %r zero-padded changeset revision number
1071 %r zero-padded changeset revision number
1072
1072
1073 Without the -a option, export will avoid generating diffs of files
1073 Without the -a option, export will avoid generating diffs of files
1074 it detects as binary. With -a, export will generate a diff anyway,
1074 it detects as binary. With -a, export will generate a diff anyway,
1075 probably with undesirable results.
1075 probably with undesirable results.
1076
1076
1077 With the --switch-parent option, the diff will be against the second
1077 With the --switch-parent option, the diff will be against the second
1078 parent. It can be useful to review a merge.
1078 parent. It can be useful to review a merge.
1079 """
1079 """
1080 if not changesets:
1080 if not changesets:
1081 raise util.Abort(_("export requires at least one changeset"))
1081 raise util.Abort(_("export requires at least one changeset"))
1082 revs = cmdutil.revrange(repo, changesets)
1082 revs = cmdutil.revrange(repo, changesets)
1083 if len(revs) > 1:
1083 if len(revs) > 1:
1084 ui.note(_('exporting patches:\n'))
1084 ui.note(_('exporting patches:\n'))
1085 else:
1085 else:
1086 ui.note(_('exporting patch:\n'))
1086 ui.note(_('exporting patch:\n'))
1087 patch.export(repo, revs, template=opts['output'],
1087 patch.export(repo, revs, template=opts['output'],
1088 switch_parent=opts['switch_parent'],
1088 switch_parent=opts['switch_parent'],
1089 opts=patch.diffopts(ui, opts))
1089 opts=patch.diffopts(ui, opts))
1090
1090
1091 def grep(ui, repo, pattern, *pats, **opts):
1091 def grep(ui, repo, pattern, *pats, **opts):
1092 """search for a pattern in specified files and revisions
1092 """search for a pattern in specified files and revisions
1093
1093
1094 Search revisions of files for a regular expression.
1094 Search revisions of files for a regular expression.
1095
1095
1096 This command behaves differently than Unix grep. It only accepts
1096 This command behaves differently than Unix grep. It only accepts
1097 Python/Perl regexps. It searches repository history, not the
1097 Python/Perl regexps. It searches repository history, not the
1098 working directory. It always prints the revision number in which
1098 working directory. It always prints the revision number in which
1099 a match appears.
1099 a match appears.
1100
1100
1101 By default, grep only prints output for the first revision of a
1101 By default, grep only prints output for the first revision of a
1102 file in which it finds a match. To get it to print every revision
1102 file in which it finds a match. To get it to print every revision
1103 that contains a change in match status ("-" for a match that
1103 that contains a change in match status ("-" for a match that
1104 becomes a non-match, or "+" for a non-match that becomes a match),
1104 becomes a non-match, or "+" for a non-match that becomes a match),
1105 use the --all flag.
1105 use the --all flag.
1106 """
1106 """
1107 reflags = 0
1107 reflags = 0
1108 if opts['ignore_case']:
1108 if opts['ignore_case']:
1109 reflags |= re.I
1109 reflags |= re.I
1110 try:
1110 try:
1111 regexp = re.compile(pattern, reflags)
1111 regexp = re.compile(pattern, reflags)
1112 except Exception, inst:
1112 except Exception, inst:
1113 ui.warn(_("grep: invalid match pattern: %s!\n") % inst)
1113 ui.warn(_("grep: invalid match pattern: %s!\n") % inst)
1114 return None
1114 return None
1115 sep, eol = ':', '\n'
1115 sep, eol = ':', '\n'
1116 if opts['print0']:
1116 if opts['print0']:
1117 sep = eol = '\0'
1117 sep = eol = '\0'
1118
1118
1119 fcache = {}
1119 fcache = {}
1120 def getfile(fn):
1120 def getfile(fn):
1121 if fn not in fcache:
1121 if fn not in fcache:
1122 fcache[fn] = repo.file(fn)
1122 fcache[fn] = repo.file(fn)
1123 return fcache[fn]
1123 return fcache[fn]
1124
1124
1125 def matchlines(body):
1125 def matchlines(body):
1126 begin = 0
1126 begin = 0
1127 linenum = 0
1127 linenum = 0
1128 while True:
1128 while True:
1129 match = regexp.search(body, begin)
1129 match = regexp.search(body, begin)
1130 if not match:
1130 if not match:
1131 break
1131 break
1132 mstart, mend = match.span()
1132 mstart, mend = match.span()
1133 linenum += body.count('\n', begin, mstart) + 1
1133 linenum += body.count('\n', begin, mstart) + 1
1134 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1134 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1135 lend = body.find('\n', mend)
1135 lend = body.find('\n', mend)
1136 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1136 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1137 begin = lend + 1
1137 begin = lend + 1
1138
1138
1139 class linestate(object):
1139 class linestate(object):
1140 def __init__(self, line, linenum, colstart, colend):
1140 def __init__(self, line, linenum, colstart, colend):
1141 self.line = line
1141 self.line = line
1142 self.linenum = linenum
1142 self.linenum = linenum
1143 self.colstart = colstart
1143 self.colstart = colstart
1144 self.colend = colend
1144 self.colend = colend
1145
1145
1146 def __eq__(self, other):
1146 def __eq__(self, other):
1147 return self.line == other.line
1147 return self.line == other.line
1148
1148
1149 matches = {}
1149 matches = {}
1150 copies = {}
1150 copies = {}
1151 def grepbody(fn, rev, body):
1151 def grepbody(fn, rev, body):
1152 matches[rev].setdefault(fn, [])
1152 matches[rev].setdefault(fn, [])
1153 m = matches[rev][fn]
1153 m = matches[rev][fn]
1154 for lnum, cstart, cend, line in matchlines(body):
1154 for lnum, cstart, cend, line in matchlines(body):
1155 s = linestate(line, lnum, cstart, cend)
1155 s = linestate(line, lnum, cstart, cend)
1156 m.append(s)
1156 m.append(s)
1157
1157
1158 def difflinestates(a, b):
1158 def difflinestates(a, b):
1159 sm = difflib.SequenceMatcher(None, a, b)
1159 sm = difflib.SequenceMatcher(None, a, b)
1160 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1160 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1161 if tag == 'insert':
1161 if tag == 'insert':
1162 for i in xrange(blo, bhi):
1162 for i in xrange(blo, bhi):
1163 yield ('+', b[i])
1163 yield ('+', b[i])
1164 elif tag == 'delete':
1164 elif tag == 'delete':
1165 for i in xrange(alo, ahi):
1165 for i in xrange(alo, ahi):
1166 yield ('-', a[i])
1166 yield ('-', a[i])
1167 elif tag == 'replace':
1167 elif tag == 'replace':
1168 for i in xrange(alo, ahi):
1168 for i in xrange(alo, ahi):
1169 yield ('-', a[i])
1169 yield ('-', a[i])
1170 for i in xrange(blo, bhi):
1170 for i in xrange(blo, bhi):
1171 yield ('+', b[i])
1171 yield ('+', b[i])
1172
1172
1173 prev = {}
1173 prev = {}
1174 def display(fn, rev, states, prevstates):
1174 def display(fn, rev, states, prevstates):
1175 found = False
1175 found = False
1176 filerevmatches = {}
1176 filerevmatches = {}
1177 r = prev.get(fn, -1)
1177 r = prev.get(fn, -1)
1178 if opts['all']:
1178 if opts['all']:
1179 iter = difflinestates(states, prevstates)
1179 iter = difflinestates(states, prevstates)
1180 else:
1180 else:
1181 iter = [('', l) for l in prevstates]
1181 iter = [('', l) for l in prevstates]
1182 for change, l in iter:
1182 for change, l in iter:
1183 cols = [fn, str(r)]
1183 cols = [fn, str(r)]
1184 if opts['line_number']:
1184 if opts['line_number']:
1185 cols.append(str(l.linenum))
1185 cols.append(str(l.linenum))
1186 if opts['all']:
1186 if opts['all']:
1187 cols.append(change)
1187 cols.append(change)
1188 if opts['user']:
1188 if opts['user']:
1189 cols.append(ui.shortuser(get(r)[1]))
1189 cols.append(ui.shortuser(get(r)[1]))
1190 if opts['files_with_matches']:
1190 if opts['files_with_matches']:
1191 c = (fn, r)
1191 c = (fn, r)
1192 if c in filerevmatches:
1192 if c in filerevmatches:
1193 continue
1193 continue
1194 filerevmatches[c] = 1
1194 filerevmatches[c] = 1
1195 else:
1195 else:
1196 cols.append(l.line)
1196 cols.append(l.line)
1197 ui.write(sep.join(cols), eol)
1197 ui.write(sep.join(cols), eol)
1198 found = True
1198 found = True
1199 return found
1199 return found
1200
1200
1201 fstate = {}
1201 fstate = {}
1202 skip = {}
1202 skip = {}
1203 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1203 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1204 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1204 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1205 found = False
1205 found = False
1206 follow = opts.get('follow')
1206 follow = opts.get('follow')
1207 for st, rev, fns in changeiter:
1207 for st, rev, fns in changeiter:
1208 if st == 'window':
1208 if st == 'window':
1209 matches.clear()
1209 matches.clear()
1210 elif st == 'add':
1210 elif st == 'add':
1211 mf = repo.changectx(rev).manifest()
1211 mf = repo.changectx(rev).manifest()
1212 matches[rev] = {}
1212 matches[rev] = {}
1213 for fn in fns:
1213 for fn in fns:
1214 if fn in skip:
1214 if fn in skip:
1215 continue
1215 continue
1216 fstate.setdefault(fn, {})
1216 fstate.setdefault(fn, {})
1217 try:
1217 try:
1218 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1218 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1219 if follow:
1219 if follow:
1220 copied = getfile(fn).renamed(mf[fn])
1220 copied = getfile(fn).renamed(mf[fn])
1221 if copied:
1221 if copied:
1222 copies.setdefault(rev, {})[fn] = copied[0]
1222 copies.setdefault(rev, {})[fn] = copied[0]
1223 except KeyError:
1223 except KeyError:
1224 pass
1224 pass
1225 elif st == 'iter':
1225 elif st == 'iter':
1226 states = matches[rev].items()
1226 states = matches[rev].items()
1227 states.sort()
1227 states.sort()
1228 for fn, m in states:
1228 for fn, m in states:
1229 copy = copies.get(rev, {}).get(fn)
1229 copy = copies.get(rev, {}).get(fn)
1230 if fn in skip:
1230 if fn in skip:
1231 if copy:
1231 if copy:
1232 skip[copy] = True
1232 skip[copy] = True
1233 continue
1233 continue
1234 if fn in prev or fstate[fn]:
1234 if fn in prev or fstate[fn]:
1235 r = display(fn, rev, m, fstate[fn])
1235 r = display(fn, rev, m, fstate[fn])
1236 found = found or r
1236 found = found or r
1237 if r and not opts['all']:
1237 if r and not opts['all']:
1238 skip[fn] = True
1238 skip[fn] = True
1239 if copy:
1239 if copy:
1240 skip[copy] = True
1240 skip[copy] = True
1241 fstate[fn] = m
1241 fstate[fn] = m
1242 if copy:
1242 if copy:
1243 fstate[copy] = m
1243 fstate[copy] = m
1244 prev[fn] = rev
1244 prev[fn] = rev
1245
1245
1246 fstate = fstate.items()
1246 fstate = fstate.items()
1247 fstate.sort()
1247 fstate.sort()
1248 for fn, state in fstate:
1248 for fn, state in fstate:
1249 if fn in skip:
1249 if fn in skip:
1250 continue
1250 continue
1251 if fn not in copies.get(prev[fn], {}):
1251 if fn not in copies.get(prev[fn], {}):
1252 found = display(fn, rev, {}, state) or found
1252 found = display(fn, rev, {}, state) or found
1253 return (not found and 1) or 0
1253 return (not found and 1) or 0
1254
1254
1255 def heads(ui, repo, *branchrevs, **opts):
1255 def heads(ui, repo, *branchrevs, **opts):
1256 """show current repository heads or show branch heads
1256 """show current repository heads or show branch heads
1257
1257
1258 With no arguments, show all repository head changesets.
1258 With no arguments, show all repository head changesets.
1259
1259
1260 If branch or revisions names are given this will show the heads of
1260 If branch or revisions names are given this will show the heads of
1261 the specified branches or the branches those revisions are tagged
1261 the specified branches or the branches those revisions are tagged
1262 with.
1262 with.
1263
1263
1264 Repository "heads" are changesets that don't have child
1264 Repository "heads" are changesets that don't have child
1265 changesets. They are where development generally takes place and
1265 changesets. They are where development generally takes place and
1266 are the usual targets for update and merge operations.
1266 are the usual targets for update and merge operations.
1267
1267
1268 Branch heads are changesets that have a given branch tag, but have
1268 Branch heads are changesets that have a given branch tag, but have
1269 no child changesets with that tag. They are usually where
1269 no child changesets with that tag. They are usually where
1270 development on the given branch takes place.
1270 development on the given branch takes place.
1271 """
1271 """
1272 if opts['rev']:
1272 if opts['rev']:
1273 start = repo.lookup(opts['rev'])
1273 start = repo.lookup(opts['rev'])
1274 else:
1274 else:
1275 start = None
1275 start = None
1276 if not branchrevs:
1276 if not branchrevs:
1277 # Assume we're looking repo-wide heads if no revs were specified.
1277 # Assume we're looking repo-wide heads if no revs were specified.
1278 heads = repo.heads(start)
1278 heads = repo.heads(start)
1279 else:
1279 else:
1280 heads = []
1280 heads = []
1281 visitedset = util.set()
1281 visitedset = util.set()
1282 for branchrev in branchrevs:
1282 for branchrev in branchrevs:
1283 branch = repo.changectx(branchrev).branch()
1283 branch = repo.changectx(branchrev).branch()
1284 if branch in visitedset:
1284 if branch in visitedset:
1285 continue
1285 continue
1286 visitedset.add(branch)
1286 visitedset.add(branch)
1287 bheads = repo.branchheads(branch, start)
1287 bheads = repo.branchheads(branch, start)
1288 if not bheads:
1288 if not bheads:
1289 if branch != branchrev:
1289 if branch != branchrev:
1290 ui.warn(_("no changes on branch %s containing %s are "
1290 ui.warn(_("no changes on branch %s containing %s are "
1291 "reachable from %s\n")
1291 "reachable from %s\n")
1292 % (branch, branchrev, opts['rev']))
1292 % (branch, branchrev, opts['rev']))
1293 else:
1293 else:
1294 ui.warn(_("no changes on branch %s are reachable from %s\n")
1294 ui.warn(_("no changes on branch %s are reachable from %s\n")
1295 % (branch, opts['rev']))
1295 % (branch, opts['rev']))
1296 heads.extend(bheads)
1296 heads.extend(bheads)
1297 if not heads:
1297 if not heads:
1298 return 1
1298 return 1
1299 displayer = cmdutil.show_changeset(ui, repo, opts)
1299 displayer = cmdutil.show_changeset(ui, repo, opts)
1300 for n in heads:
1300 for n in heads:
1301 displayer.show(changenode=n)
1301 displayer.show(changenode=n)
1302
1302
1303 def help_(ui, name=None, with_version=False):
1303 def help_(ui, name=None, with_version=False):
1304 """show help for a command, extension, or list of commands
1304 """show help for a command, extension, or list of commands
1305
1305
1306 With no arguments, print a list of commands and short help.
1306 With no arguments, print a list of commands and short help.
1307
1307
1308 Given a command name, print help for that command.
1308 Given a command name, print help for that command.
1309
1309
1310 Given an extension name, print help for that extension, and the
1310 Given an extension name, print help for that extension, and the
1311 commands it provides."""
1311 commands it provides."""
1312 option_lists = []
1312 option_lists = []
1313
1313
1314 def addglobalopts(aliases):
1314 def addglobalopts(aliases):
1315 if ui.verbose:
1315 if ui.verbose:
1316 option_lists.append((_("global options:"), globalopts))
1316 option_lists.append((_("global options:"), globalopts))
1317 if name == 'shortlist':
1317 if name == 'shortlist':
1318 option_lists.append((_('use "hg help" for the full list '
1318 option_lists.append((_('use "hg help" for the full list '
1319 'of commands'), ()))
1319 'of commands'), ()))
1320 else:
1320 else:
1321 if name == 'shortlist':
1321 if name == 'shortlist':
1322 msg = _('use "hg help" for the full list of commands '
1322 msg = _('use "hg help" for the full list of commands '
1323 'or "hg -v" for details')
1323 'or "hg -v" for details')
1324 elif aliases:
1324 elif aliases:
1325 msg = _('use "hg -v help%s" to show aliases and '
1325 msg = _('use "hg -v help%s" to show aliases and '
1326 'global options') % (name and " " + name or "")
1326 'global options') % (name and " " + name or "")
1327 else:
1327 else:
1328 msg = _('use "hg -v help %s" to show global options') % name
1328 msg = _('use "hg -v help %s" to show global options') % name
1329 option_lists.append((msg, ()))
1329 option_lists.append((msg, ()))
1330
1330
1331 def helpcmd(name):
1331 def helpcmd(name):
1332 if with_version:
1332 if with_version:
1333 version_(ui)
1333 version_(ui)
1334 ui.write('\n')
1334 ui.write('\n')
1335 aliases, i = cmdutil.findcmd(ui, name)
1335 aliases, i = cmdutil.findcmd(ui, name)
1336 # synopsis
1336 # synopsis
1337 ui.write("%s\n\n" % i[2])
1337 ui.write("%s\n\n" % i[2])
1338
1338
1339 # description
1339 # description
1340 doc = i[0].__doc__
1340 doc = i[0].__doc__
1341 if not doc:
1341 if not doc:
1342 doc = _("(No help text available)")
1342 doc = _("(No help text available)")
1343 if ui.quiet:
1343 if ui.quiet:
1344 doc = doc.splitlines(0)[0]
1344 doc = doc.splitlines(0)[0]
1345 ui.write("%s\n" % doc.rstrip())
1345 ui.write("%s\n" % doc.rstrip())
1346
1346
1347 if not ui.quiet:
1347 if not ui.quiet:
1348 # aliases
1348 # aliases
1349 if len(aliases) > 1:
1349 if len(aliases) > 1:
1350 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1350 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1351
1351
1352 # options
1352 # options
1353 if i[1]:
1353 if i[1]:
1354 option_lists.append((_("options:\n"), i[1]))
1354 option_lists.append((_("options:\n"), i[1]))
1355
1355
1356 addglobalopts(False)
1356 addglobalopts(False)
1357
1357
1358 def helplist(select=None):
1358 def helplist(select=None):
1359 h = {}
1359 h = {}
1360 cmds = {}
1360 cmds = {}
1361 for c, e in table.items():
1361 for c, e in table.items():
1362 f = c.split("|", 1)[0]
1362 f = c.split("|", 1)[0]
1363 if select and not select(f):
1363 if select and not select(f):
1364 continue
1364 continue
1365 if name == "shortlist" and not f.startswith("^"):
1365 if name == "shortlist" and not f.startswith("^"):
1366 continue
1366 continue
1367 f = f.lstrip("^")
1367 f = f.lstrip("^")
1368 if not ui.debugflag and f.startswith("debug"):
1368 if not ui.debugflag and f.startswith("debug"):
1369 continue
1369 continue
1370 doc = e[0].__doc__
1370 doc = e[0].__doc__
1371 if not doc:
1371 if not doc:
1372 doc = _("(No help text available)")
1372 doc = _("(No help text available)")
1373 h[f] = doc.splitlines(0)[0].rstrip()
1373 h[f] = doc.splitlines(0)[0].rstrip()
1374 cmds[f] = c.lstrip("^")
1374 cmds[f] = c.lstrip("^")
1375
1375
1376 fns = h.keys()
1376 fns = h.keys()
1377 fns.sort()
1377 fns.sort()
1378 m = max(map(len, fns))
1378 m = max(map(len, fns))
1379 for f in fns:
1379 for f in fns:
1380 if ui.verbose:
1380 if ui.verbose:
1381 commands = cmds[f].replace("|",", ")
1381 commands = cmds[f].replace("|",", ")
1382 ui.write(" %s:\n %s\n"%(commands, h[f]))
1382 ui.write(" %s:\n %s\n"%(commands, h[f]))
1383 else:
1383 else:
1384 ui.write(' %-*s %s\n' % (m, f, h[f]))
1384 ui.write(' %-*s %s\n' % (m, f, h[f]))
1385
1385
1386 if not ui.quiet:
1386 if not ui.quiet:
1387 addglobalopts(True)
1387 addglobalopts(True)
1388
1388
1389 def helptopic(name):
1389 def helptopic(name):
1390 v = None
1390 v = None
1391 for i in help.helptable:
1391 for i in help.helptable:
1392 l = i.split('|')
1392 l = i.split('|')
1393 if name in l:
1393 if name in l:
1394 v = i
1394 v = i
1395 header = l[-1]
1395 header = l[-1]
1396 if not v:
1396 if not v:
1397 raise cmdutil.UnknownCommand(name)
1397 raise cmdutil.UnknownCommand(name)
1398
1398
1399 # description
1399 # description
1400 doc = help.helptable[v]
1400 doc = help.helptable[v]
1401 if not doc:
1401 if not doc:
1402 doc = _("(No help text available)")
1402 doc = _("(No help text available)")
1403 if callable(doc):
1403 if callable(doc):
1404 doc = doc()
1404 doc = doc()
1405
1405
1406 ui.write("%s\n" % header)
1406 ui.write("%s\n" % header)
1407 ui.write("%s\n" % doc.rstrip())
1407 ui.write("%s\n" % doc.rstrip())
1408
1408
1409 def helpext(name):
1409 def helpext(name):
1410 try:
1410 try:
1411 mod = extensions.find(name)
1411 mod = extensions.find(name)
1412 except KeyError:
1412 except KeyError:
1413 raise cmdutil.UnknownCommand(name)
1413 raise cmdutil.UnknownCommand(name)
1414
1414
1415 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1415 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1416 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1416 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1417 for d in doc[1:]:
1417 for d in doc[1:]:
1418 ui.write(d, '\n')
1418 ui.write(d, '\n')
1419
1419
1420 ui.status('\n')
1420 ui.status('\n')
1421
1421
1422 try:
1422 try:
1423 ct = mod.cmdtable
1423 ct = mod.cmdtable
1424 except AttributeError:
1424 except AttributeError:
1425 ct = None
1425 ct = None
1426 if not ct:
1426 if not ct:
1427 ui.status(_('no commands defined\n'))
1427 ui.status(_('no commands defined\n'))
1428 return
1428 return
1429
1429
1430 ui.status(_('list of commands:\n\n'))
1430 ui.status(_('list of commands:\n\n'))
1431 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1431 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1432 helplist(modcmds.has_key)
1432 helplist(modcmds.has_key)
1433
1433
1434 if name and name != 'shortlist':
1434 if name and name != 'shortlist':
1435 i = None
1435 i = None
1436 for f in (helpcmd, helptopic, helpext):
1436 for f in (helpcmd, helptopic, helpext):
1437 try:
1437 try:
1438 f(name)
1438 f(name)
1439 i = None
1439 i = None
1440 break
1440 break
1441 except cmdutil.UnknownCommand, inst:
1441 except cmdutil.UnknownCommand, inst:
1442 i = inst
1442 i = inst
1443 if i:
1443 if i:
1444 raise i
1444 raise i
1445
1445
1446 else:
1446 else:
1447 # program name
1447 # program name
1448 if ui.verbose or with_version:
1448 if ui.verbose or with_version:
1449 version_(ui)
1449 version_(ui)
1450 else:
1450 else:
1451 ui.status(_("Mercurial Distributed SCM\n"))
1451 ui.status(_("Mercurial Distributed SCM\n"))
1452 ui.status('\n')
1452 ui.status('\n')
1453
1453
1454 # list of commands
1454 # list of commands
1455 if name == "shortlist":
1455 if name == "shortlist":
1456 ui.status(_('basic commands:\n\n'))
1456 ui.status(_('basic commands:\n\n'))
1457 else:
1457 else:
1458 ui.status(_('list of commands:\n\n'))
1458 ui.status(_('list of commands:\n\n'))
1459
1459
1460 helplist()
1460 helplist()
1461
1461
1462 # list all option lists
1462 # list all option lists
1463 opt_output = []
1463 opt_output = []
1464 for title, options in option_lists:
1464 for title, options in option_lists:
1465 opt_output.append(("\n%s" % title, None))
1465 opt_output.append(("\n%s" % title, None))
1466 for shortopt, longopt, default, desc in options:
1466 for shortopt, longopt, default, desc in options:
1467 if "DEPRECATED" in desc and not ui.verbose: continue
1467 if "DEPRECATED" in desc and not ui.verbose: continue
1468 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1468 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1469 longopt and " --%s" % longopt),
1469 longopt and " --%s" % longopt),
1470 "%s%s" % (desc,
1470 "%s%s" % (desc,
1471 default
1471 default
1472 and _(" (default: %s)") % default
1472 and _(" (default: %s)") % default
1473 or "")))
1473 or "")))
1474
1474
1475 if opt_output:
1475 if opt_output:
1476 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1476 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1477 for first, second in opt_output:
1477 for first, second in opt_output:
1478 if second:
1478 if second:
1479 ui.write(" %-*s %s\n" % (opts_len, first, second))
1479 ui.write(" %-*s %s\n" % (opts_len, first, second))
1480 else:
1480 else:
1481 ui.write("%s\n" % first)
1481 ui.write("%s\n" % first)
1482
1482
1483 def identify(ui, repo, source=None,
1483 def identify(ui, repo, source=None,
1484 rev=None, num=None, id=None, branch=None, tags=None):
1484 rev=None, num=None, id=None, branch=None, tags=None):
1485 """identify the working copy or specified revision
1485 """identify the working copy or specified revision
1486
1486
1487 With no revision, print a summary of the current state of the repo.
1487 With no revision, print a summary of the current state of the repo.
1488
1488
1489 With a path, do a lookup in another repository.
1489 With a path, do a lookup in another repository.
1490
1490
1491 This summary identifies the repository state using one or two parent
1491 This summary identifies the repository state using one or two parent
1492 hash identifiers, followed by a "+" if there are uncommitted changes
1492 hash identifiers, followed by a "+" if there are uncommitted changes
1493 in the working directory, a list of tags for this revision and a branch
1493 in the working directory, a list of tags for this revision and a branch
1494 name for non-default branches.
1494 name for non-default branches.
1495 """
1495 """
1496
1496
1497 hexfunc = ui.debugflag and hex or short
1497 hexfunc = ui.debugflag and hex or short
1498 default = not (num or id or branch or tags)
1498 default = not (num or id or branch or tags)
1499 output = []
1499 output = []
1500
1500
1501 if source:
1501 if source:
1502 source, revs = cmdutil.parseurl(ui.expandpath(source), [])
1502 source, revs = cmdutil.parseurl(ui.expandpath(source), [])
1503 srepo = hg.repository(ui, source)
1503 srepo = hg.repository(ui, source)
1504 if not rev and revs:
1504 if not rev and revs:
1505 rev = revs[0]
1505 rev = revs[0]
1506 if not rev:
1506 if not rev:
1507 rev = "tip"
1507 rev = "tip"
1508 if num or branch or tags:
1508 if num or branch or tags:
1509 raise util.Abort(
1509 raise util.Abort(
1510 "can't query remote revision number, branch, or tags")
1510 "can't query remote revision number, branch, or tags")
1511 output = [hexfunc(srepo.lookup(rev))]
1511 output = [hexfunc(srepo.lookup(rev))]
1512 elif not rev:
1512 elif not rev:
1513 ctx = repo.workingctx()
1513 ctx = repo.workingctx()
1514 parents = ctx.parents()
1514 parents = ctx.parents()
1515 changed = False
1515 changed = False
1516 if default or id or num:
1516 if default or id or num:
1517 changed = ctx.files() + ctx.deleted()
1517 changed = ctx.files() + ctx.deleted()
1518 if default or id:
1518 if default or id:
1519 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1519 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1520 (changed) and "+" or "")]
1520 (changed) and "+" or "")]
1521 if num:
1521 if num:
1522 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1522 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1523 (changed) and "+" or ""))
1523 (changed) and "+" or ""))
1524 else:
1524 else:
1525 ctx = repo.changectx(rev)
1525 ctx = repo.changectx(rev)
1526 if default or id:
1526 if default or id:
1527 output = [hexfunc(ctx.node())]
1527 output = [hexfunc(ctx.node())]
1528 if num:
1528 if num:
1529 output.append(str(ctx.rev()))
1529 output.append(str(ctx.rev()))
1530
1530
1531 if not source and default and not ui.quiet:
1531 if not source and default and not ui.quiet:
1532 b = util.tolocal(ctx.branch())
1532 b = util.tolocal(ctx.branch())
1533 if b != 'default':
1533 if b != 'default':
1534 output.append("(%s)" % b)
1534 output.append("(%s)" % b)
1535
1535
1536 # multiple tags for a single parent separated by '/'
1536 # multiple tags for a single parent separated by '/'
1537 t = "/".join(ctx.tags())
1537 t = "/".join(ctx.tags())
1538 if t:
1538 if t:
1539 output.append(t)
1539 output.append(t)
1540
1540
1541 if branch:
1541 if branch:
1542 output.append(util.tolocal(ctx.branch()))
1542 output.append(util.tolocal(ctx.branch()))
1543
1543
1544 if tags:
1544 if tags:
1545 output.extend(ctx.tags())
1545 output.extend(ctx.tags())
1546
1546
1547 ui.write("%s\n" % ' '.join(output))
1547 ui.write("%s\n" % ' '.join(output))
1548
1548
1549 def import_(ui, repo, patch1, *patches, **opts):
1549 def import_(ui, repo, patch1, *patches, **opts):
1550 """import an ordered set of patches
1550 """import an ordered set of patches
1551
1551
1552 Import a list of patches and commit them individually.
1552 Import a list of patches and commit them individually.
1553
1553
1554 If there are outstanding changes in the working directory, import
1554 If there are outstanding changes in the working directory, import
1555 will abort unless given the -f flag.
1555 will abort unless given the -f flag.
1556
1556
1557 You can import a patch straight from a mail message. Even patches
1557 You can import a patch straight from a mail message. Even patches
1558 as attachments work (body part must be type text/plain or
1558 as attachments work (body part must be type text/plain or
1559 text/x-patch to be used). From and Subject headers of email
1559 text/x-patch to be used). From and Subject headers of email
1560 message are used as default committer and commit message. All
1560 message are used as default committer and commit message. All
1561 text/plain body parts before first diff are added to commit
1561 text/plain body parts before first diff are added to commit
1562 message.
1562 message.
1563
1563
1564 If the imported patch was generated by hg export, user and description
1564 If the imported patch was generated by hg export, user and description
1565 from patch override values from message headers and body. Values
1565 from patch override values from message headers and body. Values
1566 given on command line with -m and -u override these.
1566 given on command line with -m and -u override these.
1567
1567
1568 If --exact is specified, import will set the working directory
1568 If --exact is specified, import will set the working directory
1569 to the parent of each patch before applying it, and will abort
1569 to the parent of each patch before applying it, and will abort
1570 if the resulting changeset has a different ID than the one
1570 if the resulting changeset has a different ID than the one
1571 recorded in the patch. This may happen due to character set
1571 recorded in the patch. This may happen due to character set
1572 problems or other deficiencies in the text patch format.
1572 problems or other deficiencies in the text patch format.
1573
1573
1574 To read a patch from standard input, use patch name "-".
1574 To read a patch from standard input, use patch name "-".
1575 """
1575 """
1576 patches = (patch1,) + patches
1576 patches = (patch1,) + patches
1577
1577
1578 if opts.get('exact') or not opts['force']:
1578 if opts.get('exact') or not opts['force']:
1579 cmdutil.bail_if_changed(repo)
1579 cmdutil.bail_if_changed(repo)
1580
1580
1581 d = opts["base"]
1581 d = opts["base"]
1582 strip = opts["strip"]
1582 strip = opts["strip"]
1583
1583
1584 wlock = repo.wlock()
1584 wlock = repo.wlock()
1585 lock = repo.lock()
1585 lock = repo.lock()
1586
1586
1587 for p in patches:
1587 for p in patches:
1588 pf = os.path.join(d, p)
1588 pf = os.path.join(d, p)
1589
1589
1590 if pf == '-':
1590 if pf == '-':
1591 ui.status(_("applying patch from stdin\n"))
1591 ui.status(_("applying patch from stdin\n"))
1592 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, sys.stdin)
1592 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, sys.stdin)
1593 else:
1593 else:
1594 ui.status(_("applying %s\n") % p)
1594 ui.status(_("applying %s\n") % p)
1595 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, file(pf, 'rb'))
1595 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, file(pf, 'rb'))
1596
1596
1597 if tmpname is None:
1597 if tmpname is None:
1598 raise util.Abort(_('no diffs found'))
1598 raise util.Abort(_('no diffs found'))
1599
1599
1600 try:
1600 try:
1601 cmdline_message = cmdutil.logmessage(opts)
1601 cmdline_message = cmdutil.logmessage(opts)
1602 if cmdline_message:
1602 if cmdline_message:
1603 # pickup the cmdline msg
1603 # pickup the cmdline msg
1604 message = cmdline_message
1604 message = cmdline_message
1605 elif message:
1605 elif message:
1606 # pickup the patch msg
1606 # pickup the patch msg
1607 message = message.strip()
1607 message = message.strip()
1608 else:
1608 else:
1609 # launch the editor
1609 # launch the editor
1610 message = None
1610 message = None
1611 ui.debug(_('message:\n%s\n') % message)
1611 ui.debug(_('message:\n%s\n') % message)
1612
1612
1613 wp = repo.workingctx().parents()
1613 wp = repo.workingctx().parents()
1614 if opts.get('exact'):
1614 if opts.get('exact'):
1615 if not nodeid or not p1:
1615 if not nodeid or not p1:
1616 raise util.Abort(_('not a mercurial patch'))
1616 raise util.Abort(_('not a mercurial patch'))
1617 p1 = repo.lookup(p1)
1617 p1 = repo.lookup(p1)
1618 p2 = repo.lookup(p2 or hex(nullid))
1618 p2 = repo.lookup(p2 or hex(nullid))
1619
1619
1620 if p1 != wp[0].node():
1620 if p1 != wp[0].node():
1621 hg.clean(repo, p1, wlock=wlock)
1621 hg.clean(repo, p1, wlock=wlock)
1622 repo.dirstate.setparents(p1, p2)
1622 repo.dirstate.setparents(p1, p2)
1623 elif p2:
1623 elif p2:
1624 try:
1624 try:
1625 p1 = repo.lookup(p1)
1625 p1 = repo.lookup(p1)
1626 p2 = repo.lookup(p2)
1626 p2 = repo.lookup(p2)
1627 if p1 == wp[0].node():
1627 if p1 == wp[0].node():
1628 repo.dirstate.setparents(p1, p2)
1628 repo.dirstate.setparents(p1, p2)
1629 except hg.RepoError:
1629 except hg.RepoError:
1630 pass
1630 pass
1631 if opts.get('exact') or opts.get('import_branch'):
1631 if opts.get('exact') or opts.get('import_branch'):
1632 repo.dirstate.setbranch(branch or 'default')
1632 repo.dirstate.setbranch(branch or 'default')
1633
1633
1634 files = {}
1634 files = {}
1635 try:
1635 try:
1636 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1636 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1637 files=files)
1637 files=files)
1638 finally:
1638 finally:
1639 files = patch.updatedir(ui, repo, files, wlock=wlock)
1639 files = patch.updatedir(ui, repo, files, wlock=wlock)
1640 n = repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1640 n = repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1641 if opts.get('exact'):
1641 if opts.get('exact'):
1642 if hex(n) != nodeid:
1642 if hex(n) != nodeid:
1643 repo.rollback(wlock=wlock, lock=lock)
1643 repo.rollback(wlock=wlock, lock=lock)
1644 raise util.Abort(_('patch is damaged or loses information'))
1644 raise util.Abort(_('patch is damaged or loses information'))
1645 finally:
1645 finally:
1646 os.unlink(tmpname)
1646 os.unlink(tmpname)
1647
1647
1648 def incoming(ui, repo, source="default", **opts):
1648 def incoming(ui, repo, source="default", **opts):
1649 """show new changesets found in source
1649 """show new changesets found in source
1650
1650
1651 Show new changesets found in the specified path/URL or the default
1651 Show new changesets found in the specified path/URL or the default
1652 pull location. These are the changesets that would be pulled if a pull
1652 pull location. These are the changesets that would be pulled if a pull
1653 was requested.
1653 was requested.
1654
1654
1655 For remote repository, using --bundle avoids downloading the changesets
1655 For remote repository, using --bundle avoids downloading the changesets
1656 twice if the incoming is followed by a pull.
1656 twice if the incoming is followed by a pull.
1657
1657
1658 See pull for valid source format details.
1658 See pull for valid source format details.
1659 """
1659 """
1660 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
1660 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
1661 cmdutil.setremoteconfig(ui, opts)
1661 cmdutil.setremoteconfig(ui, opts)
1662
1662
1663 other = hg.repository(ui, source)
1663 other = hg.repository(ui, source)
1664 ui.status(_('comparing with %s\n') % source)
1664 ui.status(_('comparing with %s\n') % source)
1665 if revs:
1665 if revs:
1666 if 'lookup' in other.capabilities:
1666 if 'lookup' in other.capabilities:
1667 revs = [other.lookup(rev) for rev in revs]
1667 revs = [other.lookup(rev) for rev in revs]
1668 else:
1668 else:
1669 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1669 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1670 raise util.Abort(error)
1670 raise util.Abort(error)
1671 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1671 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1672 if not incoming:
1672 if not incoming:
1673 try:
1673 try:
1674 os.unlink(opts["bundle"])
1674 os.unlink(opts["bundle"])
1675 except:
1675 except:
1676 pass
1676 pass
1677 ui.status(_("no changes found\n"))
1677 ui.status(_("no changes found\n"))
1678 return 1
1678 return 1
1679
1679
1680 cleanup = None
1680 cleanup = None
1681 try:
1681 try:
1682 fname = opts["bundle"]
1682 fname = opts["bundle"]
1683 if fname or not other.local():
1683 if fname or not other.local():
1684 # create a bundle (uncompressed if other repo is not local)
1684 # create a bundle (uncompressed if other repo is not local)
1685 if revs is None:
1685 if revs is None:
1686 cg = other.changegroup(incoming, "incoming")
1686 cg = other.changegroup(incoming, "incoming")
1687 else:
1687 else:
1688 if 'changegroupsubset' not in other.capabilities:
1688 if 'changegroupsubset' not in other.capabilities:
1689 raise util.Abort(_("Partial incoming cannot be done because other repository doesn't support changegroupsubset."))
1689 raise util.Abort(_("Partial incoming cannot be done because other repository doesn't support changegroupsubset."))
1690 cg = other.changegroupsubset(incoming, revs, 'incoming')
1690 cg = other.changegroupsubset(incoming, revs, 'incoming')
1691 bundletype = other.local() and "HG10BZ" or "HG10UN"
1691 bundletype = other.local() and "HG10BZ" or "HG10UN"
1692 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1692 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1693 # keep written bundle?
1693 # keep written bundle?
1694 if opts["bundle"]:
1694 if opts["bundle"]:
1695 cleanup = None
1695 cleanup = None
1696 if not other.local():
1696 if not other.local():
1697 # use the created uncompressed bundlerepo
1697 # use the created uncompressed bundlerepo
1698 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1698 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1699
1699
1700 o = other.changelog.nodesbetween(incoming, revs)[0]
1700 o = other.changelog.nodesbetween(incoming, revs)[0]
1701 if opts['newest_first']:
1701 if opts['newest_first']:
1702 o.reverse()
1702 o.reverse()
1703 displayer = cmdutil.show_changeset(ui, other, opts)
1703 displayer = cmdutil.show_changeset(ui, other, opts)
1704 for n in o:
1704 for n in o:
1705 parents = [p for p in other.changelog.parents(n) if p != nullid]
1705 parents = [p for p in other.changelog.parents(n) if p != nullid]
1706 if opts['no_merges'] and len(parents) == 2:
1706 if opts['no_merges'] and len(parents) == 2:
1707 continue
1707 continue
1708 displayer.show(changenode=n)
1708 displayer.show(changenode=n)
1709 finally:
1709 finally:
1710 if hasattr(other, 'close'):
1710 if hasattr(other, 'close'):
1711 other.close()
1711 other.close()
1712 if cleanup:
1712 if cleanup:
1713 os.unlink(cleanup)
1713 os.unlink(cleanup)
1714
1714
1715 def init(ui, dest=".", **opts):
1715 def init(ui, dest=".", **opts):
1716 """create a new repository in the given directory
1716 """create a new repository in the given directory
1717
1717
1718 Initialize a new repository in the given directory. If the given
1718 Initialize a new repository in the given directory. If the given
1719 directory does not exist, it is created.
1719 directory does not exist, it is created.
1720
1720
1721 If no directory is given, the current directory is used.
1721 If no directory is given, the current directory is used.
1722
1722
1723 It is possible to specify an ssh:// URL as the destination.
1723 It is possible to specify an ssh:// URL as the destination.
1724 Look at the help text for the pull command for important details
1724 Look at the help text for the pull command for important details
1725 about ssh:// URLs.
1725 about ssh:// URLs.
1726 """
1726 """
1727 cmdutil.setremoteconfig(ui, opts)
1727 cmdutil.setremoteconfig(ui, opts)
1728 hg.repository(ui, dest, create=1)
1728 hg.repository(ui, dest, create=1)
1729
1729
1730 def locate(ui, repo, *pats, **opts):
1730 def locate(ui, repo, *pats, **opts):
1731 """locate files matching specific patterns
1731 """locate files matching specific patterns
1732
1732
1733 Print all files under Mercurial control whose names match the
1733 Print all files under Mercurial control whose names match the
1734 given patterns.
1734 given patterns.
1735
1735
1736 This command searches the entire repository by default. To search
1736 This command searches the entire repository by default. To search
1737 just the current directory and its subdirectories, use
1737 just the current directory and its subdirectories, use
1738 "--include .".
1738 "--include .".
1739
1739
1740 If no patterns are given to match, this command prints all file
1740 If no patterns are given to match, this command prints all file
1741 names.
1741 names.
1742
1742
1743 If you want to feed the output of this command into the "xargs"
1743 If you want to feed the output of this command into the "xargs"
1744 command, use the "-0" option to both this command and "xargs".
1744 command, use the "-0" option to both this command and "xargs".
1745 This will avoid the problem of "xargs" treating single filenames
1745 This will avoid the problem of "xargs" treating single filenames
1746 that contain white space as multiple filenames.
1746 that contain white space as multiple filenames.
1747 """
1747 """
1748 end = opts['print0'] and '\0' or '\n'
1748 end = opts['print0'] and '\0' or '\n'
1749 rev = opts['rev']
1749 rev = opts['rev']
1750 if rev:
1750 if rev:
1751 node = repo.lookup(rev)
1751 node = repo.lookup(rev)
1752 else:
1752 else:
1753 node = None
1753 node = None
1754
1754
1755 ret = 1
1755 ret = 1
1756 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1756 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1757 badmatch=util.always,
1757 badmatch=util.always,
1758 default='relglob'):
1758 default='relglob'):
1759 if src == 'b':
1759 if src == 'b':
1760 continue
1760 continue
1761 if not node and repo.dirstate.state(abs) == '?':
1761 if not node and repo.dirstate.state(abs) == '?':
1762 continue
1762 continue
1763 if opts['fullpath']:
1763 if opts['fullpath']:
1764 ui.write(os.path.join(repo.root, abs), end)
1764 ui.write(os.path.join(repo.root, abs), end)
1765 else:
1765 else:
1766 ui.write(((pats and rel) or abs), end)
1766 ui.write(((pats and rel) or abs), end)
1767 ret = 0
1767 ret = 0
1768
1768
1769 return ret
1769 return ret
1770
1770
1771 def log(ui, repo, *pats, **opts):
1771 def log(ui, repo, *pats, **opts):
1772 """show revision history of entire repository or files
1772 """show revision history of entire repository or files
1773
1773
1774 Print the revision history of the specified files or the entire
1774 Print the revision history of the specified files or the entire
1775 project.
1775 project.
1776
1776
1777 File history is shown without following rename or copy history of
1777 File history is shown without following rename or copy history of
1778 files. Use -f/--follow with a file name to follow history across
1778 files. Use -f/--follow with a file name to follow history across
1779 renames and copies. --follow without a file name will only show
1779 renames and copies. --follow without a file name will only show
1780 ancestors or descendants of the starting revision. --follow-first
1780 ancestors or descendants of the starting revision. --follow-first
1781 only follows the first parent of merge revisions.
1781 only follows the first parent of merge revisions.
1782
1782
1783 If no revision range is specified, the default is tip:0 unless
1783 If no revision range is specified, the default is tip:0 unless
1784 --follow is set, in which case the working directory parent is
1784 --follow is set, in which case the working directory parent is
1785 used as the starting revision.
1785 used as the starting revision.
1786
1786
1787 By default this command outputs: changeset id and hash, tags,
1787 By default this command outputs: changeset id and hash, tags,
1788 non-trivial parents, user, date and time, and a summary for each
1788 non-trivial parents, user, date and time, and a summary for each
1789 commit. When the -v/--verbose switch is used, the list of changed
1789 commit. When the -v/--verbose switch is used, the list of changed
1790 files and full commit message is shown.
1790 files and full commit message is shown.
1791
1791
1792 NOTE: log -p may generate unexpected diff output for merge
1792 NOTE: log -p may generate unexpected diff output for merge
1793 changesets, as it will compare the merge changeset against its
1793 changesets, as it will compare the merge changeset against its
1794 first parent only. Also, the files: list will only reflect files
1794 first parent only. Also, the files: list will only reflect files
1795 that are different from BOTH parents.
1795 that are different from BOTH parents.
1796
1796
1797 """
1797 """
1798
1798
1799 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1799 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1800 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1800 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1801
1801
1802 if opts['limit']:
1802 if opts['limit']:
1803 try:
1803 try:
1804 limit = int(opts['limit'])
1804 limit = int(opts['limit'])
1805 except ValueError:
1805 except ValueError:
1806 raise util.Abort(_('limit must be a positive integer'))
1806 raise util.Abort(_('limit must be a positive integer'))
1807 if limit <= 0: raise util.Abort(_('limit must be positive'))
1807 if limit <= 0: raise util.Abort(_('limit must be positive'))
1808 else:
1808 else:
1809 limit = sys.maxint
1809 limit = sys.maxint
1810 count = 0
1810 count = 0
1811
1811
1812 if opts['copies'] and opts['rev']:
1812 if opts['copies'] and opts['rev']:
1813 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1813 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1814 else:
1814 else:
1815 endrev = repo.changelog.count()
1815 endrev = repo.changelog.count()
1816 rcache = {}
1816 rcache = {}
1817 ncache = {}
1817 ncache = {}
1818 dcache = []
1818 dcache = []
1819 def getrenamed(fn, rev, man):
1819 def getrenamed(fn, rev, man):
1820 '''looks up all renames for a file (up to endrev) the first
1820 '''looks up all renames for a file (up to endrev) the first
1821 time the file is given. It indexes on the changerev and only
1821 time the file is given. It indexes on the changerev and only
1822 parses the manifest if linkrev != changerev.
1822 parses the manifest if linkrev != changerev.
1823 Returns rename info for fn at changerev rev.'''
1823 Returns rename info for fn at changerev rev.'''
1824 if fn not in rcache:
1824 if fn not in rcache:
1825 rcache[fn] = {}
1825 rcache[fn] = {}
1826 ncache[fn] = {}
1826 ncache[fn] = {}
1827 fl = repo.file(fn)
1827 fl = repo.file(fn)
1828 for i in xrange(fl.count()):
1828 for i in xrange(fl.count()):
1829 node = fl.node(i)
1829 node = fl.node(i)
1830 lr = fl.linkrev(node)
1830 lr = fl.linkrev(node)
1831 renamed = fl.renamed(node)
1831 renamed = fl.renamed(node)
1832 rcache[fn][lr] = renamed
1832 rcache[fn][lr] = renamed
1833 if renamed:
1833 if renamed:
1834 ncache[fn][node] = renamed
1834 ncache[fn][node] = renamed
1835 if lr >= endrev:
1835 if lr >= endrev:
1836 break
1836 break
1837 if rev in rcache[fn]:
1837 if rev in rcache[fn]:
1838 return rcache[fn][rev]
1838 return rcache[fn][rev]
1839 mr = repo.manifest.rev(man)
1839 mr = repo.manifest.rev(man)
1840 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1840 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1841 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1841 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1842 if not dcache or dcache[0] != man:
1842 if not dcache or dcache[0] != man:
1843 dcache[:] = [man, repo.manifest.readdelta(man)]
1843 dcache[:] = [man, repo.manifest.readdelta(man)]
1844 if fn in dcache[1]:
1844 if fn in dcache[1]:
1845 return ncache[fn].get(dcache[1][fn])
1845 return ncache[fn].get(dcache[1][fn])
1846 return None
1846 return None
1847
1847
1848 df = False
1848 df = False
1849 if opts["date"]:
1849 if opts["date"]:
1850 df = util.matchdate(opts["date"])
1850 df = util.matchdate(opts["date"])
1851
1851
1852 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1852 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1853 for st, rev, fns in changeiter:
1853 for st, rev, fns in changeiter:
1854 if st == 'add':
1854 if st == 'add':
1855 changenode = repo.changelog.node(rev)
1855 changenode = repo.changelog.node(rev)
1856 parents = [p for p in repo.changelog.parentrevs(rev)
1856 parents = [p for p in repo.changelog.parentrevs(rev)
1857 if p != nullrev]
1857 if p != nullrev]
1858 if opts['no_merges'] and len(parents) == 2:
1858 if opts['no_merges'] and len(parents) == 2:
1859 continue
1859 continue
1860 if opts['only_merges'] and len(parents) != 2:
1860 if opts['only_merges'] and len(parents) != 2:
1861 continue
1861 continue
1862
1862
1863 if df:
1863 if df:
1864 changes = get(rev)
1864 changes = get(rev)
1865 if not df(changes[2][0]):
1865 if not df(changes[2][0]):
1866 continue
1866 continue
1867
1867
1868 if opts['keyword']:
1868 if opts['keyword']:
1869 changes = get(rev)
1869 changes = get(rev)
1870 miss = 0
1870 miss = 0
1871 for k in [kw.lower() for kw in opts['keyword']]:
1871 for k in [kw.lower() for kw in opts['keyword']]:
1872 if not (k in changes[1].lower() or
1872 if not (k in changes[1].lower() or
1873 k in changes[4].lower() or
1873 k in changes[4].lower() or
1874 k in " ".join(changes[3]).lower()):
1874 k in " ".join(changes[3]).lower()):
1875 miss = 1
1875 miss = 1
1876 break
1876 break
1877 if miss:
1877 if miss:
1878 continue
1878 continue
1879
1879
1880 copies = []
1880 copies = []
1881 if opts.get('copies') and rev:
1881 if opts.get('copies') and rev:
1882 mf = get(rev)[0]
1882 mf = get(rev)[0]
1883 for fn in get(rev)[3]:
1883 for fn in get(rev)[3]:
1884 rename = getrenamed(fn, rev, mf)
1884 rename = getrenamed(fn, rev, mf)
1885 if rename:
1885 if rename:
1886 copies.append((fn, rename[0]))
1886 copies.append((fn, rename[0]))
1887 displayer.show(rev, changenode, copies=copies)
1887 displayer.show(rev, changenode, copies=copies)
1888 elif st == 'iter':
1888 elif st == 'iter':
1889 if count == limit: break
1889 if count == limit: break
1890 if displayer.flush(rev):
1890 if displayer.flush(rev):
1891 count += 1
1891 count += 1
1892
1892
1893 def manifest(ui, repo, rev=None):
1893 def manifest(ui, repo, rev=None):
1894 """output the current or given revision of the project manifest
1894 """output the current or given revision of the project manifest
1895
1895
1896 Print a list of version controlled files for the given revision.
1896 Print a list of version controlled files for the given revision.
1897 If no revision is given, the parent of the working directory is used,
1897 If no revision is given, the parent of the working directory is used,
1898 or tip if no revision is checked out.
1898 or tip if no revision is checked out.
1899
1899
1900 The manifest is the list of files being version controlled. If no revision
1900 The manifest is the list of files being version controlled. If no revision
1901 is given then the first parent of the working directory is used.
1901 is given then the first parent of the working directory is used.
1902
1902
1903 With -v flag, print file permissions. With --debug flag, print
1903 With -v flag, print file permissions. With --debug flag, print
1904 file revision hashes.
1904 file revision hashes.
1905 """
1905 """
1906
1906
1907 m = repo.changectx(rev).manifest()
1907 m = repo.changectx(rev).manifest()
1908 files = m.keys()
1908 files = m.keys()
1909 files.sort()
1909 files.sort()
1910
1910
1911 for f in files:
1911 for f in files:
1912 if ui.debugflag:
1912 if ui.debugflag:
1913 ui.write("%40s " % hex(m[f]))
1913 ui.write("%40s " % hex(m[f]))
1914 if ui.verbose:
1914 if ui.verbose:
1915 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1915 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1916 ui.write("%s\n" % f)
1916 ui.write("%s\n" % f)
1917
1917
1918 def merge(ui, repo, node=None, force=None, rev=None):
1918 def merge(ui, repo, node=None, force=None, rev=None):
1919 """merge working directory with another revision
1919 """merge working directory with another revision
1920
1920
1921 Merge the contents of the current working directory and the
1921 Merge the contents of the current working directory and the
1922 requested revision. Files that changed between either parent are
1922 requested revision. Files that changed between either parent are
1923 marked as changed for the next commit and a commit must be
1923 marked as changed for the next commit and a commit must be
1924 performed before any further updates are allowed.
1924 performed before any further updates are allowed.
1925
1925
1926 If no revision is specified, the working directory's parent is a
1926 If no revision is specified, the working directory's parent is a
1927 head revision, and the repository contains exactly one other head,
1927 head revision, and the repository contains exactly one other head,
1928 the other head is merged with by default. Otherwise, an explicit
1928 the other head is merged with by default. Otherwise, an explicit
1929 revision to merge with must be provided.
1929 revision to merge with must be provided.
1930 """
1930 """
1931
1931
1932 if rev and node:
1932 if rev and node:
1933 raise util.Abort(_("please specify just one revision"))
1933 raise util.Abort(_("please specify just one revision"))
1934
1934
1935 if not node:
1935 if not node:
1936 node = rev
1936 node = rev
1937
1937
1938 if not node:
1938 if not node:
1939 heads = repo.heads()
1939 heads = repo.heads()
1940 if len(heads) > 2:
1940 if len(heads) > 2:
1941 raise util.Abort(_('repo has %d heads - '
1941 raise util.Abort(_('repo has %d heads - '
1942 'please merge with an explicit rev') %
1942 'please merge with an explicit rev') %
1943 len(heads))
1943 len(heads))
1944 if len(heads) == 1:
1944 if len(heads) == 1:
1945 raise util.Abort(_('there is nothing to merge - '
1945 raise util.Abort(_('there is nothing to merge - '
1946 'use "hg update" instead'))
1946 'use "hg update" instead'))
1947 parent = repo.dirstate.parents()[0]
1947 parent = repo.dirstate.parents()[0]
1948 if parent not in heads:
1948 if parent not in heads:
1949 raise util.Abort(_('working dir not at a head rev - '
1949 raise util.Abort(_('working dir not at a head rev - '
1950 'use "hg update" or merge with an explicit rev'))
1950 'use "hg update" or merge with an explicit rev'))
1951 node = parent == heads[0] and heads[-1] or heads[0]
1951 node = parent == heads[0] and heads[-1] or heads[0]
1952 return hg.merge(repo, node, force=force)
1952 return hg.merge(repo, node, force=force)
1953
1953
1954 def outgoing(ui, repo, dest=None, **opts):
1954 def outgoing(ui, repo, dest=None, **opts):
1955 """show changesets not found in destination
1955 """show changesets not found in destination
1956
1956
1957 Show changesets not found in the specified destination repository or
1957 Show changesets not found in the specified destination repository or
1958 the default push location. These are the changesets that would be pushed
1958 the default push location. These are the changesets that would be pushed
1959 if a push was requested.
1959 if a push was requested.
1960
1960
1961 See pull for valid destination format details.
1961 See pull for valid destination format details.
1962 """
1962 """
1963 dest, revs = cmdutil.parseurl(
1963 dest, revs = cmdutil.parseurl(
1964 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1964 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1965 cmdutil.setremoteconfig(ui, opts)
1965 cmdutil.setremoteconfig(ui, opts)
1966 if revs:
1966 if revs:
1967 revs = [repo.lookup(rev) for rev in revs]
1967 revs = [repo.lookup(rev) for rev in revs]
1968
1968
1969 other = hg.repository(ui, dest)
1969 other = hg.repository(ui, dest)
1970 ui.status(_('comparing with %s\n') % dest)
1970 ui.status(_('comparing with %s\n') % dest)
1971 o = repo.findoutgoing(other, force=opts['force'])
1971 o = repo.findoutgoing(other, force=opts['force'])
1972 if not o:
1972 if not o:
1973 ui.status(_("no changes found\n"))
1973 ui.status(_("no changes found\n"))
1974 return 1
1974 return 1
1975 o = repo.changelog.nodesbetween(o, revs)[0]
1975 o = repo.changelog.nodesbetween(o, revs)[0]
1976 if opts['newest_first']:
1976 if opts['newest_first']:
1977 o.reverse()
1977 o.reverse()
1978 displayer = cmdutil.show_changeset(ui, repo, opts)
1978 displayer = cmdutil.show_changeset(ui, repo, opts)
1979 for n in o:
1979 for n in o:
1980 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1980 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1981 if opts['no_merges'] and len(parents) == 2:
1981 if opts['no_merges'] and len(parents) == 2:
1982 continue
1982 continue
1983 displayer.show(changenode=n)
1983 displayer.show(changenode=n)
1984
1984
1985 def parents(ui, repo, file_=None, **opts):
1985 def parents(ui, repo, file_=None, **opts):
1986 """show the parents of the working dir or revision
1986 """show the parents of the working dir or revision
1987
1987
1988 Print the working directory's parent revisions. If a
1988 Print the working directory's parent revisions. If a
1989 revision is given via --rev, the parent of that revision
1989 revision is given via --rev, the parent of that revision
1990 will be printed. If a file argument is given, revision in
1990 will be printed. If a file argument is given, revision in
1991 which the file was last changed (before the working directory
1991 which the file was last changed (before the working directory
1992 revision or the argument to --rev if given) is printed.
1992 revision or the argument to --rev if given) is printed.
1993 """
1993 """
1994 rev = opts.get('rev')
1994 rev = opts.get('rev')
1995 if file_:
1995 if file_:
1996 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
1996 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
1997 if anypats or len(files) != 1:
1997 if anypats or len(files) != 1:
1998 raise util.Abort(_('can only specify an explicit file name'))
1998 raise util.Abort(_('can only specify an explicit file name'))
1999 ctx = repo.filectx(files[0], changeid=rev)
1999 ctx = repo.filectx(files[0], changeid=rev)
2000 elif rev:
2000 elif rev:
2001 ctx = repo.changectx(rev)
2001 ctx = repo.changectx(rev)
2002 else:
2002 else:
2003 ctx = repo.workingctx()
2003 ctx = repo.workingctx()
2004 p = [cp.node() for cp in ctx.parents()]
2004 p = [cp.node() for cp in ctx.parents()]
2005
2005
2006 displayer = cmdutil.show_changeset(ui, repo, opts)
2006 displayer = cmdutil.show_changeset(ui, repo, opts)
2007 for n in p:
2007 for n in p:
2008 if n != nullid:
2008 if n != nullid:
2009 displayer.show(changenode=n)
2009 displayer.show(changenode=n)
2010
2010
2011 def paths(ui, repo, search=None):
2011 def paths(ui, repo, search=None):
2012 """show definition of symbolic path names
2012 """show definition of symbolic path names
2013
2013
2014 Show definition of symbolic path name NAME. If no name is given, show
2014 Show definition of symbolic path name NAME. If no name is given, show
2015 definition of available names.
2015 definition of available names.
2016
2016
2017 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2017 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2018 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2018 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2019 """
2019 """
2020 if search:
2020 if search:
2021 for name, path in ui.configitems("paths"):
2021 for name, path in ui.configitems("paths"):
2022 if name == search:
2022 if name == search:
2023 ui.write("%s\n" % path)
2023 ui.write("%s\n" % path)
2024 return
2024 return
2025 ui.warn(_("not found!\n"))
2025 ui.warn(_("not found!\n"))
2026 return 1
2026 return 1
2027 else:
2027 else:
2028 for name, path in ui.configitems("paths"):
2028 for name, path in ui.configitems("paths"):
2029 ui.write("%s = %s\n" % (name, path))
2029 ui.write("%s = %s\n" % (name, path))
2030
2030
2031 def postincoming(ui, repo, modheads, optupdate, wasempty):
2031 def postincoming(ui, repo, modheads, optupdate, wasempty):
2032 if modheads == 0:
2032 if modheads == 0:
2033 return
2033 return
2034 if optupdate:
2034 if optupdate:
2035 if wasempty:
2035 if wasempty:
2036 return hg.update(repo, repo.lookup('default'))
2036 return hg.update(repo, repo.lookup('default'))
2037 elif modheads == 1:
2037 elif modheads == 1:
2038 return hg.update(repo, repo.changelog.tip()) # update
2038 return hg.update(repo, repo.changelog.tip()) # update
2039 else:
2039 else:
2040 ui.status(_("not updating, since new heads added\n"))
2040 ui.status(_("not updating, since new heads added\n"))
2041 if modheads > 1:
2041 if modheads > 1:
2042 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2042 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2043 else:
2043 else:
2044 ui.status(_("(run 'hg update' to get a working copy)\n"))
2044 ui.status(_("(run 'hg update' to get a working copy)\n"))
2045
2045
2046 def pull(ui, repo, source="default", **opts):
2046 def pull(ui, repo, source="default", **opts):
2047 """pull changes from the specified source
2047 """pull changes from the specified source
2048
2048
2049 Pull changes from a remote repository to a local one.
2049 Pull changes from a remote repository to a local one.
2050
2050
2051 This finds all changes from the repository at the specified path
2051 This finds all changes from the repository at the specified path
2052 or URL and adds them to the local repository. By default, this
2052 or URL and adds them to the local repository. By default, this
2053 does not update the copy of the project in the working directory.
2053 does not update the copy of the project in the working directory.
2054
2054
2055 Valid URLs are of the form:
2055 Valid URLs are of the form:
2056
2056
2057 local/filesystem/path (or file://local/filesystem/path)
2057 local/filesystem/path (or file://local/filesystem/path)
2058 http://[user@]host[:port]/[path]
2058 http://[user@]host[:port]/[path]
2059 https://[user@]host[:port]/[path]
2059 https://[user@]host[:port]/[path]
2060 ssh://[user@]host[:port]/[path]
2060 ssh://[user@]host[:port]/[path]
2061 static-http://host[:port]/[path]
2061 static-http://host[:port]/[path]
2062
2062
2063 Paths in the local filesystem can either point to Mercurial
2063 Paths in the local filesystem can either point to Mercurial
2064 repositories or to bundle files (as created by 'hg bundle' or
2064 repositories or to bundle files (as created by 'hg bundle' or
2065 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2065 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2066 allows access to a Mercurial repository where you simply use a web
2066 allows access to a Mercurial repository where you simply use a web
2067 server to publish the .hg directory as static content.
2067 server to publish the .hg directory as static content.
2068
2068
2069 An optional identifier after # indicates a particular branch, tag,
2069 An optional identifier after # indicates a particular branch, tag,
2070 or changeset to pull.
2070 or changeset to pull.
2071
2071
2072 Some notes about using SSH with Mercurial:
2072 Some notes about using SSH with Mercurial:
2073 - SSH requires an accessible shell account on the destination machine
2073 - SSH requires an accessible shell account on the destination machine
2074 and a copy of hg in the remote path or specified with as remotecmd.
2074 and a copy of hg in the remote path or specified with as remotecmd.
2075 - path is relative to the remote user's home directory by default.
2075 - path is relative to the remote user's home directory by default.
2076 Use an extra slash at the start of a path to specify an absolute path:
2076 Use an extra slash at the start of a path to specify an absolute path:
2077 ssh://example.com//tmp/repository
2077 ssh://example.com//tmp/repository
2078 - Mercurial doesn't use its own compression via SSH; the right thing
2078 - Mercurial doesn't use its own compression via SSH; the right thing
2079 to do is to configure it in your ~/.ssh/config, e.g.:
2079 to do is to configure it in your ~/.ssh/config, e.g.:
2080 Host *.mylocalnetwork.example.com
2080 Host *.mylocalnetwork.example.com
2081 Compression no
2081 Compression no
2082 Host *
2082 Host *
2083 Compression yes
2083 Compression yes
2084 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2084 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2085 with the --ssh command line option.
2085 with the --ssh command line option.
2086 """
2086 """
2087 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
2087 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
2088 cmdutil.setremoteconfig(ui, opts)
2088 cmdutil.setremoteconfig(ui, opts)
2089
2089
2090 other = hg.repository(ui, source)
2090 other = hg.repository(ui, source)
2091 ui.status(_('pulling from %s\n') % (source))
2091 ui.status(_('pulling from %s\n') % (source))
2092 if revs:
2092 if revs:
2093 if 'lookup' in other.capabilities:
2093 if 'lookup' in other.capabilities:
2094 revs = [other.lookup(rev) for rev in revs]
2094 revs = [other.lookup(rev) for rev in revs]
2095 else:
2095 else:
2096 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2096 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2097 raise util.Abort(error)
2097 raise util.Abort(error)
2098
2098
2099 wasempty = repo.changelog.count() == 0
2099 wasempty = repo.changelog.count() == 0
2100 modheads = repo.pull(other, heads=revs, force=opts['force'])
2100 modheads = repo.pull(other, heads=revs, force=opts['force'])
2101 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2101 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2102
2102
2103 def push(ui, repo, dest=None, **opts):
2103 def push(ui, repo, dest=None, **opts):
2104 """push changes to the specified destination
2104 """push changes to the specified destination
2105
2105
2106 Push changes from the local repository to the given destination.
2106 Push changes from the local repository to the given destination.
2107
2107
2108 This is the symmetrical operation for pull. It helps to move
2108 This is the symmetrical operation for pull. It helps to move
2109 changes from the current repository to a different one. If the
2109 changes from the current repository to a different one. If the
2110 destination is local this is identical to a pull in that directory
2110 destination is local this is identical to a pull in that directory
2111 from the current one.
2111 from the current one.
2112
2112
2113 By default, push will refuse to run if it detects the result would
2113 By default, push will refuse to run if it detects the result would
2114 increase the number of remote heads. This generally indicates the
2114 increase the number of remote heads. This generally indicates the
2115 the client has forgotten to sync and merge before pushing.
2115 the client has forgotten to sync and merge before pushing.
2116
2116
2117 Valid URLs are of the form:
2117 Valid URLs are of the form:
2118
2118
2119 local/filesystem/path (or file://local/filesystem/path)
2119 local/filesystem/path (or file://local/filesystem/path)
2120 ssh://[user@]host[:port]/[path]
2120 ssh://[user@]host[:port]/[path]
2121 http://[user@]host[:port]/[path]
2121 http://[user@]host[:port]/[path]
2122 https://[user@]host[:port]/[path]
2122 https://[user@]host[:port]/[path]
2123
2123
2124 An optional identifier after # indicates a particular branch, tag,
2124 An optional identifier after # indicates a particular branch, tag,
2125 or changeset to push.
2125 or changeset to push.
2126
2126
2127 Look at the help text for the pull command for important details
2127 Look at the help text for the pull command for important details
2128 about ssh:// URLs.
2128 about ssh:// URLs.
2129
2129
2130 Pushing to http:// and https:// URLs is only possible, if this
2130 Pushing to http:// and https:// URLs is only possible, if this
2131 feature is explicitly enabled on the remote Mercurial server.
2131 feature is explicitly enabled on the remote Mercurial server.
2132 """
2132 """
2133 dest, revs = cmdutil.parseurl(
2133 dest, revs = cmdutil.parseurl(
2134 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2134 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2135 cmdutil.setremoteconfig(ui, opts)
2135 cmdutil.setremoteconfig(ui, opts)
2136
2136
2137 other = hg.repository(ui, dest)
2137 other = hg.repository(ui, dest)
2138 ui.status('pushing to %s\n' % (dest))
2138 ui.status('pushing to %s\n' % (dest))
2139 if revs:
2139 if revs:
2140 revs = [repo.lookup(rev) for rev in revs]
2140 revs = [repo.lookup(rev) for rev in revs]
2141 r = repo.push(other, opts['force'], revs=revs)
2141 r = repo.push(other, opts['force'], revs=revs)
2142 return r == 0
2142 return r == 0
2143
2143
2144 def rawcommit(ui, repo, *pats, **opts):
2144 def rawcommit(ui, repo, *pats, **opts):
2145 """raw commit interface (DEPRECATED)
2145 """raw commit interface (DEPRECATED)
2146
2146
2147 (DEPRECATED)
2147 (DEPRECATED)
2148 Lowlevel commit, for use in helper scripts.
2148 Lowlevel commit, for use in helper scripts.
2149
2149
2150 This command is not intended to be used by normal users, as it is
2150 This command is not intended to be used by normal users, as it is
2151 primarily useful for importing from other SCMs.
2151 primarily useful for importing from other SCMs.
2152
2152
2153 This command is now deprecated and will be removed in a future
2153 This command is now deprecated and will be removed in a future
2154 release, please use debugsetparents and commit instead.
2154 release, please use debugsetparents and commit instead.
2155 """
2155 """
2156
2156
2157 ui.warn(_("(the rawcommit command is deprecated)\n"))
2157 ui.warn(_("(the rawcommit command is deprecated)\n"))
2158
2158
2159 message = cmdutil.logmessage(opts)
2159 message = cmdutil.logmessage(opts)
2160
2160
2161 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2161 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2162 if opts['files']:
2162 if opts['files']:
2163 files += open(opts['files']).read().splitlines()
2163 files += open(opts['files']).read().splitlines()
2164
2164
2165 parents = [repo.lookup(p) for p in opts['parent']]
2165 parents = [repo.lookup(p) for p in opts['parent']]
2166
2166
2167 try:
2167 try:
2168 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2168 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2169 except ValueError, inst:
2169 except ValueError, inst:
2170 raise util.Abort(str(inst))
2170 raise util.Abort(str(inst))
2171
2171
2172 def recover(ui, repo):
2172 def recover(ui, repo):
2173 """roll back an interrupted transaction
2173 """roll back an interrupted transaction
2174
2174
2175 Recover from an interrupted commit or pull.
2175 Recover from an interrupted commit or pull.
2176
2176
2177 This command tries to fix the repository status after an interrupted
2177 This command tries to fix the repository status after an interrupted
2178 operation. It should only be necessary when Mercurial suggests it.
2178 operation. It should only be necessary when Mercurial suggests it.
2179 """
2179 """
2180 if repo.recover():
2180 if repo.recover():
2181 return hg.verify(repo)
2181 return hg.verify(repo)
2182 return 1
2182 return 1
2183
2183
2184 def remove(ui, repo, *pats, **opts):
2184 def remove(ui, repo, *pats, **opts):
2185 """remove the specified files on the next commit
2185 """remove the specified files on the next commit
2186
2186
2187 Schedule the indicated files for removal from the repository.
2187 Schedule the indicated files for removal from the repository.
2188
2188
2189 This only removes files from the current branch, not from the
2189 This only removes files from the current branch, not from the
2190 entire project history. If the files still exist in the working
2190 entire project history. If the files still exist in the working
2191 directory, they will be deleted from it. If invoked with --after,
2191 directory, they will be deleted from it. If invoked with --after,
2192 files are marked as removed, but not actually unlinked unless --force
2192 files are marked as removed, but not actually unlinked unless --force
2193 is also given. Without exact file names, --after will only mark
2193 is also given. Without exact file names, --after will only mark
2194 files as removed if they are no longer in the working directory.
2194 files as removed if they are no longer in the working directory.
2195
2195
2196 This command schedules the files to be removed at the next commit.
2196 This command schedules the files to be removed at the next commit.
2197 To undo a remove before that, see hg revert.
2197 To undo a remove before that, see hg revert.
2198
2198
2199 Modified files and added files are not removed by default. To
2199 Modified files and added files are not removed by default. To
2200 remove them, use the -f/--force option.
2200 remove them, use the -f/--force option.
2201 """
2201 """
2202 names = []
2202 names = []
2203 if not opts['after'] and not pats:
2203 if not opts['after'] and not pats:
2204 raise util.Abort(_('no files specified'))
2204 raise util.Abort(_('no files specified'))
2205 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2205 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2206 exact = dict.fromkeys(files)
2206 exact = dict.fromkeys(files)
2207 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2207 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2208 modified, added, removed, deleted, unknown = mardu
2208 modified, added, removed, deleted, unknown = mardu
2209 remove, forget = [], []
2209 remove, forget = [], []
2210 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2210 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2211 reason = None
2211 reason = None
2212 if abs in modified and not opts['force']:
2212 if abs in modified and not opts['force']:
2213 reason = _('is modified (use -f to force removal)')
2213 reason = _('is modified (use -f to force removal)')
2214 elif abs in added:
2214 elif abs in added:
2215 if opts['force']:
2215 if opts['force']:
2216 forget.append(abs)
2216 forget.append(abs)
2217 continue
2217 continue
2218 reason = _('has been marked for add (use -f to force removal)')
2218 reason = _('has been marked for add (use -f to force removal)')
2219 elif repo.dirstate.state(abs) == '?':
2219 elif repo.dirstate.state(abs) == '?':
2220 reason = _('is not managed')
2220 reason = _('is not managed')
2221 elif opts['after'] and not exact and abs not in deleted:
2221 elif opts['after'] and not exact and abs not in deleted:
2222 continue
2222 continue
2223 elif abs in removed:
2223 elif abs in removed:
2224 continue
2224 continue
2225 if reason:
2225 if reason:
2226 if exact:
2226 if exact:
2227 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2227 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2228 else:
2228 else:
2229 if ui.verbose or not exact:
2229 if ui.verbose or not exact:
2230 ui.status(_('removing %s\n') % rel)
2230 ui.status(_('removing %s\n') % rel)
2231 remove.append(abs)
2231 remove.append(abs)
2232 repo.forget(forget)
2232 repo.forget(forget)
2233 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2233 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2234
2234
2235 def rename(ui, repo, *pats, **opts):
2235 def rename(ui, repo, *pats, **opts):
2236 """rename files; equivalent of copy + remove
2236 """rename files; equivalent of copy + remove
2237
2237
2238 Mark dest as copies of sources; mark sources for deletion. If
2238 Mark dest as copies of sources; mark sources for deletion. If
2239 dest is a directory, copies are put in that directory. If dest is
2239 dest is a directory, copies are put in that directory. If dest is
2240 a file, there can only be one source.
2240 a file, there can only be one source.
2241
2241
2242 By default, this command copies the contents of files as they
2242 By default, this command copies the contents of files as they
2243 stand in the working directory. If invoked with --after, the
2243 stand in the working directory. If invoked with --after, the
2244 operation is recorded, but no copying is performed.
2244 operation is recorded, but no copying is performed.
2245
2245
2246 This command takes effect in the next commit. To undo a rename
2246 This command takes effect in the next commit. To undo a rename
2247 before that, see hg revert.
2247 before that, see hg revert.
2248 """
2248 """
2249 wlock = repo.wlock(0)
2249 wlock = repo.wlock(0)
2250 errs, copied = docopy(ui, repo, pats, opts, wlock)
2250 errs, copied = docopy(ui, repo, pats, opts, wlock)
2251 names = []
2251 names = []
2252 for abs, rel, exact in copied:
2252 for abs, rel, exact in copied:
2253 if ui.verbose or not exact:
2253 if ui.verbose or not exact:
2254 ui.status(_('removing %s\n') % rel)
2254 ui.status(_('removing %s\n') % rel)
2255 names.append(abs)
2255 names.append(abs)
2256 if not opts.get('dry_run'):
2256 if not opts.get('dry_run'):
2257 repo.remove(names, True, wlock=wlock)
2257 repo.remove(names, True, wlock=wlock)
2258 return errs
2258 return errs
2259
2259
2260 def revert(ui, repo, *pats, **opts):
2260 def revert(ui, repo, *pats, **opts):
2261 """revert files or dirs to their states as of some revision
2261 """revert files or dirs to their states as of some revision
2262
2262
2263 With no revision specified, revert the named files or directories
2263 With no revision specified, revert the named files or directories
2264 to the contents they had in the parent of the working directory.
2264 to the contents they had in the parent of the working directory.
2265 This restores the contents of the affected files to an unmodified
2265 This restores the contents of the affected files to an unmodified
2266 state and unschedules adds, removes, copies, and renames. If the
2266 state and unschedules adds, removes, copies, and renames. If the
2267 working directory has two parents, you must explicitly specify the
2267 working directory has two parents, you must explicitly specify the
2268 revision to revert to.
2268 revision to revert to.
2269
2269
2270 Modified files are saved with a .orig suffix before reverting.
2270 Modified files are saved with a .orig suffix before reverting.
2271 To disable these backups, use --no-backup.
2271 To disable these backups, use --no-backup.
2272
2272
2273 Using the -r option, revert the given files or directories to their
2273 Using the -r option, revert the given files or directories to their
2274 contents as of a specific revision. This can be helpful to "roll
2274 contents as of a specific revision. This can be helpful to "roll
2275 back" some or all of a change that should not have been committed.
2275 back" some or all of a change that should not have been committed.
2276
2276
2277 Revert modifies the working directory. It does not commit any
2277 Revert modifies the working directory. It does not commit any
2278 changes, or change the parent of the working directory. If you
2278 changes, or change the parent of the working directory. If you
2279 revert to a revision other than the parent of the working
2279 revert to a revision other than the parent of the working
2280 directory, the reverted files will thus appear modified
2280 directory, the reverted files will thus appear modified
2281 afterwards.
2281 afterwards.
2282
2282
2283 If a file has been deleted, it is restored. If the executable
2283 If a file has been deleted, it is restored. If the executable
2284 mode of a file was changed, it is reset.
2284 mode of a file was changed, it is reset.
2285
2285
2286 If names are given, all files matching the names are reverted.
2286 If names are given, all files matching the names are reverted.
2287
2287
2288 If no arguments are given, no files are reverted.
2288 If no arguments are given, no files are reverted.
2289 """
2289 """
2290
2290
2291 if opts["date"]:
2291 if opts["date"]:
2292 if opts["rev"]:
2292 if opts["rev"]:
2293 raise util.Abort(_("you can't specify a revision and a date"))
2293 raise util.Abort(_("you can't specify a revision and a date"))
2294 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2294 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2295
2295
2296 if not pats and not opts['all']:
2296 if not pats and not opts['all']:
2297 raise util.Abort(_('no files or directories specified; '
2297 raise util.Abort(_('no files or directories specified; '
2298 'use --all to revert the whole repo'))
2298 'use --all to revert the whole repo'))
2299
2299
2300 parent, p2 = repo.dirstate.parents()
2300 parent, p2 = repo.dirstate.parents()
2301 if not opts['rev'] and p2 != nullid:
2301 if not opts['rev'] and p2 != nullid:
2302 raise util.Abort(_('uncommitted merge - please provide a '
2302 raise util.Abort(_('uncommitted merge - please provide a '
2303 'specific revision'))
2303 'specific revision'))
2304 ctx = repo.changectx(opts['rev'])
2304 ctx = repo.changectx(opts['rev'])
2305 node = ctx.node()
2305 node = ctx.node()
2306 mf = ctx.manifest()
2306 mf = ctx.manifest()
2307 if node == parent:
2307 if node == parent:
2308 pmf = mf
2308 pmf = mf
2309 else:
2309 else:
2310 pmf = None
2310 pmf = None
2311
2311
2312 wlock = repo.wlock()
2312 wlock = repo.wlock()
2313
2313
2314 # need all matching names in dirstate and manifest of target rev,
2314 # need all matching names in dirstate and manifest of target rev,
2315 # so have to walk both. do not print errors if files exist in one
2315 # so have to walk both. do not print errors if files exist in one
2316 # but not other.
2316 # but not other.
2317
2317
2318 names = {}
2318 names = {}
2319 target_only = {}
2319 target_only = {}
2320
2320
2321 # walk dirstate.
2321 # walk dirstate.
2322
2322
2323 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2323 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2324 badmatch=mf.has_key):
2324 badmatch=mf.has_key):
2325 names[abs] = (rel, exact)
2325 names[abs] = (rel, exact)
2326 if src == 'b':
2326 if src == 'b':
2327 target_only[abs] = True
2327 target_only[abs] = True
2328
2328
2329 # walk target manifest.
2329 # walk target manifest.
2330
2330
2331 def badmatch(path):
2331 def badmatch(path):
2332 if path in names:
2332 if path in names:
2333 return True
2333 return True
2334 path_ = path + '/'
2334 path_ = path + '/'
2335 for f in names:
2335 for f in names:
2336 if f.startswith(path_):
2336 if f.startswith(path_):
2337 return True
2337 return True
2338 return False
2338 return False
2339
2339
2340 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2340 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2341 badmatch=badmatch):
2341 badmatch=badmatch):
2342 if abs in names or src == 'b':
2342 if abs in names or src == 'b':
2343 continue
2343 continue
2344 names[abs] = (rel, exact)
2344 names[abs] = (rel, exact)
2345 target_only[abs] = True
2345 target_only[abs] = True
2346
2346
2347 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2347 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2348 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2348 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2349
2349
2350 revert = ([], _('reverting %s\n'))
2350 revert = ([], _('reverting %s\n'))
2351 add = ([], _('adding %s\n'))
2351 add = ([], _('adding %s\n'))
2352 remove = ([], _('removing %s\n'))
2352 remove = ([], _('removing %s\n'))
2353 forget = ([], _('forgetting %s\n'))
2353 forget = ([], _('forgetting %s\n'))
2354 undelete = ([], _('undeleting %s\n'))
2354 undelete = ([], _('undeleting %s\n'))
2355 update = {}
2355 update = {}
2356
2356
2357 disptable = (
2357 disptable = (
2358 # dispatch table:
2358 # dispatch table:
2359 # file state
2359 # file state
2360 # action if in target manifest
2360 # action if in target manifest
2361 # action if not in target manifest
2361 # action if not in target manifest
2362 # make backup if in target manifest
2362 # make backup if in target manifest
2363 # make backup if not in target manifest
2363 # make backup if not in target manifest
2364 (modified, revert, remove, True, True),
2364 (modified, revert, remove, True, True),
2365 (added, revert, forget, True, False),
2365 (added, revert, forget, True, False),
2366 (removed, undelete, None, False, False),
2366 (removed, undelete, None, False, False),
2367 (deleted, revert, remove, False, False),
2367 (deleted, revert, remove, False, False),
2368 (unknown, add, None, True, False),
2368 (unknown, add, None, True, False),
2369 (target_only, add, None, False, False),
2369 (target_only, add, None, False, False),
2370 )
2370 )
2371
2371
2372 entries = names.items()
2372 entries = names.items()
2373 entries.sort()
2373 entries.sort()
2374
2374
2375 for abs, (rel, exact) in entries:
2375 for abs, (rel, exact) in entries:
2376 mfentry = mf.get(abs)
2376 mfentry = mf.get(abs)
2377 target = repo.wjoin(abs)
2377 target = repo.wjoin(abs)
2378 def handle(xlist, dobackup):
2378 def handle(xlist, dobackup):
2379 xlist[0].append(abs)
2379 xlist[0].append(abs)
2380 update[abs] = 1
2380 update[abs] = 1
2381 if dobackup and not opts['no_backup'] and util.lexists(target):
2381 if dobackup and not opts['no_backup'] and util.lexists(target):
2382 bakname = "%s.orig" % rel
2382 bakname = "%s.orig" % rel
2383 ui.note(_('saving current version of %s as %s\n') %
2383 ui.note(_('saving current version of %s as %s\n') %
2384 (rel, bakname))
2384 (rel, bakname))
2385 if not opts.get('dry_run'):
2385 if not opts.get('dry_run'):
2386 util.copyfile(target, bakname)
2386 util.copyfile(target, bakname)
2387 if ui.verbose or not exact:
2387 if ui.verbose or not exact:
2388 ui.status(xlist[1] % rel)
2388 ui.status(xlist[1] % rel)
2389 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2389 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2390 if abs not in table: continue
2390 if abs not in table: continue
2391 # file has changed in dirstate
2391 # file has changed in dirstate
2392 if mfentry:
2392 if mfentry:
2393 handle(hitlist, backuphit)
2393 handle(hitlist, backuphit)
2394 elif misslist is not None:
2394 elif misslist is not None:
2395 handle(misslist, backupmiss)
2395 handle(misslist, backupmiss)
2396 else:
2396 else:
2397 if exact: ui.warn(_('file not managed: %s\n') % rel)
2397 if exact: ui.warn(_('file not managed: %s\n') % rel)
2398 break
2398 break
2399 else:
2399 else:
2400 # file has not changed in dirstate
2400 # file has not changed in dirstate
2401 if node == parent:
2401 if node == parent:
2402 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2402 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2403 continue
2403 continue
2404 if pmf is None:
2404 if pmf is None:
2405 # only need parent manifest in this unlikely case,
2405 # only need parent manifest in this unlikely case,
2406 # so do not read by default
2406 # so do not read by default
2407 pmf = repo.changectx(parent).manifest()
2407 pmf = repo.changectx(parent).manifest()
2408 if abs in pmf:
2408 if abs in pmf:
2409 if mfentry:
2409 if mfentry:
2410 # if version of file is same in parent and target
2410 # if version of file is same in parent and target
2411 # manifests, do nothing
2411 # manifests, do nothing
2412 if pmf[abs] != mfentry:
2412 if pmf[abs] != mfentry:
2413 handle(revert, False)
2413 handle(revert, False)
2414 else:
2414 else:
2415 handle(remove, False)
2415 handle(remove, False)
2416
2416
2417 if not opts.get('dry_run'):
2417 if not opts.get('dry_run'):
2418 repo.dirstate.forget(forget[0])
2418 for f in forget[0]:
2419 repo.dirstate.forget(f)
2419 r = hg.revert(repo, node, update.has_key, wlock)
2420 r = hg.revert(repo, node, update.has_key, wlock)
2420 repo.dirstate.update(add[0], 'a')
2421 for f in add[0]:
2421 repo.dirstate.update(undelete[0], 'n')
2422 repo.dirstate.add(f)
2422 repo.dirstate.update(remove[0], 'r')
2423 for f in undelete[0]:
2424 repo.dirstate.normal(f)
2425 for f in remove[0]:
2426 repo.dirstate.remove(f)
2423 return r
2427 return r
2424
2428
2425 def rollback(ui, repo):
2429 def rollback(ui, repo):
2426 """roll back the last transaction in this repository
2430 """roll back the last transaction in this repository
2427
2431
2428 Roll back the last transaction in this repository, restoring the
2432 Roll back the last transaction in this repository, restoring the
2429 project to its state prior to the transaction.
2433 project to its state prior to the transaction.
2430
2434
2431 Transactions are used to encapsulate the effects of all commands
2435 Transactions are used to encapsulate the effects of all commands
2432 that create new changesets or propagate existing changesets into a
2436 that create new changesets or propagate existing changesets into a
2433 repository. For example, the following commands are transactional,
2437 repository. For example, the following commands are transactional,
2434 and their effects can be rolled back:
2438 and their effects can be rolled back:
2435
2439
2436 commit
2440 commit
2437 import
2441 import
2438 pull
2442 pull
2439 push (with this repository as destination)
2443 push (with this repository as destination)
2440 unbundle
2444 unbundle
2441
2445
2442 This command should be used with care. There is only one level of
2446 This command should be used with care. There is only one level of
2443 rollback, and there is no way to undo a rollback. It will also
2447 rollback, and there is no way to undo a rollback. It will also
2444 restore the dirstate at the time of the last transaction, which
2448 restore the dirstate at the time of the last transaction, which
2445 may lose subsequent dirstate changes.
2449 may lose subsequent dirstate changes.
2446
2450
2447 This command is not intended for use on public repositories. Once
2451 This command is not intended for use on public repositories. Once
2448 changes are visible for pull by other users, rolling a transaction
2452 changes are visible for pull by other users, rolling a transaction
2449 back locally is ineffective (someone else may already have pulled
2453 back locally is ineffective (someone else may already have pulled
2450 the changes). Furthermore, a race is possible with readers of the
2454 the changes). Furthermore, a race is possible with readers of the
2451 repository; for example an in-progress pull from the repository
2455 repository; for example an in-progress pull from the repository
2452 may fail if a rollback is performed.
2456 may fail if a rollback is performed.
2453 """
2457 """
2454 repo.rollback()
2458 repo.rollback()
2455
2459
2456 def root(ui, repo):
2460 def root(ui, repo):
2457 """print the root (top) of the current working dir
2461 """print the root (top) of the current working dir
2458
2462
2459 Print the root directory of the current repository.
2463 Print the root directory of the current repository.
2460 """
2464 """
2461 ui.write(repo.root + "\n")
2465 ui.write(repo.root + "\n")
2462
2466
2463 def serve(ui, repo, **opts):
2467 def serve(ui, repo, **opts):
2464 """export the repository via HTTP
2468 """export the repository via HTTP
2465
2469
2466 Start a local HTTP repository browser and pull server.
2470 Start a local HTTP repository browser and pull server.
2467
2471
2468 By default, the server logs accesses to stdout and errors to
2472 By default, the server logs accesses to stdout and errors to
2469 stderr. Use the "-A" and "-E" options to log to files.
2473 stderr. Use the "-A" and "-E" options to log to files.
2470 """
2474 """
2471
2475
2472 if opts["stdio"]:
2476 if opts["stdio"]:
2473 if repo is None:
2477 if repo is None:
2474 raise hg.RepoError(_("There is no Mercurial repository here"
2478 raise hg.RepoError(_("There is no Mercurial repository here"
2475 " (.hg not found)"))
2479 " (.hg not found)"))
2476 s = sshserver.sshserver(ui, repo)
2480 s = sshserver.sshserver(ui, repo)
2477 s.serve_forever()
2481 s.serve_forever()
2478
2482
2479 parentui = ui.parentui or ui
2483 parentui = ui.parentui or ui
2480 optlist = ("name templates style address port ipv6"
2484 optlist = ("name templates style address port ipv6"
2481 " accesslog errorlog webdir_conf certificate")
2485 " accesslog errorlog webdir_conf certificate")
2482 for o in optlist.split():
2486 for o in optlist.split():
2483 if opts[o]:
2487 if opts[o]:
2484 parentui.setconfig("web", o, str(opts[o]))
2488 parentui.setconfig("web", o, str(opts[o]))
2485 if repo.ui != parentui:
2489 if repo.ui != parentui:
2486 repo.ui.setconfig("web", o, str(opts[o]))
2490 repo.ui.setconfig("web", o, str(opts[o]))
2487
2491
2488 if repo is None and not ui.config("web", "webdir_conf"):
2492 if repo is None and not ui.config("web", "webdir_conf"):
2489 raise hg.RepoError(_("There is no Mercurial repository here"
2493 raise hg.RepoError(_("There is no Mercurial repository here"
2490 " (.hg not found)"))
2494 " (.hg not found)"))
2491
2495
2492 class service:
2496 class service:
2493 def init(self):
2497 def init(self):
2494 util.set_signal_handler()
2498 util.set_signal_handler()
2495 try:
2499 try:
2496 self.httpd = hgweb.server.create_server(parentui, repo)
2500 self.httpd = hgweb.server.create_server(parentui, repo)
2497 except socket.error, inst:
2501 except socket.error, inst:
2498 raise util.Abort(_('cannot start server: ') + inst.args[1])
2502 raise util.Abort(_('cannot start server: ') + inst.args[1])
2499
2503
2500 if not ui.verbose: return
2504 if not ui.verbose: return
2501
2505
2502 if self.httpd.port != 80:
2506 if self.httpd.port != 80:
2503 ui.status(_('listening at http://%s:%d/\n') %
2507 ui.status(_('listening at http://%s:%d/\n') %
2504 (self.httpd.addr, self.httpd.port))
2508 (self.httpd.addr, self.httpd.port))
2505 else:
2509 else:
2506 ui.status(_('listening at http://%s/\n') % self.httpd.addr)
2510 ui.status(_('listening at http://%s/\n') % self.httpd.addr)
2507
2511
2508 def run(self):
2512 def run(self):
2509 self.httpd.serve_forever()
2513 self.httpd.serve_forever()
2510
2514
2511 service = service()
2515 service = service()
2512
2516
2513 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2517 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2514
2518
2515 def status(ui, repo, *pats, **opts):
2519 def status(ui, repo, *pats, **opts):
2516 """show changed files in the working directory
2520 """show changed files in the working directory
2517
2521
2518 Show status of files in the repository. If names are given, only
2522 Show status of files in the repository. If names are given, only
2519 files that match are shown. Files that are clean or ignored, are
2523 files that match are shown. Files that are clean or ignored, are
2520 not listed unless -c (clean), -i (ignored) or -A is given.
2524 not listed unless -c (clean), -i (ignored) or -A is given.
2521
2525
2522 NOTE: status may appear to disagree with diff if permissions have
2526 NOTE: status may appear to disagree with diff if permissions have
2523 changed or a merge has occurred. The standard diff format does not
2527 changed or a merge has occurred. The standard diff format does not
2524 report permission changes and diff only reports changes relative
2528 report permission changes and diff only reports changes relative
2525 to one merge parent.
2529 to one merge parent.
2526
2530
2527 If one revision is given, it is used as the base revision.
2531 If one revision is given, it is used as the base revision.
2528 If two revisions are given, the difference between them is shown.
2532 If two revisions are given, the difference between them is shown.
2529
2533
2530 The codes used to show the status of files are:
2534 The codes used to show the status of files are:
2531 M = modified
2535 M = modified
2532 A = added
2536 A = added
2533 R = removed
2537 R = removed
2534 C = clean
2538 C = clean
2535 ! = deleted, but still tracked
2539 ! = deleted, but still tracked
2536 ? = not tracked
2540 ? = not tracked
2537 I = ignored (not shown by default)
2541 I = ignored (not shown by default)
2538 = the previous added file was copied from here
2542 = the previous added file was copied from here
2539 """
2543 """
2540
2544
2541 all = opts['all']
2545 all = opts['all']
2542 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2546 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2543
2547
2544 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2548 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2545 cwd = (pats and repo.getcwd()) or ''
2549 cwd = (pats and repo.getcwd()) or ''
2546 modified, added, removed, deleted, unknown, ignored, clean = [
2550 modified, added, removed, deleted, unknown, ignored, clean = [
2547 n for n in repo.status(node1=node1, node2=node2, files=files,
2551 n for n in repo.status(node1=node1, node2=node2, files=files,
2548 match=matchfn,
2552 match=matchfn,
2549 list_ignored=all or opts['ignored'],
2553 list_ignored=all or opts['ignored'],
2550 list_clean=all or opts['clean'])]
2554 list_clean=all or opts['clean'])]
2551
2555
2552 changetypes = (('modified', 'M', modified),
2556 changetypes = (('modified', 'M', modified),
2553 ('added', 'A', added),
2557 ('added', 'A', added),
2554 ('removed', 'R', removed),
2558 ('removed', 'R', removed),
2555 ('deleted', '!', deleted),
2559 ('deleted', '!', deleted),
2556 ('unknown', '?', unknown),
2560 ('unknown', '?', unknown),
2557 ('ignored', 'I', ignored))
2561 ('ignored', 'I', ignored))
2558
2562
2559 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2563 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2560
2564
2561 end = opts['print0'] and '\0' or '\n'
2565 end = opts['print0'] and '\0' or '\n'
2562
2566
2563 for opt, char, changes in ([ct for ct in explicit_changetypes
2567 for opt, char, changes in ([ct for ct in explicit_changetypes
2564 if all or opts[ct[0]]]
2568 if all or opts[ct[0]]]
2565 or changetypes):
2569 or changetypes):
2566 if opts['no_status']:
2570 if opts['no_status']:
2567 format = "%%s%s" % end
2571 format = "%%s%s" % end
2568 else:
2572 else:
2569 format = "%s %%s%s" % (char, end)
2573 format = "%s %%s%s" % (char, end)
2570
2574
2571 for f in changes:
2575 for f in changes:
2572 ui.write(format % repo.pathto(f, cwd))
2576 ui.write(format % repo.pathto(f, cwd))
2573 if ((all or opts.get('copies')) and not opts.get('no_status')):
2577 if ((all or opts.get('copies')) and not opts.get('no_status')):
2574 copied = repo.dirstate.copied(f)
2578 copied = repo.dirstate.copied(f)
2575 if copied:
2579 if copied:
2576 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2580 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2577
2581
2578 def tag(ui, repo, name, rev_=None, **opts):
2582 def tag(ui, repo, name, rev_=None, **opts):
2579 """add a tag for the current or given revision
2583 """add a tag for the current or given revision
2580
2584
2581 Name a particular revision using <name>.
2585 Name a particular revision using <name>.
2582
2586
2583 Tags are used to name particular revisions of the repository and are
2587 Tags are used to name particular revisions of the repository and are
2584 very useful to compare different revision, to go back to significant
2588 very useful to compare different revision, to go back to significant
2585 earlier versions or to mark branch points as releases, etc.
2589 earlier versions or to mark branch points as releases, etc.
2586
2590
2587 If no revision is given, the parent of the working directory is used,
2591 If no revision is given, the parent of the working directory is used,
2588 or tip if no revision is checked out.
2592 or tip if no revision is checked out.
2589
2593
2590 To facilitate version control, distribution, and merging of tags,
2594 To facilitate version control, distribution, and merging of tags,
2591 they are stored as a file named ".hgtags" which is managed
2595 they are stored as a file named ".hgtags" which is managed
2592 similarly to other project files and can be hand-edited if
2596 similarly to other project files and can be hand-edited if
2593 necessary. The file '.hg/localtags' is used for local tags (not
2597 necessary. The file '.hg/localtags' is used for local tags (not
2594 shared among repositories).
2598 shared among repositories).
2595 """
2599 """
2596 if name in ['tip', '.', 'null']:
2600 if name in ['tip', '.', 'null']:
2597 raise util.Abort(_("the name '%s' is reserved") % name)
2601 raise util.Abort(_("the name '%s' is reserved") % name)
2598 if rev_ is not None:
2602 if rev_ is not None:
2599 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2603 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2600 "please use 'hg tag [-r REV] NAME' instead\n"))
2604 "please use 'hg tag [-r REV] NAME' instead\n"))
2601 if opts['rev']:
2605 if opts['rev']:
2602 raise util.Abort(_("use only one form to specify the revision"))
2606 raise util.Abort(_("use only one form to specify the revision"))
2603 if opts['rev'] and opts['remove']:
2607 if opts['rev'] and opts['remove']:
2604 raise util.Abort(_("--rev and --remove are incompatible"))
2608 raise util.Abort(_("--rev and --remove are incompatible"))
2605 if opts['rev']:
2609 if opts['rev']:
2606 rev_ = opts['rev']
2610 rev_ = opts['rev']
2607 message = opts['message']
2611 message = opts['message']
2608 if opts['remove']:
2612 if opts['remove']:
2609 if not name in repo.tags():
2613 if not name in repo.tags():
2610 raise util.Abort(_('tag %s does not exist') % name)
2614 raise util.Abort(_('tag %s does not exist') % name)
2611 rev_ = nullid
2615 rev_ = nullid
2612 if not message:
2616 if not message:
2613 message = _('Removed tag %s') % name
2617 message = _('Removed tag %s') % name
2614 elif name in repo.tags() and not opts['force']:
2618 elif name in repo.tags() and not opts['force']:
2615 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2619 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2616 % name)
2620 % name)
2617 if not rev_ and repo.dirstate.parents()[1] != nullid:
2621 if not rev_ and repo.dirstate.parents()[1] != nullid:
2618 raise util.Abort(_('uncommitted merge - please provide a '
2622 raise util.Abort(_('uncommitted merge - please provide a '
2619 'specific revision'))
2623 'specific revision'))
2620 r = repo.changectx(rev_).node()
2624 r = repo.changectx(rev_).node()
2621
2625
2622 if not message:
2626 if not message:
2623 message = _('Added tag %s for changeset %s') % (name, short(r))
2627 message = _('Added tag %s for changeset %s') % (name, short(r))
2624
2628
2625 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2629 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2626
2630
2627 def tags(ui, repo):
2631 def tags(ui, repo):
2628 """list repository tags
2632 """list repository tags
2629
2633
2630 List the repository tags.
2634 List the repository tags.
2631
2635
2632 This lists both regular and local tags.
2636 This lists both regular and local tags.
2633 """
2637 """
2634
2638
2635 l = repo.tagslist()
2639 l = repo.tagslist()
2636 l.reverse()
2640 l.reverse()
2637 hexfunc = ui.debugflag and hex or short
2641 hexfunc = ui.debugflag and hex or short
2638 for t, n in l:
2642 for t, n in l:
2639 try:
2643 try:
2640 hn = hexfunc(n)
2644 hn = hexfunc(n)
2641 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2645 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2642 except revlog.LookupError:
2646 except revlog.LookupError:
2643 r = " ?:%s" % hn
2647 r = " ?:%s" % hn
2644 if ui.quiet:
2648 if ui.quiet:
2645 ui.write("%s\n" % t)
2649 ui.write("%s\n" % t)
2646 else:
2650 else:
2647 spaces = " " * (30 - util.locallen(t))
2651 spaces = " " * (30 - util.locallen(t))
2648 ui.write("%s%s %s\n" % (t, spaces, r))
2652 ui.write("%s%s %s\n" % (t, spaces, r))
2649
2653
2650 def tip(ui, repo, **opts):
2654 def tip(ui, repo, **opts):
2651 """show the tip revision
2655 """show the tip revision
2652
2656
2653 Show the tip revision.
2657 Show the tip revision.
2654 """
2658 """
2655 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2659 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2656
2660
2657 def unbundle(ui, repo, fname1, *fnames, **opts):
2661 def unbundle(ui, repo, fname1, *fnames, **opts):
2658 """apply one or more changegroup files
2662 """apply one or more changegroup files
2659
2663
2660 Apply one or more compressed changegroup files generated by the
2664 Apply one or more compressed changegroup files generated by the
2661 bundle command.
2665 bundle command.
2662 """
2666 """
2663 fnames = (fname1,) + fnames
2667 fnames = (fname1,) + fnames
2664 result = None
2668 result = None
2665 wasempty = repo.changelog.count() == 0
2669 wasempty = repo.changelog.count() == 0
2666 for fname in fnames:
2670 for fname in fnames:
2667 if os.path.exists(fname):
2671 if os.path.exists(fname):
2668 f = open(fname, "rb")
2672 f = open(fname, "rb")
2669 else:
2673 else:
2670 f = urllib.urlopen(fname)
2674 f = urllib.urlopen(fname)
2671 gen = changegroup.readbundle(f, fname)
2675 gen = changegroup.readbundle(f, fname)
2672 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2676 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2673
2677
2674 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2678 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2675
2679
2676 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2680 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2677 """update working directory
2681 """update working directory
2678
2682
2679 Update the working directory to the specified revision, or the
2683 Update the working directory to the specified revision, or the
2680 tip of the current branch if none is specified.
2684 tip of the current branch if none is specified.
2681
2685
2682 If there are no outstanding changes in the working directory and
2686 If there are no outstanding changes in the working directory and
2683 there is a linear relationship between the current version and the
2687 there is a linear relationship between the current version and the
2684 requested version, the result is the requested version.
2688 requested version, the result is the requested version.
2685
2689
2686 To merge the working directory with another revision, use the
2690 To merge the working directory with another revision, use the
2687 merge command.
2691 merge command.
2688
2692
2689 By default, update will refuse to run if doing so would require
2693 By default, update will refuse to run if doing so would require
2690 discarding local changes.
2694 discarding local changes.
2691 """
2695 """
2692 if rev and node:
2696 if rev and node:
2693 raise util.Abort(_("please specify just one revision"))
2697 raise util.Abort(_("please specify just one revision"))
2694
2698
2695 if not rev:
2699 if not rev:
2696 rev = node
2700 rev = node
2697
2701
2698 if date:
2702 if date:
2699 if rev:
2703 if rev:
2700 raise util.Abort(_("you can't specify a revision and a date"))
2704 raise util.Abort(_("you can't specify a revision and a date"))
2701 rev = cmdutil.finddate(ui, repo, date)
2705 rev = cmdutil.finddate(ui, repo, date)
2702
2706
2703 if clean:
2707 if clean:
2704 return hg.clean(repo, rev)
2708 return hg.clean(repo, rev)
2705 else:
2709 else:
2706 return hg.update(repo, rev)
2710 return hg.update(repo, rev)
2707
2711
2708 def verify(ui, repo):
2712 def verify(ui, repo):
2709 """verify the integrity of the repository
2713 """verify the integrity of the repository
2710
2714
2711 Verify the integrity of the current repository.
2715 Verify the integrity of the current repository.
2712
2716
2713 This will perform an extensive check of the repository's
2717 This will perform an extensive check of the repository's
2714 integrity, validating the hashes and checksums of each entry in
2718 integrity, validating the hashes and checksums of each entry in
2715 the changelog, manifest, and tracked files, as well as the
2719 the changelog, manifest, and tracked files, as well as the
2716 integrity of their crosslinks and indices.
2720 integrity of their crosslinks and indices.
2717 """
2721 """
2718 return hg.verify(repo)
2722 return hg.verify(repo)
2719
2723
2720 def version_(ui):
2724 def version_(ui):
2721 """output version and copyright information"""
2725 """output version and copyright information"""
2722 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2726 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2723 % version.get_version())
2727 % version.get_version())
2724 ui.status(_(
2728 ui.status(_(
2725 "\nCopyright (C) 2005-2007 Matt Mackall <mpm@selenic.com> and others\n"
2729 "\nCopyright (C) 2005-2007 Matt Mackall <mpm@selenic.com> and others\n"
2726 "This is free software; see the source for copying conditions. "
2730 "This is free software; see the source for copying conditions. "
2727 "There is NO\nwarranty; "
2731 "There is NO\nwarranty; "
2728 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2732 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2729 ))
2733 ))
2730
2734
2731 # Command options and aliases are listed here, alphabetically
2735 # Command options and aliases are listed here, alphabetically
2732
2736
2733 globalopts = [
2737 globalopts = [
2734 ('R', 'repository', '',
2738 ('R', 'repository', '',
2735 _('repository root directory or symbolic path name')),
2739 _('repository root directory or symbolic path name')),
2736 ('', 'cwd', '', _('change working directory')),
2740 ('', 'cwd', '', _('change working directory')),
2737 ('y', 'noninteractive', None,
2741 ('y', 'noninteractive', None,
2738 _('do not prompt, assume \'yes\' for any required answers')),
2742 _('do not prompt, assume \'yes\' for any required answers')),
2739 ('q', 'quiet', None, _('suppress output')),
2743 ('q', 'quiet', None, _('suppress output')),
2740 ('v', 'verbose', None, _('enable additional output')),
2744 ('v', 'verbose', None, _('enable additional output')),
2741 ('', 'config', [], _('set/override config option')),
2745 ('', 'config', [], _('set/override config option')),
2742 ('', 'debug', None, _('enable debugging output')),
2746 ('', 'debug', None, _('enable debugging output')),
2743 ('', 'debugger', None, _('start debugger')),
2747 ('', 'debugger', None, _('start debugger')),
2744 ('', 'encoding', util._encoding, _('set the charset encoding')),
2748 ('', 'encoding', util._encoding, _('set the charset encoding')),
2745 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2749 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2746 ('', 'lsprof', None, _('print improved command execution profile')),
2750 ('', 'lsprof', None, _('print improved command execution profile')),
2747 ('', 'traceback', None, _('print traceback on exception')),
2751 ('', 'traceback', None, _('print traceback on exception')),
2748 ('', 'time', None, _('time how long the command takes')),
2752 ('', 'time', None, _('time how long the command takes')),
2749 ('', 'profile', None, _('print command execution profile')),
2753 ('', 'profile', None, _('print command execution profile')),
2750 ('', 'version', None, _('output version information and exit')),
2754 ('', 'version', None, _('output version information and exit')),
2751 ('h', 'help', None, _('display help and exit')),
2755 ('h', 'help', None, _('display help and exit')),
2752 ]
2756 ]
2753
2757
2754 dryrunopts = [('n', 'dry-run', None,
2758 dryrunopts = [('n', 'dry-run', None,
2755 _('do not perform actions, just print output'))]
2759 _('do not perform actions, just print output'))]
2756
2760
2757 remoteopts = [
2761 remoteopts = [
2758 ('e', 'ssh', '', _('specify ssh command to use')),
2762 ('e', 'ssh', '', _('specify ssh command to use')),
2759 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2763 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2760 ]
2764 ]
2761
2765
2762 walkopts = [
2766 walkopts = [
2763 ('I', 'include', [], _('include names matching the given patterns')),
2767 ('I', 'include', [], _('include names matching the given patterns')),
2764 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2768 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2765 ]
2769 ]
2766
2770
2767 commitopts = [
2771 commitopts = [
2768 ('m', 'message', '', _('use <text> as commit message')),
2772 ('m', 'message', '', _('use <text> as commit message')),
2769 ('l', 'logfile', '', _('read commit message from <file>')),
2773 ('l', 'logfile', '', _('read commit message from <file>')),
2770 ]
2774 ]
2771
2775
2772 table = {
2776 table = {
2773 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2777 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2774 "addremove":
2778 "addremove":
2775 (addremove,
2779 (addremove,
2776 [('s', 'similarity', '',
2780 [('s', 'similarity', '',
2777 _('guess renamed files by similarity (0<=s<=100)')),
2781 _('guess renamed files by similarity (0<=s<=100)')),
2778 ] + walkopts + dryrunopts,
2782 ] + walkopts + dryrunopts,
2779 _('hg addremove [OPTION]... [FILE]...')),
2783 _('hg addremove [OPTION]... [FILE]...')),
2780 "^annotate":
2784 "^annotate":
2781 (annotate,
2785 (annotate,
2782 [('r', 'rev', '', _('annotate the specified revision')),
2786 [('r', 'rev', '', _('annotate the specified revision')),
2783 ('f', 'follow', None, _('follow file copies and renames')),
2787 ('f', 'follow', None, _('follow file copies and renames')),
2784 ('a', 'text', None, _('treat all files as text')),
2788 ('a', 'text', None, _('treat all files as text')),
2785 ('u', 'user', None, _('list the author')),
2789 ('u', 'user', None, _('list the author')),
2786 ('d', 'date', None, _('list the date')),
2790 ('d', 'date', None, _('list the date')),
2787 ('n', 'number', None, _('list the revision number (default)')),
2791 ('n', 'number', None, _('list the revision number (default)')),
2788 ('c', 'changeset', None, _('list the changeset')),
2792 ('c', 'changeset', None, _('list the changeset')),
2789 ('l', 'line-number', None,
2793 ('l', 'line-number', None,
2790 _('show line number at the first appearance'))
2794 _('show line number at the first appearance'))
2791 ] + walkopts,
2795 ] + walkopts,
2792 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2796 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2793 "archive":
2797 "archive":
2794 (archive,
2798 (archive,
2795 [('', 'no-decode', None, _('do not pass files through decoders')),
2799 [('', 'no-decode', None, _('do not pass files through decoders')),
2796 ('p', 'prefix', '', _('directory prefix for files in archive')),
2800 ('p', 'prefix', '', _('directory prefix for files in archive')),
2797 ('r', 'rev', '', _('revision to distribute')),
2801 ('r', 'rev', '', _('revision to distribute')),
2798 ('t', 'type', '', _('type of distribution to create')),
2802 ('t', 'type', '', _('type of distribution to create')),
2799 ] + walkopts,
2803 ] + walkopts,
2800 _('hg archive [OPTION]... DEST')),
2804 _('hg archive [OPTION]... DEST')),
2801 "backout":
2805 "backout":
2802 (backout,
2806 (backout,
2803 [('', 'merge', None,
2807 [('', 'merge', None,
2804 _('merge with old dirstate parent after backout')),
2808 _('merge with old dirstate parent after backout')),
2805 ('d', 'date', '', _('record datecode as commit date')),
2809 ('d', 'date', '', _('record datecode as commit date')),
2806 ('', 'parent', '', _('parent to choose when backing out merge')),
2810 ('', 'parent', '', _('parent to choose when backing out merge')),
2807 ('u', 'user', '', _('record user as committer')),
2811 ('u', 'user', '', _('record user as committer')),
2808 ('r', 'rev', '', _('revision to backout')),
2812 ('r', 'rev', '', _('revision to backout')),
2809 ] + walkopts + commitopts,
2813 ] + walkopts + commitopts,
2810 _('hg backout [OPTION]... [-r] REV')),
2814 _('hg backout [OPTION]... [-r] REV')),
2811 "branch":
2815 "branch":
2812 (branch,
2816 (branch,
2813 [('f', 'force', None,
2817 [('f', 'force', None,
2814 _('set branch name even if it shadows an existing branch'))],
2818 _('set branch name even if it shadows an existing branch'))],
2815 _('hg branch [NAME]')),
2819 _('hg branch [NAME]')),
2816 "branches":
2820 "branches":
2817 (branches,
2821 (branches,
2818 [('a', 'active', False,
2822 [('a', 'active', False,
2819 _('show only branches that have unmerged heads'))],
2823 _('show only branches that have unmerged heads'))],
2820 _('hg branches [-a]')),
2824 _('hg branches [-a]')),
2821 "bundle":
2825 "bundle":
2822 (bundle,
2826 (bundle,
2823 [('f', 'force', None,
2827 [('f', 'force', None,
2824 _('run even when remote repository is unrelated')),
2828 _('run even when remote repository is unrelated')),
2825 ('r', 'rev', [],
2829 ('r', 'rev', [],
2826 _('a changeset you would like to bundle')),
2830 _('a changeset you would like to bundle')),
2827 ('', 'base', [],
2831 ('', 'base', [],
2828 _('a base changeset to specify instead of a destination')),
2832 _('a base changeset to specify instead of a destination')),
2829 ] + remoteopts,
2833 ] + remoteopts,
2830 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2834 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2831 "cat":
2835 "cat":
2832 (cat,
2836 (cat,
2833 [('o', 'output', '', _('print output to file with formatted name')),
2837 [('o', 'output', '', _('print output to file with formatted name')),
2834 ('r', 'rev', '', _('print the given revision')),
2838 ('r', 'rev', '', _('print the given revision')),
2835 ] + walkopts,
2839 ] + walkopts,
2836 _('hg cat [OPTION]... FILE...')),
2840 _('hg cat [OPTION]... FILE...')),
2837 "^clone":
2841 "^clone":
2838 (clone,
2842 (clone,
2839 [('U', 'noupdate', None, _('do not update the new working directory')),
2843 [('U', 'noupdate', None, _('do not update the new working directory')),
2840 ('r', 'rev', [],
2844 ('r', 'rev', [],
2841 _('a changeset you would like to have after cloning')),
2845 _('a changeset you would like to have after cloning')),
2842 ('', 'pull', None, _('use pull protocol to copy metadata')),
2846 ('', 'pull', None, _('use pull protocol to copy metadata')),
2843 ('', 'uncompressed', None,
2847 ('', 'uncompressed', None,
2844 _('use uncompressed transfer (fast over LAN)')),
2848 _('use uncompressed transfer (fast over LAN)')),
2845 ] + remoteopts,
2849 ] + remoteopts,
2846 _('hg clone [OPTION]... SOURCE [DEST]')),
2850 _('hg clone [OPTION]... SOURCE [DEST]')),
2847 "^commit|ci":
2851 "^commit|ci":
2848 (commit,
2852 (commit,
2849 [('A', 'addremove', None,
2853 [('A', 'addremove', None,
2850 _('mark new/missing files as added/removed before committing')),
2854 _('mark new/missing files as added/removed before committing')),
2851 ('d', 'date', '', _('record datecode as commit date')),
2855 ('d', 'date', '', _('record datecode as commit date')),
2852 ('u', 'user', '', _('record user as commiter')),
2856 ('u', 'user', '', _('record user as commiter')),
2853 ] + walkopts + commitopts,
2857 ] + walkopts + commitopts,
2854 _('hg commit [OPTION]... [FILE]...')),
2858 _('hg commit [OPTION]... [FILE]...')),
2855 "copy|cp":
2859 "copy|cp":
2856 (copy,
2860 (copy,
2857 [('A', 'after', None, _('record a copy that has already occurred')),
2861 [('A', 'after', None, _('record a copy that has already occurred')),
2858 ('f', 'force', None,
2862 ('f', 'force', None,
2859 _('forcibly copy over an existing managed file')),
2863 _('forcibly copy over an existing managed file')),
2860 ] + walkopts + dryrunopts,
2864 ] + walkopts + dryrunopts,
2861 _('hg copy [OPTION]... [SOURCE]... DEST')),
2865 _('hg copy [OPTION]... [SOURCE]... DEST')),
2862 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2866 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2863 "debugcomplete":
2867 "debugcomplete":
2864 (debugcomplete,
2868 (debugcomplete,
2865 [('o', 'options', None, _('show the command options'))],
2869 [('o', 'options', None, _('show the command options'))],
2866 _('debugcomplete [-o] CMD')),
2870 _('debugcomplete [-o] CMD')),
2867 "debuginstall": (debuginstall, [], _('debuginstall')),
2871 "debuginstall": (debuginstall, [], _('debuginstall')),
2868 "debugrebuildstate":
2872 "debugrebuildstate":
2869 (debugrebuildstate,
2873 (debugrebuildstate,
2870 [('r', 'rev', '', _('revision to rebuild to'))],
2874 [('r', 'rev', '', _('revision to rebuild to'))],
2871 _('debugrebuildstate [-r REV] [REV]')),
2875 _('debugrebuildstate [-r REV] [REV]')),
2872 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2876 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2873 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2877 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2874 "debugstate": (debugstate, [], _('debugstate')),
2878 "debugstate": (debugstate, [], _('debugstate')),
2875 "debugdate":
2879 "debugdate":
2876 (debugdate,
2880 (debugdate,
2877 [('e', 'extended', None, _('try extended date formats'))],
2881 [('e', 'extended', None, _('try extended date formats'))],
2878 _('debugdate [-e] DATE [RANGE]')),
2882 _('debugdate [-e] DATE [RANGE]')),
2879 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2883 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2880 "debugindex": (debugindex, [], _('debugindex FILE')),
2884 "debugindex": (debugindex, [], _('debugindex FILE')),
2881 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2885 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2882 "debugrename":
2886 "debugrename":
2883 (debugrename,
2887 (debugrename,
2884 [('r', 'rev', '', _('revision to debug'))],
2888 [('r', 'rev', '', _('revision to debug'))],
2885 _('debugrename [-r REV] FILE')),
2889 _('debugrename [-r REV] FILE')),
2886 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2890 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2887 "^diff":
2891 "^diff":
2888 (diff,
2892 (diff,
2889 [('r', 'rev', [], _('revision')),
2893 [('r', 'rev', [], _('revision')),
2890 ('a', 'text', None, _('treat all files as text')),
2894 ('a', 'text', None, _('treat all files as text')),
2891 ('p', 'show-function', None,
2895 ('p', 'show-function', None,
2892 _('show which function each change is in')),
2896 _('show which function each change is in')),
2893 ('g', 'git', None, _('use git extended diff format')),
2897 ('g', 'git', None, _('use git extended diff format')),
2894 ('', 'nodates', None, _("don't include dates in diff headers")),
2898 ('', 'nodates', None, _("don't include dates in diff headers")),
2895 ('w', 'ignore-all-space', None,
2899 ('w', 'ignore-all-space', None,
2896 _('ignore white space when comparing lines')),
2900 _('ignore white space when comparing lines')),
2897 ('b', 'ignore-space-change', None,
2901 ('b', 'ignore-space-change', None,
2898 _('ignore changes in the amount of white space')),
2902 _('ignore changes in the amount of white space')),
2899 ('B', 'ignore-blank-lines', None,
2903 ('B', 'ignore-blank-lines', None,
2900 _('ignore changes whose lines are all blank')),
2904 _('ignore changes whose lines are all blank')),
2901 ] + walkopts,
2905 ] + walkopts,
2902 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2906 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2903 "^export":
2907 "^export":
2904 (export,
2908 (export,
2905 [('o', 'output', '', _('print output to file with formatted name')),
2909 [('o', 'output', '', _('print output to file with formatted name')),
2906 ('a', 'text', None, _('treat all files as text')),
2910 ('a', 'text', None, _('treat all files as text')),
2907 ('g', 'git', None, _('use git extended diff format')),
2911 ('g', 'git', None, _('use git extended diff format')),
2908 ('', 'nodates', None, _("don't include dates in diff headers")),
2912 ('', 'nodates', None, _("don't include dates in diff headers")),
2909 ('', 'switch-parent', None, _('diff against the second parent'))],
2913 ('', 'switch-parent', None, _('diff against the second parent'))],
2910 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2914 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2911 "grep":
2915 "grep":
2912 (grep,
2916 (grep,
2913 [('0', 'print0', None, _('end fields with NUL')),
2917 [('0', 'print0', None, _('end fields with NUL')),
2914 ('', 'all', None, _('print all revisions that match')),
2918 ('', 'all', None, _('print all revisions that match')),
2915 ('f', 'follow', None,
2919 ('f', 'follow', None,
2916 _('follow changeset history, or file history across copies and renames')),
2920 _('follow changeset history, or file history across copies and renames')),
2917 ('i', 'ignore-case', None, _('ignore case when matching')),
2921 ('i', 'ignore-case', None, _('ignore case when matching')),
2918 ('l', 'files-with-matches', None,
2922 ('l', 'files-with-matches', None,
2919 _('print only filenames and revs that match')),
2923 _('print only filenames and revs that match')),
2920 ('n', 'line-number', None, _('print matching line numbers')),
2924 ('n', 'line-number', None, _('print matching line numbers')),
2921 ('r', 'rev', [], _('search in given revision range')),
2925 ('r', 'rev', [], _('search in given revision range')),
2922 ('u', 'user', None, _('print user who committed change')),
2926 ('u', 'user', None, _('print user who committed change')),
2923 ] + walkopts,
2927 ] + walkopts,
2924 _('hg grep [OPTION]... PATTERN [FILE]...')),
2928 _('hg grep [OPTION]... PATTERN [FILE]...')),
2925 "heads":
2929 "heads":
2926 (heads,
2930 (heads,
2927 [('', 'style', '', _('display using template map file')),
2931 [('', 'style', '', _('display using template map file')),
2928 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2932 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2929 ('', 'template', '', _('display with template'))],
2933 ('', 'template', '', _('display with template'))],
2930 _('hg heads [-r REV] [REV]...')),
2934 _('hg heads [-r REV] [REV]...')),
2931 "help": (help_, [], _('hg help [COMMAND]')),
2935 "help": (help_, [], _('hg help [COMMAND]')),
2932 "identify|id":
2936 "identify|id":
2933 (identify,
2937 (identify,
2934 [('r', 'rev', '', _('identify the specified rev')),
2938 [('r', 'rev', '', _('identify the specified rev')),
2935 ('n', 'num', None, _('show local revision number')),
2939 ('n', 'num', None, _('show local revision number')),
2936 ('i', 'id', None, _('show global revision id')),
2940 ('i', 'id', None, _('show global revision id')),
2937 ('b', 'branch', None, _('show branch')),
2941 ('b', 'branch', None, _('show branch')),
2938 ('t', 'tags', None, _('show tags'))],
2942 ('t', 'tags', None, _('show tags'))],
2939 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2943 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2940 "import|patch":
2944 "import|patch":
2941 (import_,
2945 (import_,
2942 [('p', 'strip', 1,
2946 [('p', 'strip', 1,
2943 _('directory strip option for patch. This has the same\n'
2947 _('directory strip option for patch. This has the same\n'
2944 'meaning as the corresponding patch option')),
2948 'meaning as the corresponding patch option')),
2945 ('b', 'base', '', _('base path')),
2949 ('b', 'base', '', _('base path')),
2946 ('f', 'force', None,
2950 ('f', 'force', None,
2947 _('skip check for outstanding uncommitted changes')),
2951 _('skip check for outstanding uncommitted changes')),
2948 ('', 'exact', None,
2952 ('', 'exact', None,
2949 _('apply patch to the nodes from which it was generated')),
2953 _('apply patch to the nodes from which it was generated')),
2950 ('', 'import-branch', None,
2954 ('', 'import-branch', None,
2951 _('Use any branch information in patch (implied by --exact)'))] + commitopts,
2955 _('Use any branch information in patch (implied by --exact)'))] + commitopts,
2952 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2956 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2953 "incoming|in": (incoming,
2957 "incoming|in": (incoming,
2954 [('M', 'no-merges', None, _('do not show merges')),
2958 [('M', 'no-merges', None, _('do not show merges')),
2955 ('f', 'force', None,
2959 ('f', 'force', None,
2956 _('run even when remote repository is unrelated')),
2960 _('run even when remote repository is unrelated')),
2957 ('', 'style', '', _('display using template map file')),
2961 ('', 'style', '', _('display using template map file')),
2958 ('n', 'newest-first', None, _('show newest record first')),
2962 ('n', 'newest-first', None, _('show newest record first')),
2959 ('', 'bundle', '', _('file to store the bundles into')),
2963 ('', 'bundle', '', _('file to store the bundles into')),
2960 ('p', 'patch', None, _('show patch')),
2964 ('p', 'patch', None, _('show patch')),
2961 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2965 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2962 ('', 'template', '', _('display with template')),
2966 ('', 'template', '', _('display with template')),
2963 ] + remoteopts,
2967 ] + remoteopts,
2964 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2968 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2965 ' [--bundle FILENAME] [SOURCE]')),
2969 ' [--bundle FILENAME] [SOURCE]')),
2966 "^init":
2970 "^init":
2967 (init,
2971 (init,
2968 remoteopts,
2972 remoteopts,
2969 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2973 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2970 "locate":
2974 "locate":
2971 (locate,
2975 (locate,
2972 [('r', 'rev', '', _('search the repository as it stood at rev')),
2976 [('r', 'rev', '', _('search the repository as it stood at rev')),
2973 ('0', 'print0', None,
2977 ('0', 'print0', None,
2974 _('end filenames with NUL, for use with xargs')),
2978 _('end filenames with NUL, for use with xargs')),
2975 ('f', 'fullpath', None,
2979 ('f', 'fullpath', None,
2976 _('print complete paths from the filesystem root')),
2980 _('print complete paths from the filesystem root')),
2977 ] + walkopts,
2981 ] + walkopts,
2978 _('hg locate [OPTION]... [PATTERN]...')),
2982 _('hg locate [OPTION]... [PATTERN]...')),
2979 "^log|history":
2983 "^log|history":
2980 (log,
2984 (log,
2981 [('f', 'follow', None,
2985 [('f', 'follow', None,
2982 _('follow changeset history, or file history across copies and renames')),
2986 _('follow changeset history, or file history across copies and renames')),
2983 ('', 'follow-first', None,
2987 ('', 'follow-first', None,
2984 _('only follow the first parent of merge changesets')),
2988 _('only follow the first parent of merge changesets')),
2985 ('d', 'date', '', _('show revs matching date spec')),
2989 ('d', 'date', '', _('show revs matching date spec')),
2986 ('C', 'copies', None, _('show copied files')),
2990 ('C', 'copies', None, _('show copied files')),
2987 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
2991 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
2988 ('l', 'limit', '', _('limit number of changes displayed')),
2992 ('l', 'limit', '', _('limit number of changes displayed')),
2989 ('r', 'rev', [], _('show the specified revision or range')),
2993 ('r', 'rev', [], _('show the specified revision or range')),
2990 ('', 'removed', None, _('include revs where files were removed')),
2994 ('', 'removed', None, _('include revs where files were removed')),
2991 ('M', 'no-merges', None, _('do not show merges')),
2995 ('M', 'no-merges', None, _('do not show merges')),
2992 ('', 'style', '', _('display using template map file')),
2996 ('', 'style', '', _('display using template map file')),
2993 ('m', 'only-merges', None, _('show only merges')),
2997 ('m', 'only-merges', None, _('show only merges')),
2994 ('p', 'patch', None, _('show patch')),
2998 ('p', 'patch', None, _('show patch')),
2995 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2999 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2996 ('', 'template', '', _('display with template')),
3000 ('', 'template', '', _('display with template')),
2997 ] + walkopts,
3001 ] + walkopts,
2998 _('hg log [OPTION]... [FILE]')),
3002 _('hg log [OPTION]... [FILE]')),
2999 "manifest": (manifest, [], _('hg manifest [REV]')),
3003 "manifest": (manifest, [], _('hg manifest [REV]')),
3000 "^merge":
3004 "^merge":
3001 (merge,
3005 (merge,
3002 [('f', 'force', None, _('force a merge with outstanding changes')),
3006 [('f', 'force', None, _('force a merge with outstanding changes')),
3003 ('r', 'rev', '', _('revision to merge')),
3007 ('r', 'rev', '', _('revision to merge')),
3004 ],
3008 ],
3005 _('hg merge [-f] [[-r] REV]')),
3009 _('hg merge [-f] [[-r] REV]')),
3006 "outgoing|out": (outgoing,
3010 "outgoing|out": (outgoing,
3007 [('M', 'no-merges', None, _('do not show merges')),
3011 [('M', 'no-merges', None, _('do not show merges')),
3008 ('f', 'force', None,
3012 ('f', 'force', None,
3009 _('run even when remote repository is unrelated')),
3013 _('run even when remote repository is unrelated')),
3010 ('p', 'patch', None, _('show patch')),
3014 ('p', 'patch', None, _('show patch')),
3011 ('', 'style', '', _('display using template map file')),
3015 ('', 'style', '', _('display using template map file')),
3012 ('r', 'rev', [], _('a specific revision you would like to push')),
3016 ('r', 'rev', [], _('a specific revision you would like to push')),
3013 ('n', 'newest-first', None, _('show newest record first')),
3017 ('n', 'newest-first', None, _('show newest record first')),
3014 ('', 'template', '', _('display with template')),
3018 ('', 'template', '', _('display with template')),
3015 ] + remoteopts,
3019 ] + remoteopts,
3016 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3020 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3017 "^parents":
3021 "^parents":
3018 (parents,
3022 (parents,
3019 [('r', 'rev', '', _('show parents from the specified rev')),
3023 [('r', 'rev', '', _('show parents from the specified rev')),
3020 ('', 'style', '', _('display using template map file')),
3024 ('', 'style', '', _('display using template map file')),
3021 ('', 'template', '', _('display with template'))],
3025 ('', 'template', '', _('display with template'))],
3022 _('hg parents [-r REV] [FILE]')),
3026 _('hg parents [-r REV] [FILE]')),
3023 "paths": (paths, [], _('hg paths [NAME]')),
3027 "paths": (paths, [], _('hg paths [NAME]')),
3024 "^pull":
3028 "^pull":
3025 (pull,
3029 (pull,
3026 [('u', 'update', None,
3030 [('u', 'update', None,
3027 _('update to new tip if changesets were pulled')),
3031 _('update to new tip if changesets were pulled')),
3028 ('f', 'force', None,
3032 ('f', 'force', None,
3029 _('run even when remote repository is unrelated')),
3033 _('run even when remote repository is unrelated')),
3030 ('r', 'rev', [],
3034 ('r', 'rev', [],
3031 _('a specific revision up to which you would like to pull')),
3035 _('a specific revision up to which you would like to pull')),
3032 ] + remoteopts,
3036 ] + remoteopts,
3033 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3037 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3034 "^push":
3038 "^push":
3035 (push,
3039 (push,
3036 [('f', 'force', None, _('force push')),
3040 [('f', 'force', None, _('force push')),
3037 ('r', 'rev', [], _('a specific revision you would like to push')),
3041 ('r', 'rev', [], _('a specific revision you would like to push')),
3038 ] + remoteopts,
3042 ] + remoteopts,
3039 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3043 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3040 "debugrawcommit|rawcommit":
3044 "debugrawcommit|rawcommit":
3041 (rawcommit,
3045 (rawcommit,
3042 [('p', 'parent', [], _('parent')),
3046 [('p', 'parent', [], _('parent')),
3043 ('d', 'date', '', _('date code')),
3047 ('d', 'date', '', _('date code')),
3044 ('u', 'user', '', _('user')),
3048 ('u', 'user', '', _('user')),
3045 ('F', 'files', '', _('file list'))
3049 ('F', 'files', '', _('file list'))
3046 ] + commitopts,
3050 ] + commitopts,
3047 _('hg debugrawcommit [OPTION]... [FILE]...')),
3051 _('hg debugrawcommit [OPTION]... [FILE]...')),
3048 "recover": (recover, [], _('hg recover')),
3052 "recover": (recover, [], _('hg recover')),
3049 "^remove|rm":
3053 "^remove|rm":
3050 (remove,
3054 (remove,
3051 [('A', 'after', None, _('record remove that has already occurred')),
3055 [('A', 'after', None, _('record remove that has already occurred')),
3052 ('f', 'force', None, _('remove file even if modified')),
3056 ('f', 'force', None, _('remove file even if modified')),
3053 ] + walkopts,
3057 ] + walkopts,
3054 _('hg remove [OPTION]... FILE...')),
3058 _('hg remove [OPTION]... FILE...')),
3055 "rename|mv":
3059 "rename|mv":
3056 (rename,
3060 (rename,
3057 [('A', 'after', None, _('record a rename that has already occurred')),
3061 [('A', 'after', None, _('record a rename that has already occurred')),
3058 ('f', 'force', None,
3062 ('f', 'force', None,
3059 _('forcibly copy over an existing managed file')),
3063 _('forcibly copy over an existing managed file')),
3060 ] + walkopts + dryrunopts,
3064 ] + walkopts + dryrunopts,
3061 _('hg rename [OPTION]... SOURCE... DEST')),
3065 _('hg rename [OPTION]... SOURCE... DEST')),
3062 "^revert":
3066 "^revert":
3063 (revert,
3067 (revert,
3064 [('a', 'all', None, _('revert all changes when no arguments given')),
3068 [('a', 'all', None, _('revert all changes when no arguments given')),
3065 ('d', 'date', '', _('tipmost revision matching date')),
3069 ('d', 'date', '', _('tipmost revision matching date')),
3066 ('r', 'rev', '', _('revision to revert to')),
3070 ('r', 'rev', '', _('revision to revert to')),
3067 ('', 'no-backup', None, _('do not save backup copies of files')),
3071 ('', 'no-backup', None, _('do not save backup copies of files')),
3068 ] + walkopts + dryrunopts,
3072 ] + walkopts + dryrunopts,
3069 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3073 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3070 "rollback": (rollback, [], _('hg rollback')),
3074 "rollback": (rollback, [], _('hg rollback')),
3071 "root": (root, [], _('hg root')),
3075 "root": (root, [], _('hg root')),
3072 "showconfig|debugconfig":
3076 "showconfig|debugconfig":
3073 (showconfig,
3077 (showconfig,
3074 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3078 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3075 _('showconfig [-u] [NAME]...')),
3079 _('showconfig [-u] [NAME]...')),
3076 "^serve":
3080 "^serve":
3077 (serve,
3081 (serve,
3078 [('A', 'accesslog', '', _('name of access log file to write to')),
3082 [('A', 'accesslog', '', _('name of access log file to write to')),
3079 ('d', 'daemon', None, _('run server in background')),
3083 ('d', 'daemon', None, _('run server in background')),
3080 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3084 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3081 ('E', 'errorlog', '', _('name of error log file to write to')),
3085 ('E', 'errorlog', '', _('name of error log file to write to')),
3082 ('p', 'port', 0, _('port to use (default: 8000)')),
3086 ('p', 'port', 0, _('port to use (default: 8000)')),
3083 ('a', 'address', '', _('address to use')),
3087 ('a', 'address', '', _('address to use')),
3084 ('n', 'name', '',
3088 ('n', 'name', '',
3085 _('name to show in web pages (default: working dir)')),
3089 _('name to show in web pages (default: working dir)')),
3086 ('', 'webdir-conf', '', _('name of the webdir config file'
3090 ('', 'webdir-conf', '', _('name of the webdir config file'
3087 ' (serve more than one repo)')),
3091 ' (serve more than one repo)')),
3088 ('', 'pid-file', '', _('name of file to write process ID to')),
3092 ('', 'pid-file', '', _('name of file to write process ID to')),
3089 ('', 'stdio', None, _('for remote clients')),
3093 ('', 'stdio', None, _('for remote clients')),
3090 ('t', 'templates', '', _('web templates to use')),
3094 ('t', 'templates', '', _('web templates to use')),
3091 ('', 'style', '', _('template style to use')),
3095 ('', 'style', '', _('template style to use')),
3092 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3096 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3093 ('', 'certificate', '', _('SSL certificate file'))],
3097 ('', 'certificate', '', _('SSL certificate file'))],
3094 _('hg serve [OPTION]...')),
3098 _('hg serve [OPTION]...')),
3095 "^status|st":
3099 "^status|st":
3096 (status,
3100 (status,
3097 [('A', 'all', None, _('show status of all files')),
3101 [('A', 'all', None, _('show status of all files')),
3098 ('m', 'modified', None, _('show only modified files')),
3102 ('m', 'modified', None, _('show only modified files')),
3099 ('a', 'added', None, _('show only added files')),
3103 ('a', 'added', None, _('show only added files')),
3100 ('r', 'removed', None, _('show only removed files')),
3104 ('r', 'removed', None, _('show only removed files')),
3101 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3105 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3102 ('c', 'clean', None, _('show only files without changes')),
3106 ('c', 'clean', None, _('show only files without changes')),
3103 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3107 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3104 ('i', 'ignored', None, _('show only ignored files')),
3108 ('i', 'ignored', None, _('show only ignored files')),
3105 ('n', 'no-status', None, _('hide status prefix')),
3109 ('n', 'no-status', None, _('hide status prefix')),
3106 ('C', 'copies', None, _('show source of copied files')),
3110 ('C', 'copies', None, _('show source of copied files')),
3107 ('0', 'print0', None,
3111 ('0', 'print0', None,
3108 _('end filenames with NUL, for use with xargs')),
3112 _('end filenames with NUL, for use with xargs')),
3109 ('', 'rev', [], _('show difference from revision')),
3113 ('', 'rev', [], _('show difference from revision')),
3110 ] + walkopts,
3114 ] + walkopts,
3111 _('hg status [OPTION]... [FILE]...')),
3115 _('hg status [OPTION]... [FILE]...')),
3112 "tag":
3116 "tag":
3113 (tag,
3117 (tag,
3114 [('f', 'force', None, _('replace existing tag')),
3118 [('f', 'force', None, _('replace existing tag')),
3115 ('l', 'local', None, _('make the tag local')),
3119 ('l', 'local', None, _('make the tag local')),
3116 ('m', 'message', '', _('message for tag commit log entry')),
3120 ('m', 'message', '', _('message for tag commit log entry')),
3117 ('d', 'date', '', _('record datecode as commit date')),
3121 ('d', 'date', '', _('record datecode as commit date')),
3118 ('u', 'user', '', _('record user as commiter')),
3122 ('u', 'user', '', _('record user as commiter')),
3119 ('r', 'rev', '', _('revision to tag')),
3123 ('r', 'rev', '', _('revision to tag')),
3120 ('', 'remove', None, _('remove a tag'))],
3124 ('', 'remove', None, _('remove a tag'))],
3121 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3125 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3122 "tags": (tags, [], _('hg tags')),
3126 "tags": (tags, [], _('hg tags')),
3123 "tip":
3127 "tip":
3124 (tip,
3128 (tip,
3125 [('', 'style', '', _('display using template map file')),
3129 [('', 'style', '', _('display using template map file')),
3126 ('p', 'patch', None, _('show patch')),
3130 ('p', 'patch', None, _('show patch')),
3127 ('', 'template', '', _('display with template'))],
3131 ('', 'template', '', _('display with template'))],
3128 _('hg tip [-p]')),
3132 _('hg tip [-p]')),
3129 "unbundle":
3133 "unbundle":
3130 (unbundle,
3134 (unbundle,
3131 [('u', 'update', None,
3135 [('u', 'update', None,
3132 _('update to new tip if changesets were unbundled'))],
3136 _('update to new tip if changesets were unbundled'))],
3133 _('hg unbundle [-u] FILE...')),
3137 _('hg unbundle [-u] FILE...')),
3134 "^update|up|checkout|co":
3138 "^update|up|checkout|co":
3135 (update,
3139 (update,
3136 [('C', 'clean', None, _('overwrite locally modified files')),
3140 [('C', 'clean', None, _('overwrite locally modified files')),
3137 ('d', 'date', '', _('tipmost revision matching date')),
3141 ('d', 'date', '', _('tipmost revision matching date')),
3138 ('r', 'rev', '', _('revision'))],
3142 ('r', 'rev', '', _('revision'))],
3139 _('hg update [-C] [-d DATE] [[-r] REV]')),
3143 _('hg update [-C] [-d DATE] [[-r] REV]')),
3140 "verify": (verify, [], _('hg verify')),
3144 "verify": (verify, [], _('hg verify')),
3141 "version": (version_, [], _('hg version')),
3145 "version": (version_, [], _('hg version')),
3142 }
3146 }
3143
3147
3144 extensions.commandtable = table
3148 extensions.commandtable = table
3145
3149
3146 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3150 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3147 " debugindex debugindexdot debugdate debuginstall")
3151 " debugindex debugindexdot debugdate debuginstall")
3148 optionalrepo = ("paths serve showconfig")
3152 optionalrepo = ("paths serve showconfig")
3149
3153
3150 def dispatch(args, argv0=None):
3154 def dispatch(args, argv0=None):
3151 try:
3155 try:
3152 u = ui.ui(traceback='--traceback' in args)
3156 u = ui.ui(traceback='--traceback' in args)
3153 except util.Abort, inst:
3157 except util.Abort, inst:
3154 sys.stderr.write(_("abort: %s\n") % inst)
3158 sys.stderr.write(_("abort: %s\n") % inst)
3155 return -1
3159 return -1
3156 return cmdutil.runcatch(u, args, argv0=argv0)
3160 return cmdutil.runcatch(u, args, argv0=argv0)
3157
3161
3158 def run():
3162 def run():
3159 sys.exit(dispatch(sys.argv[1:], argv0=sys.argv[0]))
3163 sys.exit(dispatch(sys.argv[1:], argv0=sys.argv[0]))
@@ -1,488 +1,507 b''
1 """
1 """
2 dirstate.py - working directory tracking for mercurial
2 dirstate.py - working directory tracking for mercurial
3
3
4 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8 """
8 """
9
9
10 from node import *
10 from node import *
11 from i18n import _
11 from i18n import _
12 import struct, os, time, bisect, stat, strutil, util, re, errno, ignore
12 import struct, os, time, bisect, stat, strutil, util, re, errno, ignore
13 import cStringIO
13 import cStringIO
14
14
15 _unknown = ('?', 0, 0, 0)
15 _unknown = ('?', 0, 0, 0)
16 _format = ">cllll"
16 _format = ">cllll"
17
17
18 class dirstate(object):
18 class dirstate(object):
19
19
20 def __init__(self, opener, ui, root):
20 def __init__(self, opener, ui, root):
21 self._opener = opener
21 self._opener = opener
22 self._root = root
22 self._root = root
23 self._dirty = False
23 self._dirty = False
24 self._ui = ui
24 self._ui = ui
25
25
26 def __getattr__(self, name):
26 def __getattr__(self, name):
27 if name == '_map':
27 if name == '_map':
28 self._read()
28 self._read()
29 return self._map
29 return self._map
30 elif name == '_copymap':
30 elif name == '_copymap':
31 self._read()
31 self._read()
32 return self._copymap
32 return self._copymap
33 elif name == '_branch':
33 elif name == '_branch':
34 try:
34 try:
35 self._branch = (self._opener("branch").read().strip()
35 self._branch = (self._opener("branch").read().strip()
36 or "default")
36 or "default")
37 except IOError:
37 except IOError:
38 self._branch = "default"
38 self._branch = "default"
39 return self._branch
39 return self._branch
40 elif name == '_pl':
40 elif name == '_pl':
41 self._pl = [nullid, nullid]
41 self._pl = [nullid, nullid]
42 try:
42 try:
43 st = self._opener("dirstate").read(40)
43 st = self._opener("dirstate").read(40)
44 if len(st) == 40:
44 if len(st) == 40:
45 self._pl = st[:20], st[20:40]
45 self._pl = st[:20], st[20:40]
46 except IOError, err:
46 except IOError, err:
47 if err.errno != errno.ENOENT: raise
47 if err.errno != errno.ENOENT: raise
48 return self._pl
48 return self._pl
49 elif name == '_dirs':
49 elif name == '_dirs':
50 self._dirs = {}
50 self._dirs = {}
51 for f in self._map:
51 for f in self._map:
52 self._incpath(f)
52 self._incpath(f)
53 return self._dirs
53 return self._dirs
54 elif name == '_ignore':
54 elif name == '_ignore':
55 files = [self.wjoin('.hgignore')]
55 files = [self.wjoin('.hgignore')]
56 for name, path in self._ui.configitems("ui"):
56 for name, path in self._ui.configitems("ui"):
57 if name == 'ignore' or name.startswith('ignore.'):
57 if name == 'ignore' or name.startswith('ignore.'):
58 files.append(os.path.expanduser(path))
58 files.append(os.path.expanduser(path))
59 self._ignore = ignore.ignore(self._root, files, self._ui.warn)
59 self._ignore = ignore.ignore(self._root, files, self._ui.warn)
60 return self._ignore
60 return self._ignore
61 elif name == '_slash':
61 elif name == '_slash':
62 self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/'
62 self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/'
63 return self._slash
63 return self._slash
64 else:
64 else:
65 raise AttributeError, name
65 raise AttributeError, name
66
66
67 def wjoin(self, f):
67 def wjoin(self, f):
68 return os.path.join(self._root, f)
68 return os.path.join(self._root, f)
69
69
70 def getcwd(self):
70 def getcwd(self):
71 cwd = os.getcwd()
71 cwd = os.getcwd()
72 if cwd == self._root: return ''
72 if cwd == self._root: return ''
73 # self._root ends with a path separator if self._root is '/' or 'C:\'
73 # self._root ends with a path separator if self._root is '/' or 'C:\'
74 rootsep = self._root
74 rootsep = self._root
75 if not rootsep.endswith(os.sep):
75 if not rootsep.endswith(os.sep):
76 rootsep += os.sep
76 rootsep += os.sep
77 if cwd.startswith(rootsep):
77 if cwd.startswith(rootsep):
78 return cwd[len(rootsep):]
78 return cwd[len(rootsep):]
79 else:
79 else:
80 # we're outside the repo. return an absolute path.
80 # we're outside the repo. return an absolute path.
81 return cwd
81 return cwd
82
82
83 def pathto(self, f, cwd=None):
83 def pathto(self, f, cwd=None):
84 if cwd is None:
84 if cwd is None:
85 cwd = self.getcwd()
85 cwd = self.getcwd()
86 path = util.pathto(self._root, cwd, f)
86 path = util.pathto(self._root, cwd, f)
87 if self._slash:
87 if self._slash:
88 return path.replace(os.sep, '/')
88 return path.replace(os.sep, '/')
89 return path
89 return path
90
90
91 def __del__(self):
91 def __del__(self):
92 self.write()
92 self.write()
93
93
94 def __getitem__(self, key):
94 def __getitem__(self, key):
95 return self._map[key]
95 return self._map[key]
96
96
97 def __contains__(self, key):
97 def __contains__(self, key):
98 return key in self._map
98 return key in self._map
99
99
100 def __iter__(self):
100 def __iter__(self):
101 a = self._map.keys()
101 a = self._map.keys()
102 a.sort()
102 a.sort()
103 for x in a:
103 for x in a:
104 yield x
104 yield x
105
105
106 def parents(self):
106 def parents(self):
107 return self._pl
107 return self._pl
108
108
109 def branch(self):
109 def branch(self):
110 return self._branch
110 return self._branch
111
111
112 def setparents(self, p1, p2=nullid):
112 def setparents(self, p1, p2=nullid):
113 self._dirty = True
113 self._dirty = True
114 self._pl = p1, p2
114 self._pl = p1, p2
115
115
116 def setbranch(self, branch):
116 def setbranch(self, branch):
117 self._branch = branch
117 self._branch = branch
118 self._opener("branch", "w").write(branch + '\n')
118 self._opener("branch", "w").write(branch + '\n')
119
119
120 def state(self, key):
120 def state(self, key):
121 ''' current states:
122 n normal
123 m needs merging
124 r marked for removal
125 a marked for addition'''
121 return self._map.get(key, ("?",))[0]
126 return self._map.get(key, ("?",))[0]
122
127
123 def _read(self):
128 def _read(self):
124 self._map = {}
129 self._map = {}
125 self._copymap = {}
130 self._copymap = {}
126 self._pl = [nullid, nullid]
131 self._pl = [nullid, nullid]
127 try:
132 try:
128 st = self._opener("dirstate").read()
133 st = self._opener("dirstate").read()
129 except IOError, err:
134 except IOError, err:
130 if err.errno != errno.ENOENT: raise
135 if err.errno != errno.ENOENT: raise
131 return
136 return
132 if not st:
137 if not st:
133 return
138 return
134
139
135 self._pl = [st[:20], st[20: 40]]
140 self._pl = [st[:20], st[20: 40]]
136
141
137 # deref fields so they will be local in loop
142 # deref fields so they will be local in loop
138 dmap = self._map
143 dmap = self._map
139 copymap = self._copymap
144 copymap = self._copymap
140 unpack = struct.unpack
145 unpack = struct.unpack
141
146
142 pos = 40
147 pos = 40
143 e_size = struct.calcsize(_format)
148 e_size = struct.calcsize(_format)
144
149
145 while pos < len(st):
150 while pos < len(st):
146 newpos = pos + e_size
151 newpos = pos + e_size
147 e = unpack(_format, st[pos:newpos])
152 e = unpack(_format, st[pos:newpos])
148 l = e[4]
153 l = e[4]
149 pos = newpos
154 pos = newpos
150 newpos = pos + l
155 newpos = pos + l
151 f = st[pos:newpos]
156 f = st[pos:newpos]
152 if '\0' in f:
157 if '\0' in f:
153 f, c = f.split('\0')
158 f, c = f.split('\0')
154 copymap[f] = c
159 copymap[f] = c
155 dmap[f] = e[:4]
160 dmap[f] = e[:4]
156 pos = newpos
161 pos = newpos
157
162
158 def invalidate(self):
163 def invalidate(self):
159 for a in "_map _copymap _branch _pl _dirs _ignore".split():
164 for a in "_map _copymap _branch _pl _dirs _ignore".split():
160 if hasattr(self, a):
165 if hasattr(self, a):
161 self.__delattr__(a)
166 self.__delattr__(a)
162 self._dirty = False
167 self._dirty = False
163
168
164 def copy(self, source, dest):
169 def copy(self, source, dest):
165 self._dirty = True
170 self._dirty = True
166 self._copymap[dest] = source
171 self._copymap[dest] = source
167
172
168 def copied(self, file):
173 def copied(self, file):
169 return self._copymap.get(file, None)
174 return self._copymap.get(file, None)
170
175
171 def copies(self):
176 def copies(self):
172 return self._copymap
177 return self._copymap
173
178
174 def _incpath(self, path):
179 def _incpath(self, path):
175 for c in strutil.findall(path, '/'):
180 for c in strutil.findall(path, '/'):
176 pc = path[:c]
181 pc = path[:c]
177 self._dirs.setdefault(pc, 0)
182 self._dirs.setdefault(pc, 0)
178 self._dirs[pc] += 1
183 self._dirs[pc] += 1
179
184
180 def _decpath(self, path):
185 def _decpath(self, path):
181 for c in strutil.findall(path, '/'):
186 for c in strutil.findall(path, '/'):
182 pc = path[:c]
187 pc = path[:c]
183 self._dirs.setdefault(pc, 0)
188 self._dirs.setdefault(pc, 0)
184 self._dirs[pc] -= 1
189 self._dirs[pc] -= 1
185
190
186 def _incpathcheck(self, f):
191 def _incpathcheck(self, f):
187 if '\r' in f or '\n' in f:
192 if '\r' in f or '\n' in f:
188 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames"))
193 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames"))
189 # shadows
194 # shadows
190 if f in self._dirs:
195 if f in self._dirs:
191 raise util.Abort(_('directory named %r already in dirstate') % f)
196 raise util.Abort(_('directory named %r already in dirstate') % f)
192 for c in strutil.rfindall(f, '/'):
197 for c in strutil.rfindall(f, '/'):
193 d = f[:c]
198 d = f[:c]
194 if d in self._dirs:
199 if d in self._dirs:
195 break
200 break
196 if d in self._map:
201 if d in self._map:
197 raise util.Abort(_('file named %r already in dirstate') % d)
202 raise util.Abort(_('file named %r already in dirstate') % d)
198 self._incpath(f)
203 self._incpath(f)
199
204
200 def update(self, files, state, **kw):
205 def normal(self, f):
201 ''' current states:
206 'mark a file normal'
202 n normal
203 m needs merging
204 r marked for removal
205 a marked for addition'''
206
207 if not files: return
208 self._dirty = True
207 self._dirty = True
209 for f in files:
208 s = os.lstat(self.wjoin(f))
209 self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime)
210 if self._copymap.has_key(f):
210 if self._copymap.has_key(f):
211 del self._copymap[f]
211 del self._copymap[f]
212
212
213 if state == "r":
213 def normaldirty(self, f):
214 self._map[f] = ('r', 0, 0, 0)
214 'mark a file normal, but possibly dirty'
215 self._decpath(f)
215 self._dirty = True
216 continue
216 s = os.lstat(self.wjoin(f))
217 else:
217 self._map[f] = ('n', s.st_mode, -1, -1)
218 if state == "a":
218 if f in self._copymap:
219 del self._copymap[f]
220
221 def add(self, f):
222 'mark a file added'
223 self._dirty = True
219 self._incpathcheck(f)
224 self._incpathcheck(f)
220 s = os.lstat(self.wjoin(f))
225 s = os.lstat(self.wjoin(f))
221 st_size = kw.get('st_size', s.st_size)
226 self._map[f] = ('a', s.st_mode, s.st_size, s.st_mtime)
222 st_mtime = kw.get('st_mtime', s.st_mtime)
227 if f in self._copymap:
223 self._map[f] = (state, s.st_mode, st_size, st_mtime)
228 del self._copymap[f]
229
230 def remove(self, f):
231 'mark a file removed'
232 self._dirty = True
233 self._map[f] = ('r', 0, 0, 0)
234 self._decpath(f)
235 if f in self._copymap:
236 del self._copymap[f]
224
237
225 def forget(self, files):
238 def merge(self, f):
226 if not files: return
239 'mark a file merged'
227 self._dirty = True
240 self._dirty = True
228 for f in files:
241 s = os.lstat(self.wjoin(f))
242 self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime)
243 if f in self._copymap:
244 del self._copymap[f]
245
246 def forget(self, f):
247 'forget a file'
248 self._dirty = True
229 try:
249 try:
230 del self._map[f]
250 del self._map[f]
231 self._decpath(f)
251 self._decpath(f)
232 except KeyError:
252 except KeyError:
233 self._ui.warn(_("not in dirstate: %s!\n") % f)
253 self._ui.warn(_("not in dirstate: %s!\n") % f)
234 pass
235
254
236 def rebuild(self, parent, files):
255 def rebuild(self, parent, files):
237 self.invalidate()
256 self.invalidate()
238 for f in files:
257 for f in files:
239 if files.execf(f):
258 if files.execf(f):
240 self._map[f] = ('n', 0777, -1, 0)
259 self._map[f] = ('n', 0777, -1, 0)
241 else:
260 else:
242 self._map[f] = ('n', 0666, -1, 0)
261 self._map[f] = ('n', 0666, -1, 0)
243 self._pl = (parent, nullid)
262 self._pl = (parent, nullid)
244 self._dirty = True
263 self._dirty = True
245
264
246 def write(self):
265 def write(self):
247 if not self._dirty:
266 if not self._dirty:
248 return
267 return
249 cs = cStringIO.StringIO()
268 cs = cStringIO.StringIO()
250 cs.write("".join(self._pl))
269 cs.write("".join(self._pl))
251 for f, e in self._map.iteritems():
270 for f, e in self._map.iteritems():
252 c = self.copied(f)
271 c = self.copied(f)
253 if c:
272 if c:
254 f = f + "\0" + c
273 f = f + "\0" + c
255 e = struct.pack(_format, e[0], e[1], e[2], e[3], len(f))
274 e = struct.pack(_format, e[0], e[1], e[2], e[3], len(f))
256 cs.write(e)
275 cs.write(e)
257 cs.write(f)
276 cs.write(f)
258 st = self._opener("dirstate", "w", atomictemp=True)
277 st = self._opener("dirstate", "w", atomictemp=True)
259 st.write(cs.getvalue())
278 st.write(cs.getvalue())
260 st.rename()
279 st.rename()
261 self._dirty = False
280 self._dirty = False
262
281
263 def filterfiles(self, files):
282 def filterfiles(self, files):
264 ret = {}
283 ret = {}
265 unknown = []
284 unknown = []
266
285
267 for x in files:
286 for x in files:
268 if x == '.':
287 if x == '.':
269 return self._map.copy()
288 return self._map.copy()
270 if x not in self._map:
289 if x not in self._map:
271 unknown.append(x)
290 unknown.append(x)
272 else:
291 else:
273 ret[x] = self._map[x]
292 ret[x] = self._map[x]
274
293
275 if not unknown:
294 if not unknown:
276 return ret
295 return ret
277
296
278 b = self._map.keys()
297 b = self._map.keys()
279 b.sort()
298 b.sort()
280 blen = len(b)
299 blen = len(b)
281
300
282 for x in unknown:
301 for x in unknown:
283 bs = bisect.bisect(b, "%s%s" % (x, '/'))
302 bs = bisect.bisect(b, "%s%s" % (x, '/'))
284 while bs < blen:
303 while bs < blen:
285 s = b[bs]
304 s = b[bs]
286 if len(s) > len(x) and s.startswith(x):
305 if len(s) > len(x) and s.startswith(x):
287 ret[s] = self._map[s]
306 ret[s] = self._map[s]
288 else:
307 else:
289 break
308 break
290 bs += 1
309 bs += 1
291 return ret
310 return ret
292
311
293 def _supported(self, f, st, verbose=False):
312 def _supported(self, f, st, verbose=False):
294 if stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode):
313 if stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode):
295 return True
314 return True
296 if verbose:
315 if verbose:
297 kind = 'unknown'
316 kind = 'unknown'
298 if stat.S_ISCHR(st.st_mode): kind = _('character device')
317 if stat.S_ISCHR(st.st_mode): kind = _('character device')
299 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
318 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
300 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
319 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
301 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
320 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
302 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
321 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
303 self._ui.warn(_('%s: unsupported file type (type is %s)\n')
322 self._ui.warn(_('%s: unsupported file type (type is %s)\n')
304 % (self.pathto(f), kind))
323 % (self.pathto(f), kind))
305 return False
324 return False
306
325
307 def walk(self, files=None, match=util.always, badmatch=None):
326 def walk(self, files=None, match=util.always, badmatch=None):
308 # filter out the stat
327 # filter out the stat
309 for src, f, st in self.statwalk(files, match, badmatch=badmatch):
328 for src, f, st in self.statwalk(files, match, badmatch=badmatch):
310 yield src, f
329 yield src, f
311
330
312 def statwalk(self, files=None, match=util.always, ignored=False,
331 def statwalk(self, files=None, match=util.always, ignored=False,
313 badmatch=None, directories=False):
332 badmatch=None, directories=False):
314 '''
333 '''
315 walk recursively through the directory tree, finding all files
334 walk recursively through the directory tree, finding all files
316 matched by the match function
335 matched by the match function
317
336
318 results are yielded in a tuple (src, filename, st), where src
337 results are yielded in a tuple (src, filename, st), where src
319 is one of:
338 is one of:
320 'f' the file was found in the directory tree
339 'f' the file was found in the directory tree
321 'd' the file is a directory of the tree
340 'd' the file is a directory of the tree
322 'm' the file was only in the dirstate and not in the tree
341 'm' the file was only in the dirstate and not in the tree
323 'b' file was not found and matched badmatch
342 'b' file was not found and matched badmatch
324
343
325 and st is the stat result if the file was found in the directory.
344 and st is the stat result if the file was found in the directory.
326 '''
345 '''
327
346
328 # walk all files by default
347 # walk all files by default
329 if not files:
348 if not files:
330 files = ['.']
349 files = ['.']
331 dc = self._map.copy()
350 dc = self._map.copy()
332 else:
351 else:
333 files = util.unique(files)
352 files = util.unique(files)
334 dc = self.filterfiles(files)
353 dc = self.filterfiles(files)
335
354
336 def imatch(file_):
355 def imatch(file_):
337 if file_ not in dc and self._ignore(file_):
356 if file_ not in dc and self._ignore(file_):
338 return False
357 return False
339 return match(file_)
358 return match(file_)
340
359
341 ignore = self._ignore
360 ignore = self._ignore
342 if ignored:
361 if ignored:
343 imatch = match
362 imatch = match
344 ignore = util.never
363 ignore = util.never
345
364
346 # self._root may end with a path separator when self._root == '/'
365 # self._root may end with a path separator when self._root == '/'
347 common_prefix_len = len(self._root)
366 common_prefix_len = len(self._root)
348 if not self._root.endswith(os.sep):
367 if not self._root.endswith(os.sep):
349 common_prefix_len += 1
368 common_prefix_len += 1
350 # recursion free walker, faster than os.walk.
369 # recursion free walker, faster than os.walk.
351 def findfiles(s):
370 def findfiles(s):
352 work = [s]
371 work = [s]
353 if directories:
372 if directories:
354 yield 'd', util.normpath(s[common_prefix_len:]), os.lstat(s)
373 yield 'd', util.normpath(s[common_prefix_len:]), os.lstat(s)
355 while work:
374 while work:
356 top = work.pop()
375 top = work.pop()
357 names = os.listdir(top)
376 names = os.listdir(top)
358 names.sort()
377 names.sort()
359 # nd is the top of the repository dir tree
378 # nd is the top of the repository dir tree
360 nd = util.normpath(top[common_prefix_len:])
379 nd = util.normpath(top[common_prefix_len:])
361 if nd == '.':
380 if nd == '.':
362 nd = ''
381 nd = ''
363 else:
382 else:
364 # do not recurse into a repo contained in this
383 # do not recurse into a repo contained in this
365 # one. use bisect to find .hg directory so speed
384 # one. use bisect to find .hg directory so speed
366 # is good on big directory.
385 # is good on big directory.
367 hg = bisect.bisect_left(names, '.hg')
386 hg = bisect.bisect_left(names, '.hg')
368 if hg < len(names) and names[hg] == '.hg':
387 if hg < len(names) and names[hg] == '.hg':
369 if os.path.isdir(os.path.join(top, '.hg')):
388 if os.path.isdir(os.path.join(top, '.hg')):
370 continue
389 continue
371 for f in names:
390 for f in names:
372 np = util.pconvert(os.path.join(nd, f))
391 np = util.pconvert(os.path.join(nd, f))
373 if seen(np):
392 if seen(np):
374 continue
393 continue
375 p = os.path.join(top, f)
394 p = os.path.join(top, f)
376 # don't trip over symlinks
395 # don't trip over symlinks
377 st = os.lstat(p)
396 st = os.lstat(p)
378 if stat.S_ISDIR(st.st_mode):
397 if stat.S_ISDIR(st.st_mode):
379 if not ignore(np):
398 if not ignore(np):
380 work.append(p)
399 work.append(p)
381 if directories:
400 if directories:
382 yield 'd', np, st
401 yield 'd', np, st
383 if imatch(np) and np in dc:
402 if imatch(np) and np in dc:
384 yield 'm', np, st
403 yield 'm', np, st
385 elif imatch(np):
404 elif imatch(np):
386 if self._supported(np, st):
405 if self._supported(np, st):
387 yield 'f', np, st
406 yield 'f', np, st
388 elif np in dc:
407 elif np in dc:
389 yield 'm', np, st
408 yield 'm', np, st
390
409
391 known = {'.hg': 1}
410 known = {'.hg': 1}
392 def seen(fn):
411 def seen(fn):
393 if fn in known: return True
412 if fn in known: return True
394 known[fn] = 1
413 known[fn] = 1
395
414
396 # step one, find all files that match our criteria
415 # step one, find all files that match our criteria
397 files.sort()
416 files.sort()
398 for ff in files:
417 for ff in files:
399 nf = util.normpath(ff)
418 nf = util.normpath(ff)
400 f = self.wjoin(ff)
419 f = self.wjoin(ff)
401 try:
420 try:
402 st = os.lstat(f)
421 st = os.lstat(f)
403 except OSError, inst:
422 except OSError, inst:
404 found = False
423 found = False
405 for fn in dc:
424 for fn in dc:
406 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
425 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
407 found = True
426 found = True
408 break
427 break
409 if not found:
428 if not found:
410 if inst.errno != errno.ENOENT or not badmatch:
429 if inst.errno != errno.ENOENT or not badmatch:
411 self._ui.warn('%s: %s\n' %
430 self._ui.warn('%s: %s\n' %
412 (self.pathto(ff), inst.strerror))
431 (self.pathto(ff), inst.strerror))
413 elif badmatch and badmatch(ff) and imatch(nf):
432 elif badmatch and badmatch(ff) and imatch(nf):
414 yield 'b', ff, None
433 yield 'b', ff, None
415 continue
434 continue
416 if stat.S_ISDIR(st.st_mode):
435 if stat.S_ISDIR(st.st_mode):
417 cmp1 = (lambda x, y: cmp(x[1], y[1]))
436 cmp1 = (lambda x, y: cmp(x[1], y[1]))
418 sorted_ = [ x for x in findfiles(f) ]
437 sorted_ = [ x for x in findfiles(f) ]
419 sorted_.sort(cmp1)
438 sorted_.sort(cmp1)
420 for e in sorted_:
439 for e in sorted_:
421 yield e
440 yield e
422 else:
441 else:
423 if not seen(nf) and match(nf):
442 if not seen(nf) and match(nf):
424 if self._supported(ff, st, verbose=True):
443 if self._supported(ff, st, verbose=True):
425 yield 'f', nf, st
444 yield 'f', nf, st
426 elif ff in dc:
445 elif ff in dc:
427 yield 'm', nf, st
446 yield 'm', nf, st
428
447
429 # step two run through anything left in the dc hash and yield
448 # step two run through anything left in the dc hash and yield
430 # if we haven't already seen it
449 # if we haven't already seen it
431 ks = dc.keys()
450 ks = dc.keys()
432 ks.sort()
451 ks.sort()
433 for k in ks:
452 for k in ks:
434 if not seen(k) and imatch(k):
453 if not seen(k) and imatch(k):
435 yield 'm', k, None
454 yield 'm', k, None
436
455
437 def status(self, files=None, match=util.always, list_ignored=False,
456 def status(self, files=None, match=util.always, list_ignored=False,
438 list_clean=False):
457 list_clean=False):
439 lookup, modified, added, unknown, ignored = [], [], [], [], []
458 lookup, modified, added, unknown, ignored = [], [], [], [], []
440 removed, deleted, clean = [], [], []
459 removed, deleted, clean = [], [], []
441
460
442 for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
461 for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
443 try:
462 try:
444 type_, mode, size, time = self[fn]
463 type_, mode, size, time = self[fn]
445 except KeyError:
464 except KeyError:
446 if list_ignored and self._ignore(fn):
465 if list_ignored and self._ignore(fn):
447 ignored.append(fn)
466 ignored.append(fn)
448 else:
467 else:
449 unknown.append(fn)
468 unknown.append(fn)
450 continue
469 continue
451 if src == 'm':
470 if src == 'm':
452 nonexistent = True
471 nonexistent = True
453 if not st:
472 if not st:
454 try:
473 try:
455 st = os.lstat(self.wjoin(fn))
474 st = os.lstat(self.wjoin(fn))
456 except OSError, inst:
475 except OSError, inst:
457 if inst.errno != errno.ENOENT:
476 if inst.errno != errno.ENOENT:
458 raise
477 raise
459 st = None
478 st = None
460 # We need to re-check that it is a valid file
479 # We need to re-check that it is a valid file
461 if st and self._supported(fn, st):
480 if st and self._supported(fn, st):
462 nonexistent = False
481 nonexistent = False
463 # XXX: what to do with file no longer present in the fs
482 # XXX: what to do with file no longer present in the fs
464 # who are not removed in the dirstate ?
483 # who are not removed in the dirstate ?
465 if nonexistent and type_ in "nm":
484 if nonexistent and type_ in "nm":
466 deleted.append(fn)
485 deleted.append(fn)
467 continue
486 continue
468 # check the common case first
487 # check the common case first
469 if type_ == 'n':
488 if type_ == 'n':
470 if not st:
489 if not st:
471 st = os.lstat(self.wjoin(fn))
490 st = os.lstat(self.wjoin(fn))
472 if (size >= 0 and (size != st.st_size
491 if (size >= 0 and (size != st.st_size
473 or (mode ^ st.st_mode) & 0100)
492 or (mode ^ st.st_mode) & 0100)
474 or fn in self._copymap):
493 or fn in self._copymap):
475 modified.append(fn)
494 modified.append(fn)
476 elif time != int(st.st_mtime):
495 elif time != int(st.st_mtime):
477 lookup.append(fn)
496 lookup.append(fn)
478 elif list_clean:
497 elif list_clean:
479 clean.append(fn)
498 clean.append(fn)
480 elif type_ == 'm':
499 elif type_ == 'm':
481 modified.append(fn)
500 modified.append(fn)
482 elif type_ == 'a':
501 elif type_ == 'a':
483 added.append(fn)
502 added.append(fn)
484 elif type_ == 'r':
503 elif type_ == 'r':
485 removed.append(fn)
504 removed.append(fn)
486
505
487 return (lookup, modified, added, removed, deleted, unknown, ignored,
506 return (lookup, modified, added, removed, deleted, unknown, ignored,
488 clean)
507 clean)
@@ -1,1947 +1,1949 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import repo, changegroup
10 import repo, changegroup
11 import changelog, dirstate, filelog, manifest, context
11 import changelog, dirstate, filelog, manifest, context
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
13 import os, revlog, time, util, extensions, hook
13 import os, revlog, time, util, extensions, hook
14
14
15 class localrepository(repo.repository):
15 class localrepository(repo.repository):
16 capabilities = ('lookup', 'changegroupsubset')
16 capabilities = ('lookup', 'changegroupsubset')
17 supported = ('revlogv1', 'store')
17 supported = ('revlogv1', 'store')
18
18
19 def __del__(self):
19 def __del__(self):
20 self.transhandle = None
20 self.transhandle = None
21 def __init__(self, parentui, path=None, create=0):
21 def __init__(self, parentui, path=None, create=0):
22 repo.repository.__init__(self)
22 repo.repository.__init__(self)
23 self.path = path
23 self.path = path
24 self.root = os.path.realpath(path)
24 self.root = os.path.realpath(path)
25 self.path = os.path.join(self.root, ".hg")
25 self.path = os.path.join(self.root, ".hg")
26 self.origroot = path
26 self.origroot = path
27 self.opener = util.opener(self.path)
27 self.opener = util.opener(self.path)
28 self.wopener = util.opener(self.root)
28 self.wopener = util.opener(self.root)
29
29
30 if not os.path.isdir(self.path):
30 if not os.path.isdir(self.path):
31 if create:
31 if create:
32 if not os.path.exists(path):
32 if not os.path.exists(path):
33 os.mkdir(path)
33 os.mkdir(path)
34 os.mkdir(self.path)
34 os.mkdir(self.path)
35 requirements = ["revlogv1"]
35 requirements = ["revlogv1"]
36 if parentui.configbool('format', 'usestore', True):
36 if parentui.configbool('format', 'usestore', True):
37 os.mkdir(os.path.join(self.path, "store"))
37 os.mkdir(os.path.join(self.path, "store"))
38 requirements.append("store")
38 requirements.append("store")
39 # create an invalid changelog
39 # create an invalid changelog
40 self.opener("00changelog.i", "a").write(
40 self.opener("00changelog.i", "a").write(
41 '\0\0\0\2' # represents revlogv2
41 '\0\0\0\2' # represents revlogv2
42 ' dummy changelog to prevent using the old repo layout'
42 ' dummy changelog to prevent using the old repo layout'
43 )
43 )
44 reqfile = self.opener("requires", "w")
44 reqfile = self.opener("requires", "w")
45 for r in requirements:
45 for r in requirements:
46 reqfile.write("%s\n" % r)
46 reqfile.write("%s\n" % r)
47 reqfile.close()
47 reqfile.close()
48 else:
48 else:
49 raise repo.RepoError(_("repository %s not found") % path)
49 raise repo.RepoError(_("repository %s not found") % path)
50 elif create:
50 elif create:
51 raise repo.RepoError(_("repository %s already exists") % path)
51 raise repo.RepoError(_("repository %s already exists") % path)
52 else:
52 else:
53 # find requirements
53 # find requirements
54 try:
54 try:
55 requirements = self.opener("requires").read().splitlines()
55 requirements = self.opener("requires").read().splitlines()
56 except IOError, inst:
56 except IOError, inst:
57 if inst.errno != errno.ENOENT:
57 if inst.errno != errno.ENOENT:
58 raise
58 raise
59 requirements = []
59 requirements = []
60 # check them
60 # check them
61 for r in requirements:
61 for r in requirements:
62 if r not in self.supported:
62 if r not in self.supported:
63 raise repo.RepoError(_("requirement '%s' not supported") % r)
63 raise repo.RepoError(_("requirement '%s' not supported") % r)
64
64
65 # setup store
65 # setup store
66 if "store" in requirements:
66 if "store" in requirements:
67 self.encodefn = util.encodefilename
67 self.encodefn = util.encodefilename
68 self.decodefn = util.decodefilename
68 self.decodefn = util.decodefilename
69 self.spath = os.path.join(self.path, "store")
69 self.spath = os.path.join(self.path, "store")
70 else:
70 else:
71 self.encodefn = lambda x: x
71 self.encodefn = lambda x: x
72 self.decodefn = lambda x: x
72 self.decodefn = lambda x: x
73 self.spath = self.path
73 self.spath = self.path
74 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
74 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
75
75
76 self.ui = ui.ui(parentui=parentui)
76 self.ui = ui.ui(parentui=parentui)
77 try:
77 try:
78 self.ui.readconfig(self.join("hgrc"), self.root)
78 self.ui.readconfig(self.join("hgrc"), self.root)
79 extensions.loadall(self.ui)
79 extensions.loadall(self.ui)
80 except IOError:
80 except IOError:
81 pass
81 pass
82
82
83 self.tagscache = None
83 self.tagscache = None
84 self.branchcache = None
84 self.branchcache = None
85 self.nodetagscache = None
85 self.nodetagscache = None
86 self.filterpats = {}
86 self.filterpats = {}
87 self.transhandle = None
87 self.transhandle = None
88
88
89 def __getattr__(self, name):
89 def __getattr__(self, name):
90 if name == 'changelog':
90 if name == 'changelog':
91 self.changelog = changelog.changelog(self.sopener)
91 self.changelog = changelog.changelog(self.sopener)
92 self.sopener.defversion = self.changelog.version
92 self.sopener.defversion = self.changelog.version
93 return self.changelog
93 return self.changelog
94 if name == 'manifest':
94 if name == 'manifest':
95 self.changelog
95 self.changelog
96 self.manifest = manifest.manifest(self.sopener)
96 self.manifest = manifest.manifest(self.sopener)
97 return self.manifest
97 return self.manifest
98 if name == 'dirstate':
98 if name == 'dirstate':
99 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
99 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
100 return self.dirstate
100 return self.dirstate
101 else:
101 else:
102 raise AttributeError, name
102 raise AttributeError, name
103
103
104 def url(self):
104 def url(self):
105 return 'file:' + self.root
105 return 'file:' + self.root
106
106
107 def hook(self, name, throw=False, **args):
107 def hook(self, name, throw=False, **args):
108 return hook.hook(self.ui, self, name, throw, **args)
108 return hook.hook(self.ui, self, name, throw, **args)
109
109
110 tag_disallowed = ':\r\n'
110 tag_disallowed = ':\r\n'
111
111
112 def _tag(self, name, node, message, local, user, date, parent=None,
112 def _tag(self, name, node, message, local, user, date, parent=None,
113 extra={}):
113 extra={}):
114 use_dirstate = parent is None
114 use_dirstate = parent is None
115
115
116 for c in self.tag_disallowed:
116 for c in self.tag_disallowed:
117 if c in name:
117 if c in name:
118 raise util.Abort(_('%r cannot be used in a tag name') % c)
118 raise util.Abort(_('%r cannot be used in a tag name') % c)
119
119
120 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
120 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
121
121
122 def writetag(fp, name, munge, prevtags):
122 def writetag(fp, name, munge, prevtags):
123 if prevtags and prevtags[-1] != '\n':
123 if prevtags and prevtags[-1] != '\n':
124 fp.write('\n')
124 fp.write('\n')
125 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
125 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
126 fp.close()
126 fp.close()
127 self.hook('tag', node=hex(node), tag=name, local=local)
127 self.hook('tag', node=hex(node), tag=name, local=local)
128
128
129 prevtags = ''
129 prevtags = ''
130 if local:
130 if local:
131 try:
131 try:
132 fp = self.opener('localtags', 'r+')
132 fp = self.opener('localtags', 'r+')
133 except IOError, err:
133 except IOError, err:
134 fp = self.opener('localtags', 'a')
134 fp = self.opener('localtags', 'a')
135 else:
135 else:
136 prevtags = fp.read()
136 prevtags = fp.read()
137
137
138 # local tags are stored in the current charset
138 # local tags are stored in the current charset
139 writetag(fp, name, None, prevtags)
139 writetag(fp, name, None, prevtags)
140 return
140 return
141
141
142 if use_dirstate:
142 if use_dirstate:
143 try:
143 try:
144 fp = self.wfile('.hgtags', 'rb+')
144 fp = self.wfile('.hgtags', 'rb+')
145 except IOError, err:
145 except IOError, err:
146 fp = self.wfile('.hgtags', 'ab')
146 fp = self.wfile('.hgtags', 'ab')
147 else:
147 else:
148 prevtags = fp.read()
148 prevtags = fp.read()
149 else:
149 else:
150 try:
150 try:
151 prevtags = self.filectx('.hgtags', parent).data()
151 prevtags = self.filectx('.hgtags', parent).data()
152 except revlog.LookupError:
152 except revlog.LookupError:
153 pass
153 pass
154 fp = self.wfile('.hgtags', 'wb')
154 fp = self.wfile('.hgtags', 'wb')
155
155
156 # committed tags are stored in UTF-8
156 # committed tags are stored in UTF-8
157 writetag(fp, name, util.fromlocal, prevtags)
157 writetag(fp, name, util.fromlocal, prevtags)
158
158
159 if use_dirstate and self.dirstate.state('.hgtags') == '?':
159 if use_dirstate and self.dirstate.state('.hgtags') == '?':
160 self.add(['.hgtags'])
160 self.add(['.hgtags'])
161
161
162 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
162 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
163 extra=extra)
163 extra=extra)
164
164
165 self.hook('tag', node=hex(node), tag=name, local=local)
165 self.hook('tag', node=hex(node), tag=name, local=local)
166
166
167 return tagnode
167 return tagnode
168
168
169 def tag(self, name, node, message, local, user, date):
169 def tag(self, name, node, message, local, user, date):
170 '''tag a revision with a symbolic name.
170 '''tag a revision with a symbolic name.
171
171
172 if local is True, the tag is stored in a per-repository file.
172 if local is True, the tag is stored in a per-repository file.
173 otherwise, it is stored in the .hgtags file, and a new
173 otherwise, it is stored in the .hgtags file, and a new
174 changeset is committed with the change.
174 changeset is committed with the change.
175
175
176 keyword arguments:
176 keyword arguments:
177
177
178 local: whether to store tag in non-version-controlled file
178 local: whether to store tag in non-version-controlled file
179 (default False)
179 (default False)
180
180
181 message: commit message to use if committing
181 message: commit message to use if committing
182
182
183 user: name of user to use if committing
183 user: name of user to use if committing
184
184
185 date: date tuple to use if committing'''
185 date: date tuple to use if committing'''
186
186
187 for x in self.status()[:5]:
187 for x in self.status()[:5]:
188 if '.hgtags' in x:
188 if '.hgtags' in x:
189 raise util.Abort(_('working copy of .hgtags is changed '
189 raise util.Abort(_('working copy of .hgtags is changed '
190 '(please commit .hgtags manually)'))
190 '(please commit .hgtags manually)'))
191
191
192
192
193 self._tag(name, node, message, local, user, date)
193 self._tag(name, node, message, local, user, date)
194
194
195 def tags(self):
195 def tags(self):
196 '''return a mapping of tag to node'''
196 '''return a mapping of tag to node'''
197 if self.tagscache:
197 if self.tagscache:
198 return self.tagscache
198 return self.tagscache
199
199
200 globaltags = {}
200 globaltags = {}
201
201
202 def readtags(lines, fn):
202 def readtags(lines, fn):
203 filetags = {}
203 filetags = {}
204 count = 0
204 count = 0
205
205
206 def warn(msg):
206 def warn(msg):
207 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
207 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
208
208
209 for l in lines:
209 for l in lines:
210 count += 1
210 count += 1
211 if not l:
211 if not l:
212 continue
212 continue
213 s = l.split(" ", 1)
213 s = l.split(" ", 1)
214 if len(s) != 2:
214 if len(s) != 2:
215 warn(_("cannot parse entry"))
215 warn(_("cannot parse entry"))
216 continue
216 continue
217 node, key = s
217 node, key = s
218 key = util.tolocal(key.strip()) # stored in UTF-8
218 key = util.tolocal(key.strip()) # stored in UTF-8
219 try:
219 try:
220 bin_n = bin(node)
220 bin_n = bin(node)
221 except TypeError:
221 except TypeError:
222 warn(_("node '%s' is not well formed") % node)
222 warn(_("node '%s' is not well formed") % node)
223 continue
223 continue
224 if bin_n not in self.changelog.nodemap:
224 if bin_n not in self.changelog.nodemap:
225 warn(_("tag '%s' refers to unknown node") % key)
225 warn(_("tag '%s' refers to unknown node") % key)
226 continue
226 continue
227
227
228 h = []
228 h = []
229 if key in filetags:
229 if key in filetags:
230 n, h = filetags[key]
230 n, h = filetags[key]
231 h.append(n)
231 h.append(n)
232 filetags[key] = (bin_n, h)
232 filetags[key] = (bin_n, h)
233
233
234 for k, nh in filetags.items():
234 for k, nh in filetags.items():
235 if k not in globaltags:
235 if k not in globaltags:
236 globaltags[k] = nh
236 globaltags[k] = nh
237 continue
237 continue
238 # we prefer the global tag if:
238 # we prefer the global tag if:
239 # it supercedes us OR
239 # it supercedes us OR
240 # mutual supercedes and it has a higher rank
240 # mutual supercedes and it has a higher rank
241 # otherwise we win because we're tip-most
241 # otherwise we win because we're tip-most
242 an, ah = nh
242 an, ah = nh
243 bn, bh = globaltags[k]
243 bn, bh = globaltags[k]
244 if (bn != an and an in bh and
244 if (bn != an and an in bh and
245 (bn not in ah or len(bh) > len(ah))):
245 (bn not in ah or len(bh) > len(ah))):
246 an = bn
246 an = bn
247 ah.extend([n for n in bh if n not in ah])
247 ah.extend([n for n in bh if n not in ah])
248 globaltags[k] = an, ah
248 globaltags[k] = an, ah
249
249
250 # read the tags file from each head, ending with the tip
250 # read the tags file from each head, ending with the tip
251 f = None
251 f = None
252 for rev, node, fnode in self._hgtagsnodes():
252 for rev, node, fnode in self._hgtagsnodes():
253 f = (f and f.filectx(fnode) or
253 f = (f and f.filectx(fnode) or
254 self.filectx('.hgtags', fileid=fnode))
254 self.filectx('.hgtags', fileid=fnode))
255 readtags(f.data().splitlines(), f)
255 readtags(f.data().splitlines(), f)
256
256
257 try:
257 try:
258 data = util.fromlocal(self.opener("localtags").read())
258 data = util.fromlocal(self.opener("localtags").read())
259 # localtags are stored in the local character set
259 # localtags are stored in the local character set
260 # while the internal tag table is stored in UTF-8
260 # while the internal tag table is stored in UTF-8
261 readtags(data.splitlines(), "localtags")
261 readtags(data.splitlines(), "localtags")
262 except IOError:
262 except IOError:
263 pass
263 pass
264
264
265 self.tagscache = {}
265 self.tagscache = {}
266 for k,nh in globaltags.items():
266 for k,nh in globaltags.items():
267 n = nh[0]
267 n = nh[0]
268 if n != nullid:
268 if n != nullid:
269 self.tagscache[k] = n
269 self.tagscache[k] = n
270 self.tagscache['tip'] = self.changelog.tip()
270 self.tagscache['tip'] = self.changelog.tip()
271
271
272 return self.tagscache
272 return self.tagscache
273
273
274 def _hgtagsnodes(self):
274 def _hgtagsnodes(self):
275 heads = self.heads()
275 heads = self.heads()
276 heads.reverse()
276 heads.reverse()
277 last = {}
277 last = {}
278 ret = []
278 ret = []
279 for node in heads:
279 for node in heads:
280 c = self.changectx(node)
280 c = self.changectx(node)
281 rev = c.rev()
281 rev = c.rev()
282 try:
282 try:
283 fnode = c.filenode('.hgtags')
283 fnode = c.filenode('.hgtags')
284 except revlog.LookupError:
284 except revlog.LookupError:
285 continue
285 continue
286 ret.append((rev, node, fnode))
286 ret.append((rev, node, fnode))
287 if fnode in last:
287 if fnode in last:
288 ret[last[fnode]] = None
288 ret[last[fnode]] = None
289 last[fnode] = len(ret) - 1
289 last[fnode] = len(ret) - 1
290 return [item for item in ret if item]
290 return [item for item in ret if item]
291
291
292 def tagslist(self):
292 def tagslist(self):
293 '''return a list of tags ordered by revision'''
293 '''return a list of tags ordered by revision'''
294 l = []
294 l = []
295 for t, n in self.tags().items():
295 for t, n in self.tags().items():
296 try:
296 try:
297 r = self.changelog.rev(n)
297 r = self.changelog.rev(n)
298 except:
298 except:
299 r = -2 # sort to the beginning of the list if unknown
299 r = -2 # sort to the beginning of the list if unknown
300 l.append((r, t, n))
300 l.append((r, t, n))
301 l.sort()
301 l.sort()
302 return [(t, n) for r, t, n in l]
302 return [(t, n) for r, t, n in l]
303
303
304 def nodetags(self, node):
304 def nodetags(self, node):
305 '''return the tags associated with a node'''
305 '''return the tags associated with a node'''
306 if not self.nodetagscache:
306 if not self.nodetagscache:
307 self.nodetagscache = {}
307 self.nodetagscache = {}
308 for t, n in self.tags().items():
308 for t, n in self.tags().items():
309 self.nodetagscache.setdefault(n, []).append(t)
309 self.nodetagscache.setdefault(n, []).append(t)
310 return self.nodetagscache.get(node, [])
310 return self.nodetagscache.get(node, [])
311
311
312 def _branchtags(self):
312 def _branchtags(self):
313 partial, last, lrev = self._readbranchcache()
313 partial, last, lrev = self._readbranchcache()
314
314
315 tiprev = self.changelog.count() - 1
315 tiprev = self.changelog.count() - 1
316 if lrev != tiprev:
316 if lrev != tiprev:
317 self._updatebranchcache(partial, lrev+1, tiprev+1)
317 self._updatebranchcache(partial, lrev+1, tiprev+1)
318 self._writebranchcache(partial, self.changelog.tip(), tiprev)
318 self._writebranchcache(partial, self.changelog.tip(), tiprev)
319
319
320 return partial
320 return partial
321
321
322 def branchtags(self):
322 def branchtags(self):
323 if self.branchcache is not None:
323 if self.branchcache is not None:
324 return self.branchcache
324 return self.branchcache
325
325
326 self.branchcache = {} # avoid recursion in changectx
326 self.branchcache = {} # avoid recursion in changectx
327 partial = self._branchtags()
327 partial = self._branchtags()
328
328
329 # the branch cache is stored on disk as UTF-8, but in the local
329 # the branch cache is stored on disk as UTF-8, but in the local
330 # charset internally
330 # charset internally
331 for k, v in partial.items():
331 for k, v in partial.items():
332 self.branchcache[util.tolocal(k)] = v
332 self.branchcache[util.tolocal(k)] = v
333 return self.branchcache
333 return self.branchcache
334
334
335 def _readbranchcache(self):
335 def _readbranchcache(self):
336 partial = {}
336 partial = {}
337 try:
337 try:
338 f = self.opener("branch.cache")
338 f = self.opener("branch.cache")
339 lines = f.read().split('\n')
339 lines = f.read().split('\n')
340 f.close()
340 f.close()
341 except (IOError, OSError):
341 except (IOError, OSError):
342 return {}, nullid, nullrev
342 return {}, nullid, nullrev
343
343
344 try:
344 try:
345 last, lrev = lines.pop(0).split(" ", 1)
345 last, lrev = lines.pop(0).split(" ", 1)
346 last, lrev = bin(last), int(lrev)
346 last, lrev = bin(last), int(lrev)
347 if not (lrev < self.changelog.count() and
347 if not (lrev < self.changelog.count() and
348 self.changelog.node(lrev) == last): # sanity check
348 self.changelog.node(lrev) == last): # sanity check
349 # invalidate the cache
349 # invalidate the cache
350 raise ValueError('Invalid branch cache: unknown tip')
350 raise ValueError('Invalid branch cache: unknown tip')
351 for l in lines:
351 for l in lines:
352 if not l: continue
352 if not l: continue
353 node, label = l.split(" ", 1)
353 node, label = l.split(" ", 1)
354 partial[label.strip()] = bin(node)
354 partial[label.strip()] = bin(node)
355 except (KeyboardInterrupt, util.SignalInterrupt):
355 except (KeyboardInterrupt, util.SignalInterrupt):
356 raise
356 raise
357 except Exception, inst:
357 except Exception, inst:
358 if self.ui.debugflag:
358 if self.ui.debugflag:
359 self.ui.warn(str(inst), '\n')
359 self.ui.warn(str(inst), '\n')
360 partial, last, lrev = {}, nullid, nullrev
360 partial, last, lrev = {}, nullid, nullrev
361 return partial, last, lrev
361 return partial, last, lrev
362
362
363 def _writebranchcache(self, branches, tip, tiprev):
363 def _writebranchcache(self, branches, tip, tiprev):
364 try:
364 try:
365 f = self.opener("branch.cache", "w", atomictemp=True)
365 f = self.opener("branch.cache", "w", atomictemp=True)
366 f.write("%s %s\n" % (hex(tip), tiprev))
366 f.write("%s %s\n" % (hex(tip), tiprev))
367 for label, node in branches.iteritems():
367 for label, node in branches.iteritems():
368 f.write("%s %s\n" % (hex(node), label))
368 f.write("%s %s\n" % (hex(node), label))
369 f.rename()
369 f.rename()
370 except (IOError, OSError):
370 except (IOError, OSError):
371 pass
371 pass
372
372
373 def _updatebranchcache(self, partial, start, end):
373 def _updatebranchcache(self, partial, start, end):
374 for r in xrange(start, end):
374 for r in xrange(start, end):
375 c = self.changectx(r)
375 c = self.changectx(r)
376 b = c.branch()
376 b = c.branch()
377 partial[b] = c.node()
377 partial[b] = c.node()
378
378
379 def lookup(self, key):
379 def lookup(self, key):
380 if key == '.':
380 if key == '.':
381 key, second = self.dirstate.parents()
381 key, second = self.dirstate.parents()
382 if key == nullid:
382 if key == nullid:
383 raise repo.RepoError(_("no revision checked out"))
383 raise repo.RepoError(_("no revision checked out"))
384 if second != nullid:
384 if second != nullid:
385 self.ui.warn(_("warning: working directory has two parents, "
385 self.ui.warn(_("warning: working directory has two parents, "
386 "tag '.' uses the first\n"))
386 "tag '.' uses the first\n"))
387 elif key == 'null':
387 elif key == 'null':
388 return nullid
388 return nullid
389 n = self.changelog._match(key)
389 n = self.changelog._match(key)
390 if n:
390 if n:
391 return n
391 return n
392 if key in self.tags():
392 if key in self.tags():
393 return self.tags()[key]
393 return self.tags()[key]
394 if key in self.branchtags():
394 if key in self.branchtags():
395 return self.branchtags()[key]
395 return self.branchtags()[key]
396 n = self.changelog._partialmatch(key)
396 n = self.changelog._partialmatch(key)
397 if n:
397 if n:
398 return n
398 return n
399 raise repo.RepoError(_("unknown revision '%s'") % key)
399 raise repo.RepoError(_("unknown revision '%s'") % key)
400
400
401 def dev(self):
401 def dev(self):
402 return os.lstat(self.path).st_dev
402 return os.lstat(self.path).st_dev
403
403
404 def local(self):
404 def local(self):
405 return True
405 return True
406
406
407 def join(self, f):
407 def join(self, f):
408 return os.path.join(self.path, f)
408 return os.path.join(self.path, f)
409
409
410 def sjoin(self, f):
410 def sjoin(self, f):
411 f = self.encodefn(f)
411 f = self.encodefn(f)
412 return os.path.join(self.spath, f)
412 return os.path.join(self.spath, f)
413
413
414 def wjoin(self, f):
414 def wjoin(self, f):
415 return os.path.join(self.root, f)
415 return os.path.join(self.root, f)
416
416
417 def file(self, f):
417 def file(self, f):
418 if f[0] == '/':
418 if f[0] == '/':
419 f = f[1:]
419 f = f[1:]
420 return filelog.filelog(self.sopener, f)
420 return filelog.filelog(self.sopener, f)
421
421
422 def changectx(self, changeid=None):
422 def changectx(self, changeid=None):
423 return context.changectx(self, changeid)
423 return context.changectx(self, changeid)
424
424
425 def workingctx(self):
425 def workingctx(self):
426 return context.workingctx(self)
426 return context.workingctx(self)
427
427
428 def parents(self, changeid=None):
428 def parents(self, changeid=None):
429 '''
429 '''
430 get list of changectxs for parents of changeid or working directory
430 get list of changectxs for parents of changeid or working directory
431 '''
431 '''
432 if changeid is None:
432 if changeid is None:
433 pl = self.dirstate.parents()
433 pl = self.dirstate.parents()
434 else:
434 else:
435 n = self.changelog.lookup(changeid)
435 n = self.changelog.lookup(changeid)
436 pl = self.changelog.parents(n)
436 pl = self.changelog.parents(n)
437 if pl[1] == nullid:
437 if pl[1] == nullid:
438 return [self.changectx(pl[0])]
438 return [self.changectx(pl[0])]
439 return [self.changectx(pl[0]), self.changectx(pl[1])]
439 return [self.changectx(pl[0]), self.changectx(pl[1])]
440
440
441 def filectx(self, path, changeid=None, fileid=None):
441 def filectx(self, path, changeid=None, fileid=None):
442 """changeid can be a changeset revision, node, or tag.
442 """changeid can be a changeset revision, node, or tag.
443 fileid can be a file revision or node."""
443 fileid can be a file revision or node."""
444 return context.filectx(self, path, changeid, fileid)
444 return context.filectx(self, path, changeid, fileid)
445
445
446 def getcwd(self):
446 def getcwd(self):
447 return self.dirstate.getcwd()
447 return self.dirstate.getcwd()
448
448
449 def pathto(self, f, cwd=None):
449 def pathto(self, f, cwd=None):
450 return self.dirstate.pathto(f, cwd)
450 return self.dirstate.pathto(f, cwd)
451
451
452 def wfile(self, f, mode='r'):
452 def wfile(self, f, mode='r'):
453 return self.wopener(f, mode)
453 return self.wopener(f, mode)
454
454
455 def _link(self, f):
455 def _link(self, f):
456 return os.path.islink(self.wjoin(f))
456 return os.path.islink(self.wjoin(f))
457
457
458 def _filter(self, filter, filename, data):
458 def _filter(self, filter, filename, data):
459 if filter not in self.filterpats:
459 if filter not in self.filterpats:
460 l = []
460 l = []
461 for pat, cmd in self.ui.configitems(filter):
461 for pat, cmd in self.ui.configitems(filter):
462 mf = util.matcher(self.root, "", [pat], [], [])[1]
462 mf = util.matcher(self.root, "", [pat], [], [])[1]
463 l.append((mf, cmd))
463 l.append((mf, cmd))
464 self.filterpats[filter] = l
464 self.filterpats[filter] = l
465
465
466 for mf, cmd in self.filterpats[filter]:
466 for mf, cmd in self.filterpats[filter]:
467 if mf(filename):
467 if mf(filename):
468 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
468 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
469 data = util.filter(data, cmd)
469 data = util.filter(data, cmd)
470 break
470 break
471
471
472 return data
472 return data
473
473
474 def wread(self, filename):
474 def wread(self, filename):
475 if self._link(filename):
475 if self._link(filename):
476 data = os.readlink(self.wjoin(filename))
476 data = os.readlink(self.wjoin(filename))
477 else:
477 else:
478 data = self.wopener(filename, 'r').read()
478 data = self.wopener(filename, 'r').read()
479 return self._filter("encode", filename, data)
479 return self._filter("encode", filename, data)
480
480
481 def wwrite(self, filename, data, flags):
481 def wwrite(self, filename, data, flags):
482 data = self._filter("decode", filename, data)
482 data = self._filter("decode", filename, data)
483 if "l" in flags:
483 if "l" in flags:
484 self.wopener.symlink(data, filename)
484 self.wopener.symlink(data, filename)
485 else:
485 else:
486 try:
486 try:
487 if self._link(filename):
487 if self._link(filename):
488 os.unlink(self.wjoin(filename))
488 os.unlink(self.wjoin(filename))
489 except OSError:
489 except OSError:
490 pass
490 pass
491 self.wopener(filename, 'w').write(data)
491 self.wopener(filename, 'w').write(data)
492 util.set_exec(self.wjoin(filename), "x" in flags)
492 util.set_exec(self.wjoin(filename), "x" in flags)
493
493
494 def wwritedata(self, filename, data):
494 def wwritedata(self, filename, data):
495 return self._filter("decode", filename, data)
495 return self._filter("decode", filename, data)
496
496
497 def transaction(self):
497 def transaction(self):
498 tr = self.transhandle
498 tr = self.transhandle
499 if tr != None and tr.running():
499 if tr != None and tr.running():
500 return tr.nest()
500 return tr.nest()
501
501
502 # save dirstate for rollback
502 # save dirstate for rollback
503 try:
503 try:
504 ds = self.opener("dirstate").read()
504 ds = self.opener("dirstate").read()
505 except IOError:
505 except IOError:
506 ds = ""
506 ds = ""
507 self.opener("journal.dirstate", "w").write(ds)
507 self.opener("journal.dirstate", "w").write(ds)
508
508
509 renames = [(self.sjoin("journal"), self.sjoin("undo")),
509 renames = [(self.sjoin("journal"), self.sjoin("undo")),
510 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
510 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
511 tr = transaction.transaction(self.ui.warn, self.sopener,
511 tr = transaction.transaction(self.ui.warn, self.sopener,
512 self.sjoin("journal"),
512 self.sjoin("journal"),
513 aftertrans(renames))
513 aftertrans(renames))
514 self.transhandle = tr
514 self.transhandle = tr
515 return tr
515 return tr
516
516
517 def recover(self):
517 def recover(self):
518 l = self.lock()
518 l = self.lock()
519 if os.path.exists(self.sjoin("journal")):
519 if os.path.exists(self.sjoin("journal")):
520 self.ui.status(_("rolling back interrupted transaction\n"))
520 self.ui.status(_("rolling back interrupted transaction\n"))
521 transaction.rollback(self.sopener, self.sjoin("journal"))
521 transaction.rollback(self.sopener, self.sjoin("journal"))
522 self.invalidate()
522 self.invalidate()
523 return True
523 return True
524 else:
524 else:
525 self.ui.warn(_("no interrupted transaction available\n"))
525 self.ui.warn(_("no interrupted transaction available\n"))
526 return False
526 return False
527
527
528 def rollback(self, wlock=None, lock=None):
528 def rollback(self, wlock=None, lock=None):
529 if not wlock:
529 if not wlock:
530 wlock = self.wlock()
530 wlock = self.wlock()
531 if not lock:
531 if not lock:
532 lock = self.lock()
532 lock = self.lock()
533 if os.path.exists(self.sjoin("undo")):
533 if os.path.exists(self.sjoin("undo")):
534 self.ui.status(_("rolling back last transaction\n"))
534 self.ui.status(_("rolling back last transaction\n"))
535 transaction.rollback(self.sopener, self.sjoin("undo"))
535 transaction.rollback(self.sopener, self.sjoin("undo"))
536 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
536 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
537 self.invalidate()
537 self.invalidate()
538 self.dirstate.invalidate()
538 self.dirstate.invalidate()
539 else:
539 else:
540 self.ui.warn(_("no rollback information available\n"))
540 self.ui.warn(_("no rollback information available\n"))
541
541
542 def invalidate(self):
542 def invalidate(self):
543 for a in "changelog manifest".split():
543 for a in "changelog manifest".split():
544 if hasattr(self, a):
544 if hasattr(self, a):
545 self.__delattr__(a)
545 self.__delattr__(a)
546 self.tagscache = None
546 self.tagscache = None
547 self.nodetagscache = None
547 self.nodetagscache = None
548
548
549 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
549 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
550 desc=None):
550 desc=None):
551 try:
551 try:
552 l = lock.lock(lockname, 0, releasefn, desc=desc)
552 l = lock.lock(lockname, 0, releasefn, desc=desc)
553 except lock.LockHeld, inst:
553 except lock.LockHeld, inst:
554 if not wait:
554 if not wait:
555 raise
555 raise
556 self.ui.warn(_("waiting for lock on %s held by %r\n") %
556 self.ui.warn(_("waiting for lock on %s held by %r\n") %
557 (desc, inst.locker))
557 (desc, inst.locker))
558 # default to 600 seconds timeout
558 # default to 600 seconds timeout
559 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
559 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
560 releasefn, desc=desc)
560 releasefn, desc=desc)
561 if acquirefn:
561 if acquirefn:
562 acquirefn()
562 acquirefn()
563 return l
563 return l
564
564
565 def lock(self, wait=1):
565 def lock(self, wait=1):
566 return self.do_lock(self.sjoin("lock"), wait,
566 return self.do_lock(self.sjoin("lock"), wait,
567 acquirefn=self.invalidate,
567 acquirefn=self.invalidate,
568 desc=_('repository %s') % self.origroot)
568 desc=_('repository %s') % self.origroot)
569
569
570 def wlock(self, wait=1):
570 def wlock(self, wait=1):
571 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
571 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
572 self.dirstate.invalidate,
572 self.dirstate.invalidate,
573 desc=_('working directory of %s') % self.origroot)
573 desc=_('working directory of %s') % self.origroot)
574
574
575 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
575 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
576 """
576 """
577 commit an individual file as part of a larger transaction
577 commit an individual file as part of a larger transaction
578 """
578 """
579
579
580 t = self.wread(fn)
580 t = self.wread(fn)
581 fl = self.file(fn)
581 fl = self.file(fn)
582 fp1 = manifest1.get(fn, nullid)
582 fp1 = manifest1.get(fn, nullid)
583 fp2 = manifest2.get(fn, nullid)
583 fp2 = manifest2.get(fn, nullid)
584
584
585 meta = {}
585 meta = {}
586 cp = self.dirstate.copied(fn)
586 cp = self.dirstate.copied(fn)
587 if cp:
587 if cp:
588 # Mark the new revision of this file as a copy of another
588 # Mark the new revision of this file as a copy of another
589 # file. This copy data will effectively act as a parent
589 # file. This copy data will effectively act as a parent
590 # of this new revision. If this is a merge, the first
590 # of this new revision. If this is a merge, the first
591 # parent will be the nullid (meaning "look up the copy data")
591 # parent will be the nullid (meaning "look up the copy data")
592 # and the second one will be the other parent. For example:
592 # and the second one will be the other parent. For example:
593 #
593 #
594 # 0 --- 1 --- 3 rev1 changes file foo
594 # 0 --- 1 --- 3 rev1 changes file foo
595 # \ / rev2 renames foo to bar and changes it
595 # \ / rev2 renames foo to bar and changes it
596 # \- 2 -/ rev3 should have bar with all changes and
596 # \- 2 -/ rev3 should have bar with all changes and
597 # should record that bar descends from
597 # should record that bar descends from
598 # bar in rev2 and foo in rev1
598 # bar in rev2 and foo in rev1
599 #
599 #
600 # this allows this merge to succeed:
600 # this allows this merge to succeed:
601 #
601 #
602 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
602 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
603 # \ / merging rev3 and rev4 should use bar@rev2
603 # \ / merging rev3 and rev4 should use bar@rev2
604 # \- 2 --- 4 as the merge base
604 # \- 2 --- 4 as the merge base
605 #
605 #
606 meta["copy"] = cp
606 meta["copy"] = cp
607 if not manifest2: # not a branch merge
607 if not manifest2: # not a branch merge
608 meta["copyrev"] = hex(manifest1.get(cp, nullid))
608 meta["copyrev"] = hex(manifest1.get(cp, nullid))
609 fp2 = nullid
609 fp2 = nullid
610 elif fp2 != nullid: # copied on remote side
610 elif fp2 != nullid: # copied on remote side
611 meta["copyrev"] = hex(manifest1.get(cp, nullid))
611 meta["copyrev"] = hex(manifest1.get(cp, nullid))
612 elif fp1 != nullid: # copied on local side, reversed
612 elif fp1 != nullid: # copied on local side, reversed
613 meta["copyrev"] = hex(manifest2.get(cp))
613 meta["copyrev"] = hex(manifest2.get(cp))
614 fp2 = fp1
614 fp2 = fp1
615 else: # directory rename
615 else: # directory rename
616 meta["copyrev"] = hex(manifest1.get(cp, nullid))
616 meta["copyrev"] = hex(manifest1.get(cp, nullid))
617 self.ui.debug(_(" %s: copy %s:%s\n") %
617 self.ui.debug(_(" %s: copy %s:%s\n") %
618 (fn, cp, meta["copyrev"]))
618 (fn, cp, meta["copyrev"]))
619 fp1 = nullid
619 fp1 = nullid
620 elif fp2 != nullid:
620 elif fp2 != nullid:
621 # is one parent an ancestor of the other?
621 # is one parent an ancestor of the other?
622 fpa = fl.ancestor(fp1, fp2)
622 fpa = fl.ancestor(fp1, fp2)
623 if fpa == fp1:
623 if fpa == fp1:
624 fp1, fp2 = fp2, nullid
624 fp1, fp2 = fp2, nullid
625 elif fpa == fp2:
625 elif fpa == fp2:
626 fp2 = nullid
626 fp2 = nullid
627
627
628 # is the file unmodified from the parent? report existing entry
628 # is the file unmodified from the parent? report existing entry
629 if fp2 == nullid and not fl.cmp(fp1, t):
629 if fp2 == nullid and not fl.cmp(fp1, t):
630 return fp1
630 return fp1
631
631
632 changelist.append(fn)
632 changelist.append(fn)
633 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
633 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
634
634
635 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
635 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
636 if p1 is None:
636 if p1 is None:
637 p1, p2 = self.dirstate.parents()
637 p1, p2 = self.dirstate.parents()
638 return self.commit(files=files, text=text, user=user, date=date,
638 return self.commit(files=files, text=text, user=user, date=date,
639 p1=p1, p2=p2, wlock=wlock, extra=extra)
639 p1=p1, p2=p2, wlock=wlock, extra=extra)
640
640
641 def commit(self, files=None, text="", user=None, date=None,
641 def commit(self, files=None, text="", user=None, date=None,
642 match=util.always, force=False, lock=None, wlock=None,
642 match=util.always, force=False, lock=None, wlock=None,
643 force_editor=False, p1=None, p2=None, extra={}):
643 force_editor=False, p1=None, p2=None, extra={}):
644
644
645 commit = []
645 commit = []
646 remove = []
646 remove = []
647 changed = []
647 changed = []
648 use_dirstate = (p1 is None) # not rawcommit
648 use_dirstate = (p1 is None) # not rawcommit
649 extra = extra.copy()
649 extra = extra.copy()
650
650
651 if use_dirstate:
651 if use_dirstate:
652 if files:
652 if files:
653 for f in files:
653 for f in files:
654 s = self.dirstate.state(f)
654 s = self.dirstate.state(f)
655 if s in 'nmai':
655 if s in 'nmai':
656 commit.append(f)
656 commit.append(f)
657 elif s == 'r':
657 elif s == 'r':
658 remove.append(f)
658 remove.append(f)
659 else:
659 else:
660 self.ui.warn(_("%s not tracked!\n") % f)
660 self.ui.warn(_("%s not tracked!\n") % f)
661 else:
661 else:
662 changes = self.status(match=match)[:5]
662 changes = self.status(match=match)[:5]
663 modified, added, removed, deleted, unknown = changes
663 modified, added, removed, deleted, unknown = changes
664 commit = modified + added
664 commit = modified + added
665 remove = removed
665 remove = removed
666 else:
666 else:
667 commit = files
667 commit = files
668
668
669 if use_dirstate:
669 if use_dirstate:
670 p1, p2 = self.dirstate.parents()
670 p1, p2 = self.dirstate.parents()
671 update_dirstate = True
671 update_dirstate = True
672 else:
672 else:
673 p1, p2 = p1, p2 or nullid
673 p1, p2 = p1, p2 or nullid
674 update_dirstate = (self.dirstate.parents()[0] == p1)
674 update_dirstate = (self.dirstate.parents()[0] == p1)
675
675
676 c1 = self.changelog.read(p1)
676 c1 = self.changelog.read(p1)
677 c2 = self.changelog.read(p2)
677 c2 = self.changelog.read(p2)
678 m1 = self.manifest.read(c1[0]).copy()
678 m1 = self.manifest.read(c1[0]).copy()
679 m2 = self.manifest.read(c2[0])
679 m2 = self.manifest.read(c2[0])
680
680
681 if use_dirstate:
681 if use_dirstate:
682 branchname = self.workingctx().branch()
682 branchname = self.workingctx().branch()
683 try:
683 try:
684 branchname = branchname.decode('UTF-8').encode('UTF-8')
684 branchname = branchname.decode('UTF-8').encode('UTF-8')
685 except UnicodeDecodeError:
685 except UnicodeDecodeError:
686 raise util.Abort(_('branch name not in UTF-8!'))
686 raise util.Abort(_('branch name not in UTF-8!'))
687 else:
687 else:
688 branchname = ""
688 branchname = ""
689
689
690 if use_dirstate:
690 if use_dirstate:
691 oldname = c1[5].get("branch") # stored in UTF-8
691 oldname = c1[5].get("branch") # stored in UTF-8
692 if (not commit and not remove and not force and p2 == nullid
692 if (not commit and not remove and not force and p2 == nullid
693 and branchname == oldname):
693 and branchname == oldname):
694 self.ui.status(_("nothing changed\n"))
694 self.ui.status(_("nothing changed\n"))
695 return None
695 return None
696
696
697 xp1 = hex(p1)
697 xp1 = hex(p1)
698 if p2 == nullid: xp2 = ''
698 if p2 == nullid: xp2 = ''
699 else: xp2 = hex(p2)
699 else: xp2 = hex(p2)
700
700
701 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
701 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
702
702
703 if not wlock:
703 if not wlock:
704 wlock = self.wlock()
704 wlock = self.wlock()
705 if not lock:
705 if not lock:
706 lock = self.lock()
706 lock = self.lock()
707 tr = self.transaction()
707 tr = self.transaction()
708
708
709 # check in files
709 # check in files
710 new = {}
710 new = {}
711 linkrev = self.changelog.count()
711 linkrev = self.changelog.count()
712 commit.sort()
712 commit.sort()
713 is_exec = util.execfunc(self.root, m1.execf)
713 is_exec = util.execfunc(self.root, m1.execf)
714 is_link = util.linkfunc(self.root, m1.linkf)
714 is_link = util.linkfunc(self.root, m1.linkf)
715 for f in commit:
715 for f in commit:
716 self.ui.note(f + "\n")
716 self.ui.note(f + "\n")
717 try:
717 try:
718 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
718 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
719 new_exec = is_exec(f)
719 new_exec = is_exec(f)
720 new_link = is_link(f)
720 new_link = is_link(f)
721 if not changed or changed[-1] != f:
721 if not changed or changed[-1] != f:
722 # mention the file in the changelog if some flag changed,
722 # mention the file in the changelog if some flag changed,
723 # even if there was no content change.
723 # even if there was no content change.
724 old_exec = m1.execf(f)
724 old_exec = m1.execf(f)
725 old_link = m1.linkf(f)
725 old_link = m1.linkf(f)
726 if old_exec != new_exec or old_link != new_link:
726 if old_exec != new_exec or old_link != new_link:
727 changed.append(f)
727 changed.append(f)
728 m1.set(f, new_exec, new_link)
728 m1.set(f, new_exec, new_link)
729 except (OSError, IOError):
729 except (OSError, IOError):
730 if use_dirstate:
730 if use_dirstate:
731 self.ui.warn(_("trouble committing %s!\n") % f)
731 self.ui.warn(_("trouble committing %s!\n") % f)
732 raise
732 raise
733 else:
733 else:
734 remove.append(f)
734 remove.append(f)
735
735
736 # update manifest
736 # update manifest
737 m1.update(new)
737 m1.update(new)
738 remove.sort()
738 remove.sort()
739 removed = []
739 removed = []
740
740
741 for f in remove:
741 for f in remove:
742 if f in m1:
742 if f in m1:
743 del m1[f]
743 del m1[f]
744 removed.append(f)
744 removed.append(f)
745 elif f in m2:
745 elif f in m2:
746 removed.append(f)
746 removed.append(f)
747 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
747 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
748
748
749 # add changeset
749 # add changeset
750 new = new.keys()
750 new = new.keys()
751 new.sort()
751 new.sort()
752
752
753 user = user or self.ui.username()
753 user = user or self.ui.username()
754 if not text or force_editor:
754 if not text or force_editor:
755 edittext = []
755 edittext = []
756 if text:
756 if text:
757 edittext.append(text)
757 edittext.append(text)
758 edittext.append("")
758 edittext.append("")
759 edittext.append("HG: user: %s" % user)
759 edittext.append("HG: user: %s" % user)
760 if p2 != nullid:
760 if p2 != nullid:
761 edittext.append("HG: branch merge")
761 edittext.append("HG: branch merge")
762 if branchname:
762 if branchname:
763 edittext.append("HG: branch %s" % util.tolocal(branchname))
763 edittext.append("HG: branch %s" % util.tolocal(branchname))
764 edittext.extend(["HG: changed %s" % f for f in changed])
764 edittext.extend(["HG: changed %s" % f for f in changed])
765 edittext.extend(["HG: removed %s" % f for f in removed])
765 edittext.extend(["HG: removed %s" % f for f in removed])
766 if not changed and not remove:
766 if not changed and not remove:
767 edittext.append("HG: no files changed")
767 edittext.append("HG: no files changed")
768 edittext.append("")
768 edittext.append("")
769 # run editor in the repository root
769 # run editor in the repository root
770 olddir = os.getcwd()
770 olddir = os.getcwd()
771 os.chdir(self.root)
771 os.chdir(self.root)
772 text = self.ui.edit("\n".join(edittext), user)
772 text = self.ui.edit("\n".join(edittext), user)
773 os.chdir(olddir)
773 os.chdir(olddir)
774
774
775 lines = [line.rstrip() for line in text.rstrip().splitlines()]
775 lines = [line.rstrip() for line in text.rstrip().splitlines()]
776 while lines and not lines[0]:
776 while lines and not lines[0]:
777 del lines[0]
777 del lines[0]
778 if not lines:
778 if not lines:
779 return None
779 return None
780 text = '\n'.join(lines)
780 text = '\n'.join(lines)
781 if branchname:
781 if branchname:
782 extra["branch"] = branchname
782 extra["branch"] = branchname
783 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
783 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
784 user, date, extra)
784 user, date, extra)
785 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
785 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
786 parent2=xp2)
786 parent2=xp2)
787 tr.close()
787 tr.close()
788
788
789 if self.branchcache and "branch" in extra:
789 if self.branchcache and "branch" in extra:
790 self.branchcache[util.tolocal(extra["branch"])] = n
790 self.branchcache[util.tolocal(extra["branch"])] = n
791
791
792 if use_dirstate or update_dirstate:
792 if use_dirstate or update_dirstate:
793 self.dirstate.setparents(n)
793 self.dirstate.setparents(n)
794 if use_dirstate:
794 if use_dirstate:
795 self.dirstate.update(new, "n")
795 for f in new:
796 self.dirstate.forget(removed)
796 self.dirstate.normal(f)
797 for f in removed:
798 self.dirstate.forget(f)
797
799
798 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
800 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
799 return n
801 return n
800
802
801 def walk(self, node=None, files=[], match=util.always, badmatch=None):
803 def walk(self, node=None, files=[], match=util.always, badmatch=None):
802 '''
804 '''
803 walk recursively through the directory tree or a given
805 walk recursively through the directory tree or a given
804 changeset, finding all files matched by the match
806 changeset, finding all files matched by the match
805 function
807 function
806
808
807 results are yielded in a tuple (src, filename), where src
809 results are yielded in a tuple (src, filename), where src
808 is one of:
810 is one of:
809 'f' the file was found in the directory tree
811 'f' the file was found in the directory tree
810 'm' the file was only in the dirstate and not in the tree
812 'm' the file was only in the dirstate and not in the tree
811 'b' file was not found and matched badmatch
813 'b' file was not found and matched badmatch
812 '''
814 '''
813
815
814 if node:
816 if node:
815 fdict = dict.fromkeys(files)
817 fdict = dict.fromkeys(files)
816 # for dirstate.walk, files=['.'] means "walk the whole tree".
818 # for dirstate.walk, files=['.'] means "walk the whole tree".
817 # follow that here, too
819 # follow that here, too
818 fdict.pop('.', None)
820 fdict.pop('.', None)
819 mdict = self.manifest.read(self.changelog.read(node)[0])
821 mdict = self.manifest.read(self.changelog.read(node)[0])
820 mfiles = mdict.keys()
822 mfiles = mdict.keys()
821 mfiles.sort()
823 mfiles.sort()
822 for fn in mfiles:
824 for fn in mfiles:
823 for ffn in fdict:
825 for ffn in fdict:
824 # match if the file is the exact name or a directory
826 # match if the file is the exact name or a directory
825 if ffn == fn or fn.startswith("%s/" % ffn):
827 if ffn == fn or fn.startswith("%s/" % ffn):
826 del fdict[ffn]
828 del fdict[ffn]
827 break
829 break
828 if match(fn):
830 if match(fn):
829 yield 'm', fn
831 yield 'm', fn
830 ffiles = fdict.keys()
832 ffiles = fdict.keys()
831 ffiles.sort()
833 ffiles.sort()
832 for fn in ffiles:
834 for fn in ffiles:
833 if badmatch and badmatch(fn):
835 if badmatch and badmatch(fn):
834 if match(fn):
836 if match(fn):
835 yield 'b', fn
837 yield 'b', fn
836 else:
838 else:
837 self.ui.warn(_('%s: No such file in rev %s\n')
839 self.ui.warn(_('%s: No such file in rev %s\n')
838 % (self.pathto(fn), short(node)))
840 % (self.pathto(fn), short(node)))
839 else:
841 else:
840 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
842 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
841 yield src, fn
843 yield src, fn
842
844
843 def status(self, node1=None, node2=None, files=[], match=util.always,
845 def status(self, node1=None, node2=None, files=[], match=util.always,
844 wlock=None, list_ignored=False, list_clean=False):
846 wlock=None, list_ignored=False, list_clean=False):
845 """return status of files between two nodes or node and working directory
847 """return status of files between two nodes or node and working directory
846
848
847 If node1 is None, use the first dirstate parent instead.
849 If node1 is None, use the first dirstate parent instead.
848 If node2 is None, compare node1 with working directory.
850 If node2 is None, compare node1 with working directory.
849 """
851 """
850
852
851 def fcmp(fn, getnode):
853 def fcmp(fn, getnode):
852 t1 = self.wread(fn)
854 t1 = self.wread(fn)
853 return self.file(fn).cmp(getnode(fn), t1)
855 return self.file(fn).cmp(getnode(fn), t1)
854
856
855 def mfmatches(node):
857 def mfmatches(node):
856 change = self.changelog.read(node)
858 change = self.changelog.read(node)
857 mf = self.manifest.read(change[0]).copy()
859 mf = self.manifest.read(change[0]).copy()
858 for fn in mf.keys():
860 for fn in mf.keys():
859 if not match(fn):
861 if not match(fn):
860 del mf[fn]
862 del mf[fn]
861 return mf
863 return mf
862
864
863 modified, added, removed, deleted, unknown = [], [], [], [], []
865 modified, added, removed, deleted, unknown = [], [], [], [], []
864 ignored, clean = [], []
866 ignored, clean = [], []
865
867
866 compareworking = False
868 compareworking = False
867 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
869 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
868 compareworking = True
870 compareworking = True
869
871
870 if not compareworking:
872 if not compareworking:
871 # read the manifest from node1 before the manifest from node2,
873 # read the manifest from node1 before the manifest from node2,
872 # so that we'll hit the manifest cache if we're going through
874 # so that we'll hit the manifest cache if we're going through
873 # all the revisions in parent->child order.
875 # all the revisions in parent->child order.
874 mf1 = mfmatches(node1)
876 mf1 = mfmatches(node1)
875
877
876 mywlock = False
878 mywlock = False
877
879
878 # are we comparing the working directory?
880 # are we comparing the working directory?
879 if not node2:
881 if not node2:
880 (lookup, modified, added, removed, deleted, unknown,
882 (lookup, modified, added, removed, deleted, unknown,
881 ignored, clean) = self.dirstate.status(files, match,
883 ignored, clean) = self.dirstate.status(files, match,
882 list_ignored, list_clean)
884 list_ignored, list_clean)
883
885
884 # are we comparing working dir against its parent?
886 # are we comparing working dir against its parent?
885 if compareworking:
887 if compareworking:
886 if lookup:
888 if lookup:
887 # do a full compare of any files that might have changed
889 # do a full compare of any files that might have changed
888 mnode = self.changelog.read(self.dirstate.parents()[0])[0]
890 mnode = self.changelog.read(self.dirstate.parents()[0])[0]
889 getnode = lambda fn: (self.manifest.find(mnode, fn)[0] or
891 getnode = lambda fn: (self.manifest.find(mnode, fn)[0] or
890 nullid)
892 nullid)
891 for f in lookup:
893 for f in lookup:
892 if fcmp(f, getnode):
894 if fcmp(f, getnode):
893 modified.append(f)
895 modified.append(f)
894 else:
896 else:
895 if list_clean:
897 if list_clean:
896 clean.append(f)
898 clean.append(f)
897 if not wlock and not mywlock:
899 if not wlock and not mywlock:
898 mywlock = True
900 mywlock = True
899 try:
901 try:
900 wlock = self.wlock(wait=0)
902 wlock = self.wlock(wait=0)
901 except lock.LockException:
903 except lock.LockException:
902 pass
904 pass
903 if wlock:
905 if wlock:
904 self.dirstate.update([f], "n")
906 self.dirstate.normal(f)
905 else:
907 else:
906 # we are comparing working dir against non-parent
908 # we are comparing working dir against non-parent
907 # generate a pseudo-manifest for the working dir
909 # generate a pseudo-manifest for the working dir
908 # XXX: create it in dirstate.py ?
910 # XXX: create it in dirstate.py ?
909 mf2 = mfmatches(self.dirstate.parents()[0])
911 mf2 = mfmatches(self.dirstate.parents()[0])
910 is_exec = util.execfunc(self.root, mf2.execf)
912 is_exec = util.execfunc(self.root, mf2.execf)
911 is_link = util.linkfunc(self.root, mf2.linkf)
913 is_link = util.linkfunc(self.root, mf2.linkf)
912 for f in lookup + modified + added:
914 for f in lookup + modified + added:
913 mf2[f] = ""
915 mf2[f] = ""
914 mf2.set(f, is_exec(f), is_link(f))
916 mf2.set(f, is_exec(f), is_link(f))
915 for f in removed:
917 for f in removed:
916 if f in mf2:
918 if f in mf2:
917 del mf2[f]
919 del mf2[f]
918
920
919 if mywlock and wlock:
921 if mywlock and wlock:
920 wlock.release()
922 wlock.release()
921 else:
923 else:
922 # we are comparing two revisions
924 # we are comparing two revisions
923 mf2 = mfmatches(node2)
925 mf2 = mfmatches(node2)
924
926
925 if not compareworking:
927 if not compareworking:
926 # flush lists from dirstate before comparing manifests
928 # flush lists from dirstate before comparing manifests
927 modified, added, clean = [], [], []
929 modified, added, clean = [], [], []
928
930
929 # make sure to sort the files so we talk to the disk in a
931 # make sure to sort the files so we talk to the disk in a
930 # reasonable order
932 # reasonable order
931 mf2keys = mf2.keys()
933 mf2keys = mf2.keys()
932 mf2keys.sort()
934 mf2keys.sort()
933 getnode = lambda fn: mf1.get(fn, nullid)
935 getnode = lambda fn: mf1.get(fn, nullid)
934 for fn in mf2keys:
936 for fn in mf2keys:
935 if mf1.has_key(fn):
937 if mf1.has_key(fn):
936 if (mf1.flags(fn) != mf2.flags(fn) or
938 if (mf1.flags(fn) != mf2.flags(fn) or
937 (mf1[fn] != mf2[fn] and
939 (mf1[fn] != mf2[fn] and
938 (mf2[fn] != "" or fcmp(fn, getnode)))):
940 (mf2[fn] != "" or fcmp(fn, getnode)))):
939 modified.append(fn)
941 modified.append(fn)
940 elif list_clean:
942 elif list_clean:
941 clean.append(fn)
943 clean.append(fn)
942 del mf1[fn]
944 del mf1[fn]
943 else:
945 else:
944 added.append(fn)
946 added.append(fn)
945
947
946 removed = mf1.keys()
948 removed = mf1.keys()
947
949
948 # sort and return results:
950 # sort and return results:
949 for l in modified, added, removed, deleted, unknown, ignored, clean:
951 for l in modified, added, removed, deleted, unknown, ignored, clean:
950 l.sort()
952 l.sort()
951 return (modified, added, removed, deleted, unknown, ignored, clean)
953 return (modified, added, removed, deleted, unknown, ignored, clean)
952
954
953 def add(self, list, wlock=None):
955 def add(self, list, wlock=None):
954 if not wlock:
956 if not wlock:
955 wlock = self.wlock()
957 wlock = self.wlock()
956 for f in list:
958 for f in list:
957 p = self.wjoin(f)
959 p = self.wjoin(f)
958 try:
960 try:
959 st = os.lstat(p)
961 st = os.lstat(p)
960 except:
962 except:
961 self.ui.warn(_("%s does not exist!\n") % f)
963 self.ui.warn(_("%s does not exist!\n") % f)
962 continue
964 continue
963 if st.st_size > 10000000:
965 if st.st_size > 10000000:
964 self.ui.warn(_("%s: files over 10MB may cause memory and"
966 self.ui.warn(_("%s: files over 10MB may cause memory and"
965 " performance problems\n"
967 " performance problems\n"
966 "(use 'hg revert %s' to unadd the file)\n")
968 "(use 'hg revert %s' to unadd the file)\n")
967 % (f, f))
969 % (f, f))
968 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
970 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
969 self.ui.warn(_("%s not added: only files and symlinks "
971 self.ui.warn(_("%s not added: only files and symlinks "
970 "supported currently\n") % f)
972 "supported currently\n") % f)
971 elif self.dirstate.state(f) in 'an':
973 elif self.dirstate.state(f) in 'an':
972 self.ui.warn(_("%s already tracked!\n") % f)
974 self.ui.warn(_("%s already tracked!\n") % f)
973 else:
975 else:
974 self.dirstate.update([f], "a")
976 self.dirstate.add(f)
975
977
976 def forget(self, list, wlock=None):
978 def forget(self, list, wlock=None):
977 if not wlock:
979 if not wlock:
978 wlock = self.wlock()
980 wlock = self.wlock()
979 for f in list:
981 for f in list:
980 if self.dirstate.state(f) not in 'ai':
982 if self.dirstate.state(f) not in 'ai':
981 self.ui.warn(_("%s not added!\n") % f)
983 self.ui.warn(_("%s not added!\n") % f)
982 else:
984 else:
983 self.dirstate.forget([f])
985 self.dirstate.forget(f)
984
986
985 def remove(self, list, unlink=False, wlock=None):
987 def remove(self, list, unlink=False, wlock=None):
986 if unlink:
988 if unlink:
987 for f in list:
989 for f in list:
988 try:
990 try:
989 util.unlink(self.wjoin(f))
991 util.unlink(self.wjoin(f))
990 except OSError, inst:
992 except OSError, inst:
991 if inst.errno != errno.ENOENT:
993 if inst.errno != errno.ENOENT:
992 raise
994 raise
993 if not wlock:
995 if not wlock:
994 wlock = self.wlock()
996 wlock = self.wlock()
995 for f in list:
997 for f in list:
996 if unlink and os.path.exists(self.wjoin(f)):
998 if unlink and os.path.exists(self.wjoin(f)):
997 self.ui.warn(_("%s still exists!\n") % f)
999 self.ui.warn(_("%s still exists!\n") % f)
998 elif self.dirstate.state(f) == 'a':
1000 elif self.dirstate.state(f) == 'a':
999 self.dirstate.forget([f])
1001 self.dirstate.forget(f)
1000 elif f not in self.dirstate:
1002 elif f not in self.dirstate:
1001 self.ui.warn(_("%s not tracked!\n") % f)
1003 self.ui.warn(_("%s not tracked!\n") % f)
1002 else:
1004 else:
1003 self.dirstate.update([f], "r")
1005 self.dirstate.remove(f)
1004
1006
1005 def undelete(self, list, wlock=None):
1007 def undelete(self, list, wlock=None):
1006 p = self.dirstate.parents()[0]
1008 p = self.dirstate.parents()[0]
1007 mn = self.changelog.read(p)[0]
1009 mn = self.changelog.read(p)[0]
1008 m = self.manifest.read(mn)
1010 m = self.manifest.read(mn)
1009 if not wlock:
1011 if not wlock:
1010 wlock = self.wlock()
1012 wlock = self.wlock()
1011 for f in list:
1013 for f in list:
1012 if self.dirstate.state(f) not in "r":
1014 if self.dirstate.state(f) not in "r":
1013 self.ui.warn("%s not removed!\n" % f)
1015 self.ui.warn("%s not removed!\n" % f)
1014 else:
1016 else:
1015 t = self.file(f).read(m[f])
1017 t = self.file(f).read(m[f])
1016 self.wwrite(f, t, m.flags(f))
1018 self.wwrite(f, t, m.flags(f))
1017 self.dirstate.update([f], "n")
1019 self.dirstate.normal(f)
1018
1020
1019 def copy(self, source, dest, wlock=None):
1021 def copy(self, source, dest, wlock=None):
1020 p = self.wjoin(dest)
1022 p = self.wjoin(dest)
1021 if not (os.path.exists(p) or os.path.islink(p)):
1023 if not (os.path.exists(p) or os.path.islink(p)):
1022 self.ui.warn(_("%s does not exist!\n") % dest)
1024 self.ui.warn(_("%s does not exist!\n") % dest)
1023 elif not (os.path.isfile(p) or os.path.islink(p)):
1025 elif not (os.path.isfile(p) or os.path.islink(p)):
1024 self.ui.warn(_("copy failed: %s is not a file or a "
1026 self.ui.warn(_("copy failed: %s is not a file or a "
1025 "symbolic link\n") % dest)
1027 "symbolic link\n") % dest)
1026 else:
1028 else:
1027 if not wlock:
1029 if not wlock:
1028 wlock = self.wlock()
1030 wlock = self.wlock()
1029 if self.dirstate.state(dest) == '?':
1031 if self.dirstate.state(dest) == '?':
1030 self.dirstate.update([dest], "a")
1032 self.dirstate.add(dest)
1031 self.dirstate.copy(source, dest)
1033 self.dirstate.copy(source, dest)
1032
1034
1033 def heads(self, start=None):
1035 def heads(self, start=None):
1034 heads = self.changelog.heads(start)
1036 heads = self.changelog.heads(start)
1035 # sort the output in rev descending order
1037 # sort the output in rev descending order
1036 heads = [(-self.changelog.rev(h), h) for h in heads]
1038 heads = [(-self.changelog.rev(h), h) for h in heads]
1037 heads.sort()
1039 heads.sort()
1038 return [n for (r, n) in heads]
1040 return [n for (r, n) in heads]
1039
1041
1040 def branchheads(self, branch, start=None):
1042 def branchheads(self, branch, start=None):
1041 branches = self.branchtags()
1043 branches = self.branchtags()
1042 if branch not in branches:
1044 if branch not in branches:
1043 return []
1045 return []
1044 # The basic algorithm is this:
1046 # The basic algorithm is this:
1045 #
1047 #
1046 # Start from the branch tip since there are no later revisions that can
1048 # Start from the branch tip since there are no later revisions that can
1047 # possibly be in this branch, and the tip is a guaranteed head.
1049 # possibly be in this branch, and the tip is a guaranteed head.
1048 #
1050 #
1049 # Remember the tip's parents as the first ancestors, since these by
1051 # Remember the tip's parents as the first ancestors, since these by
1050 # definition are not heads.
1052 # definition are not heads.
1051 #
1053 #
1052 # Step backwards from the brach tip through all the revisions. We are
1054 # Step backwards from the brach tip through all the revisions. We are
1053 # guaranteed by the rules of Mercurial that we will now be visiting the
1055 # guaranteed by the rules of Mercurial that we will now be visiting the
1054 # nodes in reverse topological order (children before parents).
1056 # nodes in reverse topological order (children before parents).
1055 #
1057 #
1056 # If a revision is one of the ancestors of a head then we can toss it
1058 # If a revision is one of the ancestors of a head then we can toss it
1057 # out of the ancestors set (we've already found it and won't be
1059 # out of the ancestors set (we've already found it and won't be
1058 # visiting it again) and put its parents in the ancestors set.
1060 # visiting it again) and put its parents in the ancestors set.
1059 #
1061 #
1060 # Otherwise, if a revision is in the branch it's another head, since it
1062 # Otherwise, if a revision is in the branch it's another head, since it
1061 # wasn't in the ancestor list of an existing head. So add it to the
1063 # wasn't in the ancestor list of an existing head. So add it to the
1062 # head list, and add its parents to the ancestor list.
1064 # head list, and add its parents to the ancestor list.
1063 #
1065 #
1064 # If it is not in the branch ignore it.
1066 # If it is not in the branch ignore it.
1065 #
1067 #
1066 # Once we have a list of heads, use nodesbetween to filter out all the
1068 # Once we have a list of heads, use nodesbetween to filter out all the
1067 # heads that cannot be reached from startrev. There may be a more
1069 # heads that cannot be reached from startrev. There may be a more
1068 # efficient way to do this as part of the previous algorithm.
1070 # efficient way to do this as part of the previous algorithm.
1069
1071
1070 set = util.set
1072 set = util.set
1071 heads = [self.changelog.rev(branches[branch])]
1073 heads = [self.changelog.rev(branches[branch])]
1072 # Don't care if ancestors contains nullrev or not.
1074 # Don't care if ancestors contains nullrev or not.
1073 ancestors = set(self.changelog.parentrevs(heads[0]))
1075 ancestors = set(self.changelog.parentrevs(heads[0]))
1074 for rev in xrange(heads[0] - 1, nullrev, -1):
1076 for rev in xrange(heads[0] - 1, nullrev, -1):
1075 if rev in ancestors:
1077 if rev in ancestors:
1076 ancestors.update(self.changelog.parentrevs(rev))
1078 ancestors.update(self.changelog.parentrevs(rev))
1077 ancestors.remove(rev)
1079 ancestors.remove(rev)
1078 elif self.changectx(rev).branch() == branch:
1080 elif self.changectx(rev).branch() == branch:
1079 heads.append(rev)
1081 heads.append(rev)
1080 ancestors.update(self.changelog.parentrevs(rev))
1082 ancestors.update(self.changelog.parentrevs(rev))
1081 heads = [self.changelog.node(rev) for rev in heads]
1083 heads = [self.changelog.node(rev) for rev in heads]
1082 if start is not None:
1084 if start is not None:
1083 heads = self.changelog.nodesbetween([start], heads)[2]
1085 heads = self.changelog.nodesbetween([start], heads)[2]
1084 return heads
1086 return heads
1085
1087
1086 def branches(self, nodes):
1088 def branches(self, nodes):
1087 if not nodes:
1089 if not nodes:
1088 nodes = [self.changelog.tip()]
1090 nodes = [self.changelog.tip()]
1089 b = []
1091 b = []
1090 for n in nodes:
1092 for n in nodes:
1091 t = n
1093 t = n
1092 while 1:
1094 while 1:
1093 p = self.changelog.parents(n)
1095 p = self.changelog.parents(n)
1094 if p[1] != nullid or p[0] == nullid:
1096 if p[1] != nullid or p[0] == nullid:
1095 b.append((t, n, p[0], p[1]))
1097 b.append((t, n, p[0], p[1]))
1096 break
1098 break
1097 n = p[0]
1099 n = p[0]
1098 return b
1100 return b
1099
1101
1100 def between(self, pairs):
1102 def between(self, pairs):
1101 r = []
1103 r = []
1102
1104
1103 for top, bottom in pairs:
1105 for top, bottom in pairs:
1104 n, l, i = top, [], 0
1106 n, l, i = top, [], 0
1105 f = 1
1107 f = 1
1106
1108
1107 while n != bottom:
1109 while n != bottom:
1108 p = self.changelog.parents(n)[0]
1110 p = self.changelog.parents(n)[0]
1109 if i == f:
1111 if i == f:
1110 l.append(n)
1112 l.append(n)
1111 f = f * 2
1113 f = f * 2
1112 n = p
1114 n = p
1113 i += 1
1115 i += 1
1114
1116
1115 r.append(l)
1117 r.append(l)
1116
1118
1117 return r
1119 return r
1118
1120
1119 def findincoming(self, remote, base=None, heads=None, force=False):
1121 def findincoming(self, remote, base=None, heads=None, force=False):
1120 """Return list of roots of the subsets of missing nodes from remote
1122 """Return list of roots of the subsets of missing nodes from remote
1121
1123
1122 If base dict is specified, assume that these nodes and their parents
1124 If base dict is specified, assume that these nodes and their parents
1123 exist on the remote side and that no child of a node of base exists
1125 exist on the remote side and that no child of a node of base exists
1124 in both remote and self.
1126 in both remote and self.
1125 Furthermore base will be updated to include the nodes that exists
1127 Furthermore base will be updated to include the nodes that exists
1126 in self and remote but no children exists in self and remote.
1128 in self and remote but no children exists in self and remote.
1127 If a list of heads is specified, return only nodes which are heads
1129 If a list of heads is specified, return only nodes which are heads
1128 or ancestors of these heads.
1130 or ancestors of these heads.
1129
1131
1130 All the ancestors of base are in self and in remote.
1132 All the ancestors of base are in self and in remote.
1131 All the descendants of the list returned are missing in self.
1133 All the descendants of the list returned are missing in self.
1132 (and so we know that the rest of the nodes are missing in remote, see
1134 (and so we know that the rest of the nodes are missing in remote, see
1133 outgoing)
1135 outgoing)
1134 """
1136 """
1135 m = self.changelog.nodemap
1137 m = self.changelog.nodemap
1136 search = []
1138 search = []
1137 fetch = {}
1139 fetch = {}
1138 seen = {}
1140 seen = {}
1139 seenbranch = {}
1141 seenbranch = {}
1140 if base == None:
1142 if base == None:
1141 base = {}
1143 base = {}
1142
1144
1143 if not heads:
1145 if not heads:
1144 heads = remote.heads()
1146 heads = remote.heads()
1145
1147
1146 if self.changelog.tip() == nullid:
1148 if self.changelog.tip() == nullid:
1147 base[nullid] = 1
1149 base[nullid] = 1
1148 if heads != [nullid]:
1150 if heads != [nullid]:
1149 return [nullid]
1151 return [nullid]
1150 return []
1152 return []
1151
1153
1152 # assume we're closer to the tip than the root
1154 # assume we're closer to the tip than the root
1153 # and start by examining the heads
1155 # and start by examining the heads
1154 self.ui.status(_("searching for changes\n"))
1156 self.ui.status(_("searching for changes\n"))
1155
1157
1156 unknown = []
1158 unknown = []
1157 for h in heads:
1159 for h in heads:
1158 if h not in m:
1160 if h not in m:
1159 unknown.append(h)
1161 unknown.append(h)
1160 else:
1162 else:
1161 base[h] = 1
1163 base[h] = 1
1162
1164
1163 if not unknown:
1165 if not unknown:
1164 return []
1166 return []
1165
1167
1166 req = dict.fromkeys(unknown)
1168 req = dict.fromkeys(unknown)
1167 reqcnt = 0
1169 reqcnt = 0
1168
1170
1169 # search through remote branches
1171 # search through remote branches
1170 # a 'branch' here is a linear segment of history, with four parts:
1172 # a 'branch' here is a linear segment of history, with four parts:
1171 # head, root, first parent, second parent
1173 # head, root, first parent, second parent
1172 # (a branch always has two parents (or none) by definition)
1174 # (a branch always has two parents (or none) by definition)
1173 unknown = remote.branches(unknown)
1175 unknown = remote.branches(unknown)
1174 while unknown:
1176 while unknown:
1175 r = []
1177 r = []
1176 while unknown:
1178 while unknown:
1177 n = unknown.pop(0)
1179 n = unknown.pop(0)
1178 if n[0] in seen:
1180 if n[0] in seen:
1179 continue
1181 continue
1180
1182
1181 self.ui.debug(_("examining %s:%s\n")
1183 self.ui.debug(_("examining %s:%s\n")
1182 % (short(n[0]), short(n[1])))
1184 % (short(n[0]), short(n[1])))
1183 if n[0] == nullid: # found the end of the branch
1185 if n[0] == nullid: # found the end of the branch
1184 pass
1186 pass
1185 elif n in seenbranch:
1187 elif n in seenbranch:
1186 self.ui.debug(_("branch already found\n"))
1188 self.ui.debug(_("branch already found\n"))
1187 continue
1189 continue
1188 elif n[1] and n[1] in m: # do we know the base?
1190 elif n[1] and n[1] in m: # do we know the base?
1189 self.ui.debug(_("found incomplete branch %s:%s\n")
1191 self.ui.debug(_("found incomplete branch %s:%s\n")
1190 % (short(n[0]), short(n[1])))
1192 % (short(n[0]), short(n[1])))
1191 search.append(n) # schedule branch range for scanning
1193 search.append(n) # schedule branch range for scanning
1192 seenbranch[n] = 1
1194 seenbranch[n] = 1
1193 else:
1195 else:
1194 if n[1] not in seen and n[1] not in fetch:
1196 if n[1] not in seen and n[1] not in fetch:
1195 if n[2] in m and n[3] in m:
1197 if n[2] in m and n[3] in m:
1196 self.ui.debug(_("found new changeset %s\n") %
1198 self.ui.debug(_("found new changeset %s\n") %
1197 short(n[1]))
1199 short(n[1]))
1198 fetch[n[1]] = 1 # earliest unknown
1200 fetch[n[1]] = 1 # earliest unknown
1199 for p in n[2:4]:
1201 for p in n[2:4]:
1200 if p in m:
1202 if p in m:
1201 base[p] = 1 # latest known
1203 base[p] = 1 # latest known
1202
1204
1203 for p in n[2:4]:
1205 for p in n[2:4]:
1204 if p not in req and p not in m:
1206 if p not in req and p not in m:
1205 r.append(p)
1207 r.append(p)
1206 req[p] = 1
1208 req[p] = 1
1207 seen[n[0]] = 1
1209 seen[n[0]] = 1
1208
1210
1209 if r:
1211 if r:
1210 reqcnt += 1
1212 reqcnt += 1
1211 self.ui.debug(_("request %d: %s\n") %
1213 self.ui.debug(_("request %d: %s\n") %
1212 (reqcnt, " ".join(map(short, r))))
1214 (reqcnt, " ".join(map(short, r))))
1213 for p in xrange(0, len(r), 10):
1215 for p in xrange(0, len(r), 10):
1214 for b in remote.branches(r[p:p+10]):
1216 for b in remote.branches(r[p:p+10]):
1215 self.ui.debug(_("received %s:%s\n") %
1217 self.ui.debug(_("received %s:%s\n") %
1216 (short(b[0]), short(b[1])))
1218 (short(b[0]), short(b[1])))
1217 unknown.append(b)
1219 unknown.append(b)
1218
1220
1219 # do binary search on the branches we found
1221 # do binary search on the branches we found
1220 while search:
1222 while search:
1221 n = search.pop(0)
1223 n = search.pop(0)
1222 reqcnt += 1
1224 reqcnt += 1
1223 l = remote.between([(n[0], n[1])])[0]
1225 l = remote.between([(n[0], n[1])])[0]
1224 l.append(n[1])
1226 l.append(n[1])
1225 p = n[0]
1227 p = n[0]
1226 f = 1
1228 f = 1
1227 for i in l:
1229 for i in l:
1228 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1230 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1229 if i in m:
1231 if i in m:
1230 if f <= 2:
1232 if f <= 2:
1231 self.ui.debug(_("found new branch changeset %s\n") %
1233 self.ui.debug(_("found new branch changeset %s\n") %
1232 short(p))
1234 short(p))
1233 fetch[p] = 1
1235 fetch[p] = 1
1234 base[i] = 1
1236 base[i] = 1
1235 else:
1237 else:
1236 self.ui.debug(_("narrowed branch search to %s:%s\n")
1238 self.ui.debug(_("narrowed branch search to %s:%s\n")
1237 % (short(p), short(i)))
1239 % (short(p), short(i)))
1238 search.append((p, i))
1240 search.append((p, i))
1239 break
1241 break
1240 p, f = i, f * 2
1242 p, f = i, f * 2
1241
1243
1242 # sanity check our fetch list
1244 # sanity check our fetch list
1243 for f in fetch.keys():
1245 for f in fetch.keys():
1244 if f in m:
1246 if f in m:
1245 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1247 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1246
1248
1247 if base.keys() == [nullid]:
1249 if base.keys() == [nullid]:
1248 if force:
1250 if force:
1249 self.ui.warn(_("warning: repository is unrelated\n"))
1251 self.ui.warn(_("warning: repository is unrelated\n"))
1250 else:
1252 else:
1251 raise util.Abort(_("repository is unrelated"))
1253 raise util.Abort(_("repository is unrelated"))
1252
1254
1253 self.ui.debug(_("found new changesets starting at ") +
1255 self.ui.debug(_("found new changesets starting at ") +
1254 " ".join([short(f) for f in fetch]) + "\n")
1256 " ".join([short(f) for f in fetch]) + "\n")
1255
1257
1256 self.ui.debug(_("%d total queries\n") % reqcnt)
1258 self.ui.debug(_("%d total queries\n") % reqcnt)
1257
1259
1258 return fetch.keys()
1260 return fetch.keys()
1259
1261
1260 def findoutgoing(self, remote, base=None, heads=None, force=False):
1262 def findoutgoing(self, remote, base=None, heads=None, force=False):
1261 """Return list of nodes that are roots of subsets not in remote
1263 """Return list of nodes that are roots of subsets not in remote
1262
1264
1263 If base dict is specified, assume that these nodes and their parents
1265 If base dict is specified, assume that these nodes and their parents
1264 exist on the remote side.
1266 exist on the remote side.
1265 If a list of heads is specified, return only nodes which are heads
1267 If a list of heads is specified, return only nodes which are heads
1266 or ancestors of these heads, and return a second element which
1268 or ancestors of these heads, and return a second element which
1267 contains all remote heads which get new children.
1269 contains all remote heads which get new children.
1268 """
1270 """
1269 if base == None:
1271 if base == None:
1270 base = {}
1272 base = {}
1271 self.findincoming(remote, base, heads, force=force)
1273 self.findincoming(remote, base, heads, force=force)
1272
1274
1273 self.ui.debug(_("common changesets up to ")
1275 self.ui.debug(_("common changesets up to ")
1274 + " ".join(map(short, base.keys())) + "\n")
1276 + " ".join(map(short, base.keys())) + "\n")
1275
1277
1276 remain = dict.fromkeys(self.changelog.nodemap)
1278 remain = dict.fromkeys(self.changelog.nodemap)
1277
1279
1278 # prune everything remote has from the tree
1280 # prune everything remote has from the tree
1279 del remain[nullid]
1281 del remain[nullid]
1280 remove = base.keys()
1282 remove = base.keys()
1281 while remove:
1283 while remove:
1282 n = remove.pop(0)
1284 n = remove.pop(0)
1283 if n in remain:
1285 if n in remain:
1284 del remain[n]
1286 del remain[n]
1285 for p in self.changelog.parents(n):
1287 for p in self.changelog.parents(n):
1286 remove.append(p)
1288 remove.append(p)
1287
1289
1288 # find every node whose parents have been pruned
1290 # find every node whose parents have been pruned
1289 subset = []
1291 subset = []
1290 # find every remote head that will get new children
1292 # find every remote head that will get new children
1291 updated_heads = {}
1293 updated_heads = {}
1292 for n in remain:
1294 for n in remain:
1293 p1, p2 = self.changelog.parents(n)
1295 p1, p2 = self.changelog.parents(n)
1294 if p1 not in remain and p2 not in remain:
1296 if p1 not in remain and p2 not in remain:
1295 subset.append(n)
1297 subset.append(n)
1296 if heads:
1298 if heads:
1297 if p1 in heads:
1299 if p1 in heads:
1298 updated_heads[p1] = True
1300 updated_heads[p1] = True
1299 if p2 in heads:
1301 if p2 in heads:
1300 updated_heads[p2] = True
1302 updated_heads[p2] = True
1301
1303
1302 # this is the set of all roots we have to push
1304 # this is the set of all roots we have to push
1303 if heads:
1305 if heads:
1304 return subset, updated_heads.keys()
1306 return subset, updated_heads.keys()
1305 else:
1307 else:
1306 return subset
1308 return subset
1307
1309
1308 def pull(self, remote, heads=None, force=False, lock=None):
1310 def pull(self, remote, heads=None, force=False, lock=None):
1309 mylock = False
1311 mylock = False
1310 if not lock:
1312 if not lock:
1311 lock = self.lock()
1313 lock = self.lock()
1312 mylock = True
1314 mylock = True
1313
1315
1314 try:
1316 try:
1315 fetch = self.findincoming(remote, force=force)
1317 fetch = self.findincoming(remote, force=force)
1316 if fetch == [nullid]:
1318 if fetch == [nullid]:
1317 self.ui.status(_("requesting all changes\n"))
1319 self.ui.status(_("requesting all changes\n"))
1318
1320
1319 if not fetch:
1321 if not fetch:
1320 self.ui.status(_("no changes found\n"))
1322 self.ui.status(_("no changes found\n"))
1321 return 0
1323 return 0
1322
1324
1323 if heads is None:
1325 if heads is None:
1324 cg = remote.changegroup(fetch, 'pull')
1326 cg = remote.changegroup(fetch, 'pull')
1325 else:
1327 else:
1326 if 'changegroupsubset' not in remote.capabilities:
1328 if 'changegroupsubset' not in remote.capabilities:
1327 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1329 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1328 cg = remote.changegroupsubset(fetch, heads, 'pull')
1330 cg = remote.changegroupsubset(fetch, heads, 'pull')
1329 return self.addchangegroup(cg, 'pull', remote.url())
1331 return self.addchangegroup(cg, 'pull', remote.url())
1330 finally:
1332 finally:
1331 if mylock:
1333 if mylock:
1332 lock.release()
1334 lock.release()
1333
1335
1334 def push(self, remote, force=False, revs=None):
1336 def push(self, remote, force=False, revs=None):
1335 # there are two ways to push to remote repo:
1337 # there are two ways to push to remote repo:
1336 #
1338 #
1337 # addchangegroup assumes local user can lock remote
1339 # addchangegroup assumes local user can lock remote
1338 # repo (local filesystem, old ssh servers).
1340 # repo (local filesystem, old ssh servers).
1339 #
1341 #
1340 # unbundle assumes local user cannot lock remote repo (new ssh
1342 # unbundle assumes local user cannot lock remote repo (new ssh
1341 # servers, http servers).
1343 # servers, http servers).
1342
1344
1343 if remote.capable('unbundle'):
1345 if remote.capable('unbundle'):
1344 return self.push_unbundle(remote, force, revs)
1346 return self.push_unbundle(remote, force, revs)
1345 return self.push_addchangegroup(remote, force, revs)
1347 return self.push_addchangegroup(remote, force, revs)
1346
1348
1347 def prepush(self, remote, force, revs):
1349 def prepush(self, remote, force, revs):
1348 base = {}
1350 base = {}
1349 remote_heads = remote.heads()
1351 remote_heads = remote.heads()
1350 inc = self.findincoming(remote, base, remote_heads, force=force)
1352 inc = self.findincoming(remote, base, remote_heads, force=force)
1351
1353
1352 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1354 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1353 if revs is not None:
1355 if revs is not None:
1354 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1356 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1355 else:
1357 else:
1356 bases, heads = update, self.changelog.heads()
1358 bases, heads = update, self.changelog.heads()
1357
1359
1358 if not bases:
1360 if not bases:
1359 self.ui.status(_("no changes found\n"))
1361 self.ui.status(_("no changes found\n"))
1360 return None, 1
1362 return None, 1
1361 elif not force:
1363 elif not force:
1362 # check if we're creating new remote heads
1364 # check if we're creating new remote heads
1363 # to be a remote head after push, node must be either
1365 # to be a remote head after push, node must be either
1364 # - unknown locally
1366 # - unknown locally
1365 # - a local outgoing head descended from update
1367 # - a local outgoing head descended from update
1366 # - a remote head that's known locally and not
1368 # - a remote head that's known locally and not
1367 # ancestral to an outgoing head
1369 # ancestral to an outgoing head
1368
1370
1369 warn = 0
1371 warn = 0
1370
1372
1371 if remote_heads == [nullid]:
1373 if remote_heads == [nullid]:
1372 warn = 0
1374 warn = 0
1373 elif not revs and len(heads) > len(remote_heads):
1375 elif not revs and len(heads) > len(remote_heads):
1374 warn = 1
1376 warn = 1
1375 else:
1377 else:
1376 newheads = list(heads)
1378 newheads = list(heads)
1377 for r in remote_heads:
1379 for r in remote_heads:
1378 if r in self.changelog.nodemap:
1380 if r in self.changelog.nodemap:
1379 desc = self.changelog.heads(r, heads)
1381 desc = self.changelog.heads(r, heads)
1380 l = [h for h in heads if h in desc]
1382 l = [h for h in heads if h in desc]
1381 if not l:
1383 if not l:
1382 newheads.append(r)
1384 newheads.append(r)
1383 else:
1385 else:
1384 newheads.append(r)
1386 newheads.append(r)
1385 if len(newheads) > len(remote_heads):
1387 if len(newheads) > len(remote_heads):
1386 warn = 1
1388 warn = 1
1387
1389
1388 if warn:
1390 if warn:
1389 self.ui.warn(_("abort: push creates new remote branches!\n"))
1391 self.ui.warn(_("abort: push creates new remote branches!\n"))
1390 self.ui.status(_("(did you forget to merge?"
1392 self.ui.status(_("(did you forget to merge?"
1391 " use push -f to force)\n"))
1393 " use push -f to force)\n"))
1392 return None, 1
1394 return None, 1
1393 elif inc:
1395 elif inc:
1394 self.ui.warn(_("note: unsynced remote changes!\n"))
1396 self.ui.warn(_("note: unsynced remote changes!\n"))
1395
1397
1396
1398
1397 if revs is None:
1399 if revs is None:
1398 cg = self.changegroup(update, 'push')
1400 cg = self.changegroup(update, 'push')
1399 else:
1401 else:
1400 cg = self.changegroupsubset(update, revs, 'push')
1402 cg = self.changegroupsubset(update, revs, 'push')
1401 return cg, remote_heads
1403 return cg, remote_heads
1402
1404
1403 def push_addchangegroup(self, remote, force, revs):
1405 def push_addchangegroup(self, remote, force, revs):
1404 lock = remote.lock()
1406 lock = remote.lock()
1405
1407
1406 ret = self.prepush(remote, force, revs)
1408 ret = self.prepush(remote, force, revs)
1407 if ret[0] is not None:
1409 if ret[0] is not None:
1408 cg, remote_heads = ret
1410 cg, remote_heads = ret
1409 return remote.addchangegroup(cg, 'push', self.url())
1411 return remote.addchangegroup(cg, 'push', self.url())
1410 return ret[1]
1412 return ret[1]
1411
1413
1412 def push_unbundle(self, remote, force, revs):
1414 def push_unbundle(self, remote, force, revs):
1413 # local repo finds heads on server, finds out what revs it
1415 # local repo finds heads on server, finds out what revs it
1414 # must push. once revs transferred, if server finds it has
1416 # must push. once revs transferred, if server finds it has
1415 # different heads (someone else won commit/push race), server
1417 # different heads (someone else won commit/push race), server
1416 # aborts.
1418 # aborts.
1417
1419
1418 ret = self.prepush(remote, force, revs)
1420 ret = self.prepush(remote, force, revs)
1419 if ret[0] is not None:
1421 if ret[0] is not None:
1420 cg, remote_heads = ret
1422 cg, remote_heads = ret
1421 if force: remote_heads = ['force']
1423 if force: remote_heads = ['force']
1422 return remote.unbundle(cg, remote_heads, 'push')
1424 return remote.unbundle(cg, remote_heads, 'push')
1423 return ret[1]
1425 return ret[1]
1424
1426
1425 def changegroupinfo(self, nodes):
1427 def changegroupinfo(self, nodes):
1426 self.ui.note(_("%d changesets found\n") % len(nodes))
1428 self.ui.note(_("%d changesets found\n") % len(nodes))
1427 if self.ui.debugflag:
1429 if self.ui.debugflag:
1428 self.ui.debug(_("List of changesets:\n"))
1430 self.ui.debug(_("List of changesets:\n"))
1429 for node in nodes:
1431 for node in nodes:
1430 self.ui.debug("%s\n" % hex(node))
1432 self.ui.debug("%s\n" % hex(node))
1431
1433
1432 def changegroupsubset(self, bases, heads, source):
1434 def changegroupsubset(self, bases, heads, source):
1433 """This function generates a changegroup consisting of all the nodes
1435 """This function generates a changegroup consisting of all the nodes
1434 that are descendents of any of the bases, and ancestors of any of
1436 that are descendents of any of the bases, and ancestors of any of
1435 the heads.
1437 the heads.
1436
1438
1437 It is fairly complex as determining which filenodes and which
1439 It is fairly complex as determining which filenodes and which
1438 manifest nodes need to be included for the changeset to be complete
1440 manifest nodes need to be included for the changeset to be complete
1439 is non-trivial.
1441 is non-trivial.
1440
1442
1441 Another wrinkle is doing the reverse, figuring out which changeset in
1443 Another wrinkle is doing the reverse, figuring out which changeset in
1442 the changegroup a particular filenode or manifestnode belongs to."""
1444 the changegroup a particular filenode or manifestnode belongs to."""
1443
1445
1444 self.hook('preoutgoing', throw=True, source=source)
1446 self.hook('preoutgoing', throw=True, source=source)
1445
1447
1446 # Set up some initial variables
1448 # Set up some initial variables
1447 # Make it easy to refer to self.changelog
1449 # Make it easy to refer to self.changelog
1448 cl = self.changelog
1450 cl = self.changelog
1449 # msng is short for missing - compute the list of changesets in this
1451 # msng is short for missing - compute the list of changesets in this
1450 # changegroup.
1452 # changegroup.
1451 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1453 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1452 self.changegroupinfo(msng_cl_lst)
1454 self.changegroupinfo(msng_cl_lst)
1453 # Some bases may turn out to be superfluous, and some heads may be
1455 # Some bases may turn out to be superfluous, and some heads may be
1454 # too. nodesbetween will return the minimal set of bases and heads
1456 # too. nodesbetween will return the minimal set of bases and heads
1455 # necessary to re-create the changegroup.
1457 # necessary to re-create the changegroup.
1456
1458
1457 # Known heads are the list of heads that it is assumed the recipient
1459 # Known heads are the list of heads that it is assumed the recipient
1458 # of this changegroup will know about.
1460 # of this changegroup will know about.
1459 knownheads = {}
1461 knownheads = {}
1460 # We assume that all parents of bases are known heads.
1462 # We assume that all parents of bases are known heads.
1461 for n in bases:
1463 for n in bases:
1462 for p in cl.parents(n):
1464 for p in cl.parents(n):
1463 if p != nullid:
1465 if p != nullid:
1464 knownheads[p] = 1
1466 knownheads[p] = 1
1465 knownheads = knownheads.keys()
1467 knownheads = knownheads.keys()
1466 if knownheads:
1468 if knownheads:
1467 # Now that we know what heads are known, we can compute which
1469 # Now that we know what heads are known, we can compute which
1468 # changesets are known. The recipient must know about all
1470 # changesets are known. The recipient must know about all
1469 # changesets required to reach the known heads from the null
1471 # changesets required to reach the known heads from the null
1470 # changeset.
1472 # changeset.
1471 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1473 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1472 junk = None
1474 junk = None
1473 # Transform the list into an ersatz set.
1475 # Transform the list into an ersatz set.
1474 has_cl_set = dict.fromkeys(has_cl_set)
1476 has_cl_set = dict.fromkeys(has_cl_set)
1475 else:
1477 else:
1476 # If there were no known heads, the recipient cannot be assumed to
1478 # If there were no known heads, the recipient cannot be assumed to
1477 # know about any changesets.
1479 # know about any changesets.
1478 has_cl_set = {}
1480 has_cl_set = {}
1479
1481
1480 # Make it easy to refer to self.manifest
1482 # Make it easy to refer to self.manifest
1481 mnfst = self.manifest
1483 mnfst = self.manifest
1482 # We don't know which manifests are missing yet
1484 # We don't know which manifests are missing yet
1483 msng_mnfst_set = {}
1485 msng_mnfst_set = {}
1484 # Nor do we know which filenodes are missing.
1486 # Nor do we know which filenodes are missing.
1485 msng_filenode_set = {}
1487 msng_filenode_set = {}
1486
1488
1487 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1489 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1488 junk = None
1490 junk = None
1489
1491
1490 # A changeset always belongs to itself, so the changenode lookup
1492 # A changeset always belongs to itself, so the changenode lookup
1491 # function for a changenode is identity.
1493 # function for a changenode is identity.
1492 def identity(x):
1494 def identity(x):
1493 return x
1495 return x
1494
1496
1495 # A function generating function. Sets up an environment for the
1497 # A function generating function. Sets up an environment for the
1496 # inner function.
1498 # inner function.
1497 def cmp_by_rev_func(revlog):
1499 def cmp_by_rev_func(revlog):
1498 # Compare two nodes by their revision number in the environment's
1500 # Compare two nodes by their revision number in the environment's
1499 # revision history. Since the revision number both represents the
1501 # revision history. Since the revision number both represents the
1500 # most efficient order to read the nodes in, and represents a
1502 # most efficient order to read the nodes in, and represents a
1501 # topological sorting of the nodes, this function is often useful.
1503 # topological sorting of the nodes, this function is often useful.
1502 def cmp_by_rev(a, b):
1504 def cmp_by_rev(a, b):
1503 return cmp(revlog.rev(a), revlog.rev(b))
1505 return cmp(revlog.rev(a), revlog.rev(b))
1504 return cmp_by_rev
1506 return cmp_by_rev
1505
1507
1506 # If we determine that a particular file or manifest node must be a
1508 # If we determine that a particular file or manifest node must be a
1507 # node that the recipient of the changegroup will already have, we can
1509 # node that the recipient of the changegroup will already have, we can
1508 # also assume the recipient will have all the parents. This function
1510 # also assume the recipient will have all the parents. This function
1509 # prunes them from the set of missing nodes.
1511 # prunes them from the set of missing nodes.
1510 def prune_parents(revlog, hasset, msngset):
1512 def prune_parents(revlog, hasset, msngset):
1511 haslst = hasset.keys()
1513 haslst = hasset.keys()
1512 haslst.sort(cmp_by_rev_func(revlog))
1514 haslst.sort(cmp_by_rev_func(revlog))
1513 for node in haslst:
1515 for node in haslst:
1514 parentlst = [p for p in revlog.parents(node) if p != nullid]
1516 parentlst = [p for p in revlog.parents(node) if p != nullid]
1515 while parentlst:
1517 while parentlst:
1516 n = parentlst.pop()
1518 n = parentlst.pop()
1517 if n not in hasset:
1519 if n not in hasset:
1518 hasset[n] = 1
1520 hasset[n] = 1
1519 p = [p for p in revlog.parents(n) if p != nullid]
1521 p = [p for p in revlog.parents(n) if p != nullid]
1520 parentlst.extend(p)
1522 parentlst.extend(p)
1521 for n in hasset:
1523 for n in hasset:
1522 msngset.pop(n, None)
1524 msngset.pop(n, None)
1523
1525
1524 # This is a function generating function used to set up an environment
1526 # This is a function generating function used to set up an environment
1525 # for the inner function to execute in.
1527 # for the inner function to execute in.
1526 def manifest_and_file_collector(changedfileset):
1528 def manifest_and_file_collector(changedfileset):
1527 # This is an information gathering function that gathers
1529 # This is an information gathering function that gathers
1528 # information from each changeset node that goes out as part of
1530 # information from each changeset node that goes out as part of
1529 # the changegroup. The information gathered is a list of which
1531 # the changegroup. The information gathered is a list of which
1530 # manifest nodes are potentially required (the recipient may
1532 # manifest nodes are potentially required (the recipient may
1531 # already have them) and total list of all files which were
1533 # already have them) and total list of all files which were
1532 # changed in any changeset in the changegroup.
1534 # changed in any changeset in the changegroup.
1533 #
1535 #
1534 # We also remember the first changenode we saw any manifest
1536 # We also remember the first changenode we saw any manifest
1535 # referenced by so we can later determine which changenode 'owns'
1537 # referenced by so we can later determine which changenode 'owns'
1536 # the manifest.
1538 # the manifest.
1537 def collect_manifests_and_files(clnode):
1539 def collect_manifests_and_files(clnode):
1538 c = cl.read(clnode)
1540 c = cl.read(clnode)
1539 for f in c[3]:
1541 for f in c[3]:
1540 # This is to make sure we only have one instance of each
1542 # This is to make sure we only have one instance of each
1541 # filename string for each filename.
1543 # filename string for each filename.
1542 changedfileset.setdefault(f, f)
1544 changedfileset.setdefault(f, f)
1543 msng_mnfst_set.setdefault(c[0], clnode)
1545 msng_mnfst_set.setdefault(c[0], clnode)
1544 return collect_manifests_and_files
1546 return collect_manifests_and_files
1545
1547
1546 # Figure out which manifest nodes (of the ones we think might be part
1548 # Figure out which manifest nodes (of the ones we think might be part
1547 # of the changegroup) the recipient must know about and remove them
1549 # of the changegroup) the recipient must know about and remove them
1548 # from the changegroup.
1550 # from the changegroup.
1549 def prune_manifests():
1551 def prune_manifests():
1550 has_mnfst_set = {}
1552 has_mnfst_set = {}
1551 for n in msng_mnfst_set:
1553 for n in msng_mnfst_set:
1552 # If a 'missing' manifest thinks it belongs to a changenode
1554 # If a 'missing' manifest thinks it belongs to a changenode
1553 # the recipient is assumed to have, obviously the recipient
1555 # the recipient is assumed to have, obviously the recipient
1554 # must have that manifest.
1556 # must have that manifest.
1555 linknode = cl.node(mnfst.linkrev(n))
1557 linknode = cl.node(mnfst.linkrev(n))
1556 if linknode in has_cl_set:
1558 if linknode in has_cl_set:
1557 has_mnfst_set[n] = 1
1559 has_mnfst_set[n] = 1
1558 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1560 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1559
1561
1560 # Use the information collected in collect_manifests_and_files to say
1562 # Use the information collected in collect_manifests_and_files to say
1561 # which changenode any manifestnode belongs to.
1563 # which changenode any manifestnode belongs to.
1562 def lookup_manifest_link(mnfstnode):
1564 def lookup_manifest_link(mnfstnode):
1563 return msng_mnfst_set[mnfstnode]
1565 return msng_mnfst_set[mnfstnode]
1564
1566
1565 # A function generating function that sets up the initial environment
1567 # A function generating function that sets up the initial environment
1566 # the inner function.
1568 # the inner function.
1567 def filenode_collector(changedfiles):
1569 def filenode_collector(changedfiles):
1568 next_rev = [0]
1570 next_rev = [0]
1569 # This gathers information from each manifestnode included in the
1571 # This gathers information from each manifestnode included in the
1570 # changegroup about which filenodes the manifest node references
1572 # changegroup about which filenodes the manifest node references
1571 # so we can include those in the changegroup too.
1573 # so we can include those in the changegroup too.
1572 #
1574 #
1573 # It also remembers which changenode each filenode belongs to. It
1575 # It also remembers which changenode each filenode belongs to. It
1574 # does this by assuming the a filenode belongs to the changenode
1576 # does this by assuming the a filenode belongs to the changenode
1575 # the first manifest that references it belongs to.
1577 # the first manifest that references it belongs to.
1576 def collect_msng_filenodes(mnfstnode):
1578 def collect_msng_filenodes(mnfstnode):
1577 r = mnfst.rev(mnfstnode)
1579 r = mnfst.rev(mnfstnode)
1578 if r == next_rev[0]:
1580 if r == next_rev[0]:
1579 # If the last rev we looked at was the one just previous,
1581 # If the last rev we looked at was the one just previous,
1580 # we only need to see a diff.
1582 # we only need to see a diff.
1581 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1583 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1582 # For each line in the delta
1584 # For each line in the delta
1583 for dline in delta.splitlines():
1585 for dline in delta.splitlines():
1584 # get the filename and filenode for that line
1586 # get the filename and filenode for that line
1585 f, fnode = dline.split('\0')
1587 f, fnode = dline.split('\0')
1586 fnode = bin(fnode[:40])
1588 fnode = bin(fnode[:40])
1587 f = changedfiles.get(f, None)
1589 f = changedfiles.get(f, None)
1588 # And if the file is in the list of files we care
1590 # And if the file is in the list of files we care
1589 # about.
1591 # about.
1590 if f is not None:
1592 if f is not None:
1591 # Get the changenode this manifest belongs to
1593 # Get the changenode this manifest belongs to
1592 clnode = msng_mnfst_set[mnfstnode]
1594 clnode = msng_mnfst_set[mnfstnode]
1593 # Create the set of filenodes for the file if
1595 # Create the set of filenodes for the file if
1594 # there isn't one already.
1596 # there isn't one already.
1595 ndset = msng_filenode_set.setdefault(f, {})
1597 ndset = msng_filenode_set.setdefault(f, {})
1596 # And set the filenode's changelog node to the
1598 # And set the filenode's changelog node to the
1597 # manifest's if it hasn't been set already.
1599 # manifest's if it hasn't been set already.
1598 ndset.setdefault(fnode, clnode)
1600 ndset.setdefault(fnode, clnode)
1599 else:
1601 else:
1600 # Otherwise we need a full manifest.
1602 # Otherwise we need a full manifest.
1601 m = mnfst.read(mnfstnode)
1603 m = mnfst.read(mnfstnode)
1602 # For every file in we care about.
1604 # For every file in we care about.
1603 for f in changedfiles:
1605 for f in changedfiles:
1604 fnode = m.get(f, None)
1606 fnode = m.get(f, None)
1605 # If it's in the manifest
1607 # If it's in the manifest
1606 if fnode is not None:
1608 if fnode is not None:
1607 # See comments above.
1609 # See comments above.
1608 clnode = msng_mnfst_set[mnfstnode]
1610 clnode = msng_mnfst_set[mnfstnode]
1609 ndset = msng_filenode_set.setdefault(f, {})
1611 ndset = msng_filenode_set.setdefault(f, {})
1610 ndset.setdefault(fnode, clnode)
1612 ndset.setdefault(fnode, clnode)
1611 # Remember the revision we hope to see next.
1613 # Remember the revision we hope to see next.
1612 next_rev[0] = r + 1
1614 next_rev[0] = r + 1
1613 return collect_msng_filenodes
1615 return collect_msng_filenodes
1614
1616
1615 # We have a list of filenodes we think we need for a file, lets remove
1617 # We have a list of filenodes we think we need for a file, lets remove
1616 # all those we now the recipient must have.
1618 # all those we now the recipient must have.
1617 def prune_filenodes(f, filerevlog):
1619 def prune_filenodes(f, filerevlog):
1618 msngset = msng_filenode_set[f]
1620 msngset = msng_filenode_set[f]
1619 hasset = {}
1621 hasset = {}
1620 # If a 'missing' filenode thinks it belongs to a changenode we
1622 # If a 'missing' filenode thinks it belongs to a changenode we
1621 # assume the recipient must have, then the recipient must have
1623 # assume the recipient must have, then the recipient must have
1622 # that filenode.
1624 # that filenode.
1623 for n in msngset:
1625 for n in msngset:
1624 clnode = cl.node(filerevlog.linkrev(n))
1626 clnode = cl.node(filerevlog.linkrev(n))
1625 if clnode in has_cl_set:
1627 if clnode in has_cl_set:
1626 hasset[n] = 1
1628 hasset[n] = 1
1627 prune_parents(filerevlog, hasset, msngset)
1629 prune_parents(filerevlog, hasset, msngset)
1628
1630
1629 # A function generator function that sets up the a context for the
1631 # A function generator function that sets up the a context for the
1630 # inner function.
1632 # inner function.
1631 def lookup_filenode_link_func(fname):
1633 def lookup_filenode_link_func(fname):
1632 msngset = msng_filenode_set[fname]
1634 msngset = msng_filenode_set[fname]
1633 # Lookup the changenode the filenode belongs to.
1635 # Lookup the changenode the filenode belongs to.
1634 def lookup_filenode_link(fnode):
1636 def lookup_filenode_link(fnode):
1635 return msngset[fnode]
1637 return msngset[fnode]
1636 return lookup_filenode_link
1638 return lookup_filenode_link
1637
1639
1638 # Now that we have all theses utility functions to help out and
1640 # Now that we have all theses utility functions to help out and
1639 # logically divide up the task, generate the group.
1641 # logically divide up the task, generate the group.
1640 def gengroup():
1642 def gengroup():
1641 # The set of changed files starts empty.
1643 # The set of changed files starts empty.
1642 changedfiles = {}
1644 changedfiles = {}
1643 # Create a changenode group generator that will call our functions
1645 # Create a changenode group generator that will call our functions
1644 # back to lookup the owning changenode and collect information.
1646 # back to lookup the owning changenode and collect information.
1645 group = cl.group(msng_cl_lst, identity,
1647 group = cl.group(msng_cl_lst, identity,
1646 manifest_and_file_collector(changedfiles))
1648 manifest_and_file_collector(changedfiles))
1647 for chnk in group:
1649 for chnk in group:
1648 yield chnk
1650 yield chnk
1649
1651
1650 # The list of manifests has been collected by the generator
1652 # The list of manifests has been collected by the generator
1651 # calling our functions back.
1653 # calling our functions back.
1652 prune_manifests()
1654 prune_manifests()
1653 msng_mnfst_lst = msng_mnfst_set.keys()
1655 msng_mnfst_lst = msng_mnfst_set.keys()
1654 # Sort the manifestnodes by revision number.
1656 # Sort the manifestnodes by revision number.
1655 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1657 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1656 # Create a generator for the manifestnodes that calls our lookup
1658 # Create a generator for the manifestnodes that calls our lookup
1657 # and data collection functions back.
1659 # and data collection functions back.
1658 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1660 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1659 filenode_collector(changedfiles))
1661 filenode_collector(changedfiles))
1660 for chnk in group:
1662 for chnk in group:
1661 yield chnk
1663 yield chnk
1662
1664
1663 # These are no longer needed, dereference and toss the memory for
1665 # These are no longer needed, dereference and toss the memory for
1664 # them.
1666 # them.
1665 msng_mnfst_lst = None
1667 msng_mnfst_lst = None
1666 msng_mnfst_set.clear()
1668 msng_mnfst_set.clear()
1667
1669
1668 changedfiles = changedfiles.keys()
1670 changedfiles = changedfiles.keys()
1669 changedfiles.sort()
1671 changedfiles.sort()
1670 # Go through all our files in order sorted by name.
1672 # Go through all our files in order sorted by name.
1671 for fname in changedfiles:
1673 for fname in changedfiles:
1672 filerevlog = self.file(fname)
1674 filerevlog = self.file(fname)
1673 # Toss out the filenodes that the recipient isn't really
1675 # Toss out the filenodes that the recipient isn't really
1674 # missing.
1676 # missing.
1675 if msng_filenode_set.has_key(fname):
1677 if msng_filenode_set.has_key(fname):
1676 prune_filenodes(fname, filerevlog)
1678 prune_filenodes(fname, filerevlog)
1677 msng_filenode_lst = msng_filenode_set[fname].keys()
1679 msng_filenode_lst = msng_filenode_set[fname].keys()
1678 else:
1680 else:
1679 msng_filenode_lst = []
1681 msng_filenode_lst = []
1680 # If any filenodes are left, generate the group for them,
1682 # If any filenodes are left, generate the group for them,
1681 # otherwise don't bother.
1683 # otherwise don't bother.
1682 if len(msng_filenode_lst) > 0:
1684 if len(msng_filenode_lst) > 0:
1683 yield changegroup.genchunk(fname)
1685 yield changegroup.genchunk(fname)
1684 # Sort the filenodes by their revision #
1686 # Sort the filenodes by their revision #
1685 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1687 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1686 # Create a group generator and only pass in a changenode
1688 # Create a group generator and only pass in a changenode
1687 # lookup function as we need to collect no information
1689 # lookup function as we need to collect no information
1688 # from filenodes.
1690 # from filenodes.
1689 group = filerevlog.group(msng_filenode_lst,
1691 group = filerevlog.group(msng_filenode_lst,
1690 lookup_filenode_link_func(fname))
1692 lookup_filenode_link_func(fname))
1691 for chnk in group:
1693 for chnk in group:
1692 yield chnk
1694 yield chnk
1693 if msng_filenode_set.has_key(fname):
1695 if msng_filenode_set.has_key(fname):
1694 # Don't need this anymore, toss it to free memory.
1696 # Don't need this anymore, toss it to free memory.
1695 del msng_filenode_set[fname]
1697 del msng_filenode_set[fname]
1696 # Signal that no more groups are left.
1698 # Signal that no more groups are left.
1697 yield changegroup.closechunk()
1699 yield changegroup.closechunk()
1698
1700
1699 if msng_cl_lst:
1701 if msng_cl_lst:
1700 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1702 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1701
1703
1702 return util.chunkbuffer(gengroup())
1704 return util.chunkbuffer(gengroup())
1703
1705
1704 def changegroup(self, basenodes, source):
1706 def changegroup(self, basenodes, source):
1705 """Generate a changegroup of all nodes that we have that a recipient
1707 """Generate a changegroup of all nodes that we have that a recipient
1706 doesn't.
1708 doesn't.
1707
1709
1708 This is much easier than the previous function as we can assume that
1710 This is much easier than the previous function as we can assume that
1709 the recipient has any changenode we aren't sending them."""
1711 the recipient has any changenode we aren't sending them."""
1710
1712
1711 self.hook('preoutgoing', throw=True, source=source)
1713 self.hook('preoutgoing', throw=True, source=source)
1712
1714
1713 cl = self.changelog
1715 cl = self.changelog
1714 nodes = cl.nodesbetween(basenodes, None)[0]
1716 nodes = cl.nodesbetween(basenodes, None)[0]
1715 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1717 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1716 self.changegroupinfo(nodes)
1718 self.changegroupinfo(nodes)
1717
1719
1718 def identity(x):
1720 def identity(x):
1719 return x
1721 return x
1720
1722
1721 def gennodelst(revlog):
1723 def gennodelst(revlog):
1722 for r in xrange(0, revlog.count()):
1724 for r in xrange(0, revlog.count()):
1723 n = revlog.node(r)
1725 n = revlog.node(r)
1724 if revlog.linkrev(n) in revset:
1726 if revlog.linkrev(n) in revset:
1725 yield n
1727 yield n
1726
1728
1727 def changed_file_collector(changedfileset):
1729 def changed_file_collector(changedfileset):
1728 def collect_changed_files(clnode):
1730 def collect_changed_files(clnode):
1729 c = cl.read(clnode)
1731 c = cl.read(clnode)
1730 for fname in c[3]:
1732 for fname in c[3]:
1731 changedfileset[fname] = 1
1733 changedfileset[fname] = 1
1732 return collect_changed_files
1734 return collect_changed_files
1733
1735
1734 def lookuprevlink_func(revlog):
1736 def lookuprevlink_func(revlog):
1735 def lookuprevlink(n):
1737 def lookuprevlink(n):
1736 return cl.node(revlog.linkrev(n))
1738 return cl.node(revlog.linkrev(n))
1737 return lookuprevlink
1739 return lookuprevlink
1738
1740
1739 def gengroup():
1741 def gengroup():
1740 # construct a list of all changed files
1742 # construct a list of all changed files
1741 changedfiles = {}
1743 changedfiles = {}
1742
1744
1743 for chnk in cl.group(nodes, identity,
1745 for chnk in cl.group(nodes, identity,
1744 changed_file_collector(changedfiles)):
1746 changed_file_collector(changedfiles)):
1745 yield chnk
1747 yield chnk
1746 changedfiles = changedfiles.keys()
1748 changedfiles = changedfiles.keys()
1747 changedfiles.sort()
1749 changedfiles.sort()
1748
1750
1749 mnfst = self.manifest
1751 mnfst = self.manifest
1750 nodeiter = gennodelst(mnfst)
1752 nodeiter = gennodelst(mnfst)
1751 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1753 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1752 yield chnk
1754 yield chnk
1753
1755
1754 for fname in changedfiles:
1756 for fname in changedfiles:
1755 filerevlog = self.file(fname)
1757 filerevlog = self.file(fname)
1756 nodeiter = gennodelst(filerevlog)
1758 nodeiter = gennodelst(filerevlog)
1757 nodeiter = list(nodeiter)
1759 nodeiter = list(nodeiter)
1758 if nodeiter:
1760 if nodeiter:
1759 yield changegroup.genchunk(fname)
1761 yield changegroup.genchunk(fname)
1760 lookup = lookuprevlink_func(filerevlog)
1762 lookup = lookuprevlink_func(filerevlog)
1761 for chnk in filerevlog.group(nodeiter, lookup):
1763 for chnk in filerevlog.group(nodeiter, lookup):
1762 yield chnk
1764 yield chnk
1763
1765
1764 yield changegroup.closechunk()
1766 yield changegroup.closechunk()
1765
1767
1766 if nodes:
1768 if nodes:
1767 self.hook('outgoing', node=hex(nodes[0]), source=source)
1769 self.hook('outgoing', node=hex(nodes[0]), source=source)
1768
1770
1769 return util.chunkbuffer(gengroup())
1771 return util.chunkbuffer(gengroup())
1770
1772
1771 def addchangegroup(self, source, srctype, url):
1773 def addchangegroup(self, source, srctype, url):
1772 """add changegroup to repo.
1774 """add changegroup to repo.
1773
1775
1774 return values:
1776 return values:
1775 - nothing changed or no source: 0
1777 - nothing changed or no source: 0
1776 - more heads than before: 1+added heads (2..n)
1778 - more heads than before: 1+added heads (2..n)
1777 - less heads than before: -1-removed heads (-2..-n)
1779 - less heads than before: -1-removed heads (-2..-n)
1778 - number of heads stays the same: 1
1780 - number of heads stays the same: 1
1779 """
1781 """
1780 def csmap(x):
1782 def csmap(x):
1781 self.ui.debug(_("add changeset %s\n") % short(x))
1783 self.ui.debug(_("add changeset %s\n") % short(x))
1782 return cl.count()
1784 return cl.count()
1783
1785
1784 def revmap(x):
1786 def revmap(x):
1785 return cl.rev(x)
1787 return cl.rev(x)
1786
1788
1787 if not source:
1789 if not source:
1788 return 0
1790 return 0
1789
1791
1790 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1792 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1791
1793
1792 changesets = files = revisions = 0
1794 changesets = files = revisions = 0
1793
1795
1794 tr = self.transaction()
1796 tr = self.transaction()
1795
1797
1796 # write changelog data to temp files so concurrent readers will not see
1798 # write changelog data to temp files so concurrent readers will not see
1797 # inconsistent view
1799 # inconsistent view
1798 cl = self.changelog
1800 cl = self.changelog
1799 cl.delayupdate()
1801 cl.delayupdate()
1800 oldheads = len(cl.heads())
1802 oldheads = len(cl.heads())
1801
1803
1802 # pull off the changeset group
1804 # pull off the changeset group
1803 self.ui.status(_("adding changesets\n"))
1805 self.ui.status(_("adding changesets\n"))
1804 cor = cl.count() - 1
1806 cor = cl.count() - 1
1805 chunkiter = changegroup.chunkiter(source)
1807 chunkiter = changegroup.chunkiter(source)
1806 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1808 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1807 raise util.Abort(_("received changelog group is empty"))
1809 raise util.Abort(_("received changelog group is empty"))
1808 cnr = cl.count() - 1
1810 cnr = cl.count() - 1
1809 changesets = cnr - cor
1811 changesets = cnr - cor
1810
1812
1811 # pull off the manifest group
1813 # pull off the manifest group
1812 self.ui.status(_("adding manifests\n"))
1814 self.ui.status(_("adding manifests\n"))
1813 chunkiter = changegroup.chunkiter(source)
1815 chunkiter = changegroup.chunkiter(source)
1814 # no need to check for empty manifest group here:
1816 # no need to check for empty manifest group here:
1815 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1817 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1816 # no new manifest will be created and the manifest group will
1818 # no new manifest will be created and the manifest group will
1817 # be empty during the pull
1819 # be empty during the pull
1818 self.manifest.addgroup(chunkiter, revmap, tr)
1820 self.manifest.addgroup(chunkiter, revmap, tr)
1819
1821
1820 # process the files
1822 # process the files
1821 self.ui.status(_("adding file changes\n"))
1823 self.ui.status(_("adding file changes\n"))
1822 while 1:
1824 while 1:
1823 f = changegroup.getchunk(source)
1825 f = changegroup.getchunk(source)
1824 if not f:
1826 if not f:
1825 break
1827 break
1826 self.ui.debug(_("adding %s revisions\n") % f)
1828 self.ui.debug(_("adding %s revisions\n") % f)
1827 fl = self.file(f)
1829 fl = self.file(f)
1828 o = fl.count()
1830 o = fl.count()
1829 chunkiter = changegroup.chunkiter(source)
1831 chunkiter = changegroup.chunkiter(source)
1830 if fl.addgroup(chunkiter, revmap, tr) is None:
1832 if fl.addgroup(chunkiter, revmap, tr) is None:
1831 raise util.Abort(_("received file revlog group is empty"))
1833 raise util.Abort(_("received file revlog group is empty"))
1832 revisions += fl.count() - o
1834 revisions += fl.count() - o
1833 files += 1
1835 files += 1
1834
1836
1835 # make changelog see real files again
1837 # make changelog see real files again
1836 cl.finalize(tr)
1838 cl.finalize(tr)
1837
1839
1838 newheads = len(self.changelog.heads())
1840 newheads = len(self.changelog.heads())
1839 heads = ""
1841 heads = ""
1840 if oldheads and newheads != oldheads:
1842 if oldheads and newheads != oldheads:
1841 heads = _(" (%+d heads)") % (newheads - oldheads)
1843 heads = _(" (%+d heads)") % (newheads - oldheads)
1842
1844
1843 self.ui.status(_("added %d changesets"
1845 self.ui.status(_("added %d changesets"
1844 " with %d changes to %d files%s\n")
1846 " with %d changes to %d files%s\n")
1845 % (changesets, revisions, files, heads))
1847 % (changesets, revisions, files, heads))
1846
1848
1847 if changesets > 0:
1849 if changesets > 0:
1848 self.hook('pretxnchangegroup', throw=True,
1850 self.hook('pretxnchangegroup', throw=True,
1849 node=hex(self.changelog.node(cor+1)), source=srctype,
1851 node=hex(self.changelog.node(cor+1)), source=srctype,
1850 url=url)
1852 url=url)
1851
1853
1852 tr.close()
1854 tr.close()
1853
1855
1854 if changesets > 0:
1856 if changesets > 0:
1855 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1857 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1856 source=srctype, url=url)
1858 source=srctype, url=url)
1857
1859
1858 for i in xrange(cor + 1, cnr + 1):
1860 for i in xrange(cor + 1, cnr + 1):
1859 self.hook("incoming", node=hex(self.changelog.node(i)),
1861 self.hook("incoming", node=hex(self.changelog.node(i)),
1860 source=srctype, url=url)
1862 source=srctype, url=url)
1861
1863
1862 # never return 0 here:
1864 # never return 0 here:
1863 if newheads < oldheads:
1865 if newheads < oldheads:
1864 return newheads - oldheads - 1
1866 return newheads - oldheads - 1
1865 else:
1867 else:
1866 return newheads - oldheads + 1
1868 return newheads - oldheads + 1
1867
1869
1868
1870
1869 def stream_in(self, remote):
1871 def stream_in(self, remote):
1870 fp = remote.stream_out()
1872 fp = remote.stream_out()
1871 l = fp.readline()
1873 l = fp.readline()
1872 try:
1874 try:
1873 resp = int(l)
1875 resp = int(l)
1874 except ValueError:
1876 except ValueError:
1875 raise util.UnexpectedOutput(
1877 raise util.UnexpectedOutput(
1876 _('Unexpected response from remote server:'), l)
1878 _('Unexpected response from remote server:'), l)
1877 if resp == 1:
1879 if resp == 1:
1878 raise util.Abort(_('operation forbidden by server'))
1880 raise util.Abort(_('operation forbidden by server'))
1879 elif resp == 2:
1881 elif resp == 2:
1880 raise util.Abort(_('locking the remote repository failed'))
1882 raise util.Abort(_('locking the remote repository failed'))
1881 elif resp != 0:
1883 elif resp != 0:
1882 raise util.Abort(_('the server sent an unknown error code'))
1884 raise util.Abort(_('the server sent an unknown error code'))
1883 self.ui.status(_('streaming all changes\n'))
1885 self.ui.status(_('streaming all changes\n'))
1884 l = fp.readline()
1886 l = fp.readline()
1885 try:
1887 try:
1886 total_files, total_bytes = map(int, l.split(' ', 1))
1888 total_files, total_bytes = map(int, l.split(' ', 1))
1887 except ValueError, TypeError:
1889 except ValueError, TypeError:
1888 raise util.UnexpectedOutput(
1890 raise util.UnexpectedOutput(
1889 _('Unexpected response from remote server:'), l)
1891 _('Unexpected response from remote server:'), l)
1890 self.ui.status(_('%d files to transfer, %s of data\n') %
1892 self.ui.status(_('%d files to transfer, %s of data\n') %
1891 (total_files, util.bytecount(total_bytes)))
1893 (total_files, util.bytecount(total_bytes)))
1892 start = time.time()
1894 start = time.time()
1893 for i in xrange(total_files):
1895 for i in xrange(total_files):
1894 # XXX doesn't support '\n' or '\r' in filenames
1896 # XXX doesn't support '\n' or '\r' in filenames
1895 l = fp.readline()
1897 l = fp.readline()
1896 try:
1898 try:
1897 name, size = l.split('\0', 1)
1899 name, size = l.split('\0', 1)
1898 size = int(size)
1900 size = int(size)
1899 except ValueError, TypeError:
1901 except ValueError, TypeError:
1900 raise util.UnexpectedOutput(
1902 raise util.UnexpectedOutput(
1901 _('Unexpected response from remote server:'), l)
1903 _('Unexpected response from remote server:'), l)
1902 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1904 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1903 ofp = self.sopener(name, 'w')
1905 ofp = self.sopener(name, 'w')
1904 for chunk in util.filechunkiter(fp, limit=size):
1906 for chunk in util.filechunkiter(fp, limit=size):
1905 ofp.write(chunk)
1907 ofp.write(chunk)
1906 ofp.close()
1908 ofp.close()
1907 elapsed = time.time() - start
1909 elapsed = time.time() - start
1908 if elapsed <= 0:
1910 if elapsed <= 0:
1909 elapsed = 0.001
1911 elapsed = 0.001
1910 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1912 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1911 (util.bytecount(total_bytes), elapsed,
1913 (util.bytecount(total_bytes), elapsed,
1912 util.bytecount(total_bytes / elapsed)))
1914 util.bytecount(total_bytes / elapsed)))
1913 self.invalidate()
1915 self.invalidate()
1914 return len(self.heads()) + 1
1916 return len(self.heads()) + 1
1915
1917
1916 def clone(self, remote, heads=[], stream=False):
1918 def clone(self, remote, heads=[], stream=False):
1917 '''clone remote repository.
1919 '''clone remote repository.
1918
1920
1919 keyword arguments:
1921 keyword arguments:
1920 heads: list of revs to clone (forces use of pull)
1922 heads: list of revs to clone (forces use of pull)
1921 stream: use streaming clone if possible'''
1923 stream: use streaming clone if possible'''
1922
1924
1923 # now, all clients that can request uncompressed clones can
1925 # now, all clients that can request uncompressed clones can
1924 # read repo formats supported by all servers that can serve
1926 # read repo formats supported by all servers that can serve
1925 # them.
1927 # them.
1926
1928
1927 # if revlog format changes, client will have to check version
1929 # if revlog format changes, client will have to check version
1928 # and format flags on "stream" capability, and use
1930 # and format flags on "stream" capability, and use
1929 # uncompressed only if compatible.
1931 # uncompressed only if compatible.
1930
1932
1931 if stream and not heads and remote.capable('stream'):
1933 if stream and not heads and remote.capable('stream'):
1932 return self.stream_in(remote)
1934 return self.stream_in(remote)
1933 return self.pull(remote, heads)
1935 return self.pull(remote, heads)
1934
1936
1935 # used to avoid circular references so destructors work
1937 # used to avoid circular references so destructors work
1936 def aftertrans(files):
1938 def aftertrans(files):
1937 renamefiles = [tuple(t) for t in files]
1939 renamefiles = [tuple(t) for t in files]
1938 def a():
1940 def a():
1939 for src, dest in renamefiles:
1941 for src, dest in renamefiles:
1940 util.rename(src, dest)
1942 util.rename(src, dest)
1941 return a
1943 return a
1942
1944
1943 def instance(ui, path, create):
1945 def instance(ui, path, create):
1944 return localrepository(ui, util.drop_scheme('file', path), create)
1946 return localrepository(ui, util.drop_scheme('file', path), create)
1945
1947
1946 def islocal(path):
1948 def islocal(path):
1947 return True
1949 return True
@@ -1,570 +1,570 b''
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import errno, util, os, tempfile, context
10 import errno, util, os, tempfile, context
11
11
12 def filemerge(repo, fw, fo, wctx, mctx):
12 def filemerge(repo, fw, fo, wctx, mctx):
13 """perform a 3-way merge in the working directory
13 """perform a 3-way merge in the working directory
14
14
15 fw = filename in the working directory
15 fw = filename in the working directory
16 fo = filename in other parent
16 fo = filename in other parent
17 wctx, mctx = working and merge changecontexts
17 wctx, mctx = working and merge changecontexts
18 """
18 """
19
19
20 def temp(prefix, ctx):
20 def temp(prefix, ctx):
21 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
21 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
22 (fd, name) = tempfile.mkstemp(prefix=pre)
22 (fd, name) = tempfile.mkstemp(prefix=pre)
23 data = repo.wwritedata(ctx.path(), ctx.data())
23 data = repo.wwritedata(ctx.path(), ctx.data())
24 f = os.fdopen(fd, "wb")
24 f = os.fdopen(fd, "wb")
25 f.write(data)
25 f.write(data)
26 f.close()
26 f.close()
27 return name
27 return name
28
28
29 fcm = wctx.filectx(fw)
29 fcm = wctx.filectx(fw)
30 fco = mctx.filectx(fo)
30 fco = mctx.filectx(fo)
31
31
32 if not fco.cmp(fcm.data()): # files identical?
32 if not fco.cmp(fcm.data()): # files identical?
33 return None
33 return None
34
34
35 fca = fcm.ancestor(fco)
35 fca = fcm.ancestor(fco)
36 if not fca:
36 if not fca:
37 fca = repo.filectx(fw, fileid=nullrev)
37 fca = repo.filectx(fw, fileid=nullrev)
38 a = repo.wjoin(fw)
38 a = repo.wjoin(fw)
39 b = temp("base", fca)
39 b = temp("base", fca)
40 c = temp("other", fco)
40 c = temp("other", fco)
41
41
42 if fw != fo:
42 if fw != fo:
43 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
43 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
44 else:
44 else:
45 repo.ui.status(_("merging %s\n") % fw)
45 repo.ui.status(_("merging %s\n") % fw)
46
46
47 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
47 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
48
48
49 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
49 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
50 or "hgmerge")
50 or "hgmerge")
51 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
51 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
52 environ={'HG_FILE': fw,
52 environ={'HG_FILE': fw,
53 'HG_MY_NODE': str(wctx.parents()[0]),
53 'HG_MY_NODE': str(wctx.parents()[0]),
54 'HG_OTHER_NODE': str(mctx)})
54 'HG_OTHER_NODE': str(mctx)})
55 if r:
55 if r:
56 repo.ui.warn(_("merging %s failed!\n") % fw)
56 repo.ui.warn(_("merging %s failed!\n") % fw)
57
57
58 os.unlink(b)
58 os.unlink(b)
59 os.unlink(c)
59 os.unlink(c)
60 return r
60 return r
61
61
62 def checkunknown(wctx, mctx):
62 def checkunknown(wctx, mctx):
63 "check for collisions between unknown files and files in mctx"
63 "check for collisions between unknown files and files in mctx"
64 man = mctx.manifest()
64 man = mctx.manifest()
65 for f in wctx.unknown():
65 for f in wctx.unknown():
66 if f in man:
66 if f in man:
67 if mctx.filectx(f).cmp(wctx.filectx(f).data()):
67 if mctx.filectx(f).cmp(wctx.filectx(f).data()):
68 raise util.Abort(_("untracked local file '%s' differs"
68 raise util.Abort(_("untracked local file '%s' differs"
69 " from remote version") % f)
69 " from remote version") % f)
70
70
71 def checkcollision(mctx):
71 def checkcollision(mctx):
72 "check for case folding collisions in the destination context"
72 "check for case folding collisions in the destination context"
73 folded = {}
73 folded = {}
74 for fn in mctx.manifest():
74 for fn in mctx.manifest():
75 fold = fn.lower()
75 fold = fn.lower()
76 if fold in folded:
76 if fold in folded:
77 raise util.Abort(_("case-folding collision between %s and %s")
77 raise util.Abort(_("case-folding collision between %s and %s")
78 % (fn, folded[fold]))
78 % (fn, folded[fold]))
79 folded[fold] = fn
79 folded[fold] = fn
80
80
81 def forgetremoved(wctx, mctx):
81 def forgetremoved(wctx, mctx):
82 """
82 """
83 Forget removed files
83 Forget removed files
84
84
85 If we're jumping between revisions (as opposed to merging), and if
85 If we're jumping between revisions (as opposed to merging), and if
86 neither the working directory nor the target rev has the file,
86 neither the working directory nor the target rev has the file,
87 then we need to remove it from the dirstate, to prevent the
87 then we need to remove it from the dirstate, to prevent the
88 dirstate from listing the file when it is no longer in the
88 dirstate from listing the file when it is no longer in the
89 manifest.
89 manifest.
90 """
90 """
91
91
92 action = []
92 action = []
93 man = mctx.manifest()
93 man = mctx.manifest()
94 for f in wctx.deleted() + wctx.removed():
94 for f in wctx.deleted() + wctx.removed():
95 if f not in man:
95 if f not in man:
96 action.append((f, "f"))
96 action.append((f, "f"))
97
97
98 return action
98 return action
99
99
100 def findcopies(repo, m1, m2, ma, limit):
100 def findcopies(repo, m1, m2, ma, limit):
101 """
101 """
102 Find moves and copies between m1 and m2 back to limit linkrev
102 Find moves and copies between m1 and m2 back to limit linkrev
103 """
103 """
104
104
105 def nonoverlap(d1, d2, d3):
105 def nonoverlap(d1, d2, d3):
106 "Return list of elements in d1 not in d2 or d3"
106 "Return list of elements in d1 not in d2 or d3"
107 l = [d for d in d1 if d not in d3 and d not in d2]
107 l = [d for d in d1 if d not in d3 and d not in d2]
108 l.sort()
108 l.sort()
109 return l
109 return l
110
110
111 def dirname(f):
111 def dirname(f):
112 s = f.rfind("/")
112 s = f.rfind("/")
113 if s == -1:
113 if s == -1:
114 return ""
114 return ""
115 return f[:s]
115 return f[:s]
116
116
117 def dirs(files):
117 def dirs(files):
118 d = {}
118 d = {}
119 for f in files:
119 for f in files:
120 f = dirname(f)
120 f = dirname(f)
121 while f not in d:
121 while f not in d:
122 d[f] = True
122 d[f] = True
123 f = dirname(f)
123 f = dirname(f)
124 return d
124 return d
125
125
126 wctx = repo.workingctx()
126 wctx = repo.workingctx()
127
127
128 def makectx(f, n):
128 def makectx(f, n):
129 if len(n) == 20:
129 if len(n) == 20:
130 return repo.filectx(f, fileid=n)
130 return repo.filectx(f, fileid=n)
131 return wctx.filectx(f)
131 return wctx.filectx(f)
132 ctx = util.cachefunc(makectx)
132 ctx = util.cachefunc(makectx)
133
133
134 def findold(fctx):
134 def findold(fctx):
135 "find files that path was copied from, back to linkrev limit"
135 "find files that path was copied from, back to linkrev limit"
136 old = {}
136 old = {}
137 seen = {}
137 seen = {}
138 orig = fctx.path()
138 orig = fctx.path()
139 visit = [fctx]
139 visit = [fctx]
140 while visit:
140 while visit:
141 fc = visit.pop()
141 fc = visit.pop()
142 s = str(fc)
142 s = str(fc)
143 if s in seen:
143 if s in seen:
144 continue
144 continue
145 seen[s] = 1
145 seen[s] = 1
146 if fc.path() != orig and fc.path() not in old:
146 if fc.path() != orig and fc.path() not in old:
147 old[fc.path()] = 1
147 old[fc.path()] = 1
148 if fc.rev() < limit:
148 if fc.rev() < limit:
149 continue
149 continue
150 visit += fc.parents()
150 visit += fc.parents()
151
151
152 old = old.keys()
152 old = old.keys()
153 old.sort()
153 old.sort()
154 return old
154 return old
155
155
156 copy = {}
156 copy = {}
157 fullcopy = {}
157 fullcopy = {}
158 diverge = {}
158 diverge = {}
159
159
160 def checkcopies(c, man, aman):
160 def checkcopies(c, man, aman):
161 '''check possible copies for filectx c'''
161 '''check possible copies for filectx c'''
162 for of in findold(c):
162 for of in findold(c):
163 fullcopy[c.path()] = of # remember for dir rename detection
163 fullcopy[c.path()] = of # remember for dir rename detection
164 if of not in man: # original file not in other manifest?
164 if of not in man: # original file not in other manifest?
165 if of in ma:
165 if of in ma:
166 diverge.setdefault(of, []).append(c.path())
166 diverge.setdefault(of, []).append(c.path())
167 continue
167 continue
168 # if the original file is unchanged on the other branch,
168 # if the original file is unchanged on the other branch,
169 # no merge needed
169 # no merge needed
170 if man[of] == aman.get(of):
170 if man[of] == aman.get(of):
171 continue
171 continue
172 c2 = ctx(of, man[of])
172 c2 = ctx(of, man[of])
173 ca = c.ancestor(c2)
173 ca = c.ancestor(c2)
174 if not ca: # unrelated?
174 if not ca: # unrelated?
175 continue
175 continue
176 # named changed on only one side?
176 # named changed on only one side?
177 if ca.path() == c.path() or ca.path() == c2.path():
177 if ca.path() == c.path() or ca.path() == c2.path():
178 if c == ca or c2 == ca: # no merge needed, ignore copy
178 if c == ca or c2 == ca: # no merge needed, ignore copy
179 continue
179 continue
180 copy[c.path()] = of
180 copy[c.path()] = of
181
181
182 if not repo.ui.configbool("merge", "followcopies", True):
182 if not repo.ui.configbool("merge", "followcopies", True):
183 return {}, {}
183 return {}, {}
184
184
185 # avoid silly behavior for update from empty dir
185 # avoid silly behavior for update from empty dir
186 if not m1 or not m2 or not ma:
186 if not m1 or not m2 or not ma:
187 return {}, {}
187 return {}, {}
188
188
189 u1 = nonoverlap(m1, m2, ma)
189 u1 = nonoverlap(m1, m2, ma)
190 u2 = nonoverlap(m2, m1, ma)
190 u2 = nonoverlap(m2, m1, ma)
191
191
192 for f in u1:
192 for f in u1:
193 checkcopies(ctx(f, m1[f]), m2, ma)
193 checkcopies(ctx(f, m1[f]), m2, ma)
194
194
195 for f in u2:
195 for f in u2:
196 checkcopies(ctx(f, m2[f]), m1, ma)
196 checkcopies(ctx(f, m2[f]), m1, ma)
197
197
198 d2 = {}
198 d2 = {}
199 for of, fl in diverge.items():
199 for of, fl in diverge.items():
200 for f in fl:
200 for f in fl:
201 fo = list(fl)
201 fo = list(fl)
202 fo.remove(f)
202 fo.remove(f)
203 d2[f] = (of, fo)
203 d2[f] = (of, fo)
204
204
205 if not fullcopy or not repo.ui.configbool("merge", "followdirs", True):
205 if not fullcopy or not repo.ui.configbool("merge", "followdirs", True):
206 return copy, diverge
206 return copy, diverge
207
207
208 # generate a directory move map
208 # generate a directory move map
209 d1, d2 = dirs(m1), dirs(m2)
209 d1, d2 = dirs(m1), dirs(m2)
210 invalid = {}
210 invalid = {}
211 dirmove = {}
211 dirmove = {}
212
212
213 # examine each file copy for a potential directory move, which is
213 # examine each file copy for a potential directory move, which is
214 # when all the files in a directory are moved to a new directory
214 # when all the files in a directory are moved to a new directory
215 for dst, src in fullcopy.items():
215 for dst, src in fullcopy.items():
216 dsrc, ddst = dirname(src), dirname(dst)
216 dsrc, ddst = dirname(src), dirname(dst)
217 if dsrc in invalid:
217 if dsrc in invalid:
218 # already seen to be uninteresting
218 # already seen to be uninteresting
219 continue
219 continue
220 elif dsrc in d1 and ddst in d1:
220 elif dsrc in d1 and ddst in d1:
221 # directory wasn't entirely moved locally
221 # directory wasn't entirely moved locally
222 invalid[dsrc] = True
222 invalid[dsrc] = True
223 elif dsrc in d2 and ddst in d2:
223 elif dsrc in d2 and ddst in d2:
224 # directory wasn't entirely moved remotely
224 # directory wasn't entirely moved remotely
225 invalid[dsrc] = True
225 invalid[dsrc] = True
226 elif dsrc in dirmove and dirmove[dsrc] != ddst:
226 elif dsrc in dirmove and dirmove[dsrc] != ddst:
227 # files from the same directory moved to two different places
227 # files from the same directory moved to two different places
228 invalid[dsrc] = True
228 invalid[dsrc] = True
229 else:
229 else:
230 # looks good so far
230 # looks good so far
231 dirmove[dsrc + "/"] = ddst + "/"
231 dirmove[dsrc + "/"] = ddst + "/"
232
232
233 for i in invalid:
233 for i in invalid:
234 if i in dirmove:
234 if i in dirmove:
235 del dirmove[i]
235 del dirmove[i]
236
236
237 del d1, d2, invalid
237 del d1, d2, invalid
238
238
239 if not dirmove:
239 if not dirmove:
240 return copy, diverge
240 return copy, diverge
241
241
242 # check unaccounted nonoverlapping files against directory moves
242 # check unaccounted nonoverlapping files against directory moves
243 for f in u1 + u2:
243 for f in u1 + u2:
244 if f not in fullcopy:
244 if f not in fullcopy:
245 for d in dirmove:
245 for d in dirmove:
246 if f.startswith(d):
246 if f.startswith(d):
247 # new file added in a directory that was moved, move it
247 # new file added in a directory that was moved, move it
248 copy[f] = dirmove[d] + f[len(d):]
248 copy[f] = dirmove[d] + f[len(d):]
249 break
249 break
250
250
251 return copy, diverge
251 return copy, diverge
252
252
253 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
253 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
254 """
254 """
255 Merge p1 and p2 with ancestor ma and generate merge action list
255 Merge p1 and p2 with ancestor ma and generate merge action list
256
256
257 overwrite = whether we clobber working files
257 overwrite = whether we clobber working files
258 partial = function to filter file lists
258 partial = function to filter file lists
259 """
259 """
260
260
261 repo.ui.note(_("resolving manifests\n"))
261 repo.ui.note(_("resolving manifests\n"))
262 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
262 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
263 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
263 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
264
264
265 m1 = p1.manifest()
265 m1 = p1.manifest()
266 m2 = p2.manifest()
266 m2 = p2.manifest()
267 ma = pa.manifest()
267 ma = pa.manifest()
268 backwards = (pa == p2)
268 backwards = (pa == p2)
269 action = []
269 action = []
270 copy = {}
270 copy = {}
271 diverge = {}
271 diverge = {}
272
272
273 def fmerge(f, f2=None, fa=None):
273 def fmerge(f, f2=None, fa=None):
274 """merge flags"""
274 """merge flags"""
275 if not f2:
275 if not f2:
276 f2 = f
276 f2 = f
277 fa = f
277 fa = f
278 a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2)
278 a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2)
279 if ((a^b) | (a^c)) ^ a:
279 if ((a^b) | (a^c)) ^ a:
280 return 'x'
280 return 'x'
281 a, b, c = ma.linkf(fa), m1.linkf(f), m2.linkf(f2)
281 a, b, c = ma.linkf(fa), m1.linkf(f), m2.linkf(f2)
282 if ((a^b) | (a^c)) ^ a:
282 if ((a^b) | (a^c)) ^ a:
283 return 'l'
283 return 'l'
284 return ''
284 return ''
285
285
286 def act(msg, m, f, *args):
286 def act(msg, m, f, *args):
287 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
287 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
288 action.append((f, m) + args)
288 action.append((f, m) + args)
289
289
290 if not (backwards or overwrite):
290 if not (backwards or overwrite):
291 copy, diverge = findcopies(repo, m1, m2, ma, pa.rev())
291 copy, diverge = findcopies(repo, m1, m2, ma, pa.rev())
292
292
293 for of, fl in diverge.items():
293 for of, fl in diverge.items():
294 act("divergent renames", "dr", of, fl)
294 act("divergent renames", "dr", of, fl)
295
295
296 copied = dict.fromkeys(copy.values())
296 copied = dict.fromkeys(copy.values())
297
297
298 # Compare manifests
298 # Compare manifests
299 for f, n in m1.iteritems():
299 for f, n in m1.iteritems():
300 if partial and not partial(f):
300 if partial and not partial(f):
301 continue
301 continue
302 if f in m2:
302 if f in m2:
303 # are files different?
303 # are files different?
304 if n != m2[f]:
304 if n != m2[f]:
305 a = ma.get(f, nullid)
305 a = ma.get(f, nullid)
306 # are both different from the ancestor?
306 # are both different from the ancestor?
307 if not overwrite and n != a and m2[f] != a:
307 if not overwrite and n != a and m2[f] != a:
308 act("versions differ", "m", f, f, f, fmerge(f), False)
308 act("versions differ", "m", f, f, f, fmerge(f), False)
309 # are we clobbering?
309 # are we clobbering?
310 # is remote's version newer?
310 # is remote's version newer?
311 # or are we going back in time and clean?
311 # or are we going back in time and clean?
312 elif overwrite or m2[f] != a or (backwards and not n[20:]):
312 elif overwrite or m2[f] != a or (backwards and not n[20:]):
313 act("remote is newer", "g", f, m2.flags(f))
313 act("remote is newer", "g", f, m2.flags(f))
314 # local is newer, not overwrite, check mode bits
314 # local is newer, not overwrite, check mode bits
315 elif fmerge(f) != m1.flags(f):
315 elif fmerge(f) != m1.flags(f):
316 act("update permissions", "e", f, m2.flags(f))
316 act("update permissions", "e", f, m2.flags(f))
317 # contents same, check mode bits
317 # contents same, check mode bits
318 elif m1.flags(f) != m2.flags(f):
318 elif m1.flags(f) != m2.flags(f):
319 if overwrite or fmerge(f) != m1.flags(f):
319 if overwrite or fmerge(f) != m1.flags(f):
320 act("update permissions", "e", f, m2.flags(f))
320 act("update permissions", "e", f, m2.flags(f))
321 elif f in copied:
321 elif f in copied:
322 continue
322 continue
323 elif f in copy:
323 elif f in copy:
324 f2 = copy[f]
324 f2 = copy[f]
325 if f2 not in m2: # directory rename
325 if f2 not in m2: # directory rename
326 act("remote renamed directory to " + f2, "d",
326 act("remote renamed directory to " + f2, "d",
327 f, None, f2, m1.flags(f))
327 f, None, f2, m1.flags(f))
328 elif f2 in m1: # case 2 A,B/B/B
328 elif f2 in m1: # case 2 A,B/B/B
329 act("local copied to " + f2, "m",
329 act("local copied to " + f2, "m",
330 f, f2, f, fmerge(f, f2, f2), False)
330 f, f2, f, fmerge(f, f2, f2), False)
331 else: # case 4,21 A/B/B
331 else: # case 4,21 A/B/B
332 act("local moved to " + f2, "m",
332 act("local moved to " + f2, "m",
333 f, f2, f, fmerge(f, f2, f2), False)
333 f, f2, f, fmerge(f, f2, f2), False)
334 elif f in ma:
334 elif f in ma:
335 if n != ma[f] and not overwrite:
335 if n != ma[f] and not overwrite:
336 if repo.ui.prompt(
336 if repo.ui.prompt(
337 (_(" local changed %s which remote deleted\n") % f) +
337 (_(" local changed %s which remote deleted\n") % f) +
338 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("d"):
338 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("d"):
339 act("prompt delete", "r", f)
339 act("prompt delete", "r", f)
340 else:
340 else:
341 act("other deleted", "r", f)
341 act("other deleted", "r", f)
342 else:
342 else:
343 # file is created on branch or in working directory
343 # file is created on branch or in working directory
344 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
344 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
345 act("remote deleted", "r", f)
345 act("remote deleted", "r", f)
346
346
347 for f, n in m2.iteritems():
347 for f, n in m2.iteritems():
348 if partial and not partial(f):
348 if partial and not partial(f):
349 continue
349 continue
350 if f in m1:
350 if f in m1:
351 continue
351 continue
352 if f in copied:
352 if f in copied:
353 continue
353 continue
354 if f in copy:
354 if f in copy:
355 f2 = copy[f]
355 f2 = copy[f]
356 if f2 not in m1: # directory rename
356 if f2 not in m1: # directory rename
357 act("local renamed directory to " + f2, "d",
357 act("local renamed directory to " + f2, "d",
358 None, f, f2, m2.flags(f))
358 None, f, f2, m2.flags(f))
359 elif f2 in m2: # rename case 1, A/A,B/A
359 elif f2 in m2: # rename case 1, A/A,B/A
360 act("remote copied to " + f, "m",
360 act("remote copied to " + f, "m",
361 f2, f, f, fmerge(f2, f, f2), False)
361 f2, f, f, fmerge(f2, f, f2), False)
362 else: # case 3,20 A/B/A
362 else: # case 3,20 A/B/A
363 act("remote moved to " + f, "m",
363 act("remote moved to " + f, "m",
364 f2, f, f, fmerge(f2, f, f2), True)
364 f2, f, f, fmerge(f2, f, f2), True)
365 elif f in ma:
365 elif f in ma:
366 if overwrite or backwards:
366 if overwrite or backwards:
367 act("recreating", "g", f, m2.flags(f))
367 act("recreating", "g", f, m2.flags(f))
368 elif n != ma[f]:
368 elif n != ma[f]:
369 if repo.ui.prompt(
369 if repo.ui.prompt(
370 (_("remote changed %s which local deleted\n") % f) +
370 (_("remote changed %s which local deleted\n") % f) +
371 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("k"):
371 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("k"):
372 act("prompt recreating", "g", f, m2.flags(f))
372 act("prompt recreating", "g", f, m2.flags(f))
373 else:
373 else:
374 act("remote created", "g", f, m2.flags(f))
374 act("remote created", "g", f, m2.flags(f))
375
375
376 return action
376 return action
377
377
378 def applyupdates(repo, action, wctx, mctx):
378 def applyupdates(repo, action, wctx, mctx):
379 "apply the merge action list to the working directory"
379 "apply the merge action list to the working directory"
380
380
381 updated, merged, removed, unresolved = 0, 0, 0, 0
381 updated, merged, removed, unresolved = 0, 0, 0, 0
382 action.sort()
382 action.sort()
383 for a in action:
383 for a in action:
384 f, m = a[:2]
384 f, m = a[:2]
385 if f and f[0] == "/":
385 if f and f[0] == "/":
386 continue
386 continue
387 if m == "r": # remove
387 if m == "r": # remove
388 repo.ui.note(_("removing %s\n") % f)
388 repo.ui.note(_("removing %s\n") % f)
389 util.audit_path(f)
389 util.audit_path(f)
390 try:
390 try:
391 util.unlink(repo.wjoin(f))
391 util.unlink(repo.wjoin(f))
392 except OSError, inst:
392 except OSError, inst:
393 if inst.errno != errno.ENOENT:
393 if inst.errno != errno.ENOENT:
394 repo.ui.warn(_("update failed to remove %s: %s!\n") %
394 repo.ui.warn(_("update failed to remove %s: %s!\n") %
395 (f, inst.strerror))
395 (f, inst.strerror))
396 removed += 1
396 removed += 1
397 elif m == "m": # merge
397 elif m == "m": # merge
398 f2, fd, flags, move = a[2:]
398 f2, fd, flags, move = a[2:]
399 r = filemerge(repo, f, f2, wctx, mctx)
399 r = filemerge(repo, f, f2, wctx, mctx)
400 if r > 0:
400 if r > 0:
401 unresolved += 1
401 unresolved += 1
402 else:
402 else:
403 if r is None:
403 if r is None:
404 updated += 1
404 updated += 1
405 else:
405 else:
406 merged += 1
406 merged += 1
407 if f != fd:
407 if f != fd:
408 repo.ui.debug(_("copying %s to %s\n") % (f, fd))
408 repo.ui.debug(_("copying %s to %s\n") % (f, fd))
409 repo.wwrite(fd, repo.wread(f), flags)
409 repo.wwrite(fd, repo.wread(f), flags)
410 if move:
410 if move:
411 repo.ui.debug(_("removing %s\n") % f)
411 repo.ui.debug(_("removing %s\n") % f)
412 os.unlink(repo.wjoin(f))
412 os.unlink(repo.wjoin(f))
413 util.set_exec(repo.wjoin(fd), "x" in flags)
413 util.set_exec(repo.wjoin(fd), "x" in flags)
414 elif m == "g": # get
414 elif m == "g": # get
415 flags = a[2]
415 flags = a[2]
416 repo.ui.note(_("getting %s\n") % f)
416 repo.ui.note(_("getting %s\n") % f)
417 t = mctx.filectx(f).data()
417 t = mctx.filectx(f).data()
418 repo.wwrite(f, t, flags)
418 repo.wwrite(f, t, flags)
419 updated += 1
419 updated += 1
420 elif m == "d": # directory rename
420 elif m == "d": # directory rename
421 f2, fd, flags = a[2:]
421 f2, fd, flags = a[2:]
422 if f:
422 if f:
423 repo.ui.note(_("moving %s to %s\n") % (f, fd))
423 repo.ui.note(_("moving %s to %s\n") % (f, fd))
424 t = wctx.filectx(f).data()
424 t = wctx.filectx(f).data()
425 repo.wwrite(fd, t, flags)
425 repo.wwrite(fd, t, flags)
426 util.unlink(repo.wjoin(f))
426 util.unlink(repo.wjoin(f))
427 if f2:
427 if f2:
428 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
428 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
429 t = mctx.filectx(f2).data()
429 t = mctx.filectx(f2).data()
430 repo.wwrite(fd, t, flags)
430 repo.wwrite(fd, t, flags)
431 updated += 1
431 updated += 1
432 elif m == "dr": # divergent renames
432 elif m == "dr": # divergent renames
433 fl = a[2]
433 fl = a[2]
434 repo.ui.warn("warning: detected divergent renames of %s to:\n" % f)
434 repo.ui.warn("warning: detected divergent renames of %s to:\n" % f)
435 for nf in fl:
435 for nf in fl:
436 repo.ui.warn(" %s\n" % nf)
436 repo.ui.warn(" %s\n" % nf)
437 elif m == "e": # exec
437 elif m == "e": # exec
438 flags = a[2]
438 flags = a[2]
439 util.set_exec(repo.wjoin(f), flags)
439 util.set_exec(repo.wjoin(f), flags)
440
440
441 return updated, merged, removed, unresolved
441 return updated, merged, removed, unresolved
442
442
443 def recordupdates(repo, action, branchmerge):
443 def recordupdates(repo, action, branchmerge):
444 "record merge actions to the dirstate"
444 "record merge actions to the dirstate"
445
445
446 for a in action:
446 for a in action:
447 f, m = a[:2]
447 f, m = a[:2]
448 if m == "r": # remove
448 if m == "r": # remove
449 if branchmerge:
449 if branchmerge:
450 repo.dirstate.update([f], 'r')
450 repo.dirstate.remove(f)
451 else:
451 else:
452 repo.dirstate.forget([f])
452 repo.dirstate.forget(f)
453 elif m == "f": # forget
453 elif m == "f": # forget
454 repo.dirstate.forget([f])
454 repo.dirstate.forget(f)
455 elif m == "g": # get
455 elif m == "g": # get
456 if branchmerge:
456 if branchmerge:
457 repo.dirstate.update([f], 'n', st_mtime=-1)
457 repo.dirstate.normaldirty(f)
458 else:
458 else:
459 repo.dirstate.update([f], 'n')
459 repo.dirstate.normal(f)
460 elif m == "m": # merge
460 elif m == "m": # merge
461 f2, fd, flag, move = a[2:]
461 f2, fd, flag, move = a[2:]
462 if branchmerge:
462 if branchmerge:
463 # We've done a branch merge, mark this file as merged
463 # We've done a branch merge, mark this file as merged
464 # so that we properly record the merger later
464 # so that we properly record the merger later
465 repo.dirstate.update([fd], 'm')
465 repo.dirstate.merge(fd)
466 if f != f2: # copy/rename
466 if f != f2: # copy/rename
467 if move:
467 if move:
468 repo.dirstate.update([f], 'r')
468 repo.dirstate.remove(f)
469 if f != fd:
469 if f != fd:
470 repo.dirstate.copy(f, fd)
470 repo.dirstate.copy(f, fd)
471 else:
471 else:
472 repo.dirstate.copy(f2, fd)
472 repo.dirstate.copy(f2, fd)
473 else:
473 else:
474 # We've update-merged a locally modified file, so
474 # We've update-merged a locally modified file, so
475 # we set the dirstate to emulate a normal checkout
475 # we set the dirstate to emulate a normal checkout
476 # of that file some time in the past. Thus our
476 # of that file some time in the past. Thus our
477 # merge will appear as a normal local file
477 # merge will appear as a normal local file
478 # modification.
478 # modification.
479 repo.dirstate.update([fd], 'n', st_size=-1, st_mtime=-1)
479 repo.dirstate.normaldirty(fd)
480 if move:
480 if move:
481 repo.dirstate.forget([f])
481 repo.dirstate.forget(f)
482 elif m == "d": # directory rename
482 elif m == "d": # directory rename
483 f2, fd, flag = a[2:]
483 f2, fd, flag = a[2:]
484 if not f2 and f not in repo.dirstate:
484 if not f2 and f not in repo.dirstate:
485 # untracked file moved
485 # untracked file moved
486 continue
486 continue
487 if branchmerge:
487 if branchmerge:
488 repo.dirstate.update([fd], 'a')
488 repo.dirstate.add(fd)
489 if f:
489 if f:
490 repo.dirstate.update([f], 'r')
490 repo.dirstate.remove(f)
491 repo.dirstate.copy(f, fd)
491 repo.dirstate.copy(f, fd)
492 if f2:
492 if f2:
493 repo.dirstate.copy(f2, fd)
493 repo.dirstate.copy(f2, fd)
494 else:
494 else:
495 repo.dirstate.update([fd], 'n')
495 repo.dirstate.normal(fd)
496 if f:
496 if f:
497 repo.dirstate.forget([f])
497 repo.dirstate.forget(f)
498
498
499 def update(repo, node, branchmerge, force, partial, wlock):
499 def update(repo, node, branchmerge, force, partial, wlock):
500 """
500 """
501 Perform a merge between the working directory and the given node
501 Perform a merge between the working directory and the given node
502
502
503 branchmerge = whether to merge between branches
503 branchmerge = whether to merge between branches
504 force = whether to force branch merging or file overwriting
504 force = whether to force branch merging or file overwriting
505 partial = a function to filter file lists (dirstate not updated)
505 partial = a function to filter file lists (dirstate not updated)
506 wlock = working dir lock, if already held
506 wlock = working dir lock, if already held
507 """
507 """
508
508
509 if not wlock:
509 if not wlock:
510 wlock = repo.wlock()
510 wlock = repo.wlock()
511
511
512 wc = repo.workingctx()
512 wc = repo.workingctx()
513 if node is None:
513 if node is None:
514 # tip of current branch
514 # tip of current branch
515 try:
515 try:
516 node = repo.branchtags()[wc.branch()]
516 node = repo.branchtags()[wc.branch()]
517 except KeyError:
517 except KeyError:
518 raise util.Abort(_("branch %s not found") % wc.branch())
518 raise util.Abort(_("branch %s not found") % wc.branch())
519 overwrite = force and not branchmerge
519 overwrite = force and not branchmerge
520 forcemerge = force and branchmerge
520 forcemerge = force and branchmerge
521 pl = wc.parents()
521 pl = wc.parents()
522 p1, p2 = pl[0], repo.changectx(node)
522 p1, p2 = pl[0], repo.changectx(node)
523 pa = p1.ancestor(p2)
523 pa = p1.ancestor(p2)
524 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
524 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
525 fastforward = False
525 fastforward = False
526
526
527 ### check phase
527 ### check phase
528 if not overwrite and len(pl) > 1:
528 if not overwrite and len(pl) > 1:
529 raise util.Abort(_("outstanding uncommitted merges"))
529 raise util.Abort(_("outstanding uncommitted merges"))
530 if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
530 if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
531 if branchmerge:
531 if branchmerge:
532 if p1.branch() != p2.branch() and pa != p2:
532 if p1.branch() != p2.branch() and pa != p2:
533 fastforward = True
533 fastforward = True
534 else:
534 else:
535 raise util.Abort(_("there is nothing to merge, just use "
535 raise util.Abort(_("there is nothing to merge, just use "
536 "'hg update' or look at 'hg heads'"))
536 "'hg update' or look at 'hg heads'"))
537 elif not (overwrite or branchmerge):
537 elif not (overwrite or branchmerge):
538 raise util.Abort(_("update spans branches, use 'hg merge' "
538 raise util.Abort(_("update spans branches, use 'hg merge' "
539 "or 'hg update -C' to lose changes"))
539 "or 'hg update -C' to lose changes"))
540 if branchmerge and not forcemerge:
540 if branchmerge and not forcemerge:
541 if wc.files():
541 if wc.files():
542 raise util.Abort(_("outstanding uncommitted changes"))
542 raise util.Abort(_("outstanding uncommitted changes"))
543
543
544 ### calculate phase
544 ### calculate phase
545 action = []
545 action = []
546 if not force:
546 if not force:
547 checkunknown(wc, p2)
547 checkunknown(wc, p2)
548 if not util.checkfolding(repo.path):
548 if not util.checkfolding(repo.path):
549 checkcollision(p2)
549 checkcollision(p2)
550 if not branchmerge:
550 if not branchmerge:
551 action += forgetremoved(wc, p2)
551 action += forgetremoved(wc, p2)
552 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
552 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
553
553
554 ### apply phase
554 ### apply phase
555 if not branchmerge: # just jump to the new rev
555 if not branchmerge: # just jump to the new rev
556 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
556 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
557 if not partial:
557 if not partial:
558 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
558 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
559
559
560 stats = applyupdates(repo, action, wc, p2)
560 stats = applyupdates(repo, action, wc, p2)
561
561
562 if not partial:
562 if not partial:
563 recordupdates(repo, action, branchmerge)
563 recordupdates(repo, action, branchmerge)
564 repo.dirstate.setparents(fp1, fp2)
564 repo.dirstate.setparents(fp1, fp2)
565 if not branchmerge and not fastforward:
565 if not branchmerge and not fastforward:
566 repo.dirstate.setbranch(p2.branch())
566 repo.dirstate.setbranch(p2.branch())
567 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
567 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
568
568
569 return stats
569 return stats
570
570
General Comments 0
You need to be logged in to leave comments. Login now