##// END OF EJS Templates
Make repo locks recursive, eliminate all passing of lock/wlock
Matt Mackall -
r4917:126f527b default
parent child Browse files
Show More
@@ -1,98 +1,98
1 # fetch.py - pull and merge remote changes
1 # fetch.py - pull and merge remote changes
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from mercurial.i18n import _
8 from mercurial.i18n import _
9 from mercurial.node import *
9 from mercurial.node import *
10 from mercurial import commands, cmdutil, hg, node, util
10 from mercurial import commands, cmdutil, hg, node, util
11
11
12 def fetch(ui, repo, source='default', **opts):
12 def fetch(ui, repo, source='default', **opts):
13 '''Pull changes from a remote repository, merge new changes if needed.
13 '''Pull changes from a remote repository, merge new changes if needed.
14
14
15 This finds all changes from the repository at the specified path
15 This finds all changes from the repository at the specified path
16 or URL and adds them to the local repository.
16 or URL and adds them to the local repository.
17
17
18 If the pulled changes add a new head, the head is automatically
18 If the pulled changes add a new head, the head is automatically
19 merged, and the result of the merge is committed. Otherwise, the
19 merged, and the result of the merge is committed. Otherwise, the
20 working directory is updated.'''
20 working directory is updated.'''
21
21
22 def postincoming(other, modheads, lock, wlock):
22 def postincoming(other, modheads):
23 if modheads == 0:
23 if modheads == 0:
24 return 0
24 return 0
25 if modheads == 1:
25 if modheads == 1:
26 return hg.clean(repo, repo.changelog.tip(), wlock=wlock)
26 return hg.clean(repo, repo.changelog.tip())
27 newheads = repo.heads(parent)
27 newheads = repo.heads(parent)
28 newchildren = [n for n in repo.heads(parent) if n != parent]
28 newchildren = [n for n in repo.heads(parent) if n != parent]
29 newparent = parent
29 newparent = parent
30 if newchildren:
30 if newchildren:
31 newparent = newchildren[0]
31 newparent = newchildren[0]
32 hg.clean(repo, newparent, wlock=wlock)
32 hg.clean(repo, newparent)
33 newheads = [n for n in repo.heads() if n != newparent]
33 newheads = [n for n in repo.heads() if n != newparent]
34 err = False
34 err = False
35 if newheads:
35 if newheads:
36 ui.status(_('merging with new head %d:%s\n') %
36 ui.status(_('merging with new head %d:%s\n') %
37 (repo.changelog.rev(newheads[0]), short(newheads[0])))
37 (repo.changelog.rev(newheads[0]), short(newheads[0])))
38 err = hg.merge(repo, newheads[0], remind=False, wlock=wlock)
38 err = hg.merge(repo, newheads[0], remind=False)
39 if not err and len(newheads) > 1:
39 if not err and len(newheads) > 1:
40 ui.status(_('not merging with %d other new heads '
40 ui.status(_('not merging with %d other new heads '
41 '(use "hg heads" and "hg merge" to merge them)') %
41 '(use "hg heads" and "hg merge" to merge them)') %
42 (len(newheads) - 1))
42 (len(newheads) - 1))
43 if not err:
43 if not err:
44 mod, add, rem = repo.status(wlock=wlock)[:3]
44 mod, add, rem = repo.status()[:3]
45 message = (cmdutil.logmessage(opts) or
45 message = (cmdutil.logmessage(opts) or
46 (_('Automated merge with %s') % other.url()))
46 (_('Automated merge with %s') % other.url()))
47 n = repo.commit(mod + add + rem, message,
47 n = repo.commit(mod + add + rem, message,
48 opts['user'], opts['date'], lock=lock, wlock=wlock,
48 opts['user'], opts['date'],
49 force_editor=opts.get('force_editor'))
49 force_editor=opts.get('force_editor'))
50 ui.status(_('new changeset %d:%s merges remote changes '
50 ui.status(_('new changeset %d:%s merges remote changes '
51 'with local\n') % (repo.changelog.rev(n),
51 'with local\n') % (repo.changelog.rev(n),
52 short(n)))
52 short(n)))
53 def pull(lock, wlock):
53 def pull():
54 cmdutil.setremoteconfig(ui, opts)
54 cmdutil.setremoteconfig(ui, opts)
55
55
56 other = hg.repository(ui, ui.expandpath(source))
56 other = hg.repository(ui, ui.expandpath(source))
57 ui.status(_('pulling from %s\n') % ui.expandpath(source))
57 ui.status(_('pulling from %s\n') % ui.expandpath(source))
58 revs = None
58 revs = None
59 if opts['rev'] and not other.local():
59 if opts['rev'] and not other.local():
60 raise util.Abort(_("fetch -r doesn't work for remote repositories yet"))
60 raise util.Abort(_("fetch -r doesn't work for remote repositories yet"))
61 elif opts['rev']:
61 elif opts['rev']:
62 revs = [other.lookup(rev) for rev in opts['rev']]
62 revs = [other.lookup(rev) for rev in opts['rev']]
63 modheads = repo.pull(other, heads=revs, lock=lock)
63 modheads = repo.pull(other, heads=revs)
64 return postincoming(other, modheads, lock, wlock)
64 return postincoming(other, modheads)
65
65
66 parent, p2 = repo.dirstate.parents()
66 parent, p2 = repo.dirstate.parents()
67 if parent != repo.changelog.tip():
67 if parent != repo.changelog.tip():
68 raise util.Abort(_('working dir not at tip '
68 raise util.Abort(_('working dir not at tip '
69 '(use "hg update" to check out tip)'))
69 '(use "hg update" to check out tip)'))
70 if p2 != nullid:
70 if p2 != nullid:
71 raise util.Abort(_('outstanding uncommitted merge'))
71 raise util.Abort(_('outstanding uncommitted merge'))
72 wlock = lock = None
72 wlock = lock = None
73 try:
73 try:
74 wlock = repo.wlock()
74 wlock = repo.wlock()
75 lock = repo.lock()
75 lock = repo.lock()
76 mod, add, rem = repo.status(wlock=wlock)[:3]
76 mod, add, rem = repo.status()[:3]
77 if mod or add or rem:
77 if mod or add or rem:
78 raise util.Abort(_('outstanding uncommitted changes'))
78 raise util.Abort(_('outstanding uncommitted changes'))
79 if len(repo.heads()) > 1:
79 if len(repo.heads()) > 1:
80 raise util.Abort(_('multiple heads in this repository '
80 raise util.Abort(_('multiple heads in this repository '
81 '(use "hg heads" and "hg merge" to merge)'))
81 '(use "hg heads" and "hg merge" to merge)'))
82 return pull(lock, wlock)
82 return pull()
83 finally:
83 finally:
84 del lock, wlock
84 del lock, wlock
85
85
86 cmdtable = {
86 cmdtable = {
87 'fetch':
87 'fetch':
88 (fetch,
88 (fetch,
89 [('e', 'ssh', '', _('specify ssh command to use')),
89 [('e', 'ssh', '', _('specify ssh command to use')),
90 ('m', 'message', '', _('use <text> as commit message')),
90 ('m', 'message', '', _('use <text> as commit message')),
91 ('l', 'logfile', '', _('read the commit message from <file>')),
91 ('l', 'logfile', '', _('read the commit message from <file>')),
92 ('d', 'date', '', _('record datecode as commit date')),
92 ('d', 'date', '', _('record datecode as commit date')),
93 ('u', 'user', '', _('record user as commiter')),
93 ('u', 'user', '', _('record user as commiter')),
94 ('r', 'rev', [], _('a specific revision you would like to pull')),
94 ('r', 'rev', [], _('a specific revision you would like to pull')),
95 ('f', 'force-editor', None, _('edit commit message')),
95 ('f', 'force-editor', None, _('edit commit message')),
96 ('', 'remotecmd', '', _('hg command to run on the remote side'))],
96 ('', 'remotecmd', '', _('hg command to run on the remote side'))],
97 _('hg fetch [SOURCE]')),
97 _('hg fetch [SOURCE]')),
98 }
98 }
@@ -1,2262 +1,2249
1 # queue.py - patch queues for mercurial
1 # queue.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 '''patch management and development
8 '''patch management and development
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use "hg help command" for more details):
17 Common tasks (use "hg help command" for more details):
18
18
19 prepare repository to work with patches qinit
19 prepare repository to work with patches qinit
20 create new patch qnew
20 create new patch qnew
21 import existing patch qimport
21 import existing patch qimport
22
22
23 print patch series qseries
23 print patch series qseries
24 print applied patches qapplied
24 print applied patches qapplied
25 print name of top applied patch qtop
25 print name of top applied patch qtop
26
26
27 add known patch to applied stack qpush
27 add known patch to applied stack qpush
28 remove patch from applied stack qpop
28 remove patch from applied stack qpop
29 refresh contents of top applied patch qrefresh
29 refresh contents of top applied patch qrefresh
30 '''
30 '''
31
31
32 from mercurial.i18n import _
32 from mercurial.i18n import _
33 from mercurial import commands, cmdutil, hg, patch, revlog, util
33 from mercurial import commands, cmdutil, hg, patch, revlog, util
34 from mercurial import repair
34 from mercurial import repair
35 import os, sys, re, errno
35 import os, sys, re, errno
36
36
37 commands.norepo += " qclone qversion"
37 commands.norepo += " qclone qversion"
38
38
39 # Patch names looks like unix-file names.
39 # Patch names looks like unix-file names.
40 # They must be joinable with queue directory and result in the patch path.
40 # They must be joinable with queue directory and result in the patch path.
41 normname = util.normpath
41 normname = util.normpath
42
42
43 class statusentry:
43 class statusentry:
44 def __init__(self, rev, name=None):
44 def __init__(self, rev, name=None):
45 if not name:
45 if not name:
46 fields = rev.split(':', 1)
46 fields = rev.split(':', 1)
47 if len(fields) == 2:
47 if len(fields) == 2:
48 self.rev, self.name = fields
48 self.rev, self.name = fields
49 else:
49 else:
50 self.rev, self.name = None, None
50 self.rev, self.name = None, None
51 else:
51 else:
52 self.rev, self.name = rev, name
52 self.rev, self.name = rev, name
53
53
54 def __str__(self):
54 def __str__(self):
55 return self.rev + ':' + self.name
55 return self.rev + ':' + self.name
56
56
57 class queue:
57 class queue:
58 def __init__(self, ui, path, patchdir=None):
58 def __init__(self, ui, path, patchdir=None):
59 self.basepath = path
59 self.basepath = path
60 self.path = patchdir or os.path.join(path, "patches")
60 self.path = patchdir or os.path.join(path, "patches")
61 self.opener = util.opener(self.path)
61 self.opener = util.opener(self.path)
62 self.ui = ui
62 self.ui = ui
63 self.applied = []
63 self.applied = []
64 self.full_series = []
64 self.full_series = []
65 self.applied_dirty = 0
65 self.applied_dirty = 0
66 self.series_dirty = 0
66 self.series_dirty = 0
67 self.series_path = "series"
67 self.series_path = "series"
68 self.status_path = "status"
68 self.status_path = "status"
69 self.guards_path = "guards"
69 self.guards_path = "guards"
70 self.active_guards = None
70 self.active_guards = None
71 self.guards_dirty = False
71 self.guards_dirty = False
72 self._diffopts = None
72 self._diffopts = None
73
73
74 if os.path.exists(self.join(self.series_path)):
74 if os.path.exists(self.join(self.series_path)):
75 self.full_series = self.opener(self.series_path).read().splitlines()
75 self.full_series = self.opener(self.series_path).read().splitlines()
76 self.parse_series()
76 self.parse_series()
77
77
78 if os.path.exists(self.join(self.status_path)):
78 if os.path.exists(self.join(self.status_path)):
79 lines = self.opener(self.status_path).read().splitlines()
79 lines = self.opener(self.status_path).read().splitlines()
80 self.applied = [statusentry(l) for l in lines]
80 self.applied = [statusentry(l) for l in lines]
81
81
82 def diffopts(self):
82 def diffopts(self):
83 if self._diffopts is None:
83 if self._diffopts is None:
84 self._diffopts = patch.diffopts(self.ui)
84 self._diffopts = patch.diffopts(self.ui)
85 return self._diffopts
85 return self._diffopts
86
86
87 def join(self, *p):
87 def join(self, *p):
88 return os.path.join(self.path, *p)
88 return os.path.join(self.path, *p)
89
89
90 def find_series(self, patch):
90 def find_series(self, patch):
91 pre = re.compile("(\s*)([^#]+)")
91 pre = re.compile("(\s*)([^#]+)")
92 index = 0
92 index = 0
93 for l in self.full_series:
93 for l in self.full_series:
94 m = pre.match(l)
94 m = pre.match(l)
95 if m:
95 if m:
96 s = m.group(2)
96 s = m.group(2)
97 s = s.rstrip()
97 s = s.rstrip()
98 if s == patch:
98 if s == patch:
99 return index
99 return index
100 index += 1
100 index += 1
101 return None
101 return None
102
102
103 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
103 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
104
104
105 def parse_series(self):
105 def parse_series(self):
106 self.series = []
106 self.series = []
107 self.series_guards = []
107 self.series_guards = []
108 for l in self.full_series:
108 for l in self.full_series:
109 h = l.find('#')
109 h = l.find('#')
110 if h == -1:
110 if h == -1:
111 patch = l
111 patch = l
112 comment = ''
112 comment = ''
113 elif h == 0:
113 elif h == 0:
114 continue
114 continue
115 else:
115 else:
116 patch = l[:h]
116 patch = l[:h]
117 comment = l[h:]
117 comment = l[h:]
118 patch = patch.strip()
118 patch = patch.strip()
119 if patch:
119 if patch:
120 if patch in self.series:
120 if patch in self.series:
121 raise util.Abort(_('%s appears more than once in %s') %
121 raise util.Abort(_('%s appears more than once in %s') %
122 (patch, self.join(self.series_path)))
122 (patch, self.join(self.series_path)))
123 self.series.append(patch)
123 self.series.append(patch)
124 self.series_guards.append(self.guard_re.findall(comment))
124 self.series_guards.append(self.guard_re.findall(comment))
125
125
126 def check_guard(self, guard):
126 def check_guard(self, guard):
127 bad_chars = '# \t\r\n\f'
127 bad_chars = '# \t\r\n\f'
128 first = guard[0]
128 first = guard[0]
129 for c in '-+':
129 for c in '-+':
130 if first == c:
130 if first == c:
131 return (_('guard %r starts with invalid character: %r') %
131 return (_('guard %r starts with invalid character: %r') %
132 (guard, c))
132 (guard, c))
133 for c in bad_chars:
133 for c in bad_chars:
134 if c in guard:
134 if c in guard:
135 return _('invalid character in guard %r: %r') % (guard, c)
135 return _('invalid character in guard %r: %r') % (guard, c)
136
136
137 def set_active(self, guards):
137 def set_active(self, guards):
138 for guard in guards:
138 for guard in guards:
139 bad = self.check_guard(guard)
139 bad = self.check_guard(guard)
140 if bad:
140 if bad:
141 raise util.Abort(bad)
141 raise util.Abort(bad)
142 guards = dict.fromkeys(guards).keys()
142 guards = dict.fromkeys(guards).keys()
143 guards.sort()
143 guards.sort()
144 self.ui.debug('active guards: %s\n' % ' '.join(guards))
144 self.ui.debug('active guards: %s\n' % ' '.join(guards))
145 self.active_guards = guards
145 self.active_guards = guards
146 self.guards_dirty = True
146 self.guards_dirty = True
147
147
148 def active(self):
148 def active(self):
149 if self.active_guards is None:
149 if self.active_guards is None:
150 self.active_guards = []
150 self.active_guards = []
151 try:
151 try:
152 guards = self.opener(self.guards_path).read().split()
152 guards = self.opener(self.guards_path).read().split()
153 except IOError, err:
153 except IOError, err:
154 if err.errno != errno.ENOENT: raise
154 if err.errno != errno.ENOENT: raise
155 guards = []
155 guards = []
156 for i, guard in enumerate(guards):
156 for i, guard in enumerate(guards):
157 bad = self.check_guard(guard)
157 bad = self.check_guard(guard)
158 if bad:
158 if bad:
159 self.ui.warn('%s:%d: %s\n' %
159 self.ui.warn('%s:%d: %s\n' %
160 (self.join(self.guards_path), i + 1, bad))
160 (self.join(self.guards_path), i + 1, bad))
161 else:
161 else:
162 self.active_guards.append(guard)
162 self.active_guards.append(guard)
163 return self.active_guards
163 return self.active_guards
164
164
165 def set_guards(self, idx, guards):
165 def set_guards(self, idx, guards):
166 for g in guards:
166 for g in guards:
167 if len(g) < 2:
167 if len(g) < 2:
168 raise util.Abort(_('guard %r too short') % g)
168 raise util.Abort(_('guard %r too short') % g)
169 if g[0] not in '-+':
169 if g[0] not in '-+':
170 raise util.Abort(_('guard %r starts with invalid char') % g)
170 raise util.Abort(_('guard %r starts with invalid char') % g)
171 bad = self.check_guard(g[1:])
171 bad = self.check_guard(g[1:])
172 if bad:
172 if bad:
173 raise util.Abort(bad)
173 raise util.Abort(bad)
174 drop = self.guard_re.sub('', self.full_series[idx])
174 drop = self.guard_re.sub('', self.full_series[idx])
175 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
175 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
176 self.parse_series()
176 self.parse_series()
177 self.series_dirty = True
177 self.series_dirty = True
178
178
179 def pushable(self, idx):
179 def pushable(self, idx):
180 if isinstance(idx, str):
180 if isinstance(idx, str):
181 idx = self.series.index(idx)
181 idx = self.series.index(idx)
182 patchguards = self.series_guards[idx]
182 patchguards = self.series_guards[idx]
183 if not patchguards:
183 if not patchguards:
184 return True, None
184 return True, None
185 default = False
185 default = False
186 guards = self.active()
186 guards = self.active()
187 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
187 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
188 if exactneg:
188 if exactneg:
189 return False, exactneg[0]
189 return False, exactneg[0]
190 pos = [g for g in patchguards if g[0] == '+']
190 pos = [g for g in patchguards if g[0] == '+']
191 exactpos = [g for g in pos if g[1:] in guards]
191 exactpos = [g for g in pos if g[1:] in guards]
192 if pos:
192 if pos:
193 if exactpos:
193 if exactpos:
194 return True, exactpos[0]
194 return True, exactpos[0]
195 return False, pos
195 return False, pos
196 return True, ''
196 return True, ''
197
197
198 def explain_pushable(self, idx, all_patches=False):
198 def explain_pushable(self, idx, all_patches=False):
199 write = all_patches and self.ui.write or self.ui.warn
199 write = all_patches and self.ui.write or self.ui.warn
200 if all_patches or self.ui.verbose:
200 if all_patches or self.ui.verbose:
201 if isinstance(idx, str):
201 if isinstance(idx, str):
202 idx = self.series.index(idx)
202 idx = self.series.index(idx)
203 pushable, why = self.pushable(idx)
203 pushable, why = self.pushable(idx)
204 if all_patches and pushable:
204 if all_patches and pushable:
205 if why is None:
205 if why is None:
206 write(_('allowing %s - no guards in effect\n') %
206 write(_('allowing %s - no guards in effect\n') %
207 self.series[idx])
207 self.series[idx])
208 else:
208 else:
209 if not why:
209 if not why:
210 write(_('allowing %s - no matching negative guards\n') %
210 write(_('allowing %s - no matching negative guards\n') %
211 self.series[idx])
211 self.series[idx])
212 else:
212 else:
213 write(_('allowing %s - guarded by %r\n') %
213 write(_('allowing %s - guarded by %r\n') %
214 (self.series[idx], why))
214 (self.series[idx], why))
215 if not pushable:
215 if not pushable:
216 if why:
216 if why:
217 write(_('skipping %s - guarded by %r\n') %
217 write(_('skipping %s - guarded by %r\n') %
218 (self.series[idx], why))
218 (self.series[idx], why))
219 else:
219 else:
220 write(_('skipping %s - no matching guards\n') %
220 write(_('skipping %s - no matching guards\n') %
221 self.series[idx])
221 self.series[idx])
222
222
223 def save_dirty(self):
223 def save_dirty(self):
224 def write_list(items, path):
224 def write_list(items, path):
225 fp = self.opener(path, 'w')
225 fp = self.opener(path, 'w')
226 for i in items:
226 for i in items:
227 print >> fp, i
227 print >> fp, i
228 fp.close()
228 fp.close()
229 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
229 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
230 if self.series_dirty: write_list(self.full_series, self.series_path)
230 if self.series_dirty: write_list(self.full_series, self.series_path)
231 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
231 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
232
232
233 def readheaders(self, patch):
233 def readheaders(self, patch):
234 def eatdiff(lines):
234 def eatdiff(lines):
235 while lines:
235 while lines:
236 l = lines[-1]
236 l = lines[-1]
237 if (l.startswith("diff -") or
237 if (l.startswith("diff -") or
238 l.startswith("Index:") or
238 l.startswith("Index:") or
239 l.startswith("===========")):
239 l.startswith("===========")):
240 del lines[-1]
240 del lines[-1]
241 else:
241 else:
242 break
242 break
243 def eatempty(lines):
243 def eatempty(lines):
244 while lines:
244 while lines:
245 l = lines[-1]
245 l = lines[-1]
246 if re.match('\s*$', l):
246 if re.match('\s*$', l):
247 del lines[-1]
247 del lines[-1]
248 else:
248 else:
249 break
249 break
250
250
251 pf = self.join(patch)
251 pf = self.join(patch)
252 message = []
252 message = []
253 comments = []
253 comments = []
254 user = None
254 user = None
255 date = None
255 date = None
256 format = None
256 format = None
257 subject = None
257 subject = None
258 diffstart = 0
258 diffstart = 0
259
259
260 for line in file(pf):
260 for line in file(pf):
261 line = line.rstrip()
261 line = line.rstrip()
262 if line.startswith('diff --git'):
262 if line.startswith('diff --git'):
263 diffstart = 2
263 diffstart = 2
264 break
264 break
265 if diffstart:
265 if diffstart:
266 if line.startswith('+++ '):
266 if line.startswith('+++ '):
267 diffstart = 2
267 diffstart = 2
268 break
268 break
269 if line.startswith("--- "):
269 if line.startswith("--- "):
270 diffstart = 1
270 diffstart = 1
271 continue
271 continue
272 elif format == "hgpatch":
272 elif format == "hgpatch":
273 # parse values when importing the result of an hg export
273 # parse values when importing the result of an hg export
274 if line.startswith("# User "):
274 if line.startswith("# User "):
275 user = line[7:]
275 user = line[7:]
276 elif line.startswith("# Date "):
276 elif line.startswith("# Date "):
277 date = line[7:]
277 date = line[7:]
278 elif not line.startswith("# ") and line:
278 elif not line.startswith("# ") and line:
279 message.append(line)
279 message.append(line)
280 format = None
280 format = None
281 elif line == '# HG changeset patch':
281 elif line == '# HG changeset patch':
282 format = "hgpatch"
282 format = "hgpatch"
283 elif (format != "tagdone" and (line.startswith("Subject: ") or
283 elif (format != "tagdone" and (line.startswith("Subject: ") or
284 line.startswith("subject: "))):
284 line.startswith("subject: "))):
285 subject = line[9:]
285 subject = line[9:]
286 format = "tag"
286 format = "tag"
287 elif (format != "tagdone" and (line.startswith("From: ") or
287 elif (format != "tagdone" and (line.startswith("From: ") or
288 line.startswith("from: "))):
288 line.startswith("from: "))):
289 user = line[6:]
289 user = line[6:]
290 format = "tag"
290 format = "tag"
291 elif format == "tag" and line == "":
291 elif format == "tag" and line == "":
292 # when looking for tags (subject: from: etc) they
292 # when looking for tags (subject: from: etc) they
293 # end once you find a blank line in the source
293 # end once you find a blank line in the source
294 format = "tagdone"
294 format = "tagdone"
295 elif message or line:
295 elif message or line:
296 message.append(line)
296 message.append(line)
297 comments.append(line)
297 comments.append(line)
298
298
299 eatdiff(message)
299 eatdiff(message)
300 eatdiff(comments)
300 eatdiff(comments)
301 eatempty(message)
301 eatempty(message)
302 eatempty(comments)
302 eatempty(comments)
303
303
304 # make sure message isn't empty
304 # make sure message isn't empty
305 if format and format.startswith("tag") and subject:
305 if format and format.startswith("tag") and subject:
306 message.insert(0, "")
306 message.insert(0, "")
307 message.insert(0, subject)
307 message.insert(0, subject)
308 return (message, comments, user, date, diffstart > 1)
308 return (message, comments, user, date, diffstart > 1)
309
309
310 def removeundo(self, repo):
310 def removeundo(self, repo):
311 undo = repo.sjoin('undo')
311 undo = repo.sjoin('undo')
312 if not os.path.exists(undo):
312 if not os.path.exists(undo):
313 return
313 return
314 try:
314 try:
315 os.unlink(undo)
315 os.unlink(undo)
316 except OSError, inst:
316 except OSError, inst:
317 self.ui.warn('error removing undo: %s\n' % str(inst))
317 self.ui.warn('error removing undo: %s\n' % str(inst))
318
318
319 def printdiff(self, repo, node1, node2=None, files=None,
319 def printdiff(self, repo, node1, node2=None, files=None,
320 fp=None, changes=None, opts={}):
320 fp=None, changes=None, opts={}):
321 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
321 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
322
322
323 patch.diff(repo, node1, node2, fns, match=matchfn,
323 patch.diff(repo, node1, node2, fns, match=matchfn,
324 fp=fp, changes=changes, opts=self.diffopts())
324 fp=fp, changes=changes, opts=self.diffopts())
325
325
326 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
326 def mergeone(self, repo, mergeq, head, patch, rev):
327 # first try just applying the patch
327 # first try just applying the patch
328 (err, n) = self.apply(repo, [ patch ], update_status=False,
328 (err, n) = self.apply(repo, [ patch ], update_status=False,
329 strict=True, merge=rev, wlock=wlock)
329 strict=True, merge=rev)
330
330
331 if err == 0:
331 if err == 0:
332 return (err, n)
332 return (err, n)
333
333
334 if n is None:
334 if n is None:
335 raise util.Abort(_("apply failed for patch %s") % patch)
335 raise util.Abort(_("apply failed for patch %s") % patch)
336
336
337 self.ui.warn("patch didn't work out, merging %s\n" % patch)
337 self.ui.warn("patch didn't work out, merging %s\n" % patch)
338
338
339 # apply failed, strip away that rev and merge.
339 # apply failed, strip away that rev and merge.
340 hg.clean(repo, head, wlock=wlock)
340 hg.clean(repo, head)
341 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
341 self.strip(repo, n, update=False, backup='strip')
342
342
343 ctx = repo.changectx(rev)
343 ctx = repo.changectx(rev)
344 ret = hg.merge(repo, rev, wlock=wlock)
344 ret = hg.merge(repo, rev)
345 if ret:
345 if ret:
346 raise util.Abort(_("update returned %d") % ret)
346 raise util.Abort(_("update returned %d") % ret)
347 n = repo.commit(None, ctx.description(), ctx.user(),
347 n = repo.commit(None, ctx.description(), ctx.user(), force=1)
348 force=1, wlock=wlock)
349 if n == None:
348 if n == None:
350 raise util.Abort(_("repo commit failed"))
349 raise util.Abort(_("repo commit failed"))
351 try:
350 try:
352 message, comments, user, date, patchfound = mergeq.readheaders(patch)
351 message, comments, user, date, patchfound = mergeq.readheaders(patch)
353 except:
352 except:
354 raise util.Abort(_("unable to read %s") % patch)
353 raise util.Abort(_("unable to read %s") % patch)
355
354
356 patchf = self.opener(patch, "w")
355 patchf = self.opener(patch, "w")
357 if comments:
356 if comments:
358 comments = "\n".join(comments) + '\n\n'
357 comments = "\n".join(comments) + '\n\n'
359 patchf.write(comments)
358 patchf.write(comments)
360 self.printdiff(repo, head, n, fp=patchf)
359 self.printdiff(repo, head, n, fp=patchf)
361 patchf.close()
360 patchf.close()
362 self.removeundo(repo)
361 self.removeundo(repo)
363 return (0, n)
362 return (0, n)
364
363
365 def qparents(self, repo, rev=None):
364 def qparents(self, repo, rev=None):
366 if rev is None:
365 if rev is None:
367 (p1, p2) = repo.dirstate.parents()
366 (p1, p2) = repo.dirstate.parents()
368 if p2 == revlog.nullid:
367 if p2 == revlog.nullid:
369 return p1
368 return p1
370 if len(self.applied) == 0:
369 if len(self.applied) == 0:
371 return None
370 return None
372 return revlog.bin(self.applied[-1].rev)
371 return revlog.bin(self.applied[-1].rev)
373 pp = repo.changelog.parents(rev)
372 pp = repo.changelog.parents(rev)
374 if pp[1] != revlog.nullid:
373 if pp[1] != revlog.nullid:
375 arevs = [ x.rev for x in self.applied ]
374 arevs = [ x.rev for x in self.applied ]
376 p0 = revlog.hex(pp[0])
375 p0 = revlog.hex(pp[0])
377 p1 = revlog.hex(pp[1])
376 p1 = revlog.hex(pp[1])
378 if p0 in arevs:
377 if p0 in arevs:
379 return pp[0]
378 return pp[0]
380 if p1 in arevs:
379 if p1 in arevs:
381 return pp[1]
380 return pp[1]
382 return pp[0]
381 return pp[0]
383
382
384 def mergepatch(self, repo, mergeq, series, wlock):
383 def mergepatch(self, repo, mergeq, series):
385 if len(self.applied) == 0:
384 if len(self.applied) == 0:
386 # each of the patches merged in will have two parents. This
385 # each of the patches merged in will have two parents. This
387 # can confuse the qrefresh, qdiff, and strip code because it
386 # can confuse the qrefresh, qdiff, and strip code because it
388 # needs to know which parent is actually in the patch queue.
387 # needs to know which parent is actually in the patch queue.
389 # so, we insert a merge marker with only one parent. This way
388 # so, we insert a merge marker with only one parent. This way
390 # the first patch in the queue is never a merge patch
389 # the first patch in the queue is never a merge patch
391 #
390 #
392 pname = ".hg.patches.merge.marker"
391 pname = ".hg.patches.merge.marker"
393 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
392 n = repo.commit(None, '[mq]: merge marker', user=None, force=1)
394 wlock=wlock)
395 self.removeundo(repo)
393 self.removeundo(repo)
396 self.applied.append(statusentry(revlog.hex(n), pname))
394 self.applied.append(statusentry(revlog.hex(n), pname))
397 self.applied_dirty = 1
395 self.applied_dirty = 1
398
396
399 head = self.qparents(repo)
397 head = self.qparents(repo)
400
398
401 for patch in series:
399 for patch in series:
402 patch = mergeq.lookup(patch, strict=True)
400 patch = mergeq.lookup(patch, strict=True)
403 if not patch:
401 if not patch:
404 self.ui.warn("patch %s does not exist\n" % patch)
402 self.ui.warn("patch %s does not exist\n" % patch)
405 return (1, None)
403 return (1, None)
406 pushable, reason = self.pushable(patch)
404 pushable, reason = self.pushable(patch)
407 if not pushable:
405 if not pushable:
408 self.explain_pushable(patch, all_patches=True)
406 self.explain_pushable(patch, all_patches=True)
409 continue
407 continue
410 info = mergeq.isapplied(patch)
408 info = mergeq.isapplied(patch)
411 if not info:
409 if not info:
412 self.ui.warn("patch %s is not applied\n" % patch)
410 self.ui.warn("patch %s is not applied\n" % patch)
413 return (1, None)
411 return (1, None)
414 rev = revlog.bin(info[1])
412 rev = revlog.bin(info[1])
415 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
413 (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
416 if head:
414 if head:
417 self.applied.append(statusentry(revlog.hex(head), patch))
415 self.applied.append(statusentry(revlog.hex(head), patch))
418 self.applied_dirty = 1
416 self.applied_dirty = 1
419 if err:
417 if err:
420 return (err, head)
418 return (err, head)
421 self.save_dirty()
419 self.save_dirty()
422 return (0, head)
420 return (0, head)
423
421
424 def patch(self, repo, patchfile):
422 def patch(self, repo, patchfile):
425 '''Apply patchfile to the working directory.
423 '''Apply patchfile to the working directory.
426 patchfile: file name of patch'''
424 patchfile: file name of patch'''
427 files = {}
425 files = {}
428 try:
426 try:
429 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
427 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
430 files=files)
428 files=files)
431 except Exception, inst:
429 except Exception, inst:
432 self.ui.note(str(inst) + '\n')
430 self.ui.note(str(inst) + '\n')
433 if not self.ui.verbose:
431 if not self.ui.verbose:
434 self.ui.warn("patch failed, unable to continue (try -v)\n")
432 self.ui.warn("patch failed, unable to continue (try -v)\n")
435 return (False, files, False)
433 return (False, files, False)
436
434
437 return (True, files, fuzz)
435 return (True, files, fuzz)
438
436
439 def apply(self, repo, series, list=False, update_status=True,
437 def apply(self, repo, series, list=False, update_status=True,
440 strict=False, patchdir=None, merge=None, wlock=None,
438 strict=False, patchdir=None, merge=None, all_files={}):
441 all_files={}):
439 wlock = lock = tr = None
442 lock = tr = None
443 try:
440 try:
444 if not wlock:
445 wlock = repo.wlock()
441 wlock = repo.wlock()
446 lock = repo.lock()
442 lock = repo.lock()
447 tr = repo.transaction()
443 tr = repo.transaction()
448 try:
444 try:
449 ret = self._apply(tr, repo, series, list, update_status,
445 ret = self._apply(tr, repo, series, list, update_status,
450 strict, patchdir, merge, wlock,
446 strict, patchdir, merge, all_files=all_files)
451 lock=lock, all_files=all_files)
452 tr.close()
447 tr.close()
453 self.save_dirty()
448 self.save_dirty()
454 return ret
449 return ret
455 except:
450 except:
456 try:
451 try:
457 tr.abort()
452 tr.abort()
458 finally:
453 finally:
459 repo.invalidate()
454 repo.invalidate()
460 repo.dirstate.invalidate()
455 repo.dirstate.invalidate()
461 raise
456 raise
462 finally:
457 finally:
463 del lock, wlock, tr
458 del lock, wlock, tr
464
459
465 def _apply(self, tr, repo, series, list=False, update_status=True,
460 def _apply(self, tr, repo, series, list=False, update_status=True,
466 strict=False, patchdir=None, merge=None, wlock=None,
461 strict=False, patchdir=None, merge=None, all_files={}):
467 lock=None, all_files={}):
468 # TODO unify with commands.py
462 # TODO unify with commands.py
469 if not patchdir:
463 if not patchdir:
470 patchdir = self.path
464 patchdir = self.path
471 err = 0
465 err = 0
472 n = None
466 n = None
473 for patchname in series:
467 for patchname in series:
474 pushable, reason = self.pushable(patchname)
468 pushable, reason = self.pushable(patchname)
475 if not pushable:
469 if not pushable:
476 self.explain_pushable(patchname, all_patches=True)
470 self.explain_pushable(patchname, all_patches=True)
477 continue
471 continue
478 self.ui.warn("applying %s\n" % patchname)
472 self.ui.warn("applying %s\n" % patchname)
479 pf = os.path.join(patchdir, patchname)
473 pf = os.path.join(patchdir, patchname)
480
474
481 try:
475 try:
482 message, comments, user, date, patchfound = self.readheaders(patchname)
476 message, comments, user, date, patchfound = self.readheaders(patchname)
483 except:
477 except:
484 self.ui.warn("Unable to read %s\n" % patchname)
478 self.ui.warn("Unable to read %s\n" % patchname)
485 err = 1
479 err = 1
486 break
480 break
487
481
488 if not message:
482 if not message:
489 message = "imported patch %s\n" % patchname
483 message = "imported patch %s\n" % patchname
490 else:
484 else:
491 if list:
485 if list:
492 message.append("\nimported patch %s" % patchname)
486 message.append("\nimported patch %s" % patchname)
493 message = '\n'.join(message)
487 message = '\n'.join(message)
494
488
495 (patcherr, files, fuzz) = self.patch(repo, pf)
489 (patcherr, files, fuzz) = self.patch(repo, pf)
496 all_files.update(files)
490 all_files.update(files)
497 patcherr = not patcherr
491 patcherr = not patcherr
498
492
499 if merge and files:
493 if merge and files:
500 # Mark as removed/merged and update dirstate parent info
494 # Mark as removed/merged and update dirstate parent info
501 removed = []
495 removed = []
502 merged = []
496 merged = []
503 for f in files:
497 for f in files:
504 if os.path.exists(repo.wjoin(f)):
498 if os.path.exists(repo.wjoin(f)):
505 merged.append(f)
499 merged.append(f)
506 else:
500 else:
507 removed.append(f)
501 removed.append(f)
508 for f in removed:
502 for f in removed:
509 repo.dirstate.remove(f)
503 repo.dirstate.remove(f)
510 for f in merged:
504 for f in merged:
511 repo.dirstate.merge(f)
505 repo.dirstate.merge(f)
512 p1, p2 = repo.dirstate.parents()
506 p1, p2 = repo.dirstate.parents()
513 repo.dirstate.setparents(p1, merge)
507 repo.dirstate.setparents(p1, merge)
514 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
508 files = patch.updatedir(self.ui, repo, files)
515 n = repo.commit(files, message, user, date, force=1, lock=lock,
509 n = repo.commit(files, message, user, date, force=1)
516 wlock=wlock)
517
510
518 if n == None:
511 if n == None:
519 raise util.Abort(_("repo commit failed"))
512 raise util.Abort(_("repo commit failed"))
520
513
521 if update_status:
514 if update_status:
522 self.applied.append(statusentry(revlog.hex(n), patchname))
515 self.applied.append(statusentry(revlog.hex(n), patchname))
523
516
524 if patcherr:
517 if patcherr:
525 if not patchfound:
518 if not patchfound:
526 self.ui.warn("patch %s is empty\n" % patchname)
519 self.ui.warn("patch %s is empty\n" % patchname)
527 err = 0
520 err = 0
528 else:
521 else:
529 self.ui.warn("patch failed, rejects left in working dir\n")
522 self.ui.warn("patch failed, rejects left in working dir\n")
530 err = 1
523 err = 1
531 break
524 break
532
525
533 if fuzz and strict:
526 if fuzz and strict:
534 self.ui.warn("fuzz found when applying patch, stopping\n")
527 self.ui.warn("fuzz found when applying patch, stopping\n")
535 err = 1
528 err = 1
536 break
529 break
537 self.removeundo(repo)
530 self.removeundo(repo)
538 return (err, n)
531 return (err, n)
539
532
540 def delete(self, repo, patches, opts):
533 def delete(self, repo, patches, opts):
541 if not patches and not opts.get('rev'):
534 if not patches and not opts.get('rev'):
542 raise util.Abort(_('qdelete requires at least one revision or '
535 raise util.Abort(_('qdelete requires at least one revision or '
543 'patch name'))
536 'patch name'))
544
537
545 realpatches = []
538 realpatches = []
546 for patch in patches:
539 for patch in patches:
547 patch = self.lookup(patch, strict=True)
540 patch = self.lookup(patch, strict=True)
548 info = self.isapplied(patch)
541 info = self.isapplied(patch)
549 if info:
542 if info:
550 raise util.Abort(_("cannot delete applied patch %s") % patch)
543 raise util.Abort(_("cannot delete applied patch %s") % patch)
551 if patch not in self.series:
544 if patch not in self.series:
552 raise util.Abort(_("patch %s not in series file") % patch)
545 raise util.Abort(_("patch %s not in series file") % patch)
553 realpatches.append(patch)
546 realpatches.append(patch)
554
547
555 appliedbase = 0
548 appliedbase = 0
556 if opts.get('rev'):
549 if opts.get('rev'):
557 if not self.applied:
550 if not self.applied:
558 raise util.Abort(_('no patches applied'))
551 raise util.Abort(_('no patches applied'))
559 revs = cmdutil.revrange(repo, opts['rev'])
552 revs = cmdutil.revrange(repo, opts['rev'])
560 if len(revs) > 1 and revs[0] > revs[1]:
553 if len(revs) > 1 and revs[0] > revs[1]:
561 revs.reverse()
554 revs.reverse()
562 for rev in revs:
555 for rev in revs:
563 if appliedbase >= len(self.applied):
556 if appliedbase >= len(self.applied):
564 raise util.Abort(_("revision %d is not managed") % rev)
557 raise util.Abort(_("revision %d is not managed") % rev)
565
558
566 base = revlog.bin(self.applied[appliedbase].rev)
559 base = revlog.bin(self.applied[appliedbase].rev)
567 node = repo.changelog.node(rev)
560 node = repo.changelog.node(rev)
568 if node != base:
561 if node != base:
569 raise util.Abort(_("cannot delete revision %d above "
562 raise util.Abort(_("cannot delete revision %d above "
570 "applied patches") % rev)
563 "applied patches") % rev)
571 realpatches.append(self.applied[appliedbase].name)
564 realpatches.append(self.applied[appliedbase].name)
572 appliedbase += 1
565 appliedbase += 1
573
566
574 if not opts.get('keep'):
567 if not opts.get('keep'):
575 r = self.qrepo()
568 r = self.qrepo()
576 if r:
569 if r:
577 r.remove(realpatches, True)
570 r.remove(realpatches, True)
578 else:
571 else:
579 for p in realpatches:
572 for p in realpatches:
580 os.unlink(self.join(p))
573 os.unlink(self.join(p))
581
574
582 if appliedbase:
575 if appliedbase:
583 del self.applied[:appliedbase]
576 del self.applied[:appliedbase]
584 self.applied_dirty = 1
577 self.applied_dirty = 1
585 indices = [self.find_series(p) for p in realpatches]
578 indices = [self.find_series(p) for p in realpatches]
586 indices.sort()
579 indices.sort()
587 for i in indices[-1::-1]:
580 for i in indices[-1::-1]:
588 del self.full_series[i]
581 del self.full_series[i]
589 self.parse_series()
582 self.parse_series()
590 self.series_dirty = 1
583 self.series_dirty = 1
591
584
592 def check_toppatch(self, repo):
585 def check_toppatch(self, repo):
593 if len(self.applied) > 0:
586 if len(self.applied) > 0:
594 top = revlog.bin(self.applied[-1].rev)
587 top = revlog.bin(self.applied[-1].rev)
595 pp = repo.dirstate.parents()
588 pp = repo.dirstate.parents()
596 if top not in pp:
589 if top not in pp:
597 raise util.Abort(_("queue top not at same revision as working directory"))
590 raise util.Abort(_("queue top not at same revision as working directory"))
598 return top
591 return top
599 return None
592 return None
600 def check_localchanges(self, repo, force=False, refresh=True):
593 def check_localchanges(self, repo, force=False, refresh=True):
601 m, a, r, d = repo.status()[:4]
594 m, a, r, d = repo.status()[:4]
602 if m or a or r or d:
595 if m or a or r or d:
603 if not force:
596 if not force:
604 if refresh:
597 if refresh:
605 raise util.Abort(_("local changes found, refresh first"))
598 raise util.Abort(_("local changes found, refresh first"))
606 else:
599 else:
607 raise util.Abort(_("local changes found"))
600 raise util.Abort(_("local changes found"))
608 return m, a, r, d
601 return m, a, r, d
609
602
610 def new(self, repo, patch, *pats, **opts):
603 def new(self, repo, patch, *pats, **opts):
611 msg = opts.get('msg')
604 msg = opts.get('msg')
612 force = opts.get('force')
605 force = opts.get('force')
613 if os.path.exists(self.join(patch)):
606 if os.path.exists(self.join(patch)):
614 raise util.Abort(_('patch "%s" already exists') % patch)
607 raise util.Abort(_('patch "%s" already exists') % patch)
615 if opts.get('include') or opts.get('exclude') or pats:
608 if opts.get('include') or opts.get('exclude') or pats:
616 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
609 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
617 m, a, r, d = repo.status(files=fns, match=match)[:4]
610 m, a, r, d = repo.status(files=fns, match=match)[:4]
618 else:
611 else:
619 m, a, r, d = self.check_localchanges(repo, force)
612 m, a, r, d = self.check_localchanges(repo, force)
620 commitfiles = m + a + r
613 commitfiles = m + a + r
621 self.check_toppatch(repo)
614 self.check_toppatch(repo)
622 wlock = repo.wlock()
615 wlock = repo.wlock()
623 try:
616 try:
624 insert = self.full_series_end()
617 insert = self.full_series_end()
625 if msg:
618 if msg:
626 n = repo.commit(commitfiles, msg, force=True, wlock=wlock)
619 n = repo.commit(commitfiles, msg, force=True)
627 else:
620 else:
628 n = repo.commit(commitfiles,
621 n = repo.commit(commitfiles, "[mq]: %s" % patch, force=True)
629 "[mq]: %s" % patch, force=True, wlock=wlock)
630 if n == None:
622 if n == None:
631 raise util.Abort(_("repo commit failed"))
623 raise util.Abort(_("repo commit failed"))
632 self.full_series[insert:insert] = [patch]
624 self.full_series[insert:insert] = [patch]
633 self.applied.append(statusentry(revlog.hex(n), patch))
625 self.applied.append(statusentry(revlog.hex(n), patch))
634 self.parse_series()
626 self.parse_series()
635 self.series_dirty = 1
627 self.series_dirty = 1
636 self.applied_dirty = 1
628 self.applied_dirty = 1
637 p = self.opener(patch, "w")
629 p = self.opener(patch, "w")
638 if msg:
630 if msg:
639 msg = msg + "\n"
631 msg = msg + "\n"
640 p.write(msg)
632 p.write(msg)
641 p.close()
633 p.close()
642 wlock = None
634 wlock = None
643 r = self.qrepo()
635 r = self.qrepo()
644 if r: r.add([patch])
636 if r: r.add([patch])
645 if commitfiles:
637 if commitfiles:
646 self.refresh(repo, short=True)
638 self.refresh(repo, short=True)
647 self.removeundo(repo)
639 self.removeundo(repo)
648 finally:
640 finally:
649 del wlock
641 del wlock
650
642
651 def strip(self, repo, rev, update=True, backup="all", wlock=None):
643 def strip(self, repo, rev, update=True, backup="all"):
652 lock = None
644 wlock = lock = None
653 try:
645 try:
654 if not wlock:
655 wlock = repo.wlock()
646 wlock = repo.wlock()
656 lock = repo.lock()
647 lock = repo.lock()
657
648
658 if update:
649 if update:
659 self.check_localchanges(repo, refresh=False)
650 self.check_localchanges(repo, refresh=False)
660 urev = self.qparents(repo, rev)
651 urev = self.qparents(repo, rev)
661 hg.clean(repo, urev, wlock=wlock)
652 hg.clean(repo, urev)
662 repo.dirstate.write()
653 repo.dirstate.write()
663
654
664 self.removeundo(repo)
655 self.removeundo(repo)
665 repair.strip(self.ui, repo, rev, backup)
656 repair.strip(self.ui, repo, rev, backup)
666 finally:
657 finally:
667 del lock, wlock
658 del lock, wlock
668
659
669 def isapplied(self, patch):
660 def isapplied(self, patch):
670 """returns (index, rev, patch)"""
661 """returns (index, rev, patch)"""
671 for i in xrange(len(self.applied)):
662 for i in xrange(len(self.applied)):
672 a = self.applied[i]
663 a = self.applied[i]
673 if a.name == patch:
664 if a.name == patch:
674 return (i, a.rev, a.name)
665 return (i, a.rev, a.name)
675 return None
666 return None
676
667
677 # if the exact patch name does not exist, we try a few
668 # if the exact patch name does not exist, we try a few
678 # variations. If strict is passed, we try only #1
669 # variations. If strict is passed, we try only #1
679 #
670 #
680 # 1) a number to indicate an offset in the series file
671 # 1) a number to indicate an offset in the series file
681 # 2) a unique substring of the patch name was given
672 # 2) a unique substring of the patch name was given
682 # 3) patchname[-+]num to indicate an offset in the series file
673 # 3) patchname[-+]num to indicate an offset in the series file
683 def lookup(self, patch, strict=False):
674 def lookup(self, patch, strict=False):
684 patch = patch and str(patch)
675 patch = patch and str(patch)
685
676
686 def partial_name(s):
677 def partial_name(s):
687 if s in self.series:
678 if s in self.series:
688 return s
679 return s
689 matches = [x for x in self.series if s in x]
680 matches = [x for x in self.series if s in x]
690 if len(matches) > 1:
681 if len(matches) > 1:
691 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
682 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
692 for m in matches:
683 for m in matches:
693 self.ui.warn(' %s\n' % m)
684 self.ui.warn(' %s\n' % m)
694 return None
685 return None
695 if matches:
686 if matches:
696 return matches[0]
687 return matches[0]
697 if len(self.series) > 0 and len(self.applied) > 0:
688 if len(self.series) > 0 and len(self.applied) > 0:
698 if s == 'qtip':
689 if s == 'qtip':
699 return self.series[self.series_end(True)-1]
690 return self.series[self.series_end(True)-1]
700 if s == 'qbase':
691 if s == 'qbase':
701 return self.series[0]
692 return self.series[0]
702 return None
693 return None
703 if patch == None:
694 if patch == None:
704 return None
695 return None
705
696
706 # we don't want to return a partial match until we make
697 # we don't want to return a partial match until we make
707 # sure the file name passed in does not exist (checked below)
698 # sure the file name passed in does not exist (checked below)
708 res = partial_name(patch)
699 res = partial_name(patch)
709 if res and res == patch:
700 if res and res == patch:
710 return res
701 return res
711
702
712 if not os.path.isfile(self.join(patch)):
703 if not os.path.isfile(self.join(patch)):
713 try:
704 try:
714 sno = int(patch)
705 sno = int(patch)
715 except(ValueError, OverflowError):
706 except(ValueError, OverflowError):
716 pass
707 pass
717 else:
708 else:
718 if sno < len(self.series):
709 if sno < len(self.series):
719 return self.series[sno]
710 return self.series[sno]
720 if not strict:
711 if not strict:
721 # return any partial match made above
712 # return any partial match made above
722 if res:
713 if res:
723 return res
714 return res
724 minus = patch.rfind('-')
715 minus = patch.rfind('-')
725 if minus >= 0:
716 if minus >= 0:
726 res = partial_name(patch[:minus])
717 res = partial_name(patch[:minus])
727 if res:
718 if res:
728 i = self.series.index(res)
719 i = self.series.index(res)
729 try:
720 try:
730 off = int(patch[minus+1:] or 1)
721 off = int(patch[minus+1:] or 1)
731 except(ValueError, OverflowError):
722 except(ValueError, OverflowError):
732 pass
723 pass
733 else:
724 else:
734 if i - off >= 0:
725 if i - off >= 0:
735 return self.series[i - off]
726 return self.series[i - off]
736 plus = patch.rfind('+')
727 plus = patch.rfind('+')
737 if plus >= 0:
728 if plus >= 0:
738 res = partial_name(patch[:plus])
729 res = partial_name(patch[:plus])
739 if res:
730 if res:
740 i = self.series.index(res)
731 i = self.series.index(res)
741 try:
732 try:
742 off = int(patch[plus+1:] or 1)
733 off = int(patch[plus+1:] or 1)
743 except(ValueError, OverflowError):
734 except(ValueError, OverflowError):
744 pass
735 pass
745 else:
736 else:
746 if i + off < len(self.series):
737 if i + off < len(self.series):
747 return self.series[i + off]
738 return self.series[i + off]
748 raise util.Abort(_("patch %s not in series") % patch)
739 raise util.Abort(_("patch %s not in series") % patch)
749
740
750 def push(self, repo, patch=None, force=False, list=False,
741 def push(self, repo, patch=None, force=False, list=False,
751 mergeq=None, wlock=None):
742 mergeq=None):
752 if not wlock:
753 wlock = repo.wlock()
743 wlock = repo.wlock()
754 try:
744 try:
755 patch = self.lookup(patch)
745 patch = self.lookup(patch)
756 # Suppose our series file is: A B C and the current 'top'
746 # Suppose our series file is: A B C and the current 'top'
757 # patch is B. qpush C should be performed (moving forward)
747 # patch is B. qpush C should be performed (moving forward)
758 # qpush B is a NOP (no change) qpush A is an error (can't
748 # qpush B is a NOP (no change) qpush A is an error (can't
759 # go backwards with qpush)
749 # go backwards with qpush)
760 if patch:
750 if patch:
761 info = self.isapplied(patch)
751 info = self.isapplied(patch)
762 if info:
752 if info:
763 if info[0] < len(self.applied) - 1:
753 if info[0] < len(self.applied) - 1:
764 raise util.Abort(
754 raise util.Abort(
765 _("cannot push to a previous patch: %s") % patch)
755 _("cannot push to a previous patch: %s") % patch)
766 if info[0] < len(self.series) - 1:
756 if info[0] < len(self.series) - 1:
767 self.ui.warn(
757 self.ui.warn(
768 _('qpush: %s is already at the top\n') % patch)
758 _('qpush: %s is already at the top\n') % patch)
769 else:
759 else:
770 self.ui.warn(_('all patches are currently applied\n'))
760 self.ui.warn(_('all patches are currently applied\n'))
771 return
761 return
772
762
773 # Following the above example, starting at 'top' of B:
763 # Following the above example, starting at 'top' of B:
774 # qpush should be performed (pushes C), but a subsequent
764 # qpush should be performed (pushes C), but a subsequent
775 # qpush without an argument is an error (nothing to
765 # qpush without an argument is an error (nothing to
776 # apply). This allows a loop of "...while hg qpush..." to
766 # apply). This allows a loop of "...while hg qpush..." to
777 # work as it detects an error when done
767 # work as it detects an error when done
778 if self.series_end() == len(self.series):
768 if self.series_end() == len(self.series):
779 self.ui.warn(_('patch series already fully applied\n'))
769 self.ui.warn(_('patch series already fully applied\n'))
780 return 1
770 return 1
781 if not force:
771 if not force:
782 self.check_localchanges(repo)
772 self.check_localchanges(repo)
783
773
784 self.applied_dirty = 1;
774 self.applied_dirty = 1;
785 start = self.series_end()
775 start = self.series_end()
786 if start > 0:
776 if start > 0:
787 self.check_toppatch(repo)
777 self.check_toppatch(repo)
788 if not patch:
778 if not patch:
789 patch = self.series[start]
779 patch = self.series[start]
790 end = start + 1
780 end = start + 1
791 else:
781 else:
792 end = self.series.index(patch, start) + 1
782 end = self.series.index(patch, start) + 1
793 s = self.series[start:end]
783 s = self.series[start:end]
794 all_files = {}
784 all_files = {}
795 try:
785 try:
796 if mergeq:
786 if mergeq:
797 ret = self.mergepatch(repo, mergeq, s, wlock)
787 ret = self.mergepatch(repo, mergeq, s)
798 else:
788 else:
799 ret = self.apply(repo, s, list, wlock=wlock,
789 ret = self.apply(repo, s, list, all_files=all_files)
800 all_files=all_files)
801 except:
790 except:
802 self.ui.warn(_('cleaning up working directory...'))
791 self.ui.warn(_('cleaning up working directory...'))
803 node = repo.dirstate.parents()[0]
792 node = repo.dirstate.parents()[0]
804 hg.revert(repo, node, None, wlock)
793 hg.revert(repo, node, None)
805 unknown = repo.status(wlock=wlock)[4]
794 unknown = repo.status()[4]
806 # only remove unknown files that we know we touched or
795 # only remove unknown files that we know we touched or
807 # created while patching
796 # created while patching
808 for f in unknown:
797 for f in unknown:
809 if f in all_files:
798 if f in all_files:
810 util.unlink(repo.wjoin(f))
799 util.unlink(repo.wjoin(f))
811 self.ui.warn(_('done\n'))
800 self.ui.warn(_('done\n'))
812 raise
801 raise
813 top = self.applied[-1].name
802 top = self.applied[-1].name
814 if ret[0]:
803 if ret[0]:
815 self.ui.write(
804 self.ui.write(
816 "Errors during apply, please fix and refresh %s\n" % top)
805 "Errors during apply, please fix and refresh %s\n" % top)
817 else:
806 else:
818 self.ui.write("Now at: %s\n" % top)
807 self.ui.write("Now at: %s\n" % top)
819 return ret[0]
808 return ret[0]
820 finally:
809 finally:
821 del wlock
810 del wlock
822
811
823 def pop(self, repo, patch=None, force=False, update=True, all=False,
812 def pop(self, repo, patch=None, force=False, update=True, all=False):
824 wlock=None):
825 def getfile(f, rev):
813 def getfile(f, rev):
826 t = repo.file(f).read(rev)
814 t = repo.file(f).read(rev)
827 repo.wfile(f, "w").write(t)
815 repo.wfile(f, "w").write(t)
828
816
829 if not wlock:
830 wlock = repo.wlock()
817 wlock = repo.wlock()
831 try:
818 try:
832 if patch:
819 if patch:
833 # index, rev, patch
820 # index, rev, patch
834 info = self.isapplied(patch)
821 info = self.isapplied(patch)
835 if not info:
822 if not info:
836 patch = self.lookup(patch)
823 patch = self.lookup(patch)
837 info = self.isapplied(patch)
824 info = self.isapplied(patch)
838 if not info:
825 if not info:
839 raise util.Abort(_("patch %s is not applied") % patch)
826 raise util.Abort(_("patch %s is not applied") % patch)
840
827
841 if len(self.applied) == 0:
828 if len(self.applied) == 0:
842 # Allow qpop -a to work repeatedly,
829 # Allow qpop -a to work repeatedly,
843 # but not qpop without an argument
830 # but not qpop without an argument
844 self.ui.warn(_("no patches applied\n"))
831 self.ui.warn(_("no patches applied\n"))
845 return not all
832 return not all
846
833
847 if not update:
834 if not update:
848 parents = repo.dirstate.parents()
835 parents = repo.dirstate.parents()
849 rr = [ revlog.bin(x.rev) for x in self.applied ]
836 rr = [ revlog.bin(x.rev) for x in self.applied ]
850 for p in parents:
837 for p in parents:
851 if p in rr:
838 if p in rr:
852 self.ui.warn("qpop: forcing dirstate update\n")
839 self.ui.warn("qpop: forcing dirstate update\n")
853 update = True
840 update = True
854
841
855 if not force and update:
842 if not force and update:
856 self.check_localchanges(repo)
843 self.check_localchanges(repo)
857
844
858 self.applied_dirty = 1;
845 self.applied_dirty = 1;
859 end = len(self.applied)
846 end = len(self.applied)
860 if not patch:
847 if not patch:
861 if all:
848 if all:
862 popi = 0
849 popi = 0
863 else:
850 else:
864 popi = len(self.applied) - 1
851 popi = len(self.applied) - 1
865 else:
852 else:
866 popi = info[0] + 1
853 popi = info[0] + 1
867 if popi >= end:
854 if popi >= end:
868 self.ui.warn("qpop: %s is already at the top\n" % patch)
855 self.ui.warn("qpop: %s is already at the top\n" % patch)
869 return
856 return
870 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
857 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
871
858
872 start = info[0]
859 start = info[0]
873 rev = revlog.bin(info[1])
860 rev = revlog.bin(info[1])
874
861
875 # we know there are no local changes, so we can make a simplified
862 # we know there are no local changes, so we can make a simplified
876 # form of hg.update.
863 # form of hg.update.
877 if update:
864 if update:
878 top = self.check_toppatch(repo)
865 top = self.check_toppatch(repo)
879 qp = self.qparents(repo, rev)
866 qp = self.qparents(repo, rev)
880 changes = repo.changelog.read(qp)
867 changes = repo.changelog.read(qp)
881 mmap = repo.manifest.read(changes[0])
868 mmap = repo.manifest.read(changes[0])
882 m, a, r, d, u = repo.status(qp, top)[:5]
869 m, a, r, d, u = repo.status(qp, top)[:5]
883 if d:
870 if d:
884 raise util.Abort("deletions found between repo revs")
871 raise util.Abort("deletions found between repo revs")
885 for f in m:
872 for f in m:
886 getfile(f, mmap[f])
873 getfile(f, mmap[f])
887 for f in r:
874 for f in r:
888 getfile(f, mmap[f])
875 getfile(f, mmap[f])
889 util.set_exec(repo.wjoin(f), mmap.execf(f))
876 util.set_exec(repo.wjoin(f), mmap.execf(f))
890 for f in m + r:
877 for f in m + r:
891 repo.dirstate.normal(f)
878 repo.dirstate.normal(f)
892 for f in a:
879 for f in a:
893 try:
880 try:
894 os.unlink(repo.wjoin(f))
881 os.unlink(repo.wjoin(f))
895 except OSError, e:
882 except OSError, e:
896 if e.errno != errno.ENOENT:
883 if e.errno != errno.ENOENT:
897 raise
884 raise
898 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
885 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
899 except: pass
886 except: pass
900 repo.dirstate.forget(f)
887 repo.dirstate.forget(f)
901 repo.dirstate.setparents(qp, revlog.nullid)
888 repo.dirstate.setparents(qp, revlog.nullid)
902 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
889 self.strip(repo, rev, update=False, backup='strip')
903 del self.applied[start:end]
890 del self.applied[start:end]
904 if len(self.applied):
891 if len(self.applied):
905 self.ui.write("Now at: %s\n" % self.applied[-1].name)
892 self.ui.write("Now at: %s\n" % self.applied[-1].name)
906 else:
893 else:
907 self.ui.write("Patch queue now empty\n")
894 self.ui.write("Patch queue now empty\n")
908 finally:
895 finally:
909 del wlock
896 del wlock
910
897
911 def diff(self, repo, pats, opts):
898 def diff(self, repo, pats, opts):
912 top = self.check_toppatch(repo)
899 top = self.check_toppatch(repo)
913 if not top:
900 if not top:
914 self.ui.write("No patches applied\n")
901 self.ui.write("No patches applied\n")
915 return
902 return
916 qp = self.qparents(repo, top)
903 qp = self.qparents(repo, top)
917 if opts.get('git'):
904 if opts.get('git'):
918 self.diffopts().git = True
905 self.diffopts().git = True
919 self.printdiff(repo, qp, files=pats, opts=opts)
906 self.printdiff(repo, qp, files=pats, opts=opts)
920
907
921 def refresh(self, repo, pats=None, **opts):
908 def refresh(self, repo, pats=None, **opts):
922 if len(self.applied) == 0:
909 if len(self.applied) == 0:
923 self.ui.write("No patches applied\n")
910 self.ui.write("No patches applied\n")
924 return 1
911 return 1
925 wlock = repo.wlock()
912 wlock = repo.wlock()
926 try:
913 try:
927 self.check_toppatch(repo)
914 self.check_toppatch(repo)
928 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
915 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
929 top = revlog.bin(top)
916 top = revlog.bin(top)
930 cparents = repo.changelog.parents(top)
917 cparents = repo.changelog.parents(top)
931 patchparent = self.qparents(repo, top)
918 patchparent = self.qparents(repo, top)
932 message, comments, user, date, patchfound = self.readheaders(patchfn)
919 message, comments, user, date, patchfound = self.readheaders(patchfn)
933
920
934 patchf = self.opener(patchfn, 'r+')
921 patchf = self.opener(patchfn, 'r+')
935
922
936 # if the patch was a git patch, refresh it as a git patch
923 # if the patch was a git patch, refresh it as a git patch
937 for line in patchf:
924 for line in patchf:
938 if line.startswith('diff --git'):
925 if line.startswith('diff --git'):
939 self.diffopts().git = True
926 self.diffopts().git = True
940 break
927 break
941 patchf.seek(0)
928 patchf.seek(0)
942 patchf.truncate()
929 patchf.truncate()
943
930
944 msg = opts.get('msg', '').rstrip()
931 msg = opts.get('msg', '').rstrip()
945 if msg:
932 if msg:
946 if comments:
933 if comments:
947 # Remove existing message.
934 # Remove existing message.
948 ci = 0
935 ci = 0
949 subj = None
936 subj = None
950 for mi in xrange(len(message)):
937 for mi in xrange(len(message)):
951 if comments[ci].lower().startswith('subject: '):
938 if comments[ci].lower().startswith('subject: '):
952 subj = comments[ci][9:]
939 subj = comments[ci][9:]
953 while message[mi] != comments[ci] and message[mi] != subj:
940 while message[mi] != comments[ci] and message[mi] != subj:
954 ci += 1
941 ci += 1
955 del comments[ci]
942 del comments[ci]
956 comments.append(msg)
943 comments.append(msg)
957 if comments:
944 if comments:
958 comments = "\n".join(comments) + '\n\n'
945 comments = "\n".join(comments) + '\n\n'
959 patchf.write(comments)
946 patchf.write(comments)
960
947
961 if opts.get('git'):
948 if opts.get('git'):
962 self.diffopts().git = True
949 self.diffopts().git = True
963 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
950 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
964 tip = repo.changelog.tip()
951 tip = repo.changelog.tip()
965 if top == tip:
952 if top == tip:
966 # if the top of our patch queue is also the tip, there is an
953 # if the top of our patch queue is also the tip, there is an
967 # optimization here. We update the dirstate in place and strip
954 # optimization here. We update the dirstate in place and strip
968 # off the tip commit. Then just commit the current directory
955 # off the tip commit. Then just commit the current directory
969 # tree. We can also send repo.commit the list of files
956 # tree. We can also send repo.commit the list of files
970 # changed to speed up the diff
957 # changed to speed up the diff
971 #
958 #
972 # in short mode, we only diff the files included in the
959 # in short mode, we only diff the files included in the
973 # patch already
960 # patch already
974 #
961 #
975 # this should really read:
962 # this should really read:
976 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
963 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
977 # but we do it backwards to take advantage of manifest/chlog
964 # but we do it backwards to take advantage of manifest/chlog
978 # caching against the next repo.status call
965 # caching against the next repo.status call
979 #
966 #
980 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
967 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
981 changes = repo.changelog.read(tip)
968 changes = repo.changelog.read(tip)
982 man = repo.manifest.read(changes[0])
969 man = repo.manifest.read(changes[0])
983 aaa = aa[:]
970 aaa = aa[:]
984 if opts.get('short'):
971 if opts.get('short'):
985 filelist = mm + aa + dd
972 filelist = mm + aa + dd
986 match = dict.fromkeys(filelist).__contains__
973 match = dict.fromkeys(filelist).__contains__
987 else:
974 else:
988 filelist = None
975 filelist = None
989 match = util.always
976 match = util.always
990 m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
977 m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
991
978
992 # we might end up with files that were added between
979 # we might end up with files that were added between
993 # tip and the dirstate parent, but then changed in the
980 # tip and the dirstate parent, but then changed in the
994 # local dirstate. in this case, we want them to only
981 # local dirstate. in this case, we want them to only
995 # show up in the added section
982 # show up in the added section
996 for x in m:
983 for x in m:
997 if x not in aa:
984 if x not in aa:
998 mm.append(x)
985 mm.append(x)
999 # we might end up with files added by the local dirstate that
986 # we might end up with files added by the local dirstate that
1000 # were deleted by the patch. In this case, they should only
987 # were deleted by the patch. In this case, they should only
1001 # show up in the changed section.
988 # show up in the changed section.
1002 for x in a:
989 for x in a:
1003 if x in dd:
990 if x in dd:
1004 del dd[dd.index(x)]
991 del dd[dd.index(x)]
1005 mm.append(x)
992 mm.append(x)
1006 else:
993 else:
1007 aa.append(x)
994 aa.append(x)
1008 # make sure any files deleted in the local dirstate
995 # make sure any files deleted in the local dirstate
1009 # are not in the add or change column of the patch
996 # are not in the add or change column of the patch
1010 forget = []
997 forget = []
1011 for x in d + r:
998 for x in d + r:
1012 if x in aa:
999 if x in aa:
1013 del aa[aa.index(x)]
1000 del aa[aa.index(x)]
1014 forget.append(x)
1001 forget.append(x)
1015 continue
1002 continue
1016 elif x in mm:
1003 elif x in mm:
1017 del mm[mm.index(x)]
1004 del mm[mm.index(x)]
1018 dd.append(x)
1005 dd.append(x)
1019
1006
1020 m = util.unique(mm)
1007 m = util.unique(mm)
1021 r = util.unique(dd)
1008 r = util.unique(dd)
1022 a = util.unique(aa)
1009 a = util.unique(aa)
1023 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1010 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1024 filelist = util.unique(c[0] + c[1] + c[2])
1011 filelist = util.unique(c[0] + c[1] + c[2])
1025 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1012 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1026 fp=patchf, changes=c, opts=self.diffopts())
1013 fp=patchf, changes=c, opts=self.diffopts())
1027 patchf.close()
1014 patchf.close()
1028
1015
1029 repo.dirstate.setparents(*cparents)
1016 repo.dirstate.setparents(*cparents)
1030 copies = {}
1017 copies = {}
1031 for dst in a:
1018 for dst in a:
1032 src = repo.dirstate.copied(dst)
1019 src = repo.dirstate.copied(dst)
1033 if src is None:
1020 if src is None:
1034 continue
1021 continue
1035 copies.setdefault(src, []).append(dst)
1022 copies.setdefault(src, []).append(dst)
1036 repo.dirstate.add(dst)
1023 repo.dirstate.add(dst)
1037 # remember the copies between patchparent and tip
1024 # remember the copies between patchparent and tip
1038 # this may be slow, so don't do it if we're not tracking copies
1025 # this may be slow, so don't do it if we're not tracking copies
1039 if self.diffopts().git:
1026 if self.diffopts().git:
1040 for dst in aaa:
1027 for dst in aaa:
1041 f = repo.file(dst)
1028 f = repo.file(dst)
1042 src = f.renamed(man[dst])
1029 src = f.renamed(man[dst])
1043 if src:
1030 if src:
1044 copies[src[0]] = copies.get(dst, [])
1031 copies[src[0]] = copies.get(dst, [])
1045 if dst in a:
1032 if dst in a:
1046 copies[src[0]].append(dst)
1033 copies[src[0]].append(dst)
1047 # we can't copy a file created by the patch itself
1034 # we can't copy a file created by the patch itself
1048 if dst in copies:
1035 if dst in copies:
1049 del copies[dst]
1036 del copies[dst]
1050 for src, dsts in copies.iteritems():
1037 for src, dsts in copies.iteritems():
1051 for dst in dsts:
1038 for dst in dsts:
1052 repo.dirstate.copy(src, dst)
1039 repo.dirstate.copy(src, dst)
1053 for f in r:
1040 for f in r:
1054 repo.dirstate.remove(f)
1041 repo.dirstate.remove(f)
1055 # if the patch excludes a modified file, mark that
1042 # if the patch excludes a modified file, mark that
1056 # file with mtime=0 so status can see it.
1043 # file with mtime=0 so status can see it.
1057 mm = []
1044 mm = []
1058 for i in xrange(len(m)-1, -1, -1):
1045 for i in xrange(len(m)-1, -1, -1):
1059 if not matchfn(m[i]):
1046 if not matchfn(m[i]):
1060 mm.append(m[i])
1047 mm.append(m[i])
1061 del m[i]
1048 del m[i]
1062 for f in m:
1049 for f in m:
1063 repo.dirstate.normal(f)
1050 repo.dirstate.normal(f)
1064 for f in mm:
1051 for f in mm:
1065 repo.dirstate.normaldirty(f)
1052 repo.dirstate.normaldirty(f)
1066 for f in forget:
1053 for f in forget:
1067 repo.dirstate.forget(f)
1054 repo.dirstate.forget(f)
1068
1055
1069 if not msg:
1056 if not msg:
1070 if not message:
1057 if not message:
1071 message = "[mq]: %s\n" % patchfn
1058 message = "[mq]: %s\n" % patchfn
1072 else:
1059 else:
1073 message = "\n".join(message)
1060 message = "\n".join(message)
1074 else:
1061 else:
1075 message = msg
1062 message = msg
1076
1063
1077 self.strip(repo, top, update=False,
1064 self.strip(repo, top, update=False,
1078 backup='strip', wlock=wlock)
1065 backup='strip')
1079 n = repo.commit(filelist, message, changes[1], match=matchfn,
1066 n = repo.commit(filelist, message, changes[1], match=matchfn,
1080 force=1, wlock=wlock)
1067 force=1)
1081 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1068 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1082 self.applied_dirty = 1
1069 self.applied_dirty = 1
1083 self.removeundo(repo)
1070 self.removeundo(repo)
1084 else:
1071 else:
1085 self.printdiff(repo, patchparent, fp=patchf)
1072 self.printdiff(repo, patchparent, fp=patchf)
1086 patchf.close()
1073 patchf.close()
1087 added = repo.status()[1]
1074 added = repo.status()[1]
1088 for a in added:
1075 for a in added:
1089 f = repo.wjoin(a)
1076 f = repo.wjoin(a)
1090 try:
1077 try:
1091 os.unlink(f)
1078 os.unlink(f)
1092 except OSError, e:
1079 except OSError, e:
1093 if e.errno != errno.ENOENT:
1080 if e.errno != errno.ENOENT:
1094 raise
1081 raise
1095 try: os.removedirs(os.path.dirname(f))
1082 try: os.removedirs(os.path.dirname(f))
1096 except: pass
1083 except: pass
1097 # forget the file copies in the dirstate
1084 # forget the file copies in the dirstate
1098 # push should readd the files later on
1085 # push should readd the files later on
1099 repo.dirstate.forget(a)
1086 repo.dirstate.forget(a)
1100 self.pop(repo, force=True, wlock=wlock)
1087 self.pop(repo, force=True)
1101 self.push(repo, force=True, wlock=wlock)
1088 self.push(repo, force=True)
1102 finally:
1089 finally:
1103 del wlock
1090 del wlock
1104
1091
1105 def init(self, repo, create=False):
1092 def init(self, repo, create=False):
1106 if not create and os.path.isdir(self.path):
1093 if not create and os.path.isdir(self.path):
1107 raise util.Abort(_("patch queue directory already exists"))
1094 raise util.Abort(_("patch queue directory already exists"))
1108 try:
1095 try:
1109 os.mkdir(self.path)
1096 os.mkdir(self.path)
1110 except OSError, inst:
1097 except OSError, inst:
1111 if inst.errno != errno.EEXIST or not create:
1098 if inst.errno != errno.EEXIST or not create:
1112 raise
1099 raise
1113 if create:
1100 if create:
1114 return self.qrepo(create=True)
1101 return self.qrepo(create=True)
1115
1102
1116 def unapplied(self, repo, patch=None):
1103 def unapplied(self, repo, patch=None):
1117 if patch and patch not in self.series:
1104 if patch and patch not in self.series:
1118 raise util.Abort(_("patch %s is not in series file") % patch)
1105 raise util.Abort(_("patch %s is not in series file") % patch)
1119 if not patch:
1106 if not patch:
1120 start = self.series_end()
1107 start = self.series_end()
1121 else:
1108 else:
1122 start = self.series.index(patch) + 1
1109 start = self.series.index(patch) + 1
1123 unapplied = []
1110 unapplied = []
1124 for i in xrange(start, len(self.series)):
1111 for i in xrange(start, len(self.series)):
1125 pushable, reason = self.pushable(i)
1112 pushable, reason = self.pushable(i)
1126 if pushable:
1113 if pushable:
1127 unapplied.append((i, self.series[i]))
1114 unapplied.append((i, self.series[i]))
1128 self.explain_pushable(i)
1115 self.explain_pushable(i)
1129 return unapplied
1116 return unapplied
1130
1117
1131 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1118 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1132 summary=False):
1119 summary=False):
1133 def displayname(patchname):
1120 def displayname(patchname):
1134 if summary:
1121 if summary:
1135 msg = self.readheaders(patchname)[0]
1122 msg = self.readheaders(patchname)[0]
1136 msg = msg and ': ' + msg[0] or ': '
1123 msg = msg and ': ' + msg[0] or ': '
1137 else:
1124 else:
1138 msg = ''
1125 msg = ''
1139 return '%s%s' % (patchname, msg)
1126 return '%s%s' % (patchname, msg)
1140
1127
1141 applied = dict.fromkeys([p.name for p in self.applied])
1128 applied = dict.fromkeys([p.name for p in self.applied])
1142 if length is None:
1129 if length is None:
1143 length = len(self.series) - start
1130 length = len(self.series) - start
1144 if not missing:
1131 if not missing:
1145 for i in xrange(start, start+length):
1132 for i in xrange(start, start+length):
1146 patch = self.series[i]
1133 patch = self.series[i]
1147 if patch in applied:
1134 if patch in applied:
1148 stat = 'A'
1135 stat = 'A'
1149 elif self.pushable(i)[0]:
1136 elif self.pushable(i)[0]:
1150 stat = 'U'
1137 stat = 'U'
1151 else:
1138 else:
1152 stat = 'G'
1139 stat = 'G'
1153 pfx = ''
1140 pfx = ''
1154 if self.ui.verbose:
1141 if self.ui.verbose:
1155 pfx = '%d %s ' % (i, stat)
1142 pfx = '%d %s ' % (i, stat)
1156 elif status and status != stat:
1143 elif status and status != stat:
1157 continue
1144 continue
1158 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1145 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1159 else:
1146 else:
1160 msng_list = []
1147 msng_list = []
1161 for root, dirs, files in os.walk(self.path):
1148 for root, dirs, files in os.walk(self.path):
1162 d = root[len(self.path) + 1:]
1149 d = root[len(self.path) + 1:]
1163 for f in files:
1150 for f in files:
1164 fl = os.path.join(d, f)
1151 fl = os.path.join(d, f)
1165 if (fl not in self.series and
1152 if (fl not in self.series and
1166 fl not in (self.status_path, self.series_path,
1153 fl not in (self.status_path, self.series_path,
1167 self.guards_path)
1154 self.guards_path)
1168 and not fl.startswith('.')):
1155 and not fl.startswith('.')):
1169 msng_list.append(fl)
1156 msng_list.append(fl)
1170 msng_list.sort()
1157 msng_list.sort()
1171 for x in msng_list:
1158 for x in msng_list:
1172 pfx = self.ui.verbose and ('D ') or ''
1159 pfx = self.ui.verbose and ('D ') or ''
1173 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1160 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1174
1161
1175 def issaveline(self, l):
1162 def issaveline(self, l):
1176 if l.name == '.hg.patches.save.line':
1163 if l.name == '.hg.patches.save.line':
1177 return True
1164 return True
1178
1165
1179 def qrepo(self, create=False):
1166 def qrepo(self, create=False):
1180 if create or os.path.isdir(self.join(".hg")):
1167 if create or os.path.isdir(self.join(".hg")):
1181 return hg.repository(self.ui, path=self.path, create=create)
1168 return hg.repository(self.ui, path=self.path, create=create)
1182
1169
1183 def restore(self, repo, rev, delete=None, qupdate=None):
1170 def restore(self, repo, rev, delete=None, qupdate=None):
1184 c = repo.changelog.read(rev)
1171 c = repo.changelog.read(rev)
1185 desc = c[4].strip()
1172 desc = c[4].strip()
1186 lines = desc.splitlines()
1173 lines = desc.splitlines()
1187 i = 0
1174 i = 0
1188 datastart = None
1175 datastart = None
1189 series = []
1176 series = []
1190 applied = []
1177 applied = []
1191 qpp = None
1178 qpp = None
1192 for i in xrange(0, len(lines)):
1179 for i in xrange(0, len(lines)):
1193 if lines[i] == 'Patch Data:':
1180 if lines[i] == 'Patch Data:':
1194 datastart = i + 1
1181 datastart = i + 1
1195 elif lines[i].startswith('Dirstate:'):
1182 elif lines[i].startswith('Dirstate:'):
1196 l = lines[i].rstrip()
1183 l = lines[i].rstrip()
1197 l = l[10:].split(' ')
1184 l = l[10:].split(' ')
1198 qpp = [ hg.bin(x) for x in l ]
1185 qpp = [ hg.bin(x) for x in l ]
1199 elif datastart != None:
1186 elif datastart != None:
1200 l = lines[i].rstrip()
1187 l = lines[i].rstrip()
1201 se = statusentry(l)
1188 se = statusentry(l)
1202 file_ = se.name
1189 file_ = se.name
1203 if se.rev:
1190 if se.rev:
1204 applied.append(se)
1191 applied.append(se)
1205 else:
1192 else:
1206 series.append(file_)
1193 series.append(file_)
1207 if datastart == None:
1194 if datastart == None:
1208 self.ui.warn("No saved patch data found\n")
1195 self.ui.warn("No saved patch data found\n")
1209 return 1
1196 return 1
1210 self.ui.warn("restoring status: %s\n" % lines[0])
1197 self.ui.warn("restoring status: %s\n" % lines[0])
1211 self.full_series = series
1198 self.full_series = series
1212 self.applied = applied
1199 self.applied = applied
1213 self.parse_series()
1200 self.parse_series()
1214 self.series_dirty = 1
1201 self.series_dirty = 1
1215 self.applied_dirty = 1
1202 self.applied_dirty = 1
1216 heads = repo.changelog.heads()
1203 heads = repo.changelog.heads()
1217 if delete:
1204 if delete:
1218 if rev not in heads:
1205 if rev not in heads:
1219 self.ui.warn("save entry has children, leaving it alone\n")
1206 self.ui.warn("save entry has children, leaving it alone\n")
1220 else:
1207 else:
1221 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1208 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1222 pp = repo.dirstate.parents()
1209 pp = repo.dirstate.parents()
1223 if rev in pp:
1210 if rev in pp:
1224 update = True
1211 update = True
1225 else:
1212 else:
1226 update = False
1213 update = False
1227 self.strip(repo, rev, update=update, backup='strip')
1214 self.strip(repo, rev, update=update, backup='strip')
1228 if qpp:
1215 if qpp:
1229 self.ui.warn("saved queue repository parents: %s %s\n" %
1216 self.ui.warn("saved queue repository parents: %s %s\n" %
1230 (hg.short(qpp[0]), hg.short(qpp[1])))
1217 (hg.short(qpp[0]), hg.short(qpp[1])))
1231 if qupdate:
1218 if qupdate:
1232 print "queue directory updating"
1219 print "queue directory updating"
1233 r = self.qrepo()
1220 r = self.qrepo()
1234 if not r:
1221 if not r:
1235 self.ui.warn("Unable to load queue repository\n")
1222 self.ui.warn("Unable to load queue repository\n")
1236 return 1
1223 return 1
1237 hg.clean(r, qpp[0])
1224 hg.clean(r, qpp[0])
1238
1225
1239 def save(self, repo, msg=None):
1226 def save(self, repo, msg=None):
1240 if len(self.applied) == 0:
1227 if len(self.applied) == 0:
1241 self.ui.warn("save: no patches applied, exiting\n")
1228 self.ui.warn("save: no patches applied, exiting\n")
1242 return 1
1229 return 1
1243 if self.issaveline(self.applied[-1]):
1230 if self.issaveline(self.applied[-1]):
1244 self.ui.warn("status is already saved\n")
1231 self.ui.warn("status is already saved\n")
1245 return 1
1232 return 1
1246
1233
1247 ar = [ ':' + x for x in self.full_series ]
1234 ar = [ ':' + x for x in self.full_series ]
1248 if not msg:
1235 if not msg:
1249 msg = "hg patches saved state"
1236 msg = "hg patches saved state"
1250 else:
1237 else:
1251 msg = "hg patches: " + msg.rstrip('\r\n')
1238 msg = "hg patches: " + msg.rstrip('\r\n')
1252 r = self.qrepo()
1239 r = self.qrepo()
1253 if r:
1240 if r:
1254 pp = r.dirstate.parents()
1241 pp = r.dirstate.parents()
1255 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1242 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1256 msg += "\n\nPatch Data:\n"
1243 msg += "\n\nPatch Data:\n"
1257 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1244 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1258 "\n".join(ar) + '\n' or "")
1245 "\n".join(ar) + '\n' or "")
1259 n = repo.commit(None, text, user=None, force=1)
1246 n = repo.commit(None, text, user=None, force=1)
1260 if not n:
1247 if not n:
1261 self.ui.warn("repo commit failed\n")
1248 self.ui.warn("repo commit failed\n")
1262 return 1
1249 return 1
1263 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1250 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1264 self.applied_dirty = 1
1251 self.applied_dirty = 1
1265 self.removeundo(repo)
1252 self.removeundo(repo)
1266
1253
1267 def full_series_end(self):
1254 def full_series_end(self):
1268 if len(self.applied) > 0:
1255 if len(self.applied) > 0:
1269 p = self.applied[-1].name
1256 p = self.applied[-1].name
1270 end = self.find_series(p)
1257 end = self.find_series(p)
1271 if end == None:
1258 if end == None:
1272 return len(self.full_series)
1259 return len(self.full_series)
1273 return end + 1
1260 return end + 1
1274 return 0
1261 return 0
1275
1262
1276 def series_end(self, all_patches=False):
1263 def series_end(self, all_patches=False):
1277 """If all_patches is False, return the index of the next pushable patch
1264 """If all_patches is False, return the index of the next pushable patch
1278 in the series, or the series length. If all_patches is True, return the
1265 in the series, or the series length. If all_patches is True, return the
1279 index of the first patch past the last applied one.
1266 index of the first patch past the last applied one.
1280 """
1267 """
1281 end = 0
1268 end = 0
1282 def next(start):
1269 def next(start):
1283 if all_patches:
1270 if all_patches:
1284 return start
1271 return start
1285 i = start
1272 i = start
1286 while i < len(self.series):
1273 while i < len(self.series):
1287 p, reason = self.pushable(i)
1274 p, reason = self.pushable(i)
1288 if p:
1275 if p:
1289 break
1276 break
1290 self.explain_pushable(i)
1277 self.explain_pushable(i)
1291 i += 1
1278 i += 1
1292 return i
1279 return i
1293 if len(self.applied) > 0:
1280 if len(self.applied) > 0:
1294 p = self.applied[-1].name
1281 p = self.applied[-1].name
1295 try:
1282 try:
1296 end = self.series.index(p)
1283 end = self.series.index(p)
1297 except ValueError:
1284 except ValueError:
1298 return 0
1285 return 0
1299 return next(end + 1)
1286 return next(end + 1)
1300 return next(end)
1287 return next(end)
1301
1288
1302 def appliedname(self, index):
1289 def appliedname(self, index):
1303 pname = self.applied[index].name
1290 pname = self.applied[index].name
1304 if not self.ui.verbose:
1291 if not self.ui.verbose:
1305 p = pname
1292 p = pname
1306 else:
1293 else:
1307 p = str(self.series.index(pname)) + " " + pname
1294 p = str(self.series.index(pname)) + " " + pname
1308 return p
1295 return p
1309
1296
1310 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1297 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1311 force=None, git=False):
1298 force=None, git=False):
1312 def checkseries(patchname):
1299 def checkseries(patchname):
1313 if patchname in self.series:
1300 if patchname in self.series:
1314 raise util.Abort(_('patch %s is already in the series file')
1301 raise util.Abort(_('patch %s is already in the series file')
1315 % patchname)
1302 % patchname)
1316 def checkfile(patchname):
1303 def checkfile(patchname):
1317 if not force and os.path.exists(self.join(patchname)):
1304 if not force and os.path.exists(self.join(patchname)):
1318 raise util.Abort(_('patch "%s" already exists')
1305 raise util.Abort(_('patch "%s" already exists')
1319 % patchname)
1306 % patchname)
1320
1307
1321 if rev:
1308 if rev:
1322 if files:
1309 if files:
1323 raise util.Abort(_('option "-r" not valid when importing '
1310 raise util.Abort(_('option "-r" not valid when importing '
1324 'files'))
1311 'files'))
1325 rev = cmdutil.revrange(repo, rev)
1312 rev = cmdutil.revrange(repo, rev)
1326 rev.sort(lambda x, y: cmp(y, x))
1313 rev.sort(lambda x, y: cmp(y, x))
1327 if (len(files) > 1 or len(rev) > 1) and patchname:
1314 if (len(files) > 1 or len(rev) > 1) and patchname:
1328 raise util.Abort(_('option "-n" not valid when importing multiple '
1315 raise util.Abort(_('option "-n" not valid when importing multiple '
1329 'patches'))
1316 'patches'))
1330 i = 0
1317 i = 0
1331 added = []
1318 added = []
1332 if rev:
1319 if rev:
1333 # If mq patches are applied, we can only import revisions
1320 # If mq patches are applied, we can only import revisions
1334 # that form a linear path to qbase.
1321 # that form a linear path to qbase.
1335 # Otherwise, they should form a linear path to a head.
1322 # Otherwise, they should form a linear path to a head.
1336 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1323 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1337 if len(heads) > 1:
1324 if len(heads) > 1:
1338 raise util.Abort(_('revision %d is the root of more than one '
1325 raise util.Abort(_('revision %d is the root of more than one '
1339 'branch') % rev[-1])
1326 'branch') % rev[-1])
1340 if self.applied:
1327 if self.applied:
1341 base = revlog.hex(repo.changelog.node(rev[0]))
1328 base = revlog.hex(repo.changelog.node(rev[0]))
1342 if base in [n.rev for n in self.applied]:
1329 if base in [n.rev for n in self.applied]:
1343 raise util.Abort(_('revision %d is already managed')
1330 raise util.Abort(_('revision %d is already managed')
1344 % rev[0])
1331 % rev[0])
1345 if heads != [revlog.bin(self.applied[-1].rev)]:
1332 if heads != [revlog.bin(self.applied[-1].rev)]:
1346 raise util.Abort(_('revision %d is not the parent of '
1333 raise util.Abort(_('revision %d is not the parent of '
1347 'the queue') % rev[0])
1334 'the queue') % rev[0])
1348 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1335 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1349 lastparent = repo.changelog.parentrevs(base)[0]
1336 lastparent = repo.changelog.parentrevs(base)[0]
1350 else:
1337 else:
1351 if heads != [repo.changelog.node(rev[0])]:
1338 if heads != [repo.changelog.node(rev[0])]:
1352 raise util.Abort(_('revision %d has unmanaged children')
1339 raise util.Abort(_('revision %d has unmanaged children')
1353 % rev[0])
1340 % rev[0])
1354 lastparent = None
1341 lastparent = None
1355
1342
1356 if git:
1343 if git:
1357 self.diffopts().git = True
1344 self.diffopts().git = True
1358
1345
1359 for r in rev:
1346 for r in rev:
1360 p1, p2 = repo.changelog.parentrevs(r)
1347 p1, p2 = repo.changelog.parentrevs(r)
1361 n = repo.changelog.node(r)
1348 n = repo.changelog.node(r)
1362 if p2 != revlog.nullrev:
1349 if p2 != revlog.nullrev:
1363 raise util.Abort(_('cannot import merge revision %d') % r)
1350 raise util.Abort(_('cannot import merge revision %d') % r)
1364 if lastparent and lastparent != r:
1351 if lastparent and lastparent != r:
1365 raise util.Abort(_('revision %d is not the parent of %d')
1352 raise util.Abort(_('revision %d is not the parent of %d')
1366 % (r, lastparent))
1353 % (r, lastparent))
1367 lastparent = p1
1354 lastparent = p1
1368
1355
1369 if not patchname:
1356 if not patchname:
1370 patchname = normname('%d.diff' % r)
1357 patchname = normname('%d.diff' % r)
1371 checkseries(patchname)
1358 checkseries(patchname)
1372 checkfile(patchname)
1359 checkfile(patchname)
1373 self.full_series.insert(0, patchname)
1360 self.full_series.insert(0, patchname)
1374
1361
1375 patchf = self.opener(patchname, "w")
1362 patchf = self.opener(patchname, "w")
1376 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1363 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1377 patchf.close()
1364 patchf.close()
1378
1365
1379 se = statusentry(revlog.hex(n), patchname)
1366 se = statusentry(revlog.hex(n), patchname)
1380 self.applied.insert(0, se)
1367 self.applied.insert(0, se)
1381
1368
1382 added.append(patchname)
1369 added.append(patchname)
1383 patchname = None
1370 patchname = None
1384 self.parse_series()
1371 self.parse_series()
1385 self.applied_dirty = 1
1372 self.applied_dirty = 1
1386
1373
1387 for filename in files:
1374 for filename in files:
1388 if existing:
1375 if existing:
1389 if filename == '-':
1376 if filename == '-':
1390 raise util.Abort(_('-e is incompatible with import from -'))
1377 raise util.Abort(_('-e is incompatible with import from -'))
1391 if not patchname:
1378 if not patchname:
1392 patchname = normname(filename)
1379 patchname = normname(filename)
1393 if not os.path.isfile(self.join(patchname)):
1380 if not os.path.isfile(self.join(patchname)):
1394 raise util.Abort(_("patch %s does not exist") % patchname)
1381 raise util.Abort(_("patch %s does not exist") % patchname)
1395 else:
1382 else:
1396 try:
1383 try:
1397 if filename == '-':
1384 if filename == '-':
1398 if not patchname:
1385 if not patchname:
1399 raise util.Abort(_('need --name to import a patch from -'))
1386 raise util.Abort(_('need --name to import a patch from -'))
1400 text = sys.stdin.read()
1387 text = sys.stdin.read()
1401 else:
1388 else:
1402 text = file(filename).read()
1389 text = file(filename).read()
1403 except IOError:
1390 except IOError:
1404 raise util.Abort(_("unable to read %s") % patchname)
1391 raise util.Abort(_("unable to read %s") % patchname)
1405 if not patchname:
1392 if not patchname:
1406 patchname = normname(os.path.basename(filename))
1393 patchname = normname(os.path.basename(filename))
1407 checkfile(patchname)
1394 checkfile(patchname)
1408 patchf = self.opener(patchname, "w")
1395 patchf = self.opener(patchname, "w")
1409 patchf.write(text)
1396 patchf.write(text)
1410 checkseries(patchname)
1397 checkseries(patchname)
1411 index = self.full_series_end() + i
1398 index = self.full_series_end() + i
1412 self.full_series[index:index] = [patchname]
1399 self.full_series[index:index] = [patchname]
1413 self.parse_series()
1400 self.parse_series()
1414 self.ui.warn("adding %s to series file\n" % patchname)
1401 self.ui.warn("adding %s to series file\n" % patchname)
1415 i += 1
1402 i += 1
1416 added.append(patchname)
1403 added.append(patchname)
1417 patchname = None
1404 patchname = None
1418 self.series_dirty = 1
1405 self.series_dirty = 1
1419 qrepo = self.qrepo()
1406 qrepo = self.qrepo()
1420 if qrepo:
1407 if qrepo:
1421 qrepo.add(added)
1408 qrepo.add(added)
1422
1409
1423 def delete(ui, repo, *patches, **opts):
1410 def delete(ui, repo, *patches, **opts):
1424 """remove patches from queue
1411 """remove patches from queue
1425
1412
1426 The patches must not be applied, unless they are arguments to
1413 The patches must not be applied, unless they are arguments to
1427 the --rev parameter. At least one patch or revision is required.
1414 the --rev parameter. At least one patch or revision is required.
1428
1415
1429 With --rev, mq will stop managing the named revisions (converting
1416 With --rev, mq will stop managing the named revisions (converting
1430 them to regular mercurial changesets). The patches must be applied
1417 them to regular mercurial changesets). The patches must be applied
1431 and at the base of the stack. This option is useful when the patches
1418 and at the base of the stack. This option is useful when the patches
1432 have been applied upstream.
1419 have been applied upstream.
1433
1420
1434 With --keep, the patch files are preserved in the patch directory."""
1421 With --keep, the patch files are preserved in the patch directory."""
1435 q = repo.mq
1422 q = repo.mq
1436 q.delete(repo, patches, opts)
1423 q.delete(repo, patches, opts)
1437 q.save_dirty()
1424 q.save_dirty()
1438 return 0
1425 return 0
1439
1426
1440 def applied(ui, repo, patch=None, **opts):
1427 def applied(ui, repo, patch=None, **opts):
1441 """print the patches already applied"""
1428 """print the patches already applied"""
1442 q = repo.mq
1429 q = repo.mq
1443 if patch:
1430 if patch:
1444 if patch not in q.series:
1431 if patch not in q.series:
1445 raise util.Abort(_("patch %s is not in series file") % patch)
1432 raise util.Abort(_("patch %s is not in series file") % patch)
1446 end = q.series.index(patch) + 1
1433 end = q.series.index(patch) + 1
1447 else:
1434 else:
1448 end = q.series_end(True)
1435 end = q.series_end(True)
1449 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1436 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1450
1437
1451 def unapplied(ui, repo, patch=None, **opts):
1438 def unapplied(ui, repo, patch=None, **opts):
1452 """print the patches not yet applied"""
1439 """print the patches not yet applied"""
1453 q = repo.mq
1440 q = repo.mq
1454 if patch:
1441 if patch:
1455 if patch not in q.series:
1442 if patch not in q.series:
1456 raise util.Abort(_("patch %s is not in series file") % patch)
1443 raise util.Abort(_("patch %s is not in series file") % patch)
1457 start = q.series.index(patch) + 1
1444 start = q.series.index(patch) + 1
1458 else:
1445 else:
1459 start = q.series_end(True)
1446 start = q.series_end(True)
1460 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1447 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1461
1448
1462 def qimport(ui, repo, *filename, **opts):
1449 def qimport(ui, repo, *filename, **opts):
1463 """import a patch
1450 """import a patch
1464
1451
1465 The patch will have the same name as its source file unless you
1452 The patch will have the same name as its source file unless you
1466 give it a new one with --name.
1453 give it a new one with --name.
1467
1454
1468 You can register an existing patch inside the patch directory
1455 You can register an existing patch inside the patch directory
1469 with the --existing flag.
1456 with the --existing flag.
1470
1457
1471 With --force, an existing patch of the same name will be overwritten.
1458 With --force, an existing patch of the same name will be overwritten.
1472
1459
1473 An existing changeset may be placed under mq control with --rev
1460 An existing changeset may be placed under mq control with --rev
1474 (e.g. qimport --rev tip -n patch will place tip under mq control).
1461 (e.g. qimport --rev tip -n patch will place tip under mq control).
1475 With --git, patches imported with --rev will use the git diff
1462 With --git, patches imported with --rev will use the git diff
1476 format.
1463 format.
1477 """
1464 """
1478 q = repo.mq
1465 q = repo.mq
1479 q.qimport(repo, filename, patchname=opts['name'],
1466 q.qimport(repo, filename, patchname=opts['name'],
1480 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1467 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1481 git=opts['git'])
1468 git=opts['git'])
1482 q.save_dirty()
1469 q.save_dirty()
1483 return 0
1470 return 0
1484
1471
1485 def init(ui, repo, **opts):
1472 def init(ui, repo, **opts):
1486 """init a new queue repository
1473 """init a new queue repository
1487
1474
1488 The queue repository is unversioned by default. If -c is
1475 The queue repository is unversioned by default. If -c is
1489 specified, qinit will create a separate nested repository
1476 specified, qinit will create a separate nested repository
1490 for patches (qinit -c may also be run later to convert
1477 for patches (qinit -c may also be run later to convert
1491 an unversioned patch repository into a versioned one).
1478 an unversioned patch repository into a versioned one).
1492 You can use qcommit to commit changes to this queue repository."""
1479 You can use qcommit to commit changes to this queue repository."""
1493 q = repo.mq
1480 q = repo.mq
1494 r = q.init(repo, create=opts['create_repo'])
1481 r = q.init(repo, create=opts['create_repo'])
1495 q.save_dirty()
1482 q.save_dirty()
1496 if r:
1483 if r:
1497 if not os.path.exists(r.wjoin('.hgignore')):
1484 if not os.path.exists(r.wjoin('.hgignore')):
1498 fp = r.wopener('.hgignore', 'w')
1485 fp = r.wopener('.hgignore', 'w')
1499 fp.write('syntax: glob\n')
1486 fp.write('syntax: glob\n')
1500 fp.write('status\n')
1487 fp.write('status\n')
1501 fp.write('guards\n')
1488 fp.write('guards\n')
1502 fp.close()
1489 fp.close()
1503 if not os.path.exists(r.wjoin('series')):
1490 if not os.path.exists(r.wjoin('series')):
1504 r.wopener('series', 'w').close()
1491 r.wopener('series', 'w').close()
1505 r.add(['.hgignore', 'series'])
1492 r.add(['.hgignore', 'series'])
1506 commands.add(ui, r)
1493 commands.add(ui, r)
1507 return 0
1494 return 0
1508
1495
1509 def clone(ui, source, dest=None, **opts):
1496 def clone(ui, source, dest=None, **opts):
1510 '''clone main and patch repository at same time
1497 '''clone main and patch repository at same time
1511
1498
1512 If source is local, destination will have no patches applied. If
1499 If source is local, destination will have no patches applied. If
1513 source is remote, this command can not check if patches are
1500 source is remote, this command can not check if patches are
1514 applied in source, so cannot guarantee that patches are not
1501 applied in source, so cannot guarantee that patches are not
1515 applied in destination. If you clone remote repository, be sure
1502 applied in destination. If you clone remote repository, be sure
1516 before that it has no patches applied.
1503 before that it has no patches applied.
1517
1504
1518 Source patch repository is looked for in <src>/.hg/patches by
1505 Source patch repository is looked for in <src>/.hg/patches by
1519 default. Use -p <url> to change.
1506 default. Use -p <url> to change.
1520
1507
1521 The patch directory must be a nested mercurial repository, as
1508 The patch directory must be a nested mercurial repository, as
1522 would be created by qinit -c.
1509 would be created by qinit -c.
1523 '''
1510 '''
1524 cmdutil.setremoteconfig(ui, opts)
1511 cmdutil.setremoteconfig(ui, opts)
1525 if dest is None:
1512 if dest is None:
1526 dest = hg.defaultdest(source)
1513 dest = hg.defaultdest(source)
1527 sr = hg.repository(ui, ui.expandpath(source))
1514 sr = hg.repository(ui, ui.expandpath(source))
1528 patchdir = opts['patches'] or (sr.url() + '/.hg/patches')
1515 patchdir = opts['patches'] or (sr.url() + '/.hg/patches')
1529 try:
1516 try:
1530 pr = hg.repository(ui, patchdir)
1517 pr = hg.repository(ui, patchdir)
1531 except hg.RepoError:
1518 except hg.RepoError:
1532 raise util.Abort(_('versioned patch repository not found'
1519 raise util.Abort(_('versioned patch repository not found'
1533 ' (see qinit -c)'))
1520 ' (see qinit -c)'))
1534 qbase, destrev = None, None
1521 qbase, destrev = None, None
1535 if sr.local():
1522 if sr.local():
1536 if sr.mq.applied:
1523 if sr.mq.applied:
1537 qbase = revlog.bin(sr.mq.applied[0].rev)
1524 qbase = revlog.bin(sr.mq.applied[0].rev)
1538 if not hg.islocal(dest):
1525 if not hg.islocal(dest):
1539 heads = dict.fromkeys(sr.heads())
1526 heads = dict.fromkeys(sr.heads())
1540 for h in sr.heads(qbase):
1527 for h in sr.heads(qbase):
1541 del heads[h]
1528 del heads[h]
1542 destrev = heads.keys()
1529 destrev = heads.keys()
1543 destrev.append(sr.changelog.parents(qbase)[0])
1530 destrev.append(sr.changelog.parents(qbase)[0])
1544 ui.note(_('cloning main repo\n'))
1531 ui.note(_('cloning main repo\n'))
1545 sr, dr = hg.clone(ui, sr.url(), dest,
1532 sr, dr = hg.clone(ui, sr.url(), dest,
1546 pull=opts['pull'],
1533 pull=opts['pull'],
1547 rev=destrev,
1534 rev=destrev,
1548 update=False,
1535 update=False,
1549 stream=opts['uncompressed'])
1536 stream=opts['uncompressed'])
1550 ui.note(_('cloning patch repo\n'))
1537 ui.note(_('cloning patch repo\n'))
1551 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1538 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1552 dr.url() + '/.hg/patches',
1539 dr.url() + '/.hg/patches',
1553 pull=opts['pull'],
1540 pull=opts['pull'],
1554 update=not opts['noupdate'],
1541 update=not opts['noupdate'],
1555 stream=opts['uncompressed'])
1542 stream=opts['uncompressed'])
1556 if dr.local():
1543 if dr.local():
1557 if qbase:
1544 if qbase:
1558 ui.note(_('stripping applied patches from destination repo\n'))
1545 ui.note(_('stripping applied patches from destination repo\n'))
1559 dr.mq.strip(dr, qbase, update=False, backup=None)
1546 dr.mq.strip(dr, qbase, update=False, backup=None)
1560 if not opts['noupdate']:
1547 if not opts['noupdate']:
1561 ui.note(_('updating destination repo\n'))
1548 ui.note(_('updating destination repo\n'))
1562 hg.update(dr, dr.changelog.tip())
1549 hg.update(dr, dr.changelog.tip())
1563
1550
1564 def commit(ui, repo, *pats, **opts):
1551 def commit(ui, repo, *pats, **opts):
1565 """commit changes in the queue repository"""
1552 """commit changes in the queue repository"""
1566 q = repo.mq
1553 q = repo.mq
1567 r = q.qrepo()
1554 r = q.qrepo()
1568 if not r: raise util.Abort('no queue repository')
1555 if not r: raise util.Abort('no queue repository')
1569 commands.commit(r.ui, r, *pats, **opts)
1556 commands.commit(r.ui, r, *pats, **opts)
1570
1557
1571 def series(ui, repo, **opts):
1558 def series(ui, repo, **opts):
1572 """print the entire series file"""
1559 """print the entire series file"""
1573 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1560 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1574 return 0
1561 return 0
1575
1562
1576 def top(ui, repo, **opts):
1563 def top(ui, repo, **opts):
1577 """print the name of the current patch"""
1564 """print the name of the current patch"""
1578 q = repo.mq
1565 q = repo.mq
1579 t = q.applied and q.series_end(True) or 0
1566 t = q.applied and q.series_end(True) or 0
1580 if t:
1567 if t:
1581 return q.qseries(repo, start=t-1, length=1, status='A',
1568 return q.qseries(repo, start=t-1, length=1, status='A',
1582 summary=opts.get('summary'))
1569 summary=opts.get('summary'))
1583 else:
1570 else:
1584 ui.write("No patches applied\n")
1571 ui.write("No patches applied\n")
1585 return 1
1572 return 1
1586
1573
1587 def next(ui, repo, **opts):
1574 def next(ui, repo, **opts):
1588 """print the name of the next patch"""
1575 """print the name of the next patch"""
1589 q = repo.mq
1576 q = repo.mq
1590 end = q.series_end()
1577 end = q.series_end()
1591 if end == len(q.series):
1578 if end == len(q.series):
1592 ui.write("All patches applied\n")
1579 ui.write("All patches applied\n")
1593 return 1
1580 return 1
1594 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1581 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1595
1582
1596 def prev(ui, repo, **opts):
1583 def prev(ui, repo, **opts):
1597 """print the name of the previous patch"""
1584 """print the name of the previous patch"""
1598 q = repo.mq
1585 q = repo.mq
1599 l = len(q.applied)
1586 l = len(q.applied)
1600 if l == 1:
1587 if l == 1:
1601 ui.write("Only one patch applied\n")
1588 ui.write("Only one patch applied\n")
1602 return 1
1589 return 1
1603 if not l:
1590 if not l:
1604 ui.write("No patches applied\n")
1591 ui.write("No patches applied\n")
1605 return 1
1592 return 1
1606 return q.qseries(repo, start=l-2, length=1, status='A',
1593 return q.qseries(repo, start=l-2, length=1, status='A',
1607 summary=opts.get('summary'))
1594 summary=opts.get('summary'))
1608
1595
1609 def new(ui, repo, patch, *args, **opts):
1596 def new(ui, repo, patch, *args, **opts):
1610 """create a new patch
1597 """create a new patch
1611
1598
1612 qnew creates a new patch on top of the currently-applied patch
1599 qnew creates a new patch on top of the currently-applied patch
1613 (if any). It will refuse to run if there are any outstanding
1600 (if any). It will refuse to run if there are any outstanding
1614 changes unless -f is specified, in which case the patch will
1601 changes unless -f is specified, in which case the patch will
1615 be initialised with them. You may also use -I, -X, and/or a list of
1602 be initialised with them. You may also use -I, -X, and/or a list of
1616 files after the patch name to add only changes to matching files
1603 files after the patch name to add only changes to matching files
1617 to the new patch, leaving the rest as uncommitted modifications.
1604 to the new patch, leaving the rest as uncommitted modifications.
1618
1605
1619 -e, -m or -l set the patch header as well as the commit message.
1606 -e, -m or -l set the patch header as well as the commit message.
1620 If none is specified, the patch header is empty and the
1607 If none is specified, the patch header is empty and the
1621 commit message is '[mq]: PATCH'"""
1608 commit message is '[mq]: PATCH'"""
1622 q = repo.mq
1609 q = repo.mq
1623 message = cmdutil.logmessage(opts)
1610 message = cmdutil.logmessage(opts)
1624 if opts['edit']:
1611 if opts['edit']:
1625 message = ui.edit(message, ui.username())
1612 message = ui.edit(message, ui.username())
1626 opts['msg'] = message
1613 opts['msg'] = message
1627 q.new(repo, patch, *args, **opts)
1614 q.new(repo, patch, *args, **opts)
1628 q.save_dirty()
1615 q.save_dirty()
1629 return 0
1616 return 0
1630
1617
1631 def refresh(ui, repo, *pats, **opts):
1618 def refresh(ui, repo, *pats, **opts):
1632 """update the current patch
1619 """update the current patch
1633
1620
1634 If any file patterns are provided, the refreshed patch will contain only
1621 If any file patterns are provided, the refreshed patch will contain only
1635 the modifications that match those patterns; the remaining modifications
1622 the modifications that match those patterns; the remaining modifications
1636 will remain in the working directory.
1623 will remain in the working directory.
1637
1624
1638 hg add/remove/copy/rename work as usual, though you might want to use
1625 hg add/remove/copy/rename work as usual, though you might want to use
1639 git-style patches (--git or [diff] git=1) to track copies and renames.
1626 git-style patches (--git or [diff] git=1) to track copies and renames.
1640 """
1627 """
1641 q = repo.mq
1628 q = repo.mq
1642 message = cmdutil.logmessage(opts)
1629 message = cmdutil.logmessage(opts)
1643 if opts['edit']:
1630 if opts['edit']:
1644 if message:
1631 if message:
1645 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1632 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1646 patch = q.applied[-1].name
1633 patch = q.applied[-1].name
1647 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1634 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1648 message = ui.edit('\n'.join(message), user or ui.username())
1635 message = ui.edit('\n'.join(message), user or ui.username())
1649 ret = q.refresh(repo, pats, msg=message, **opts)
1636 ret = q.refresh(repo, pats, msg=message, **opts)
1650 q.save_dirty()
1637 q.save_dirty()
1651 return ret
1638 return ret
1652
1639
1653 def diff(ui, repo, *pats, **opts):
1640 def diff(ui, repo, *pats, **opts):
1654 """diff of the current patch"""
1641 """diff of the current patch"""
1655 repo.mq.diff(repo, pats, opts)
1642 repo.mq.diff(repo, pats, opts)
1656 return 0
1643 return 0
1657
1644
1658 def fold(ui, repo, *files, **opts):
1645 def fold(ui, repo, *files, **opts):
1659 """fold the named patches into the current patch
1646 """fold the named patches into the current patch
1660
1647
1661 Patches must not yet be applied. Each patch will be successively
1648 Patches must not yet be applied. Each patch will be successively
1662 applied to the current patch in the order given. If all the
1649 applied to the current patch in the order given. If all the
1663 patches apply successfully, the current patch will be refreshed
1650 patches apply successfully, the current patch will be refreshed
1664 with the new cumulative patch, and the folded patches will
1651 with the new cumulative patch, and the folded patches will
1665 be deleted. With -k/--keep, the folded patch files will not
1652 be deleted. With -k/--keep, the folded patch files will not
1666 be removed afterwards.
1653 be removed afterwards.
1667
1654
1668 The header for each folded patch will be concatenated with
1655 The header for each folded patch will be concatenated with
1669 the current patch header, separated by a line of '* * *'."""
1656 the current patch header, separated by a line of '* * *'."""
1670
1657
1671 q = repo.mq
1658 q = repo.mq
1672
1659
1673 if not files:
1660 if not files:
1674 raise util.Abort(_('qfold requires at least one patch name'))
1661 raise util.Abort(_('qfold requires at least one patch name'))
1675 if not q.check_toppatch(repo):
1662 if not q.check_toppatch(repo):
1676 raise util.Abort(_('No patches applied'))
1663 raise util.Abort(_('No patches applied'))
1677
1664
1678 message = cmdutil.logmessage(opts)
1665 message = cmdutil.logmessage(opts)
1679 if opts['edit']:
1666 if opts['edit']:
1680 if message:
1667 if message:
1681 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1668 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1682
1669
1683 parent = q.lookup('qtip')
1670 parent = q.lookup('qtip')
1684 patches = []
1671 patches = []
1685 messages = []
1672 messages = []
1686 for f in files:
1673 for f in files:
1687 p = q.lookup(f)
1674 p = q.lookup(f)
1688 if p in patches or p == parent:
1675 if p in patches or p == parent:
1689 ui.warn(_('Skipping already folded patch %s') % p)
1676 ui.warn(_('Skipping already folded patch %s') % p)
1690 if q.isapplied(p):
1677 if q.isapplied(p):
1691 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1678 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1692 patches.append(p)
1679 patches.append(p)
1693
1680
1694 for p in patches:
1681 for p in patches:
1695 if not message:
1682 if not message:
1696 messages.append(q.readheaders(p)[0])
1683 messages.append(q.readheaders(p)[0])
1697 pf = q.join(p)
1684 pf = q.join(p)
1698 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1685 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1699 if not patchsuccess:
1686 if not patchsuccess:
1700 raise util.Abort(_('Error folding patch %s') % p)
1687 raise util.Abort(_('Error folding patch %s') % p)
1701 patch.updatedir(ui, repo, files)
1688 patch.updatedir(ui, repo, files)
1702
1689
1703 if not message:
1690 if not message:
1704 message, comments, user = q.readheaders(parent)[0:3]
1691 message, comments, user = q.readheaders(parent)[0:3]
1705 for msg in messages:
1692 for msg in messages:
1706 message.append('* * *')
1693 message.append('* * *')
1707 message.extend(msg)
1694 message.extend(msg)
1708 message = '\n'.join(message)
1695 message = '\n'.join(message)
1709
1696
1710 if opts['edit']:
1697 if opts['edit']:
1711 message = ui.edit(message, user or ui.username())
1698 message = ui.edit(message, user or ui.username())
1712
1699
1713 q.refresh(repo, msg=message)
1700 q.refresh(repo, msg=message)
1714 q.delete(repo, patches, opts)
1701 q.delete(repo, patches, opts)
1715 q.save_dirty()
1702 q.save_dirty()
1716
1703
1717 def goto(ui, repo, patch, **opts):
1704 def goto(ui, repo, patch, **opts):
1718 '''push or pop patches until named patch is at top of stack'''
1705 '''push or pop patches until named patch is at top of stack'''
1719 q = repo.mq
1706 q = repo.mq
1720 patch = q.lookup(patch)
1707 patch = q.lookup(patch)
1721 if q.isapplied(patch):
1708 if q.isapplied(patch):
1722 ret = q.pop(repo, patch, force=opts['force'])
1709 ret = q.pop(repo, patch, force=opts['force'])
1723 else:
1710 else:
1724 ret = q.push(repo, patch, force=opts['force'])
1711 ret = q.push(repo, patch, force=opts['force'])
1725 q.save_dirty()
1712 q.save_dirty()
1726 return ret
1713 return ret
1727
1714
1728 def guard(ui, repo, *args, **opts):
1715 def guard(ui, repo, *args, **opts):
1729 '''set or print guards for a patch
1716 '''set or print guards for a patch
1730
1717
1731 Guards control whether a patch can be pushed. A patch with no
1718 Guards control whether a patch can be pushed. A patch with no
1732 guards is always pushed. A patch with a positive guard ("+foo") is
1719 guards is always pushed. A patch with a positive guard ("+foo") is
1733 pushed only if the qselect command has activated it. A patch with
1720 pushed only if the qselect command has activated it. A patch with
1734 a negative guard ("-foo") is never pushed if the qselect command
1721 a negative guard ("-foo") is never pushed if the qselect command
1735 has activated it.
1722 has activated it.
1736
1723
1737 With no arguments, print the currently active guards.
1724 With no arguments, print the currently active guards.
1738 With arguments, set guards for the named patch.
1725 With arguments, set guards for the named patch.
1739
1726
1740 To set a negative guard "-foo" on topmost patch ("--" is needed so
1727 To set a negative guard "-foo" on topmost patch ("--" is needed so
1741 hg will not interpret "-foo" as an option):
1728 hg will not interpret "-foo" as an option):
1742 hg qguard -- -foo
1729 hg qguard -- -foo
1743
1730
1744 To set guards on another patch:
1731 To set guards on another patch:
1745 hg qguard other.patch +2.6.17 -stable
1732 hg qguard other.patch +2.6.17 -stable
1746 '''
1733 '''
1747 def status(idx):
1734 def status(idx):
1748 guards = q.series_guards[idx] or ['unguarded']
1735 guards = q.series_guards[idx] or ['unguarded']
1749 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1736 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1750 q = repo.mq
1737 q = repo.mq
1751 patch = None
1738 patch = None
1752 args = list(args)
1739 args = list(args)
1753 if opts['list']:
1740 if opts['list']:
1754 if args or opts['none']:
1741 if args or opts['none']:
1755 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1742 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1756 for i in xrange(len(q.series)):
1743 for i in xrange(len(q.series)):
1757 status(i)
1744 status(i)
1758 return
1745 return
1759 if not args or args[0][0:1] in '-+':
1746 if not args or args[0][0:1] in '-+':
1760 if not q.applied:
1747 if not q.applied:
1761 raise util.Abort(_('no patches applied'))
1748 raise util.Abort(_('no patches applied'))
1762 patch = q.applied[-1].name
1749 patch = q.applied[-1].name
1763 if patch is None and args[0][0:1] not in '-+':
1750 if patch is None and args[0][0:1] not in '-+':
1764 patch = args.pop(0)
1751 patch = args.pop(0)
1765 if patch is None:
1752 if patch is None:
1766 raise util.Abort(_('no patch to work with'))
1753 raise util.Abort(_('no patch to work with'))
1767 if args or opts['none']:
1754 if args or opts['none']:
1768 idx = q.find_series(patch)
1755 idx = q.find_series(patch)
1769 if idx is None:
1756 if idx is None:
1770 raise util.Abort(_('no patch named %s') % patch)
1757 raise util.Abort(_('no patch named %s') % patch)
1771 q.set_guards(idx, args)
1758 q.set_guards(idx, args)
1772 q.save_dirty()
1759 q.save_dirty()
1773 else:
1760 else:
1774 status(q.series.index(q.lookup(patch)))
1761 status(q.series.index(q.lookup(patch)))
1775
1762
1776 def header(ui, repo, patch=None):
1763 def header(ui, repo, patch=None):
1777 """Print the header of the topmost or specified patch"""
1764 """Print the header of the topmost or specified patch"""
1778 q = repo.mq
1765 q = repo.mq
1779
1766
1780 if patch:
1767 if patch:
1781 patch = q.lookup(patch)
1768 patch = q.lookup(patch)
1782 else:
1769 else:
1783 if not q.applied:
1770 if not q.applied:
1784 ui.write('No patches applied\n')
1771 ui.write('No patches applied\n')
1785 return 1
1772 return 1
1786 patch = q.lookup('qtip')
1773 patch = q.lookup('qtip')
1787 message = repo.mq.readheaders(patch)[0]
1774 message = repo.mq.readheaders(patch)[0]
1788
1775
1789 ui.write('\n'.join(message) + '\n')
1776 ui.write('\n'.join(message) + '\n')
1790
1777
1791 def lastsavename(path):
1778 def lastsavename(path):
1792 (directory, base) = os.path.split(path)
1779 (directory, base) = os.path.split(path)
1793 names = os.listdir(directory)
1780 names = os.listdir(directory)
1794 namere = re.compile("%s.([0-9]+)" % base)
1781 namere = re.compile("%s.([0-9]+)" % base)
1795 maxindex = None
1782 maxindex = None
1796 maxname = None
1783 maxname = None
1797 for f in names:
1784 for f in names:
1798 m = namere.match(f)
1785 m = namere.match(f)
1799 if m:
1786 if m:
1800 index = int(m.group(1))
1787 index = int(m.group(1))
1801 if maxindex == None or index > maxindex:
1788 if maxindex == None or index > maxindex:
1802 maxindex = index
1789 maxindex = index
1803 maxname = f
1790 maxname = f
1804 if maxname:
1791 if maxname:
1805 return (os.path.join(directory, maxname), maxindex)
1792 return (os.path.join(directory, maxname), maxindex)
1806 return (None, None)
1793 return (None, None)
1807
1794
1808 def savename(path):
1795 def savename(path):
1809 (last, index) = lastsavename(path)
1796 (last, index) = lastsavename(path)
1810 if last is None:
1797 if last is None:
1811 index = 0
1798 index = 0
1812 newpath = path + ".%d" % (index + 1)
1799 newpath = path + ".%d" % (index + 1)
1813 return newpath
1800 return newpath
1814
1801
1815 def push(ui, repo, patch=None, **opts):
1802 def push(ui, repo, patch=None, **opts):
1816 """push the next patch onto the stack"""
1803 """push the next patch onto the stack"""
1817 q = repo.mq
1804 q = repo.mq
1818 mergeq = None
1805 mergeq = None
1819
1806
1820 if opts['all']:
1807 if opts['all']:
1821 if not q.series:
1808 if not q.series:
1822 ui.warn(_('no patches in series\n'))
1809 ui.warn(_('no patches in series\n'))
1823 return 0
1810 return 0
1824 patch = q.series[-1]
1811 patch = q.series[-1]
1825 if opts['merge']:
1812 if opts['merge']:
1826 if opts['name']:
1813 if opts['name']:
1827 newpath = opts['name']
1814 newpath = opts['name']
1828 else:
1815 else:
1829 newpath, i = lastsavename(q.path)
1816 newpath, i = lastsavename(q.path)
1830 if not newpath:
1817 if not newpath:
1831 ui.warn("no saved queues found, please use -n\n")
1818 ui.warn("no saved queues found, please use -n\n")
1832 return 1
1819 return 1
1833 mergeq = queue(ui, repo.join(""), newpath)
1820 mergeq = queue(ui, repo.join(""), newpath)
1834 ui.warn("merging with queue at: %s\n" % mergeq.path)
1821 ui.warn("merging with queue at: %s\n" % mergeq.path)
1835 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1822 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1836 mergeq=mergeq)
1823 mergeq=mergeq)
1837 return ret
1824 return ret
1838
1825
1839 def pop(ui, repo, patch=None, **opts):
1826 def pop(ui, repo, patch=None, **opts):
1840 """pop the current patch off the stack"""
1827 """pop the current patch off the stack"""
1841 localupdate = True
1828 localupdate = True
1842 if opts['name']:
1829 if opts['name']:
1843 q = queue(ui, repo.join(""), repo.join(opts['name']))
1830 q = queue(ui, repo.join(""), repo.join(opts['name']))
1844 ui.warn('using patch queue: %s\n' % q.path)
1831 ui.warn('using patch queue: %s\n' % q.path)
1845 localupdate = False
1832 localupdate = False
1846 else:
1833 else:
1847 q = repo.mq
1834 q = repo.mq
1848 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1835 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1849 all=opts['all'])
1836 all=opts['all'])
1850 q.save_dirty()
1837 q.save_dirty()
1851 return ret
1838 return ret
1852
1839
1853 def rename(ui, repo, patch, name=None, **opts):
1840 def rename(ui, repo, patch, name=None, **opts):
1854 """rename a patch
1841 """rename a patch
1855
1842
1856 With one argument, renames the current patch to PATCH1.
1843 With one argument, renames the current patch to PATCH1.
1857 With two arguments, renames PATCH1 to PATCH2."""
1844 With two arguments, renames PATCH1 to PATCH2."""
1858
1845
1859 q = repo.mq
1846 q = repo.mq
1860
1847
1861 if not name:
1848 if not name:
1862 name = patch
1849 name = patch
1863 patch = None
1850 patch = None
1864
1851
1865 if patch:
1852 if patch:
1866 patch = q.lookup(patch)
1853 patch = q.lookup(patch)
1867 else:
1854 else:
1868 if not q.applied:
1855 if not q.applied:
1869 ui.write(_('No patches applied\n'))
1856 ui.write(_('No patches applied\n'))
1870 return
1857 return
1871 patch = q.lookup('qtip')
1858 patch = q.lookup('qtip')
1872 absdest = q.join(name)
1859 absdest = q.join(name)
1873 if os.path.isdir(absdest):
1860 if os.path.isdir(absdest):
1874 name = normname(os.path.join(name, os.path.basename(patch)))
1861 name = normname(os.path.join(name, os.path.basename(patch)))
1875 absdest = q.join(name)
1862 absdest = q.join(name)
1876 if os.path.exists(absdest):
1863 if os.path.exists(absdest):
1877 raise util.Abort(_('%s already exists') % absdest)
1864 raise util.Abort(_('%s already exists') % absdest)
1878
1865
1879 if name in q.series:
1866 if name in q.series:
1880 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1867 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1881
1868
1882 if ui.verbose:
1869 if ui.verbose:
1883 ui.write('Renaming %s to %s\n' % (patch, name))
1870 ui.write('Renaming %s to %s\n' % (patch, name))
1884 i = q.find_series(patch)
1871 i = q.find_series(patch)
1885 guards = q.guard_re.findall(q.full_series[i])
1872 guards = q.guard_re.findall(q.full_series[i])
1886 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1873 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1887 q.parse_series()
1874 q.parse_series()
1888 q.series_dirty = 1
1875 q.series_dirty = 1
1889
1876
1890 info = q.isapplied(patch)
1877 info = q.isapplied(patch)
1891 if info:
1878 if info:
1892 q.applied[info[0]] = statusentry(info[1], name)
1879 q.applied[info[0]] = statusentry(info[1], name)
1893 q.applied_dirty = 1
1880 q.applied_dirty = 1
1894
1881
1895 util.rename(q.join(patch), absdest)
1882 util.rename(q.join(patch), absdest)
1896 r = q.qrepo()
1883 r = q.qrepo()
1897 if r:
1884 if r:
1898 wlock = r.wlock()
1885 wlock = r.wlock()
1899 try:
1886 try:
1900 if r.dirstate[name] == 'r':
1887 if r.dirstate[name] == 'r':
1901 r.undelete([name], wlock)
1888 r.undelete([name])
1902 r.copy(patch, name, wlock)
1889 r.copy(patch, name)
1903 r.remove([patch], False, wlock)
1890 r.remove([patch], False)
1904 finally:
1891 finally:
1905 del wlock
1892 del wlock
1906
1893
1907 q.save_dirty()
1894 q.save_dirty()
1908
1895
1909 def restore(ui, repo, rev, **opts):
1896 def restore(ui, repo, rev, **opts):
1910 """restore the queue state saved by a rev"""
1897 """restore the queue state saved by a rev"""
1911 rev = repo.lookup(rev)
1898 rev = repo.lookup(rev)
1912 q = repo.mq
1899 q = repo.mq
1913 q.restore(repo, rev, delete=opts['delete'],
1900 q.restore(repo, rev, delete=opts['delete'],
1914 qupdate=opts['update'])
1901 qupdate=opts['update'])
1915 q.save_dirty()
1902 q.save_dirty()
1916 return 0
1903 return 0
1917
1904
1918 def save(ui, repo, **opts):
1905 def save(ui, repo, **opts):
1919 """save current queue state"""
1906 """save current queue state"""
1920 q = repo.mq
1907 q = repo.mq
1921 message = cmdutil.logmessage(opts)
1908 message = cmdutil.logmessage(opts)
1922 ret = q.save(repo, msg=message)
1909 ret = q.save(repo, msg=message)
1923 if ret:
1910 if ret:
1924 return ret
1911 return ret
1925 q.save_dirty()
1912 q.save_dirty()
1926 if opts['copy']:
1913 if opts['copy']:
1927 path = q.path
1914 path = q.path
1928 if opts['name']:
1915 if opts['name']:
1929 newpath = os.path.join(q.basepath, opts['name'])
1916 newpath = os.path.join(q.basepath, opts['name'])
1930 if os.path.exists(newpath):
1917 if os.path.exists(newpath):
1931 if not os.path.isdir(newpath):
1918 if not os.path.isdir(newpath):
1932 raise util.Abort(_('destination %s exists and is not '
1919 raise util.Abort(_('destination %s exists and is not '
1933 'a directory') % newpath)
1920 'a directory') % newpath)
1934 if not opts['force']:
1921 if not opts['force']:
1935 raise util.Abort(_('destination %s exists, '
1922 raise util.Abort(_('destination %s exists, '
1936 'use -f to force') % newpath)
1923 'use -f to force') % newpath)
1937 else:
1924 else:
1938 newpath = savename(path)
1925 newpath = savename(path)
1939 ui.warn("copy %s to %s\n" % (path, newpath))
1926 ui.warn("copy %s to %s\n" % (path, newpath))
1940 util.copyfiles(path, newpath)
1927 util.copyfiles(path, newpath)
1941 if opts['empty']:
1928 if opts['empty']:
1942 try:
1929 try:
1943 os.unlink(q.join(q.status_path))
1930 os.unlink(q.join(q.status_path))
1944 except:
1931 except:
1945 pass
1932 pass
1946 return 0
1933 return 0
1947
1934
1948 def strip(ui, repo, rev, **opts):
1935 def strip(ui, repo, rev, **opts):
1949 """strip a revision and all later revs on the same branch"""
1936 """strip a revision and all later revs on the same branch"""
1950 rev = repo.lookup(rev)
1937 rev = repo.lookup(rev)
1951 backup = 'all'
1938 backup = 'all'
1952 if opts['backup']:
1939 if opts['backup']:
1953 backup = 'strip'
1940 backup = 'strip'
1954 elif opts['nobackup']:
1941 elif opts['nobackup']:
1955 backup = 'none'
1942 backup = 'none'
1956 update = repo.dirstate.parents()[0] != revlog.nullid
1943 update = repo.dirstate.parents()[0] != revlog.nullid
1957 repo.mq.strip(repo, rev, backup=backup, update=update)
1944 repo.mq.strip(repo, rev, backup=backup, update=update)
1958 return 0
1945 return 0
1959
1946
1960 def select(ui, repo, *args, **opts):
1947 def select(ui, repo, *args, **opts):
1961 '''set or print guarded patches to push
1948 '''set or print guarded patches to push
1962
1949
1963 Use the qguard command to set or print guards on patch, then use
1950 Use the qguard command to set or print guards on patch, then use
1964 qselect to tell mq which guards to use. A patch will be pushed if it
1951 qselect to tell mq which guards to use. A patch will be pushed if it
1965 has no guards or any positive guards match the currently selected guard,
1952 has no guards or any positive guards match the currently selected guard,
1966 but will not be pushed if any negative guards match the current guard.
1953 but will not be pushed if any negative guards match the current guard.
1967 For example:
1954 For example:
1968
1955
1969 qguard foo.patch -stable (negative guard)
1956 qguard foo.patch -stable (negative guard)
1970 qguard bar.patch +stable (positive guard)
1957 qguard bar.patch +stable (positive guard)
1971 qselect stable
1958 qselect stable
1972
1959
1973 This activates the "stable" guard. mq will skip foo.patch (because
1960 This activates the "stable" guard. mq will skip foo.patch (because
1974 it has a negative match) but push bar.patch (because it
1961 it has a negative match) but push bar.patch (because it
1975 has a positive match).
1962 has a positive match).
1976
1963
1977 With no arguments, prints the currently active guards.
1964 With no arguments, prints the currently active guards.
1978 With one argument, sets the active guard.
1965 With one argument, sets the active guard.
1979
1966
1980 Use -n/--none to deactivate guards (no other arguments needed).
1967 Use -n/--none to deactivate guards (no other arguments needed).
1981 When no guards are active, patches with positive guards are skipped
1968 When no guards are active, patches with positive guards are skipped
1982 and patches with negative guards are pushed.
1969 and patches with negative guards are pushed.
1983
1970
1984 qselect can change the guards on applied patches. It does not pop
1971 qselect can change the guards on applied patches. It does not pop
1985 guarded patches by default. Use --pop to pop back to the last applied
1972 guarded patches by default. Use --pop to pop back to the last applied
1986 patch that is not guarded. Use --reapply (which implies --pop) to push
1973 patch that is not guarded. Use --reapply (which implies --pop) to push
1987 back to the current patch afterwards, but skip guarded patches.
1974 back to the current patch afterwards, but skip guarded patches.
1988
1975
1989 Use -s/--series to print a list of all guards in the series file (no
1976 Use -s/--series to print a list of all guards in the series file (no
1990 other arguments needed). Use -v for more information.'''
1977 other arguments needed). Use -v for more information.'''
1991
1978
1992 q = repo.mq
1979 q = repo.mq
1993 guards = q.active()
1980 guards = q.active()
1994 if args or opts['none']:
1981 if args or opts['none']:
1995 old_unapplied = q.unapplied(repo)
1982 old_unapplied = q.unapplied(repo)
1996 old_guarded = [i for i in xrange(len(q.applied)) if
1983 old_guarded = [i for i in xrange(len(q.applied)) if
1997 not q.pushable(i)[0]]
1984 not q.pushable(i)[0]]
1998 q.set_active(args)
1985 q.set_active(args)
1999 q.save_dirty()
1986 q.save_dirty()
2000 if not args:
1987 if not args:
2001 ui.status(_('guards deactivated\n'))
1988 ui.status(_('guards deactivated\n'))
2002 if not opts['pop'] and not opts['reapply']:
1989 if not opts['pop'] and not opts['reapply']:
2003 unapplied = q.unapplied(repo)
1990 unapplied = q.unapplied(repo)
2004 guarded = [i for i in xrange(len(q.applied))
1991 guarded = [i for i in xrange(len(q.applied))
2005 if not q.pushable(i)[0]]
1992 if not q.pushable(i)[0]]
2006 if len(unapplied) != len(old_unapplied):
1993 if len(unapplied) != len(old_unapplied):
2007 ui.status(_('number of unguarded, unapplied patches has '
1994 ui.status(_('number of unguarded, unapplied patches has '
2008 'changed from %d to %d\n') %
1995 'changed from %d to %d\n') %
2009 (len(old_unapplied), len(unapplied)))
1996 (len(old_unapplied), len(unapplied)))
2010 if len(guarded) != len(old_guarded):
1997 if len(guarded) != len(old_guarded):
2011 ui.status(_('number of guarded, applied patches has changed '
1998 ui.status(_('number of guarded, applied patches has changed '
2012 'from %d to %d\n') %
1999 'from %d to %d\n') %
2013 (len(old_guarded), len(guarded)))
2000 (len(old_guarded), len(guarded)))
2014 elif opts['series']:
2001 elif opts['series']:
2015 guards = {}
2002 guards = {}
2016 noguards = 0
2003 noguards = 0
2017 for gs in q.series_guards:
2004 for gs in q.series_guards:
2018 if not gs:
2005 if not gs:
2019 noguards += 1
2006 noguards += 1
2020 for g in gs:
2007 for g in gs:
2021 guards.setdefault(g, 0)
2008 guards.setdefault(g, 0)
2022 guards[g] += 1
2009 guards[g] += 1
2023 if ui.verbose:
2010 if ui.verbose:
2024 guards['NONE'] = noguards
2011 guards['NONE'] = noguards
2025 guards = guards.items()
2012 guards = guards.items()
2026 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2013 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2027 if guards:
2014 if guards:
2028 ui.note(_('guards in series file:\n'))
2015 ui.note(_('guards in series file:\n'))
2029 for guard, count in guards:
2016 for guard, count in guards:
2030 ui.note('%2d ' % count)
2017 ui.note('%2d ' % count)
2031 ui.write(guard, '\n')
2018 ui.write(guard, '\n')
2032 else:
2019 else:
2033 ui.note(_('no guards in series file\n'))
2020 ui.note(_('no guards in series file\n'))
2034 else:
2021 else:
2035 if guards:
2022 if guards:
2036 ui.note(_('active guards:\n'))
2023 ui.note(_('active guards:\n'))
2037 for g in guards:
2024 for g in guards:
2038 ui.write(g, '\n')
2025 ui.write(g, '\n')
2039 else:
2026 else:
2040 ui.write(_('no active guards\n'))
2027 ui.write(_('no active guards\n'))
2041 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2028 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2042 popped = False
2029 popped = False
2043 if opts['pop'] or opts['reapply']:
2030 if opts['pop'] or opts['reapply']:
2044 for i in xrange(len(q.applied)):
2031 for i in xrange(len(q.applied)):
2045 pushable, reason = q.pushable(i)
2032 pushable, reason = q.pushable(i)
2046 if not pushable:
2033 if not pushable:
2047 ui.status(_('popping guarded patches\n'))
2034 ui.status(_('popping guarded patches\n'))
2048 popped = True
2035 popped = True
2049 if i == 0:
2036 if i == 0:
2050 q.pop(repo, all=True)
2037 q.pop(repo, all=True)
2051 else:
2038 else:
2052 q.pop(repo, i-1)
2039 q.pop(repo, i-1)
2053 break
2040 break
2054 if popped:
2041 if popped:
2055 try:
2042 try:
2056 if reapply:
2043 if reapply:
2057 ui.status(_('reapplying unguarded patches\n'))
2044 ui.status(_('reapplying unguarded patches\n'))
2058 q.push(repo, reapply)
2045 q.push(repo, reapply)
2059 finally:
2046 finally:
2060 q.save_dirty()
2047 q.save_dirty()
2061
2048
2062 def reposetup(ui, repo):
2049 def reposetup(ui, repo):
2063 class mqrepo(repo.__class__):
2050 class mqrepo(repo.__class__):
2064 def abort_if_wdir_patched(self, errmsg, force=False):
2051 def abort_if_wdir_patched(self, errmsg, force=False):
2065 if self.mq.applied and not force:
2052 if self.mq.applied and not force:
2066 parent = revlog.hex(self.dirstate.parents()[0])
2053 parent = revlog.hex(self.dirstate.parents()[0])
2067 if parent in [s.rev for s in self.mq.applied]:
2054 if parent in [s.rev for s in self.mq.applied]:
2068 raise util.Abort(errmsg)
2055 raise util.Abort(errmsg)
2069
2056
2070 def commit(self, *args, **opts):
2057 def commit(self, *args, **opts):
2071 if len(args) >= 6:
2058 if len(args) >= 6:
2072 force = args[5]
2059 force = args[5]
2073 else:
2060 else:
2074 force = opts.get('force')
2061 force = opts.get('force')
2075 self.abort_if_wdir_patched(
2062 self.abort_if_wdir_patched(
2076 _('cannot commit over an applied mq patch'),
2063 _('cannot commit over an applied mq patch'),
2077 force)
2064 force)
2078
2065
2079 return super(mqrepo, self).commit(*args, **opts)
2066 return super(mqrepo, self).commit(*args, **opts)
2080
2067
2081 def push(self, remote, force=False, revs=None):
2068 def push(self, remote, force=False, revs=None):
2082 if self.mq.applied and not force and not revs:
2069 if self.mq.applied and not force and not revs:
2083 raise util.Abort(_('source has mq patches applied'))
2070 raise util.Abort(_('source has mq patches applied'))
2084 return super(mqrepo, self).push(remote, force, revs)
2071 return super(mqrepo, self).push(remote, force, revs)
2085
2072
2086 def tags(self):
2073 def tags(self):
2087 if self.tagscache:
2074 if self.tagscache:
2088 return self.tagscache
2075 return self.tagscache
2089
2076
2090 tagscache = super(mqrepo, self).tags()
2077 tagscache = super(mqrepo, self).tags()
2091
2078
2092 q = self.mq
2079 q = self.mq
2093 if not q.applied:
2080 if not q.applied:
2094 return tagscache
2081 return tagscache
2095
2082
2096 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2083 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2097 mqtags.append((mqtags[-1][0], 'qtip'))
2084 mqtags.append((mqtags[-1][0], 'qtip'))
2098 mqtags.append((mqtags[0][0], 'qbase'))
2085 mqtags.append((mqtags[0][0], 'qbase'))
2099 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2086 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2100 for patch in mqtags:
2087 for patch in mqtags:
2101 if patch[1] in tagscache:
2088 if patch[1] in tagscache:
2102 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2089 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2103 else:
2090 else:
2104 tagscache[patch[1]] = patch[0]
2091 tagscache[patch[1]] = patch[0]
2105
2092
2106 return tagscache
2093 return tagscache
2107
2094
2108 def _branchtags(self):
2095 def _branchtags(self):
2109 q = self.mq
2096 q = self.mq
2110 if not q.applied:
2097 if not q.applied:
2111 return super(mqrepo, self)._branchtags()
2098 return super(mqrepo, self)._branchtags()
2112
2099
2113 self.branchcache = {} # avoid recursion in changectx
2100 self.branchcache = {} # avoid recursion in changectx
2114 cl = self.changelog
2101 cl = self.changelog
2115 partial, last, lrev = self._readbranchcache()
2102 partial, last, lrev = self._readbranchcache()
2116
2103
2117 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2104 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2118 start = lrev + 1
2105 start = lrev + 1
2119 if start < qbase:
2106 if start < qbase:
2120 # update the cache (excluding the patches) and save it
2107 # update the cache (excluding the patches) and save it
2121 self._updatebranchcache(partial, lrev+1, qbase)
2108 self._updatebranchcache(partial, lrev+1, qbase)
2122 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2109 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2123 start = qbase
2110 start = qbase
2124 # if start = qbase, the cache is as updated as it should be.
2111 # if start = qbase, the cache is as updated as it should be.
2125 # if start > qbase, the cache includes (part of) the patches.
2112 # if start > qbase, the cache includes (part of) the patches.
2126 # we might as well use it, but we won't save it.
2113 # we might as well use it, but we won't save it.
2127
2114
2128 # update the cache up to the tip
2115 # update the cache up to the tip
2129 self._updatebranchcache(partial, start, cl.count())
2116 self._updatebranchcache(partial, start, cl.count())
2130
2117
2131 return partial
2118 return partial
2132
2119
2133 if repo.local():
2120 if repo.local():
2134 repo.__class__ = mqrepo
2121 repo.__class__ = mqrepo
2135 repo.mq = queue(ui, repo.join(""))
2122 repo.mq = queue(ui, repo.join(""))
2136
2123
2137 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2124 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2138
2125
2139 cmdtable = {
2126 cmdtable = {
2140 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2127 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2141 "qclone":
2128 "qclone":
2142 (clone,
2129 (clone,
2143 [('', 'pull', None, _('use pull protocol to copy metadata')),
2130 [('', 'pull', None, _('use pull protocol to copy metadata')),
2144 ('U', 'noupdate', None, _('do not update the new working directories')),
2131 ('U', 'noupdate', None, _('do not update the new working directories')),
2145 ('', 'uncompressed', None,
2132 ('', 'uncompressed', None,
2146 _('use uncompressed transfer (fast over LAN)')),
2133 _('use uncompressed transfer (fast over LAN)')),
2147 ('e', 'ssh', '', _('specify ssh command to use')),
2134 ('e', 'ssh', '', _('specify ssh command to use')),
2148 ('p', 'patches', '', _('location of source patch repo')),
2135 ('p', 'patches', '', _('location of source patch repo')),
2149 ('', 'remotecmd', '',
2136 ('', 'remotecmd', '',
2150 _('specify hg command to run on the remote side'))],
2137 _('specify hg command to run on the remote side'))],
2151 _('hg qclone [OPTION]... SOURCE [DEST]')),
2138 _('hg qclone [OPTION]... SOURCE [DEST]')),
2152 "qcommit|qci":
2139 "qcommit|qci":
2153 (commit,
2140 (commit,
2154 commands.table["^commit|ci"][1],
2141 commands.table["^commit|ci"][1],
2155 _('hg qcommit [OPTION]... [FILE]...')),
2142 _('hg qcommit [OPTION]... [FILE]...')),
2156 "^qdiff":
2143 "^qdiff":
2157 (diff,
2144 (diff,
2158 [('g', 'git', None, _('use git extended diff format')),
2145 [('g', 'git', None, _('use git extended diff format')),
2159 ('I', 'include', [], _('include names matching the given patterns')),
2146 ('I', 'include', [], _('include names matching the given patterns')),
2160 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2147 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2161 _('hg qdiff [-I] [-X] [-g] [FILE]...')),
2148 _('hg qdiff [-I] [-X] [-g] [FILE]...')),
2162 "qdelete|qremove|qrm":
2149 "qdelete|qremove|qrm":
2163 (delete,
2150 (delete,
2164 [('k', 'keep', None, _('keep patch file')),
2151 [('k', 'keep', None, _('keep patch file')),
2165 ('r', 'rev', [], _('stop managing a revision'))],
2152 ('r', 'rev', [], _('stop managing a revision'))],
2166 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2153 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2167 'qfold':
2154 'qfold':
2168 (fold,
2155 (fold,
2169 [('e', 'edit', None, _('edit patch header')),
2156 [('e', 'edit', None, _('edit patch header')),
2170 ('k', 'keep', None, _('keep folded patch files')),
2157 ('k', 'keep', None, _('keep folded patch files')),
2171 ] + commands.commitopts,
2158 ] + commands.commitopts,
2172 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2159 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2173 'qgoto':
2160 'qgoto':
2174 (goto,
2161 (goto,
2175 [('f', 'force', None, _('overwrite any local changes'))],
2162 [('f', 'force', None, _('overwrite any local changes'))],
2176 _('hg qgoto [OPTION]... PATCH')),
2163 _('hg qgoto [OPTION]... PATCH')),
2177 'qguard':
2164 'qguard':
2178 (guard,
2165 (guard,
2179 [('l', 'list', None, _('list all patches and guards')),
2166 [('l', 'list', None, _('list all patches and guards')),
2180 ('n', 'none', None, _('drop all guards'))],
2167 ('n', 'none', None, _('drop all guards'))],
2181 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2168 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2182 'qheader': (header, [], _('hg qheader [PATCH]')),
2169 'qheader': (header, [], _('hg qheader [PATCH]')),
2183 "^qimport":
2170 "^qimport":
2184 (qimport,
2171 (qimport,
2185 [('e', 'existing', None, 'import file in patch dir'),
2172 [('e', 'existing', None, 'import file in patch dir'),
2186 ('n', 'name', '', 'patch file name'),
2173 ('n', 'name', '', 'patch file name'),
2187 ('f', 'force', None, 'overwrite existing files'),
2174 ('f', 'force', None, 'overwrite existing files'),
2188 ('r', 'rev', [], 'place existing revisions under mq control'),
2175 ('r', 'rev', [], 'place existing revisions under mq control'),
2189 ('g', 'git', None, _('use git extended diff format'))],
2176 ('g', 'git', None, _('use git extended diff format'))],
2190 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2177 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2191 "^qinit":
2178 "^qinit":
2192 (init,
2179 (init,
2193 [('c', 'create-repo', None, 'create queue repository')],
2180 [('c', 'create-repo', None, 'create queue repository')],
2194 _('hg qinit [-c]')),
2181 _('hg qinit [-c]')),
2195 "qnew":
2182 "qnew":
2196 (new,
2183 (new,
2197 [('e', 'edit', None, _('edit commit message')),
2184 [('e', 'edit', None, _('edit commit message')),
2198 ('f', 'force', None, _('import uncommitted changes into patch')),
2185 ('f', 'force', None, _('import uncommitted changes into patch')),
2199 ('I', 'include', [], _('include names matching the given patterns')),
2186 ('I', 'include', [], _('include names matching the given patterns')),
2200 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2187 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2201 ] + commands.commitopts,
2188 ] + commands.commitopts,
2202 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2189 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2203 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2190 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2204 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2191 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2205 "^qpop":
2192 "^qpop":
2206 (pop,
2193 (pop,
2207 [('a', 'all', None, _('pop all patches')),
2194 [('a', 'all', None, _('pop all patches')),
2208 ('n', 'name', '', _('queue name to pop')),
2195 ('n', 'name', '', _('queue name to pop')),
2209 ('f', 'force', None, _('forget any local changes'))],
2196 ('f', 'force', None, _('forget any local changes'))],
2210 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2197 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2211 "^qpush":
2198 "^qpush":
2212 (push,
2199 (push,
2213 [('f', 'force', None, _('apply if the patch has rejects')),
2200 [('f', 'force', None, _('apply if the patch has rejects')),
2214 ('l', 'list', None, _('list patch name in commit text')),
2201 ('l', 'list', None, _('list patch name in commit text')),
2215 ('a', 'all', None, _('apply all patches')),
2202 ('a', 'all', None, _('apply all patches')),
2216 ('m', 'merge', None, _('merge from another queue')),
2203 ('m', 'merge', None, _('merge from another queue')),
2217 ('n', 'name', '', _('merge queue name'))],
2204 ('n', 'name', '', _('merge queue name'))],
2218 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2205 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2219 "^qrefresh":
2206 "^qrefresh":
2220 (refresh,
2207 (refresh,
2221 [('e', 'edit', None, _('edit commit message')),
2208 [('e', 'edit', None, _('edit commit message')),
2222 ('g', 'git', None, _('use git extended diff format')),
2209 ('g', 'git', None, _('use git extended diff format')),
2223 ('s', 'short', None, _('refresh only files already in the patch')),
2210 ('s', 'short', None, _('refresh only files already in the patch')),
2224 ('I', 'include', [], _('include names matching the given patterns')),
2211 ('I', 'include', [], _('include names matching the given patterns')),
2225 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2212 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2226 ] + commands.commitopts,
2213 ] + commands.commitopts,
2227 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2214 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2228 'qrename|qmv':
2215 'qrename|qmv':
2229 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2216 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2230 "qrestore":
2217 "qrestore":
2231 (restore,
2218 (restore,
2232 [('d', 'delete', None, _('delete save entry')),
2219 [('d', 'delete', None, _('delete save entry')),
2233 ('u', 'update', None, _('update queue working dir'))],
2220 ('u', 'update', None, _('update queue working dir'))],
2234 _('hg qrestore [-d] [-u] REV')),
2221 _('hg qrestore [-d] [-u] REV')),
2235 "qsave":
2222 "qsave":
2236 (save,
2223 (save,
2237 [('c', 'copy', None, _('copy patch directory')),
2224 [('c', 'copy', None, _('copy patch directory')),
2238 ('n', 'name', '', _('copy directory name')),
2225 ('n', 'name', '', _('copy directory name')),
2239 ('e', 'empty', None, _('clear queue status file')),
2226 ('e', 'empty', None, _('clear queue status file')),
2240 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2227 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2241 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2228 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2242 "qselect":
2229 "qselect":
2243 (select,
2230 (select,
2244 [('n', 'none', None, _('disable all guards')),
2231 [('n', 'none', None, _('disable all guards')),
2245 ('s', 'series', None, _('list all guards in series file')),
2232 ('s', 'series', None, _('list all guards in series file')),
2246 ('', 'pop', None, _('pop to before first guarded applied patch')),
2233 ('', 'pop', None, _('pop to before first guarded applied patch')),
2247 ('', 'reapply', None, _('pop, then reapply patches'))],
2234 ('', 'reapply', None, _('pop, then reapply patches'))],
2248 _('hg qselect [OPTION]... [GUARD]...')),
2235 _('hg qselect [OPTION]... [GUARD]...')),
2249 "qseries":
2236 "qseries":
2250 (series,
2237 (series,
2251 [('m', 'missing', None, _('print patches not in series')),
2238 [('m', 'missing', None, _('print patches not in series')),
2252 ] + seriesopts,
2239 ] + seriesopts,
2253 _('hg qseries [-ms]')),
2240 _('hg qseries [-ms]')),
2254 "^strip":
2241 "^strip":
2255 (strip,
2242 (strip,
2256 [('f', 'force', None, _('force multi-head removal')),
2243 [('f', 'force', None, _('force multi-head removal')),
2257 ('b', 'backup', None, _('bundle unrelated changesets')),
2244 ('b', 'backup', None, _('bundle unrelated changesets')),
2258 ('n', 'nobackup', None, _('no backups'))],
2245 ('n', 'nobackup', None, _('no backups'))],
2259 _('hg strip [-f] [-b] [-n] REV')),
2246 _('hg strip [-f] [-b] [-n] REV')),
2260 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2247 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2261 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2248 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2262 }
2249 }
@@ -1,600 +1,597
1 # Patch transplanting extension for Mercurial
1 # Patch transplanting extension for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from mercurial.i18n import _
8 from mercurial.i18n import _
9 import os, tempfile
9 import os, tempfile
10 from mercurial import bundlerepo, changegroup, cmdutil, commands, hg, merge
10 from mercurial import bundlerepo, changegroup, cmdutil, commands, hg, merge
11 from mercurial import patch, revlog, util
11 from mercurial import patch, revlog, util
12
12
13 '''patch transplanting tool
13 '''patch transplanting tool
14
14
15 This extension allows you to transplant patches from another branch.
15 This extension allows you to transplant patches from another branch.
16
16
17 Transplanted patches are recorded in .hg/transplant/transplants, as a map
17 Transplanted patches are recorded in .hg/transplant/transplants, as a map
18 from a changeset hash to its hash in the source repository.
18 from a changeset hash to its hash in the source repository.
19 '''
19 '''
20
20
21 class transplantentry:
21 class transplantentry:
22 def __init__(self, lnode, rnode):
22 def __init__(self, lnode, rnode):
23 self.lnode = lnode
23 self.lnode = lnode
24 self.rnode = rnode
24 self.rnode = rnode
25
25
26 class transplants:
26 class transplants:
27 def __init__(self, path=None, transplantfile=None, opener=None):
27 def __init__(self, path=None, transplantfile=None, opener=None):
28 self.path = path
28 self.path = path
29 self.transplantfile = transplantfile
29 self.transplantfile = transplantfile
30 self.opener = opener
30 self.opener = opener
31
31
32 if not opener:
32 if not opener:
33 self.opener = util.opener(self.path)
33 self.opener = util.opener(self.path)
34 self.transplants = []
34 self.transplants = []
35 self.dirty = False
35 self.dirty = False
36 self.read()
36 self.read()
37
37
38 def read(self):
38 def read(self):
39 abspath = os.path.join(self.path, self.transplantfile)
39 abspath = os.path.join(self.path, self.transplantfile)
40 if self.transplantfile and os.path.exists(abspath):
40 if self.transplantfile and os.path.exists(abspath):
41 for line in self.opener(self.transplantfile).read().splitlines():
41 for line in self.opener(self.transplantfile).read().splitlines():
42 lnode, rnode = map(revlog.bin, line.split(':'))
42 lnode, rnode = map(revlog.bin, line.split(':'))
43 self.transplants.append(transplantentry(lnode, rnode))
43 self.transplants.append(transplantentry(lnode, rnode))
44
44
45 def write(self):
45 def write(self):
46 if self.dirty and self.transplantfile:
46 if self.dirty and self.transplantfile:
47 if not os.path.isdir(self.path):
47 if not os.path.isdir(self.path):
48 os.mkdir(self.path)
48 os.mkdir(self.path)
49 fp = self.opener(self.transplantfile, 'w')
49 fp = self.opener(self.transplantfile, 'w')
50 for c in self.transplants:
50 for c in self.transplants:
51 l, r = map(revlog.hex, (c.lnode, c.rnode))
51 l, r = map(revlog.hex, (c.lnode, c.rnode))
52 fp.write(l + ':' + r + '\n')
52 fp.write(l + ':' + r + '\n')
53 fp.close()
53 fp.close()
54 self.dirty = False
54 self.dirty = False
55
55
56 def get(self, rnode):
56 def get(self, rnode):
57 return [t for t in self.transplants if t.rnode == rnode]
57 return [t for t in self.transplants if t.rnode == rnode]
58
58
59 def set(self, lnode, rnode):
59 def set(self, lnode, rnode):
60 self.transplants.append(transplantentry(lnode, rnode))
60 self.transplants.append(transplantentry(lnode, rnode))
61 self.dirty = True
61 self.dirty = True
62
62
63 def remove(self, transplant):
63 def remove(self, transplant):
64 del self.transplants[self.transplants.index(transplant)]
64 del self.transplants[self.transplants.index(transplant)]
65 self.dirty = True
65 self.dirty = True
66
66
67 class transplanter:
67 class transplanter:
68 def __init__(self, ui, repo):
68 def __init__(self, ui, repo):
69 self.ui = ui
69 self.ui = ui
70 self.path = repo.join('transplant')
70 self.path = repo.join('transplant')
71 self.opener = util.opener(self.path)
71 self.opener = util.opener(self.path)
72 self.transplants = transplants(self.path, 'transplants', opener=self.opener)
72 self.transplants = transplants(self.path, 'transplants', opener=self.opener)
73
73
74 def applied(self, repo, node, parent):
74 def applied(self, repo, node, parent):
75 '''returns True if a node is already an ancestor of parent
75 '''returns True if a node is already an ancestor of parent
76 or has already been transplanted'''
76 or has already been transplanted'''
77 if hasnode(repo, node):
77 if hasnode(repo, node):
78 if node in repo.changelog.reachable(parent, stop=node):
78 if node in repo.changelog.reachable(parent, stop=node):
79 return True
79 return True
80 for t in self.transplants.get(node):
80 for t in self.transplants.get(node):
81 # it might have been stripped
81 # it might have been stripped
82 if not hasnode(repo, t.lnode):
82 if not hasnode(repo, t.lnode):
83 self.transplants.remove(t)
83 self.transplants.remove(t)
84 return False
84 return False
85 if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
85 if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
86 return True
86 return True
87 return False
87 return False
88
88
89 def apply(self, repo, source, revmap, merges, opts={}):
89 def apply(self, repo, source, revmap, merges, opts={}):
90 '''apply the revisions in revmap one by one in revision order'''
90 '''apply the revisions in revmap one by one in revision order'''
91 revs = revmap.keys()
91 revs = revmap.keys()
92 revs.sort()
92 revs.sort()
93
93
94 p1, p2 = repo.dirstate.parents()
94 p1, p2 = repo.dirstate.parents()
95 pulls = []
95 pulls = []
96 diffopts = patch.diffopts(self.ui, opts)
96 diffopts = patch.diffopts(self.ui, opts)
97 diffopts.git = True
97 diffopts.git = True
98
98
99 lock = wlock = None
99 lock = wlock = None
100 try:
100 try:
101 wlock = repo.wlock()
101 wlock = repo.wlock()
102 lock = repo.lock()
102 lock = repo.lock()
103 for rev in revs:
103 for rev in revs:
104 node = revmap[rev]
104 node = revmap[rev]
105 revstr = '%s:%s' % (rev, revlog.short(node))
105 revstr = '%s:%s' % (rev, revlog.short(node))
106
106
107 if self.applied(repo, node, p1):
107 if self.applied(repo, node, p1):
108 self.ui.warn(_('skipping already applied revision %s\n') %
108 self.ui.warn(_('skipping already applied revision %s\n') %
109 revstr)
109 revstr)
110 continue
110 continue
111
111
112 parents = source.changelog.parents(node)
112 parents = source.changelog.parents(node)
113 if not opts.get('filter'):
113 if not opts.get('filter'):
114 # If the changeset parent is the same as the wdir's parent,
114 # If the changeset parent is the same as the wdir's parent,
115 # just pull it.
115 # just pull it.
116 if parents[0] == p1:
116 if parents[0] == p1:
117 pulls.append(node)
117 pulls.append(node)
118 p1 = node
118 p1 = node
119 continue
119 continue
120 if pulls:
120 if pulls:
121 if source != repo:
121 if source != repo:
122 repo.pull(source, heads=pulls, lock=lock)
122 repo.pull(source, heads=pulls)
123 merge.update(repo, pulls[-1], False, False, None,
123 merge.update(repo, pulls[-1], False, False, None)
124 wlock=wlock)
125 p1, p2 = repo.dirstate.parents()
124 p1, p2 = repo.dirstate.parents()
126 pulls = []
125 pulls = []
127
126
128 domerge = False
127 domerge = False
129 if node in merges:
128 if node in merges:
130 # pulling all the merge revs at once would mean we couldn't
129 # pulling all the merge revs at once would mean we couldn't
131 # transplant after the latest even if transplants before them
130 # transplant after the latest even if transplants before them
132 # fail.
131 # fail.
133 domerge = True
132 domerge = True
134 if not hasnode(repo, node):
133 if not hasnode(repo, node):
135 repo.pull(source, heads=[node], lock=lock)
134 repo.pull(source, heads=[node])
136
135
137 if parents[1] != revlog.nullid:
136 if parents[1] != revlog.nullid:
138 self.ui.note(_('skipping merge changeset %s:%s\n')
137 self.ui.note(_('skipping merge changeset %s:%s\n')
139 % (rev, revlog.short(node)))
138 % (rev, revlog.short(node)))
140 patchfile = None
139 patchfile = None
141 else:
140 else:
142 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
141 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
143 fp = os.fdopen(fd, 'w')
142 fp = os.fdopen(fd, 'w')
144 patch.diff(source, parents[0], node, fp=fp, opts=diffopts)
143 patch.diff(source, parents[0], node, fp=fp, opts=diffopts)
145 fp.close()
144 fp.close()
146
145
147 del revmap[rev]
146 del revmap[rev]
148 if patchfile or domerge:
147 if patchfile or domerge:
149 try:
148 try:
150 n = self.applyone(repo, node, source.changelog.read(node),
149 n = self.applyone(repo, node,
150 source.changelog.read(node),
151 patchfile, merge=domerge,
151 patchfile, merge=domerge,
152 log=opts.get('log'),
152 log=opts.get('log'),
153 filter=opts.get('filter'),
153 filter=opts.get('filter'))
154 lock=lock, wlock=wlock)
155 if n and domerge:
154 if n and domerge:
156 self.ui.status(_('%s merged at %s\n') % (revstr,
155 self.ui.status(_('%s merged at %s\n') % (revstr,
157 revlog.short(n)))
156 revlog.short(n)))
158 elif n:
157 elif n:
159 self.ui.status(_('%s transplanted to %s\n') % (revlog.short(node),
158 self.ui.status(_('%s transplanted to %s\n') % (revlog.short(node),
160 revlog.short(n)))
159 revlog.short(n)))
161 finally:
160 finally:
162 if patchfile:
161 if patchfile:
163 os.unlink(patchfile)
162 os.unlink(patchfile)
164 if pulls:
163 if pulls:
165 repo.pull(source, heads=pulls, lock=lock)
164 repo.pull(source, heads=pulls)
166 merge.update(repo, pulls[-1], False, False, None, wlock=wlock)
165 merge.update(repo, pulls[-1], False, False, None)
167 finally:
166 finally:
168 self.saveseries(revmap, merges)
167 self.saveseries(revmap, merges)
169 self.transplants.write()
168 self.transplants.write()
170 del lock, wlock
169 del lock, wlock
171
170
172 def filter(self, filter, changelog, patchfile):
171 def filter(self, filter, changelog, patchfile):
173 '''arbitrarily rewrite changeset before applying it'''
172 '''arbitrarily rewrite changeset before applying it'''
174
173
175 self.ui.status('filtering %s\n' % patchfile)
174 self.ui.status('filtering %s\n' % patchfile)
176 user, date, msg = (changelog[1], changelog[2], changelog[4])
175 user, date, msg = (changelog[1], changelog[2], changelog[4])
177
176
178 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
177 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
179 fp = os.fdopen(fd, 'w')
178 fp = os.fdopen(fd, 'w')
180 fp.write("# HG changeset patch\n")
179 fp.write("# HG changeset patch\n")
181 fp.write("# User %s\n" % user)
180 fp.write("# User %s\n" % user)
182 fp.write("# Date %d %d\n" % date)
181 fp.write("# Date %d %d\n" % date)
183 fp.write(changelog[4])
182 fp.write(changelog[4])
184 fp.close()
183 fp.close()
185
184
186 try:
185 try:
187 util.system('%s %s %s' % (filter, util.shellquote(headerfile),
186 util.system('%s %s %s' % (filter, util.shellquote(headerfile),
188 util.shellquote(patchfile)),
187 util.shellquote(patchfile)),
189 environ={'HGUSER': changelog[1]},
188 environ={'HGUSER': changelog[1]},
190 onerr=util.Abort, errprefix=_('filter failed'))
189 onerr=util.Abort, errprefix=_('filter failed'))
191 user, date, msg = self.parselog(file(headerfile))[1:4]
190 user, date, msg = self.parselog(file(headerfile))[1:4]
192 finally:
191 finally:
193 os.unlink(headerfile)
192 os.unlink(headerfile)
194
193
195 return (user, date, msg)
194 return (user, date, msg)
196
195
197 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
196 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
198 filter=None, lock=None, wlock=None):
197 filter=None):
199 '''apply the patch in patchfile to the repository as a transplant'''
198 '''apply the patch in patchfile to the repository as a transplant'''
200 (manifest, user, (time, timezone), files, message) = cl[:5]
199 (manifest, user, (time, timezone), files, message) = cl[:5]
201 date = "%d %d" % (time, timezone)
200 date = "%d %d" % (time, timezone)
202 extra = {'transplant_source': node}
201 extra = {'transplant_source': node}
203 if filter:
202 if filter:
204 (user, date, message) = self.filter(filter, cl, patchfile)
203 (user, date, message) = self.filter(filter, cl, patchfile)
205
204
206 if log:
205 if log:
207 message += '\n(transplanted from %s)' % revlog.hex(node)
206 message += '\n(transplanted from %s)' % revlog.hex(node)
208
207
209 self.ui.status(_('applying %s\n') % revlog.short(node))
208 self.ui.status(_('applying %s\n') % revlog.short(node))
210 self.ui.note('%s %s\n%s\n' % (user, date, message))
209 self.ui.note('%s %s\n%s\n' % (user, date, message))
211
210
212 if not patchfile and not merge:
211 if not patchfile and not merge:
213 raise util.Abort(_('can only omit patchfile if merging'))
212 raise util.Abort(_('can only omit patchfile if merging'))
214 if patchfile:
213 if patchfile:
215 try:
214 try:
216 files = {}
215 files = {}
217 try:
216 try:
218 fuzz = patch.patch(patchfile, self.ui, cwd=repo.root,
217 fuzz = patch.patch(patchfile, self.ui, cwd=repo.root,
219 files=files)
218 files=files)
220 if not files:
219 if not files:
221 self.ui.warn(_('%s: empty changeset') % revlog.hex(node))
220 self.ui.warn(_('%s: empty changeset') % revlog.hex(node))
222 return None
221 return None
223 finally:
222 finally:
224 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
223 files = patch.updatedir(self.ui, repo, files)
225 except Exception, inst:
224 except Exception, inst:
226 if filter:
225 if filter:
227 os.unlink(patchfile)
226 os.unlink(patchfile)
228 seriespath = os.path.join(self.path, 'series')
227 seriespath = os.path.join(self.path, 'series')
229 if os.path.exists(seriespath):
228 if os.path.exists(seriespath):
230 os.unlink(seriespath)
229 os.unlink(seriespath)
231 p1 = repo.dirstate.parents()[0]
230 p1 = repo.dirstate.parents()[0]
232 p2 = node
231 p2 = node
233 self.log(user, date, message, p1, p2, merge=merge)
232 self.log(user, date, message, p1, p2, merge=merge)
234 self.ui.write(str(inst) + '\n')
233 self.ui.write(str(inst) + '\n')
235 raise util.Abort(_('Fix up the merge and run hg transplant --continue'))
234 raise util.Abort(_('Fix up the merge and run hg transplant --continue'))
236 else:
235 else:
237 files = None
236 files = None
238 if merge:
237 if merge:
239 p1, p2 = repo.dirstate.parents()
238 p1, p2 = repo.dirstate.parents()
240 repo.dirstate.setparents(p1, node)
239 repo.dirstate.setparents(p1, node)
241
240
242 n = repo.commit(files, message, user, date, lock=lock, wlock=wlock,
241 n = repo.commit(files, message, user, date, extra=extra)
243 extra=extra)
244 if not merge:
242 if not merge:
245 self.transplants.set(n, node)
243 self.transplants.set(n, node)
246
244
247 return n
245 return n
248
246
249 def resume(self, repo, source, opts=None):
247 def resume(self, repo, source, opts=None):
250 '''recover last transaction and apply remaining changesets'''
248 '''recover last transaction and apply remaining changesets'''
251 if os.path.exists(os.path.join(self.path, 'journal')):
249 if os.path.exists(os.path.join(self.path, 'journal')):
252 n, node = self.recover(repo)
250 n, node = self.recover(repo)
253 self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
251 self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
254 revlog.short(n)))
252 revlog.short(n)))
255 seriespath = os.path.join(self.path, 'series')
253 seriespath = os.path.join(self.path, 'series')
256 if not os.path.exists(seriespath):
254 if not os.path.exists(seriespath):
257 self.transplants.write()
255 self.transplants.write()
258 return
256 return
259 nodes, merges = self.readseries()
257 nodes, merges = self.readseries()
260 revmap = {}
258 revmap = {}
261 for n in nodes:
259 for n in nodes:
262 revmap[source.changelog.rev(n)] = n
260 revmap[source.changelog.rev(n)] = n
263 os.unlink(seriespath)
261 os.unlink(seriespath)
264
262
265 self.apply(repo, source, revmap, merges, opts)
263 self.apply(repo, source, revmap, merges, opts)
266
264
267 def recover(self, repo):
265 def recover(self, repo):
268 '''commit working directory using journal metadata'''
266 '''commit working directory using journal metadata'''
269 node, user, date, message, parents = self.readlog()
267 node, user, date, message, parents = self.readlog()
270 merge = len(parents) == 2
268 merge = len(parents) == 2
271
269
272 if not user or not date or not message or not parents[0]:
270 if not user or not date or not message or not parents[0]:
273 raise util.Abort(_('transplant log file is corrupt'))
271 raise util.Abort(_('transplant log file is corrupt'))
274
272
275 extra = {'transplant_source': node}
273 extra = {'transplant_source': node}
276 wlock = repo.wlock()
274 wlock = repo.wlock()
277 try:
275 try:
278 p1, p2 = repo.dirstate.parents()
276 p1, p2 = repo.dirstate.parents()
279 if p1 != parents[0]:
277 if p1 != parents[0]:
280 raise util.Abort(
278 raise util.Abort(
281 _('working dir not at transplant parent %s') %
279 _('working dir not at transplant parent %s') %
282 revlog.hex(parents[0]))
280 revlog.hex(parents[0]))
283 if merge:
281 if merge:
284 repo.dirstate.setparents(p1, parents[1])
282 repo.dirstate.setparents(p1, parents[1])
285 n = repo.commit(None, message, user, date, wlock=wlock,
283 n = repo.commit(None, message, user, date, extra=extra)
286 extra=extra)
287 if not n:
284 if not n:
288 raise util.Abort(_('commit failed'))
285 raise util.Abort(_('commit failed'))
289 if not merge:
286 if not merge:
290 self.transplants.set(n, node)
287 self.transplants.set(n, node)
291 self.unlog()
288 self.unlog()
292
289
293 return n, node
290 return n, node
294 finally:
291 finally:
295 del wlock
292 del wlock
296
293
297 def readseries(self):
294 def readseries(self):
298 nodes = []
295 nodes = []
299 merges = []
296 merges = []
300 cur = nodes
297 cur = nodes
301 for line in self.opener('series').read().splitlines():
298 for line in self.opener('series').read().splitlines():
302 if line.startswith('# Merges'):
299 if line.startswith('# Merges'):
303 cur = merges
300 cur = merges
304 continue
301 continue
305 cur.append(revlog.bin(line))
302 cur.append(revlog.bin(line))
306
303
307 return (nodes, merges)
304 return (nodes, merges)
308
305
309 def saveseries(self, revmap, merges):
306 def saveseries(self, revmap, merges):
310 if not revmap:
307 if not revmap:
311 return
308 return
312
309
313 if not os.path.isdir(self.path):
310 if not os.path.isdir(self.path):
314 os.mkdir(self.path)
311 os.mkdir(self.path)
315 series = self.opener('series', 'w')
312 series = self.opener('series', 'w')
316 revs = revmap.keys()
313 revs = revmap.keys()
317 revs.sort()
314 revs.sort()
318 for rev in revs:
315 for rev in revs:
319 series.write(revlog.hex(revmap[rev]) + '\n')
316 series.write(revlog.hex(revmap[rev]) + '\n')
320 if merges:
317 if merges:
321 series.write('# Merges\n')
318 series.write('# Merges\n')
322 for m in merges:
319 for m in merges:
323 series.write(revlog.hex(m) + '\n')
320 series.write(revlog.hex(m) + '\n')
324 series.close()
321 series.close()
325
322
326 def parselog(self, fp):
323 def parselog(self, fp):
327 parents = []
324 parents = []
328 message = []
325 message = []
329 node = revlog.nullid
326 node = revlog.nullid
330 inmsg = False
327 inmsg = False
331 for line in fp.read().splitlines():
328 for line in fp.read().splitlines():
332 if inmsg:
329 if inmsg:
333 message.append(line)
330 message.append(line)
334 elif line.startswith('# User '):
331 elif line.startswith('# User '):
335 user = line[7:]
332 user = line[7:]
336 elif line.startswith('# Date '):
333 elif line.startswith('# Date '):
337 date = line[7:]
334 date = line[7:]
338 elif line.startswith('# Node ID '):
335 elif line.startswith('# Node ID '):
339 node = revlog.bin(line[10:])
336 node = revlog.bin(line[10:])
340 elif line.startswith('# Parent '):
337 elif line.startswith('# Parent '):
341 parents.append(revlog.bin(line[9:]))
338 parents.append(revlog.bin(line[9:]))
342 elif not line.startswith('#'):
339 elif not line.startswith('#'):
343 inmsg = True
340 inmsg = True
344 message.append(line)
341 message.append(line)
345 return (node, user, date, '\n'.join(message), parents)
342 return (node, user, date, '\n'.join(message), parents)
346
343
347 def log(self, user, date, message, p1, p2, merge=False):
344 def log(self, user, date, message, p1, p2, merge=False):
348 '''journal changelog metadata for later recover'''
345 '''journal changelog metadata for later recover'''
349
346
350 if not os.path.isdir(self.path):
347 if not os.path.isdir(self.path):
351 os.mkdir(self.path)
348 os.mkdir(self.path)
352 fp = self.opener('journal', 'w')
349 fp = self.opener('journal', 'w')
353 fp.write('# User %s\n' % user)
350 fp.write('# User %s\n' % user)
354 fp.write('# Date %s\n' % date)
351 fp.write('# Date %s\n' % date)
355 fp.write('# Node ID %s\n' % revlog.hex(p2))
352 fp.write('# Node ID %s\n' % revlog.hex(p2))
356 fp.write('# Parent ' + revlog.hex(p1) + '\n')
353 fp.write('# Parent ' + revlog.hex(p1) + '\n')
357 if merge:
354 if merge:
358 fp.write('# Parent ' + revlog.hex(p2) + '\n')
355 fp.write('# Parent ' + revlog.hex(p2) + '\n')
359 fp.write(message.rstrip() + '\n')
356 fp.write(message.rstrip() + '\n')
360 fp.close()
357 fp.close()
361
358
362 def readlog(self):
359 def readlog(self):
363 return self.parselog(self.opener('journal'))
360 return self.parselog(self.opener('journal'))
364
361
365 def unlog(self):
362 def unlog(self):
366 '''remove changelog journal'''
363 '''remove changelog journal'''
367 absdst = os.path.join(self.path, 'journal')
364 absdst = os.path.join(self.path, 'journal')
368 if os.path.exists(absdst):
365 if os.path.exists(absdst):
369 os.unlink(absdst)
366 os.unlink(absdst)
370
367
371 def transplantfilter(self, repo, source, root):
368 def transplantfilter(self, repo, source, root):
372 def matchfn(node):
369 def matchfn(node):
373 if self.applied(repo, node, root):
370 if self.applied(repo, node, root):
374 return False
371 return False
375 if source.changelog.parents(node)[1] != revlog.nullid:
372 if source.changelog.parents(node)[1] != revlog.nullid:
376 return False
373 return False
377 extra = source.changelog.read(node)[5]
374 extra = source.changelog.read(node)[5]
378 cnode = extra.get('transplant_source')
375 cnode = extra.get('transplant_source')
379 if cnode and self.applied(repo, cnode, root):
376 if cnode and self.applied(repo, cnode, root):
380 return False
377 return False
381 return True
378 return True
382
379
383 return matchfn
380 return matchfn
384
381
385 def hasnode(repo, node):
382 def hasnode(repo, node):
386 try:
383 try:
387 return repo.changelog.rev(node) != None
384 return repo.changelog.rev(node) != None
388 except revlog.RevlogError:
385 except revlog.RevlogError:
389 return False
386 return False
390
387
391 def browserevs(ui, repo, nodes, opts):
388 def browserevs(ui, repo, nodes, opts):
392 '''interactively transplant changesets'''
389 '''interactively transplant changesets'''
393 def browsehelp(ui):
390 def browsehelp(ui):
394 ui.write('y: transplant this changeset\n'
391 ui.write('y: transplant this changeset\n'
395 'n: skip this changeset\n'
392 'n: skip this changeset\n'
396 'm: merge at this changeset\n'
393 'm: merge at this changeset\n'
397 'p: show patch\n'
394 'p: show patch\n'
398 'c: commit selected changesets\n'
395 'c: commit selected changesets\n'
399 'q: cancel transplant\n'
396 'q: cancel transplant\n'
400 '?: show this help\n')
397 '?: show this help\n')
401
398
402 displayer = cmdutil.show_changeset(ui, repo, opts)
399 displayer = cmdutil.show_changeset(ui, repo, opts)
403 transplants = []
400 transplants = []
404 merges = []
401 merges = []
405 for node in nodes:
402 for node in nodes:
406 displayer.show(changenode=node)
403 displayer.show(changenode=node)
407 action = None
404 action = None
408 while not action:
405 while not action:
409 action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
406 action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
410 if action == '?':
407 if action == '?':
411 browsehelp(ui)
408 browsehelp(ui)
412 action = None
409 action = None
413 elif action == 'p':
410 elif action == 'p':
414 parent = repo.changelog.parents(node)[0]
411 parent = repo.changelog.parents(node)[0]
415 patch.diff(repo, parent, node)
412 patch.diff(repo, parent, node)
416 action = None
413 action = None
417 elif action not in ('y', 'n', 'm', 'c', 'q'):
414 elif action not in ('y', 'n', 'm', 'c', 'q'):
418 ui.write('no such option\n')
415 ui.write('no such option\n')
419 action = None
416 action = None
420 if action == 'y':
417 if action == 'y':
421 transplants.append(node)
418 transplants.append(node)
422 elif action == 'm':
419 elif action == 'm':
423 merges.append(node)
420 merges.append(node)
424 elif action == 'c':
421 elif action == 'c':
425 break
422 break
426 elif action == 'q':
423 elif action == 'q':
427 transplants = ()
424 transplants = ()
428 merges = ()
425 merges = ()
429 break
426 break
430 return (transplants, merges)
427 return (transplants, merges)
431
428
432 def transplant(ui, repo, *revs, **opts):
429 def transplant(ui, repo, *revs, **opts):
433 '''transplant changesets from another branch
430 '''transplant changesets from another branch
434
431
435 Selected changesets will be applied on top of the current working
432 Selected changesets will be applied on top of the current working
436 directory with the log of the original changeset. If --log is
433 directory with the log of the original changeset. If --log is
437 specified, log messages will have a comment appended of the form:
434 specified, log messages will have a comment appended of the form:
438
435
439 (transplanted from CHANGESETHASH)
436 (transplanted from CHANGESETHASH)
440
437
441 You can rewrite the changelog message with the --filter option.
438 You can rewrite the changelog message with the --filter option.
442 Its argument will be invoked with the current changelog message
439 Its argument will be invoked with the current changelog message
443 as $1 and the patch as $2.
440 as $1 and the patch as $2.
444
441
445 If --source is specified, selects changesets from the named
442 If --source is specified, selects changesets from the named
446 repository. If --branch is specified, selects changesets from the
443 repository. If --branch is specified, selects changesets from the
447 branch holding the named revision, up to that revision. If --all
444 branch holding the named revision, up to that revision. If --all
448 is specified, all changesets on the branch will be transplanted,
445 is specified, all changesets on the branch will be transplanted,
449 otherwise you will be prompted to select the changesets you want.
446 otherwise you will be prompted to select the changesets you want.
450
447
451 hg transplant --branch REVISION --all will rebase the selected branch
448 hg transplant --branch REVISION --all will rebase the selected branch
452 (up to the named revision) onto your current working directory.
449 (up to the named revision) onto your current working directory.
453
450
454 You can optionally mark selected transplanted changesets as
451 You can optionally mark selected transplanted changesets as
455 merge changesets. You will not be prompted to transplant any
452 merge changesets. You will not be prompted to transplant any
456 ancestors of a merged transplant, and you can merge descendants
453 ancestors of a merged transplant, and you can merge descendants
457 of them normally instead of transplanting them.
454 of them normally instead of transplanting them.
458
455
459 If no merges or revisions are provided, hg transplant will start
456 If no merges or revisions are provided, hg transplant will start
460 an interactive changeset browser.
457 an interactive changeset browser.
461
458
462 If a changeset application fails, you can fix the merge by hand and
459 If a changeset application fails, you can fix the merge by hand and
463 then resume where you left off by calling hg transplant --continue.
460 then resume where you left off by calling hg transplant --continue.
464 '''
461 '''
465 def getoneitem(opts, item, errmsg):
462 def getoneitem(opts, item, errmsg):
466 val = opts.get(item)
463 val = opts.get(item)
467 if val:
464 if val:
468 if len(val) > 1:
465 if len(val) > 1:
469 raise util.Abort(errmsg)
466 raise util.Abort(errmsg)
470 else:
467 else:
471 return val[0]
468 return val[0]
472
469
473 def getremotechanges(repo, url):
470 def getremotechanges(repo, url):
474 sourcerepo = ui.expandpath(url)
471 sourcerepo = ui.expandpath(url)
475 source = hg.repository(ui, sourcerepo)
472 source = hg.repository(ui, sourcerepo)
476 incoming = repo.findincoming(source, force=True)
473 incoming = repo.findincoming(source, force=True)
477 if not incoming:
474 if not incoming:
478 return (source, None, None)
475 return (source, None, None)
479
476
480 bundle = None
477 bundle = None
481 if not source.local():
478 if not source.local():
482 cg = source.changegroup(incoming, 'incoming')
479 cg = source.changegroup(incoming, 'incoming')
483 bundle = changegroup.writebundle(cg, None, 'HG10UN')
480 bundle = changegroup.writebundle(cg, None, 'HG10UN')
484 source = bundlerepo.bundlerepository(ui, repo.root, bundle)
481 source = bundlerepo.bundlerepository(ui, repo.root, bundle)
485
482
486 return (source, incoming, bundle)
483 return (source, incoming, bundle)
487
484
488 def incwalk(repo, incoming, branches, match=util.always):
485 def incwalk(repo, incoming, branches, match=util.always):
489 if not branches:
486 if not branches:
490 branches=None
487 branches=None
491 for node in repo.changelog.nodesbetween(incoming, branches)[0]:
488 for node in repo.changelog.nodesbetween(incoming, branches)[0]:
492 if match(node):
489 if match(node):
493 yield node
490 yield node
494
491
495 def transplantwalk(repo, root, branches, match=util.always):
492 def transplantwalk(repo, root, branches, match=util.always):
496 if not branches:
493 if not branches:
497 branches = repo.heads()
494 branches = repo.heads()
498 ancestors = []
495 ancestors = []
499 for branch in branches:
496 for branch in branches:
500 ancestors.append(repo.changelog.ancestor(root, branch))
497 ancestors.append(repo.changelog.ancestor(root, branch))
501 for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
498 for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
502 if match(node):
499 if match(node):
503 yield node
500 yield node
504
501
505 def checkopts(opts, revs):
502 def checkopts(opts, revs):
506 if opts.get('continue'):
503 if opts.get('continue'):
507 if filter(lambda opt: opts.get(opt), ('branch', 'all', 'merge')):
504 if filter(lambda opt: opts.get(opt), ('branch', 'all', 'merge')):
508 raise util.Abort(_('--continue is incompatible with branch, all or merge'))
505 raise util.Abort(_('--continue is incompatible with branch, all or merge'))
509 return
506 return
510 if not (opts.get('source') or revs or
507 if not (opts.get('source') or revs or
511 opts.get('merge') or opts.get('branch')):
508 opts.get('merge') or opts.get('branch')):
512 raise util.Abort(_('no source URL, branch tag or revision list provided'))
509 raise util.Abort(_('no source URL, branch tag or revision list provided'))
513 if opts.get('all'):
510 if opts.get('all'):
514 if not opts.get('branch'):
511 if not opts.get('branch'):
515 raise util.Abort(_('--all requires a branch revision'))
512 raise util.Abort(_('--all requires a branch revision'))
516 if revs:
513 if revs:
517 raise util.Abort(_('--all is incompatible with a revision list'))
514 raise util.Abort(_('--all is incompatible with a revision list'))
518
515
519 checkopts(opts, revs)
516 checkopts(opts, revs)
520
517
521 if not opts.get('log'):
518 if not opts.get('log'):
522 opts['log'] = ui.config('transplant', 'log')
519 opts['log'] = ui.config('transplant', 'log')
523 if not opts.get('filter'):
520 if not opts.get('filter'):
524 opts['filter'] = ui.config('transplant', 'filter')
521 opts['filter'] = ui.config('transplant', 'filter')
525
522
526 tp = transplanter(ui, repo)
523 tp = transplanter(ui, repo)
527
524
528 p1, p2 = repo.dirstate.parents()
525 p1, p2 = repo.dirstate.parents()
529 if p1 == revlog.nullid:
526 if p1 == revlog.nullid:
530 raise util.Abort(_('no revision checked out'))
527 raise util.Abort(_('no revision checked out'))
531 if not opts.get('continue'):
528 if not opts.get('continue'):
532 if p2 != revlog.nullid:
529 if p2 != revlog.nullid:
533 raise util.Abort(_('outstanding uncommitted merges'))
530 raise util.Abort(_('outstanding uncommitted merges'))
534 m, a, r, d = repo.status()[:4]
531 m, a, r, d = repo.status()[:4]
535 if m or a or r or d:
532 if m or a or r or d:
536 raise util.Abort(_('outstanding local changes'))
533 raise util.Abort(_('outstanding local changes'))
537
534
538 bundle = None
535 bundle = None
539 source = opts.get('source')
536 source = opts.get('source')
540 if source:
537 if source:
541 (source, incoming, bundle) = getremotechanges(repo, source)
538 (source, incoming, bundle) = getremotechanges(repo, source)
542 else:
539 else:
543 source = repo
540 source = repo
544
541
545 try:
542 try:
546 if opts.get('continue'):
543 if opts.get('continue'):
547 tp.resume(repo, source, opts)
544 tp.resume(repo, source, opts)
548 return
545 return
549
546
550 tf=tp.transplantfilter(repo, source, p1)
547 tf=tp.transplantfilter(repo, source, p1)
551 if opts.get('prune'):
548 if opts.get('prune'):
552 prune = [source.lookup(r)
549 prune = [source.lookup(r)
553 for r in cmdutil.revrange(source, opts.get('prune'))]
550 for r in cmdutil.revrange(source, opts.get('prune'))]
554 matchfn = lambda x: tf(x) and x not in prune
551 matchfn = lambda x: tf(x) and x not in prune
555 else:
552 else:
556 matchfn = tf
553 matchfn = tf
557 branches = map(source.lookup, opts.get('branch', ()))
554 branches = map(source.lookup, opts.get('branch', ()))
558 merges = map(source.lookup, opts.get('merge', ()))
555 merges = map(source.lookup, opts.get('merge', ()))
559 revmap = {}
556 revmap = {}
560 if revs:
557 if revs:
561 for r in cmdutil.revrange(source, revs):
558 for r in cmdutil.revrange(source, revs):
562 revmap[int(r)] = source.lookup(r)
559 revmap[int(r)] = source.lookup(r)
563 elif opts.get('all') or not merges:
560 elif opts.get('all') or not merges:
564 if source != repo:
561 if source != repo:
565 alltransplants = incwalk(source, incoming, branches, match=matchfn)
562 alltransplants = incwalk(source, incoming, branches, match=matchfn)
566 else:
563 else:
567 alltransplants = transplantwalk(source, p1, branches, match=matchfn)
564 alltransplants = transplantwalk(source, p1, branches, match=matchfn)
568 if opts.get('all'):
565 if opts.get('all'):
569 revs = alltransplants
566 revs = alltransplants
570 else:
567 else:
571 revs, newmerges = browserevs(ui, source, alltransplants, opts)
568 revs, newmerges = browserevs(ui, source, alltransplants, opts)
572 merges.extend(newmerges)
569 merges.extend(newmerges)
573 for r in revs:
570 for r in revs:
574 revmap[source.changelog.rev(r)] = r
571 revmap[source.changelog.rev(r)] = r
575 for r in merges:
572 for r in merges:
576 revmap[source.changelog.rev(r)] = r
573 revmap[source.changelog.rev(r)] = r
577
574
578 revs = revmap.keys()
575 revs = revmap.keys()
579 revs.sort()
576 revs.sort()
580 pulls = []
577 pulls = []
581
578
582 tp.apply(repo, source, revmap, merges, opts)
579 tp.apply(repo, source, revmap, merges, opts)
583 finally:
580 finally:
584 if bundle:
581 if bundle:
585 source.close()
582 source.close()
586 os.unlink(bundle)
583 os.unlink(bundle)
587
584
588 cmdtable = {
585 cmdtable = {
589 "transplant":
586 "transplant":
590 (transplant,
587 (transplant,
591 [('s', 'source', '', _('pull patches from REPOSITORY')),
588 [('s', 'source', '', _('pull patches from REPOSITORY')),
592 ('b', 'branch', [], _('pull patches from branch BRANCH')),
589 ('b', 'branch', [], _('pull patches from branch BRANCH')),
593 ('a', 'all', None, _('pull all changesets up to BRANCH')),
590 ('a', 'all', None, _('pull all changesets up to BRANCH')),
594 ('p', 'prune', [], _('skip over REV')),
591 ('p', 'prune', [], _('skip over REV')),
595 ('m', 'merge', [], _('merge at REV')),
592 ('m', 'merge', [], _('merge at REV')),
596 ('', 'log', None, _('append transplant info to log message')),
593 ('', 'log', None, _('append transplant info to log message')),
597 ('c', 'continue', None, _('continue last transplant session after repair')),
594 ('c', 'continue', None, _('continue last transplant session after repair')),
598 ('', 'filter', '', _('filter changesets through FILTER'))],
595 ('', 'filter', '', _('filter changesets through FILTER'))],
599 _('hg transplant [-s REPOSITORY] [-b BRANCH [-a]] [-p REV] [-m REV] [REV]...'))
596 _('hg transplant [-s REPOSITORY] [-b BRANCH [-a]] [-p REV] [-m REV] [REV]...'))
600 }
597 }
@@ -1,1278 +1,1277
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import os, sys, atexit, signal, pdb, traceback, socket, errno, shlex
10 import os, sys, atexit, signal, pdb, traceback, socket, errno, shlex
11 import mdiff, bdiff, util, templater, patch, commands, hg, lock, time
11 import mdiff, bdiff, util, templater, patch, commands, hg, lock, time
12 import fancyopts, revlog, version, extensions, hook
12 import fancyopts, revlog, version, extensions, hook
13
13
14 revrangesep = ':'
14 revrangesep = ':'
15
15
16 class UnknownCommand(Exception):
16 class UnknownCommand(Exception):
17 """Exception raised if command is not in the command table."""
17 """Exception raised if command is not in the command table."""
18 class AmbiguousCommand(Exception):
18 class AmbiguousCommand(Exception):
19 """Exception raised if command shortcut matches more than one command."""
19 """Exception raised if command shortcut matches more than one command."""
20 class ParseError(Exception):
20 class ParseError(Exception):
21 """Exception raised on errors in parsing the command line."""
21 """Exception raised on errors in parsing the command line."""
22
22
23 def runcatch(ui, args, argv0=None):
23 def runcatch(ui, args, argv0=None):
24 def catchterm(*args):
24 def catchterm(*args):
25 raise util.SignalInterrupt
25 raise util.SignalInterrupt
26
26
27 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
27 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
28 num = getattr(signal, name, None)
28 num = getattr(signal, name, None)
29 if num: signal.signal(num, catchterm)
29 if num: signal.signal(num, catchterm)
30
30
31 try:
31 try:
32 try:
32 try:
33 # enter the debugger before command execution
33 # enter the debugger before command execution
34 if '--debugger' in args:
34 if '--debugger' in args:
35 pdb.set_trace()
35 pdb.set_trace()
36 try:
36 try:
37 return dispatch(ui, args, argv0=argv0)
37 return dispatch(ui, args, argv0=argv0)
38 finally:
38 finally:
39 ui.flush()
39 ui.flush()
40 except:
40 except:
41 # enter the debugger when we hit an exception
41 # enter the debugger when we hit an exception
42 if '--debugger' in args:
42 if '--debugger' in args:
43 pdb.post_mortem(sys.exc_info()[2])
43 pdb.post_mortem(sys.exc_info()[2])
44 ui.print_exc()
44 ui.print_exc()
45 raise
45 raise
46
46
47 except ParseError, inst:
47 except ParseError, inst:
48 if inst.args[0]:
48 if inst.args[0]:
49 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
49 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
50 commands.help_(ui, inst.args[0])
50 commands.help_(ui, inst.args[0])
51 else:
51 else:
52 ui.warn(_("hg: %s\n") % inst.args[1])
52 ui.warn(_("hg: %s\n") % inst.args[1])
53 commands.help_(ui, 'shortlist')
53 commands.help_(ui, 'shortlist')
54 except AmbiguousCommand, inst:
54 except AmbiguousCommand, inst:
55 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
55 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
56 (inst.args[0], " ".join(inst.args[1])))
56 (inst.args[0], " ".join(inst.args[1])))
57 except UnknownCommand, inst:
57 except UnknownCommand, inst:
58 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
58 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
59 commands.help_(ui, 'shortlist')
59 commands.help_(ui, 'shortlist')
60 except hg.RepoError, inst:
60 except hg.RepoError, inst:
61 ui.warn(_("abort: %s!\n") % inst)
61 ui.warn(_("abort: %s!\n") % inst)
62 except lock.LockHeld, inst:
62 except lock.LockHeld, inst:
63 if inst.errno == errno.ETIMEDOUT:
63 if inst.errno == errno.ETIMEDOUT:
64 reason = _('timed out waiting for lock held by %s') % inst.locker
64 reason = _('timed out waiting for lock held by %s') % inst.locker
65 else:
65 else:
66 reason = _('lock held by %s') % inst.locker
66 reason = _('lock held by %s') % inst.locker
67 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
67 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
68 except lock.LockUnavailable, inst:
68 except lock.LockUnavailable, inst:
69 ui.warn(_("abort: could not lock %s: %s\n") %
69 ui.warn(_("abort: could not lock %s: %s\n") %
70 (inst.desc or inst.filename, inst.strerror))
70 (inst.desc or inst.filename, inst.strerror))
71 except revlog.RevlogError, inst:
71 except revlog.RevlogError, inst:
72 ui.warn(_("abort: %s!\n") % inst)
72 ui.warn(_("abort: %s!\n") % inst)
73 except util.SignalInterrupt:
73 except util.SignalInterrupt:
74 ui.warn(_("killed!\n"))
74 ui.warn(_("killed!\n"))
75 except KeyboardInterrupt:
75 except KeyboardInterrupt:
76 try:
76 try:
77 ui.warn(_("interrupted!\n"))
77 ui.warn(_("interrupted!\n"))
78 except IOError, inst:
78 except IOError, inst:
79 if inst.errno == errno.EPIPE:
79 if inst.errno == errno.EPIPE:
80 if ui.debugflag:
80 if ui.debugflag:
81 ui.warn(_("\nbroken pipe\n"))
81 ui.warn(_("\nbroken pipe\n"))
82 else:
82 else:
83 raise
83 raise
84 except socket.error, inst:
84 except socket.error, inst:
85 ui.warn(_("abort: %s\n") % inst[1])
85 ui.warn(_("abort: %s\n") % inst[1])
86 except IOError, inst:
86 except IOError, inst:
87 if hasattr(inst, "code"):
87 if hasattr(inst, "code"):
88 ui.warn(_("abort: %s\n") % inst)
88 ui.warn(_("abort: %s\n") % inst)
89 elif hasattr(inst, "reason"):
89 elif hasattr(inst, "reason"):
90 try: # usually it is in the form (errno, strerror)
90 try: # usually it is in the form (errno, strerror)
91 reason = inst.reason.args[1]
91 reason = inst.reason.args[1]
92 except: # it might be anything, for example a string
92 except: # it might be anything, for example a string
93 reason = inst.reason
93 reason = inst.reason
94 ui.warn(_("abort: error: %s\n") % reason)
94 ui.warn(_("abort: error: %s\n") % reason)
95 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
95 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
96 if ui.debugflag:
96 if ui.debugflag:
97 ui.warn(_("broken pipe\n"))
97 ui.warn(_("broken pipe\n"))
98 elif getattr(inst, "strerror", None):
98 elif getattr(inst, "strerror", None):
99 if getattr(inst, "filename", None):
99 if getattr(inst, "filename", None):
100 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
100 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
101 else:
101 else:
102 ui.warn(_("abort: %s\n") % inst.strerror)
102 ui.warn(_("abort: %s\n") % inst.strerror)
103 else:
103 else:
104 raise
104 raise
105 except OSError, inst:
105 except OSError, inst:
106 if getattr(inst, "filename", None):
106 if getattr(inst, "filename", None):
107 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
107 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
108 else:
108 else:
109 ui.warn(_("abort: %s\n") % inst.strerror)
109 ui.warn(_("abort: %s\n") % inst.strerror)
110 except util.UnexpectedOutput, inst:
110 except util.UnexpectedOutput, inst:
111 ui.warn(_("abort: %s") % inst[0])
111 ui.warn(_("abort: %s") % inst[0])
112 if not isinstance(inst[1], basestring):
112 if not isinstance(inst[1], basestring):
113 ui.warn(" %r\n" % (inst[1],))
113 ui.warn(" %r\n" % (inst[1],))
114 elif not inst[1]:
114 elif not inst[1]:
115 ui.warn(_(" empty string\n"))
115 ui.warn(_(" empty string\n"))
116 else:
116 else:
117 ui.warn("\n%r\n" % util.ellipsis(inst[1]))
117 ui.warn("\n%r\n" % util.ellipsis(inst[1]))
118 except ImportError, inst:
118 except ImportError, inst:
119 m = str(inst).split()[-1]
119 m = str(inst).split()[-1]
120 ui.warn(_("abort: could not import module %s!\n" % m))
120 ui.warn(_("abort: could not import module %s!\n" % m))
121 if m in "mpatch bdiff".split():
121 if m in "mpatch bdiff".split():
122 ui.warn(_("(did you forget to compile extensions?)\n"))
122 ui.warn(_("(did you forget to compile extensions?)\n"))
123 elif m in "zlib".split():
123 elif m in "zlib".split():
124 ui.warn(_("(is your Python install correct?)\n"))
124 ui.warn(_("(is your Python install correct?)\n"))
125
125
126 except util.Abort, inst:
126 except util.Abort, inst:
127 ui.warn(_("abort: %s\n") % inst)
127 ui.warn(_("abort: %s\n") % inst)
128 except SystemExit, inst:
128 except SystemExit, inst:
129 # Commands shouldn't sys.exit directly, but give a return code.
129 # Commands shouldn't sys.exit directly, but give a return code.
130 # Just in case catch this and and pass exit code to caller.
130 # Just in case catch this and and pass exit code to caller.
131 return inst.code
131 return inst.code
132 except:
132 except:
133 ui.warn(_("** unknown exception encountered, details follow\n"))
133 ui.warn(_("** unknown exception encountered, details follow\n"))
134 ui.warn(_("** report bug details to "
134 ui.warn(_("** report bug details to "
135 "http://www.selenic.com/mercurial/bts\n"))
135 "http://www.selenic.com/mercurial/bts\n"))
136 ui.warn(_("** or mercurial@selenic.com\n"))
136 ui.warn(_("** or mercurial@selenic.com\n"))
137 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
137 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
138 % version.get_version())
138 % version.get_version())
139 raise
139 raise
140
140
141 return -1
141 return -1
142
142
143 def findpossible(ui, cmd):
143 def findpossible(ui, cmd):
144 """
144 """
145 Return cmd -> (aliases, command table entry)
145 Return cmd -> (aliases, command table entry)
146 for each matching command.
146 for each matching command.
147 Return debug commands (or their aliases) only if no normal command matches.
147 Return debug commands (or their aliases) only if no normal command matches.
148 """
148 """
149 choice = {}
149 choice = {}
150 debugchoice = {}
150 debugchoice = {}
151 for e in commands.table.keys():
151 for e in commands.table.keys():
152 aliases = e.lstrip("^").split("|")
152 aliases = e.lstrip("^").split("|")
153 found = None
153 found = None
154 if cmd in aliases:
154 if cmd in aliases:
155 found = cmd
155 found = cmd
156 elif not ui.config("ui", "strict"):
156 elif not ui.config("ui", "strict"):
157 for a in aliases:
157 for a in aliases:
158 if a.startswith(cmd):
158 if a.startswith(cmd):
159 found = a
159 found = a
160 break
160 break
161 if found is not None:
161 if found is not None:
162 if aliases[0].startswith("debug") or found.startswith("debug"):
162 if aliases[0].startswith("debug") or found.startswith("debug"):
163 debugchoice[found] = (aliases, commands.table[e])
163 debugchoice[found] = (aliases, commands.table[e])
164 else:
164 else:
165 choice[found] = (aliases, commands.table[e])
165 choice[found] = (aliases, commands.table[e])
166
166
167 if not choice and debugchoice:
167 if not choice and debugchoice:
168 choice = debugchoice
168 choice = debugchoice
169
169
170 return choice
170 return choice
171
171
172 def findcmd(ui, cmd):
172 def findcmd(ui, cmd):
173 """Return (aliases, command table entry) for command string."""
173 """Return (aliases, command table entry) for command string."""
174 choice = findpossible(ui, cmd)
174 choice = findpossible(ui, cmd)
175
175
176 if choice.has_key(cmd):
176 if choice.has_key(cmd):
177 return choice[cmd]
177 return choice[cmd]
178
178
179 if len(choice) > 1:
179 if len(choice) > 1:
180 clist = choice.keys()
180 clist = choice.keys()
181 clist.sort()
181 clist.sort()
182 raise AmbiguousCommand(cmd, clist)
182 raise AmbiguousCommand(cmd, clist)
183
183
184 if choice:
184 if choice:
185 return choice.values()[0]
185 return choice.values()[0]
186
186
187 raise UnknownCommand(cmd)
187 raise UnknownCommand(cmd)
188
188
189 def findrepo():
189 def findrepo():
190 p = os.getcwd()
190 p = os.getcwd()
191 while not os.path.isdir(os.path.join(p, ".hg")):
191 while not os.path.isdir(os.path.join(p, ".hg")):
192 oldp, p = p, os.path.dirname(p)
192 oldp, p = p, os.path.dirname(p)
193 if p == oldp:
193 if p == oldp:
194 return None
194 return None
195
195
196 return p
196 return p
197
197
198 def parse(ui, args):
198 def parse(ui, args):
199 options = {}
199 options = {}
200 cmdoptions = {}
200 cmdoptions = {}
201
201
202 try:
202 try:
203 args = fancyopts.fancyopts(args, commands.globalopts, options)
203 args = fancyopts.fancyopts(args, commands.globalopts, options)
204 except fancyopts.getopt.GetoptError, inst:
204 except fancyopts.getopt.GetoptError, inst:
205 raise ParseError(None, inst)
205 raise ParseError(None, inst)
206
206
207 if args:
207 if args:
208 cmd, args = args[0], args[1:]
208 cmd, args = args[0], args[1:]
209 aliases, i = findcmd(ui, cmd)
209 aliases, i = findcmd(ui, cmd)
210 cmd = aliases[0]
210 cmd = aliases[0]
211 defaults = ui.config("defaults", cmd)
211 defaults = ui.config("defaults", cmd)
212 if defaults:
212 if defaults:
213 args = shlex.split(defaults) + args
213 args = shlex.split(defaults) + args
214 c = list(i[1])
214 c = list(i[1])
215 else:
215 else:
216 cmd = None
216 cmd = None
217 c = []
217 c = []
218
218
219 # combine global options into local
219 # combine global options into local
220 for o in commands.globalopts:
220 for o in commands.globalopts:
221 c.append((o[0], o[1], options[o[1]], o[3]))
221 c.append((o[0], o[1], options[o[1]], o[3]))
222
222
223 try:
223 try:
224 args = fancyopts.fancyopts(args, c, cmdoptions)
224 args = fancyopts.fancyopts(args, c, cmdoptions)
225 except fancyopts.getopt.GetoptError, inst:
225 except fancyopts.getopt.GetoptError, inst:
226 raise ParseError(cmd, inst)
226 raise ParseError(cmd, inst)
227
227
228 # separate global options back out
228 # separate global options back out
229 for o in commands.globalopts:
229 for o in commands.globalopts:
230 n = o[1]
230 n = o[1]
231 options[n] = cmdoptions[n]
231 options[n] = cmdoptions[n]
232 del cmdoptions[n]
232 del cmdoptions[n]
233
233
234 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
234 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
235
235
236 def parseconfig(config):
236 def parseconfig(config):
237 """parse the --config options from the command line"""
237 """parse the --config options from the command line"""
238 parsed = []
238 parsed = []
239 for cfg in config:
239 for cfg in config:
240 try:
240 try:
241 name, value = cfg.split('=', 1)
241 name, value = cfg.split('=', 1)
242 section, name = name.split('.', 1)
242 section, name = name.split('.', 1)
243 if not section or not name:
243 if not section or not name:
244 raise IndexError
244 raise IndexError
245 parsed.append((section, name, value))
245 parsed.append((section, name, value))
246 except (IndexError, ValueError):
246 except (IndexError, ValueError):
247 raise util.Abort(_('malformed --config option: %s') % cfg)
247 raise util.Abort(_('malformed --config option: %s') % cfg)
248 return parsed
248 return parsed
249
249
250 def earlygetopt(aliases, args):
250 def earlygetopt(aliases, args):
251 """Return list of values for an option (or aliases).
251 """Return list of values for an option (or aliases).
252
252
253 The values are listed in the order they appear in args.
253 The values are listed in the order they appear in args.
254 The options and values are removed from args.
254 The options and values are removed from args.
255 """
255 """
256 try:
256 try:
257 argcount = args.index("--")
257 argcount = args.index("--")
258 except ValueError:
258 except ValueError:
259 argcount = len(args)
259 argcount = len(args)
260 shortopts = [opt for opt in aliases if len(opt) == 2]
260 shortopts = [opt for opt in aliases if len(opt) == 2]
261 values = []
261 values = []
262 pos = 0
262 pos = 0
263 while pos < argcount:
263 while pos < argcount:
264 if args[pos] in aliases:
264 if args[pos] in aliases:
265 if pos + 1 >= argcount:
265 if pos + 1 >= argcount:
266 # ignore and let getopt report an error if there is no value
266 # ignore and let getopt report an error if there is no value
267 break
267 break
268 del args[pos]
268 del args[pos]
269 values.append(args.pop(pos))
269 values.append(args.pop(pos))
270 argcount -= 2
270 argcount -= 2
271 elif args[pos][:2] in shortopts:
271 elif args[pos][:2] in shortopts:
272 # short option can have no following space, e.g. hg log -Rfoo
272 # short option can have no following space, e.g. hg log -Rfoo
273 values.append(args.pop(pos)[2:])
273 values.append(args.pop(pos)[2:])
274 argcount -= 1
274 argcount -= 1
275 else:
275 else:
276 pos += 1
276 pos += 1
277 return values
277 return values
278
278
279 def dispatch(ui, args, argv0=None):
279 def dispatch(ui, args, argv0=None):
280 # remember how to call 'hg' before changing the working dir
280 # remember how to call 'hg' before changing the working dir
281 util.set_hgexecutable(argv0)
281 util.set_hgexecutable(argv0)
282
282
283 # read --config before doing anything else
283 # read --config before doing anything else
284 # (e.g. to change trust settings for reading .hg/hgrc)
284 # (e.g. to change trust settings for reading .hg/hgrc)
285 config = earlygetopt(['--config'], args)
285 config = earlygetopt(['--config'], args)
286 if config:
286 if config:
287 ui.updateopts(config=parseconfig(config))
287 ui.updateopts(config=parseconfig(config))
288
288
289 # check for cwd
289 # check for cwd
290 cwd = earlygetopt(['--cwd'], args)
290 cwd = earlygetopt(['--cwd'], args)
291 if cwd:
291 if cwd:
292 os.chdir(cwd[-1])
292 os.chdir(cwd[-1])
293
293
294 # read the local repository .hgrc into a local ui object
294 # read the local repository .hgrc into a local ui object
295 path = findrepo() or ""
295 path = findrepo() or ""
296 if not path:
296 if not path:
297 lui = ui
297 lui = ui
298 if path:
298 if path:
299 try:
299 try:
300 lui = commands.ui.ui(parentui=ui)
300 lui = commands.ui.ui(parentui=ui)
301 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
301 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
302 except IOError:
302 except IOError:
303 pass
303 pass
304
304
305 # now we can expand paths, even ones in .hg/hgrc
305 # now we can expand paths, even ones in .hg/hgrc
306 rpath = earlygetopt(["-R", "--repository", "--repo"], args)
306 rpath = earlygetopt(["-R", "--repository", "--repo"], args)
307 if rpath:
307 if rpath:
308 path = lui.expandpath(rpath[-1])
308 path = lui.expandpath(rpath[-1])
309 lui = commands.ui.ui(parentui=ui)
309 lui = commands.ui.ui(parentui=ui)
310 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
310 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
311
311
312 extensions.loadall(lui)
312 extensions.loadall(lui)
313 # check for fallback encoding
313 # check for fallback encoding
314 fallback = lui.config('ui', 'fallbackencoding')
314 fallback = lui.config('ui', 'fallbackencoding')
315 if fallback:
315 if fallback:
316 util._fallbackencoding = fallback
316 util._fallbackencoding = fallback
317
317
318 fullargs = args
318 fullargs = args
319 cmd, func, args, options, cmdoptions = parse(ui, args)
319 cmd, func, args, options, cmdoptions = parse(ui, args)
320
320
321 if options["config"]:
321 if options["config"]:
322 raise util.Abort(_("Option --config may not be abbreviated!"))
322 raise util.Abort(_("Option --config may not be abbreviated!"))
323 if options["cwd"]:
323 if options["cwd"]:
324 raise util.Abort(_("Option --cwd may not be abbreviated!"))
324 raise util.Abort(_("Option --cwd may not be abbreviated!"))
325 if options["repository"]:
325 if options["repository"]:
326 raise util.Abort(_(
326 raise util.Abort(_(
327 "Option -R has to be separated from other options (i.e. not -qR) "
327 "Option -R has to be separated from other options (i.e. not -qR) "
328 "and --repository may only be abbreviated as --repo!"))
328 "and --repository may only be abbreviated as --repo!"))
329
329
330 if options["encoding"]:
330 if options["encoding"]:
331 util._encoding = options["encoding"]
331 util._encoding = options["encoding"]
332 if options["encodingmode"]:
332 if options["encodingmode"]:
333 util._encodingmode = options["encodingmode"]
333 util._encodingmode = options["encodingmode"]
334 if options["time"]:
334 if options["time"]:
335 def get_times():
335 def get_times():
336 t = os.times()
336 t = os.times()
337 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
337 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
338 t = (t[0], t[1], t[2], t[3], time.clock())
338 t = (t[0], t[1], t[2], t[3], time.clock())
339 return t
339 return t
340 s = get_times()
340 s = get_times()
341 def print_time():
341 def print_time():
342 t = get_times()
342 t = get_times()
343 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
343 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
344 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
344 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
345 atexit.register(print_time)
345 atexit.register(print_time)
346
346
347 ui.updateopts(options["verbose"], options["debug"], options["quiet"],
347 ui.updateopts(options["verbose"], options["debug"], options["quiet"],
348 not options["noninteractive"], options["traceback"])
348 not options["noninteractive"], options["traceback"])
349
349
350 if options['help']:
350 if options['help']:
351 return commands.help_(ui, cmd, options['version'])
351 return commands.help_(ui, cmd, options['version'])
352 elif options['version']:
352 elif options['version']:
353 return commands.version_(ui)
353 return commands.version_(ui)
354 elif not cmd:
354 elif not cmd:
355 return commands.help_(ui, 'shortlist')
355 return commands.help_(ui, 'shortlist')
356
356
357 repo = None
357 repo = None
358 if cmd not in commands.norepo.split():
358 if cmd not in commands.norepo.split():
359 try:
359 try:
360 repo = hg.repository(ui, path=path)
360 repo = hg.repository(ui, path=path)
361 ui = repo.ui
361 ui = repo.ui
362 if not repo.local():
362 if not repo.local():
363 raise util.Abort(_("repository '%s' is not local") % path)
363 raise util.Abort(_("repository '%s' is not local") % path)
364 except hg.RepoError:
364 except hg.RepoError:
365 if cmd not in commands.optionalrepo.split():
365 if cmd not in commands.optionalrepo.split():
366 if not path:
366 if not path:
367 raise hg.RepoError(_("There is no Mercurial repository here"
367 raise hg.RepoError(_("There is no Mercurial repository here"
368 " (.hg not found)"))
368 " (.hg not found)"))
369 raise
369 raise
370 d = lambda: func(ui, repo, *args, **cmdoptions)
370 d = lambda: func(ui, repo, *args, **cmdoptions)
371 else:
371 else:
372 d = lambda: func(ui, *args, **cmdoptions)
372 d = lambda: func(ui, *args, **cmdoptions)
373
373
374 # run pre-hook, and abort if it fails
374 # run pre-hook, and abort if it fails
375 ret = hook.hook(ui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
375 ret = hook.hook(ui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
376 if ret:
376 if ret:
377 return ret
377 return ret
378 ret = runcommand(ui, options, cmd, d)
378 ret = runcommand(ui, options, cmd, d)
379 # run post-hook, passing command result
379 # run post-hook, passing command result
380 hook.hook(ui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
380 hook.hook(ui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
381 result = ret)
381 result = ret)
382 return ret
382 return ret
383
383
384 def runcommand(ui, options, cmd, cmdfunc):
384 def runcommand(ui, options, cmd, cmdfunc):
385 def checkargs():
385 def checkargs():
386 try:
386 try:
387 return cmdfunc()
387 return cmdfunc()
388 except TypeError, inst:
388 except TypeError, inst:
389 # was this an argument error?
389 # was this an argument error?
390 tb = traceback.extract_tb(sys.exc_info()[2])
390 tb = traceback.extract_tb(sys.exc_info()[2])
391 if len(tb) != 2: # no
391 if len(tb) != 2: # no
392 raise
392 raise
393 raise ParseError(cmd, _("invalid arguments"))
393 raise ParseError(cmd, _("invalid arguments"))
394
394
395 if options['profile']:
395 if options['profile']:
396 import hotshot, hotshot.stats
396 import hotshot, hotshot.stats
397 prof = hotshot.Profile("hg.prof")
397 prof = hotshot.Profile("hg.prof")
398 try:
398 try:
399 try:
399 try:
400 return prof.runcall(checkargs)
400 return prof.runcall(checkargs)
401 except:
401 except:
402 try:
402 try:
403 ui.warn(_('exception raised - generating '
403 ui.warn(_('exception raised - generating '
404 'profile anyway\n'))
404 'profile anyway\n'))
405 except:
405 except:
406 pass
406 pass
407 raise
407 raise
408 finally:
408 finally:
409 prof.close()
409 prof.close()
410 stats = hotshot.stats.load("hg.prof")
410 stats = hotshot.stats.load("hg.prof")
411 stats.strip_dirs()
411 stats.strip_dirs()
412 stats.sort_stats('time', 'calls')
412 stats.sort_stats('time', 'calls')
413 stats.print_stats(40)
413 stats.print_stats(40)
414 elif options['lsprof']:
414 elif options['lsprof']:
415 try:
415 try:
416 from mercurial import lsprof
416 from mercurial import lsprof
417 except ImportError:
417 except ImportError:
418 raise util.Abort(_(
418 raise util.Abort(_(
419 'lsprof not available - install from '
419 'lsprof not available - install from '
420 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
420 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
421 p = lsprof.Profiler()
421 p = lsprof.Profiler()
422 p.enable(subcalls=True)
422 p.enable(subcalls=True)
423 try:
423 try:
424 return checkargs()
424 return checkargs()
425 finally:
425 finally:
426 p.disable()
426 p.disable()
427 stats = lsprof.Stats(p.getstats())
427 stats = lsprof.Stats(p.getstats())
428 stats.sort()
428 stats.sort()
429 stats.pprint(top=10, file=sys.stderr, climit=5)
429 stats.pprint(top=10, file=sys.stderr, climit=5)
430 else:
430 else:
431 return checkargs()
431 return checkargs()
432
432
433 def bail_if_changed(repo):
433 def bail_if_changed(repo):
434 modified, added, removed, deleted = repo.status()[:4]
434 modified, added, removed, deleted = repo.status()[:4]
435 if modified or added or removed or deleted:
435 if modified or added or removed or deleted:
436 raise util.Abort(_("outstanding uncommitted changes"))
436 raise util.Abort(_("outstanding uncommitted changes"))
437
437
438 def logmessage(opts):
438 def logmessage(opts):
439 """ get the log message according to -m and -l option """
439 """ get the log message according to -m and -l option """
440 message = opts['message']
440 message = opts['message']
441 logfile = opts['logfile']
441 logfile = opts['logfile']
442
442
443 if message and logfile:
443 if message and logfile:
444 raise util.Abort(_('options --message and --logfile are mutually '
444 raise util.Abort(_('options --message and --logfile are mutually '
445 'exclusive'))
445 'exclusive'))
446 if not message and logfile:
446 if not message and logfile:
447 try:
447 try:
448 if logfile == '-':
448 if logfile == '-':
449 message = sys.stdin.read()
449 message = sys.stdin.read()
450 else:
450 else:
451 message = open(logfile).read()
451 message = open(logfile).read()
452 except IOError, inst:
452 except IOError, inst:
453 raise util.Abort(_("can't read commit message '%s': %s") %
453 raise util.Abort(_("can't read commit message '%s': %s") %
454 (logfile, inst.strerror))
454 (logfile, inst.strerror))
455 return message
455 return message
456
456
457 def setremoteconfig(ui, opts):
457 def setremoteconfig(ui, opts):
458 "copy remote options to ui tree"
458 "copy remote options to ui tree"
459 if opts.get('ssh'):
459 if opts.get('ssh'):
460 ui.setconfig("ui", "ssh", opts['ssh'])
460 ui.setconfig("ui", "ssh", opts['ssh'])
461 if opts.get('remotecmd'):
461 if opts.get('remotecmd'):
462 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
462 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
463
463
464 def parseurl(url, revs):
464 def parseurl(url, revs):
465 '''parse url#branch, returning url, branch + revs'''
465 '''parse url#branch, returning url, branch + revs'''
466
466
467 if '#' not in url:
467 if '#' not in url:
468 return url, (revs or None)
468 return url, (revs or None)
469
469
470 url, rev = url.split('#', 1)
470 url, rev = url.split('#', 1)
471 return url, revs + [rev]
471 return url, revs + [rev]
472
472
473 def revpair(repo, revs):
473 def revpair(repo, revs):
474 '''return pair of nodes, given list of revisions. second item can
474 '''return pair of nodes, given list of revisions. second item can
475 be None, meaning use working dir.'''
475 be None, meaning use working dir.'''
476
476
477 def revfix(repo, val, defval):
477 def revfix(repo, val, defval):
478 if not val and val != 0 and defval is not None:
478 if not val and val != 0 and defval is not None:
479 val = defval
479 val = defval
480 return repo.lookup(val)
480 return repo.lookup(val)
481
481
482 if not revs:
482 if not revs:
483 return repo.dirstate.parents()[0], None
483 return repo.dirstate.parents()[0], None
484 end = None
484 end = None
485 if len(revs) == 1:
485 if len(revs) == 1:
486 if revrangesep in revs[0]:
486 if revrangesep in revs[0]:
487 start, end = revs[0].split(revrangesep, 1)
487 start, end = revs[0].split(revrangesep, 1)
488 start = revfix(repo, start, 0)
488 start = revfix(repo, start, 0)
489 end = revfix(repo, end, repo.changelog.count() - 1)
489 end = revfix(repo, end, repo.changelog.count() - 1)
490 else:
490 else:
491 start = revfix(repo, revs[0], None)
491 start = revfix(repo, revs[0], None)
492 elif len(revs) == 2:
492 elif len(revs) == 2:
493 if revrangesep in revs[0] or revrangesep in revs[1]:
493 if revrangesep in revs[0] or revrangesep in revs[1]:
494 raise util.Abort(_('too many revisions specified'))
494 raise util.Abort(_('too many revisions specified'))
495 start = revfix(repo, revs[0], None)
495 start = revfix(repo, revs[0], None)
496 end = revfix(repo, revs[1], None)
496 end = revfix(repo, revs[1], None)
497 else:
497 else:
498 raise util.Abort(_('too many revisions specified'))
498 raise util.Abort(_('too many revisions specified'))
499 return start, end
499 return start, end
500
500
501 def revrange(repo, revs):
501 def revrange(repo, revs):
502 """Yield revision as strings from a list of revision specifications."""
502 """Yield revision as strings from a list of revision specifications."""
503
503
504 def revfix(repo, val, defval):
504 def revfix(repo, val, defval):
505 if not val and val != 0 and defval is not None:
505 if not val and val != 0 and defval is not None:
506 return defval
506 return defval
507 return repo.changelog.rev(repo.lookup(val))
507 return repo.changelog.rev(repo.lookup(val))
508
508
509 seen, l = {}, []
509 seen, l = {}, []
510 for spec in revs:
510 for spec in revs:
511 if revrangesep in spec:
511 if revrangesep in spec:
512 start, end = spec.split(revrangesep, 1)
512 start, end = spec.split(revrangesep, 1)
513 start = revfix(repo, start, 0)
513 start = revfix(repo, start, 0)
514 end = revfix(repo, end, repo.changelog.count() - 1)
514 end = revfix(repo, end, repo.changelog.count() - 1)
515 step = start > end and -1 or 1
515 step = start > end and -1 or 1
516 for rev in xrange(start, end+step, step):
516 for rev in xrange(start, end+step, step):
517 if rev in seen:
517 if rev in seen:
518 continue
518 continue
519 seen[rev] = 1
519 seen[rev] = 1
520 l.append(rev)
520 l.append(rev)
521 else:
521 else:
522 rev = revfix(repo, spec, None)
522 rev = revfix(repo, spec, None)
523 if rev in seen:
523 if rev in seen:
524 continue
524 continue
525 seen[rev] = 1
525 seen[rev] = 1
526 l.append(rev)
526 l.append(rev)
527
527
528 return l
528 return l
529
529
530 def make_filename(repo, pat, node,
530 def make_filename(repo, pat, node,
531 total=None, seqno=None, revwidth=None, pathname=None):
531 total=None, seqno=None, revwidth=None, pathname=None):
532 node_expander = {
532 node_expander = {
533 'H': lambda: hex(node),
533 'H': lambda: hex(node),
534 'R': lambda: str(repo.changelog.rev(node)),
534 'R': lambda: str(repo.changelog.rev(node)),
535 'h': lambda: short(node),
535 'h': lambda: short(node),
536 }
536 }
537 expander = {
537 expander = {
538 '%': lambda: '%',
538 '%': lambda: '%',
539 'b': lambda: os.path.basename(repo.root),
539 'b': lambda: os.path.basename(repo.root),
540 }
540 }
541
541
542 try:
542 try:
543 if node:
543 if node:
544 expander.update(node_expander)
544 expander.update(node_expander)
545 if node:
545 if node:
546 expander['r'] = (lambda:
546 expander['r'] = (lambda:
547 str(repo.changelog.rev(node)).zfill(revwidth or 0))
547 str(repo.changelog.rev(node)).zfill(revwidth or 0))
548 if total is not None:
548 if total is not None:
549 expander['N'] = lambda: str(total)
549 expander['N'] = lambda: str(total)
550 if seqno is not None:
550 if seqno is not None:
551 expander['n'] = lambda: str(seqno)
551 expander['n'] = lambda: str(seqno)
552 if total is not None and seqno is not None:
552 if total is not None and seqno is not None:
553 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
553 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
554 if pathname is not None:
554 if pathname is not None:
555 expander['s'] = lambda: os.path.basename(pathname)
555 expander['s'] = lambda: os.path.basename(pathname)
556 expander['d'] = lambda: os.path.dirname(pathname) or '.'
556 expander['d'] = lambda: os.path.dirname(pathname) or '.'
557 expander['p'] = lambda: pathname
557 expander['p'] = lambda: pathname
558
558
559 newname = []
559 newname = []
560 patlen = len(pat)
560 patlen = len(pat)
561 i = 0
561 i = 0
562 while i < patlen:
562 while i < patlen:
563 c = pat[i]
563 c = pat[i]
564 if c == '%':
564 if c == '%':
565 i += 1
565 i += 1
566 c = pat[i]
566 c = pat[i]
567 c = expander[c]()
567 c = expander[c]()
568 newname.append(c)
568 newname.append(c)
569 i += 1
569 i += 1
570 return ''.join(newname)
570 return ''.join(newname)
571 except KeyError, inst:
571 except KeyError, inst:
572 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
572 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
573 inst.args[0])
573 inst.args[0])
574
574
575 def make_file(repo, pat, node=None,
575 def make_file(repo, pat, node=None,
576 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
576 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
577 if not pat or pat == '-':
577 if not pat or pat == '-':
578 return 'w' in mode and sys.stdout or sys.stdin
578 return 'w' in mode and sys.stdout or sys.stdin
579 if hasattr(pat, 'write') and 'w' in mode:
579 if hasattr(pat, 'write') and 'w' in mode:
580 return pat
580 return pat
581 if hasattr(pat, 'read') and 'r' in mode:
581 if hasattr(pat, 'read') and 'r' in mode:
582 return pat
582 return pat
583 return open(make_filename(repo, pat, node, total, seqno, revwidth,
583 return open(make_filename(repo, pat, node, total, seqno, revwidth,
584 pathname),
584 pathname),
585 mode)
585 mode)
586
586
587 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
587 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
588 cwd = repo.getcwd()
588 cwd = repo.getcwd()
589 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
589 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
590 opts.get('exclude'), globbed=globbed,
590 opts.get('exclude'), globbed=globbed,
591 default=default)
591 default=default)
592
592
593 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
593 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
594 default=None):
594 default=None):
595 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
595 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
596 default=default)
596 default=default)
597 exact = dict.fromkeys(files)
597 exact = dict.fromkeys(files)
598 cwd = repo.getcwd()
598 cwd = repo.getcwd()
599 for src, fn in repo.walk(node=node, files=files, match=matchfn,
599 for src, fn in repo.walk(node=node, files=files, match=matchfn,
600 badmatch=badmatch):
600 badmatch=badmatch):
601 yield src, fn, repo.pathto(fn, cwd), fn in exact
601 yield src, fn, repo.pathto(fn, cwd), fn in exact
602
602
603 def findrenames(repo, added=None, removed=None, threshold=0.5):
603 def findrenames(repo, added=None, removed=None, threshold=0.5):
604 '''find renamed files -- yields (before, after, score) tuples'''
604 '''find renamed files -- yields (before, after, score) tuples'''
605 if added is None or removed is None:
605 if added is None or removed is None:
606 added, removed = repo.status()[1:3]
606 added, removed = repo.status()[1:3]
607 ctx = repo.changectx()
607 ctx = repo.changectx()
608 for a in added:
608 for a in added:
609 aa = repo.wread(a)
609 aa = repo.wread(a)
610 bestname, bestscore = None, threshold
610 bestname, bestscore = None, threshold
611 for r in removed:
611 for r in removed:
612 rr = ctx.filectx(r).data()
612 rr = ctx.filectx(r).data()
613
613
614 # bdiff.blocks() returns blocks of matching lines
614 # bdiff.blocks() returns blocks of matching lines
615 # count the number of bytes in each
615 # count the number of bytes in each
616 equal = 0
616 equal = 0
617 alines = mdiff.splitnewlines(aa)
617 alines = mdiff.splitnewlines(aa)
618 matches = bdiff.blocks(aa, rr)
618 matches = bdiff.blocks(aa, rr)
619 for x1,x2,y1,y2 in matches:
619 for x1,x2,y1,y2 in matches:
620 for line in alines[x1:x2]:
620 for line in alines[x1:x2]:
621 equal += len(line)
621 equal += len(line)
622
622
623 lengths = len(aa) + len(rr)
623 lengths = len(aa) + len(rr)
624 if lengths:
624 if lengths:
625 myscore = equal*2.0 / lengths
625 myscore = equal*2.0 / lengths
626 if myscore >= bestscore:
626 if myscore >= bestscore:
627 bestname, bestscore = r, myscore
627 bestname, bestscore = r, myscore
628 if bestname:
628 if bestname:
629 yield bestname, a, bestscore
629 yield bestname, a, bestscore
630
630
631 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
631 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
632 similarity=None):
633 if dry_run is None:
632 if dry_run is None:
634 dry_run = opts.get('dry_run')
633 dry_run = opts.get('dry_run')
635 if similarity is None:
634 if similarity is None:
636 similarity = float(opts.get('similarity') or 0)
635 similarity = float(opts.get('similarity') or 0)
637 add, remove = [], []
636 add, remove = [], []
638 mapping = {}
637 mapping = {}
639 for src, abs, rel, exact in walk(repo, pats, opts):
638 for src, abs, rel, exact in walk(repo, pats, opts):
640 target = repo.wjoin(abs)
639 target = repo.wjoin(abs)
641 if src == 'f' and abs not in repo.dirstate:
640 if src == 'f' and abs not in repo.dirstate:
642 add.append(abs)
641 add.append(abs)
643 mapping[abs] = rel, exact
642 mapping[abs] = rel, exact
644 if repo.ui.verbose or not exact:
643 if repo.ui.verbose or not exact:
645 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
644 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
646 if repo.dirstate[abs] != 'r' and not util.lexists(target):
645 if repo.dirstate[abs] != 'r' and not util.lexists(target):
647 remove.append(abs)
646 remove.append(abs)
648 mapping[abs] = rel, exact
647 mapping[abs] = rel, exact
649 if repo.ui.verbose or not exact:
648 if repo.ui.verbose or not exact:
650 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
649 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
651 if not dry_run:
650 if not dry_run:
652 repo.add(add, wlock=wlock)
651 repo.add(add)
653 repo.remove(remove, wlock=wlock)
652 repo.remove(remove)
654 if similarity > 0:
653 if similarity > 0:
655 for old, new, score in findrenames(repo, add, remove, similarity):
654 for old, new, score in findrenames(repo, add, remove, similarity):
656 oldrel, oldexact = mapping[old]
655 oldrel, oldexact = mapping[old]
657 newrel, newexact = mapping[new]
656 newrel, newexact = mapping[new]
658 if repo.ui.verbose or not oldexact or not newexact:
657 if repo.ui.verbose or not oldexact or not newexact:
659 repo.ui.status(_('recording removal of %s as rename to %s '
658 repo.ui.status(_('recording removal of %s as rename to %s '
660 '(%d%% similar)\n') %
659 '(%d%% similar)\n') %
661 (oldrel, newrel, score * 100))
660 (oldrel, newrel, score * 100))
662 if not dry_run:
661 if not dry_run:
663 repo.copy(old, new, wlock=wlock)
662 repo.copy(old, new)
664
663
665 def service(opts, parentfn=None, initfn=None, runfn=None):
664 def service(opts, parentfn=None, initfn=None, runfn=None):
666 '''Run a command as a service.'''
665 '''Run a command as a service.'''
667
666
668 if opts['daemon'] and not opts['daemon_pipefds']:
667 if opts['daemon'] and not opts['daemon_pipefds']:
669 rfd, wfd = os.pipe()
668 rfd, wfd = os.pipe()
670 args = sys.argv[:]
669 args = sys.argv[:]
671 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
670 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
672 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
671 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
673 args[0], args)
672 args[0], args)
674 os.close(wfd)
673 os.close(wfd)
675 os.read(rfd, 1)
674 os.read(rfd, 1)
676 if parentfn:
675 if parentfn:
677 return parentfn(pid)
676 return parentfn(pid)
678 else:
677 else:
679 os._exit(0)
678 os._exit(0)
680
679
681 if initfn:
680 if initfn:
682 initfn()
681 initfn()
683
682
684 if opts['pid_file']:
683 if opts['pid_file']:
685 fp = open(opts['pid_file'], 'w')
684 fp = open(opts['pid_file'], 'w')
686 fp.write(str(os.getpid()) + '\n')
685 fp.write(str(os.getpid()) + '\n')
687 fp.close()
686 fp.close()
688
687
689 if opts['daemon_pipefds']:
688 if opts['daemon_pipefds']:
690 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
689 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
691 os.close(rfd)
690 os.close(rfd)
692 try:
691 try:
693 os.setsid()
692 os.setsid()
694 except AttributeError:
693 except AttributeError:
695 pass
694 pass
696 os.write(wfd, 'y')
695 os.write(wfd, 'y')
697 os.close(wfd)
696 os.close(wfd)
698 sys.stdout.flush()
697 sys.stdout.flush()
699 sys.stderr.flush()
698 sys.stderr.flush()
700 fd = os.open(util.nulldev, os.O_RDWR)
699 fd = os.open(util.nulldev, os.O_RDWR)
701 if fd != 0: os.dup2(fd, 0)
700 if fd != 0: os.dup2(fd, 0)
702 if fd != 1: os.dup2(fd, 1)
701 if fd != 1: os.dup2(fd, 1)
703 if fd != 2: os.dup2(fd, 2)
702 if fd != 2: os.dup2(fd, 2)
704 if fd not in (0, 1, 2): os.close(fd)
703 if fd not in (0, 1, 2): os.close(fd)
705
704
706 if runfn:
705 if runfn:
707 return runfn()
706 return runfn()
708
707
709 class changeset_printer(object):
708 class changeset_printer(object):
710 '''show changeset information when templating not requested.'''
709 '''show changeset information when templating not requested.'''
711
710
712 def __init__(self, ui, repo, patch, buffered):
711 def __init__(self, ui, repo, patch, buffered):
713 self.ui = ui
712 self.ui = ui
714 self.repo = repo
713 self.repo = repo
715 self.buffered = buffered
714 self.buffered = buffered
716 self.patch = patch
715 self.patch = patch
717 self.header = {}
716 self.header = {}
718 self.hunk = {}
717 self.hunk = {}
719 self.lastheader = None
718 self.lastheader = None
720
719
721 def flush(self, rev):
720 def flush(self, rev):
722 if rev in self.header:
721 if rev in self.header:
723 h = self.header[rev]
722 h = self.header[rev]
724 if h != self.lastheader:
723 if h != self.lastheader:
725 self.lastheader = h
724 self.lastheader = h
726 self.ui.write(h)
725 self.ui.write(h)
727 del self.header[rev]
726 del self.header[rev]
728 if rev in self.hunk:
727 if rev in self.hunk:
729 self.ui.write(self.hunk[rev])
728 self.ui.write(self.hunk[rev])
730 del self.hunk[rev]
729 del self.hunk[rev]
731 return 1
730 return 1
732 return 0
731 return 0
733
732
734 def show(self, rev=0, changenode=None, copies=(), **props):
733 def show(self, rev=0, changenode=None, copies=(), **props):
735 if self.buffered:
734 if self.buffered:
736 self.ui.pushbuffer()
735 self.ui.pushbuffer()
737 self._show(rev, changenode, copies, props)
736 self._show(rev, changenode, copies, props)
738 self.hunk[rev] = self.ui.popbuffer()
737 self.hunk[rev] = self.ui.popbuffer()
739 else:
738 else:
740 self._show(rev, changenode, copies, props)
739 self._show(rev, changenode, copies, props)
741
740
742 def _show(self, rev, changenode, copies, props):
741 def _show(self, rev, changenode, copies, props):
743 '''show a single changeset or file revision'''
742 '''show a single changeset or file revision'''
744 log = self.repo.changelog
743 log = self.repo.changelog
745 if changenode is None:
744 if changenode is None:
746 changenode = log.node(rev)
745 changenode = log.node(rev)
747 elif not rev:
746 elif not rev:
748 rev = log.rev(changenode)
747 rev = log.rev(changenode)
749
748
750 if self.ui.quiet:
749 if self.ui.quiet:
751 self.ui.write("%d:%s\n" % (rev, short(changenode)))
750 self.ui.write("%d:%s\n" % (rev, short(changenode)))
752 return
751 return
753
752
754 changes = log.read(changenode)
753 changes = log.read(changenode)
755 date = util.datestr(changes[2])
754 date = util.datestr(changes[2])
756 extra = changes[5]
755 extra = changes[5]
757 branch = extra.get("branch")
756 branch = extra.get("branch")
758
757
759 hexfunc = self.ui.debugflag and hex or short
758 hexfunc = self.ui.debugflag and hex or short
760
759
761 parents = [(p, hexfunc(log.node(p)))
760 parents = [(p, hexfunc(log.node(p)))
762 for p in self._meaningful_parentrevs(log, rev)]
761 for p in self._meaningful_parentrevs(log, rev)]
763
762
764 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
763 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
765
764
766 # don't show the default branch name
765 # don't show the default branch name
767 if branch != 'default':
766 if branch != 'default':
768 branch = util.tolocal(branch)
767 branch = util.tolocal(branch)
769 self.ui.write(_("branch: %s\n") % branch)
768 self.ui.write(_("branch: %s\n") % branch)
770 for tag in self.repo.nodetags(changenode):
769 for tag in self.repo.nodetags(changenode):
771 self.ui.write(_("tag: %s\n") % tag)
770 self.ui.write(_("tag: %s\n") % tag)
772 for parent in parents:
771 for parent in parents:
773 self.ui.write(_("parent: %d:%s\n") % parent)
772 self.ui.write(_("parent: %d:%s\n") % parent)
774
773
775 if self.ui.debugflag:
774 if self.ui.debugflag:
776 self.ui.write(_("manifest: %d:%s\n") %
775 self.ui.write(_("manifest: %d:%s\n") %
777 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
776 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
778 self.ui.write(_("user: %s\n") % changes[1])
777 self.ui.write(_("user: %s\n") % changes[1])
779 self.ui.write(_("date: %s\n") % date)
778 self.ui.write(_("date: %s\n") % date)
780
779
781 if self.ui.debugflag:
780 if self.ui.debugflag:
782 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
781 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
783 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
782 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
784 files):
783 files):
785 if value:
784 if value:
786 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
785 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
787 elif changes[3] and self.ui.verbose:
786 elif changes[3] and self.ui.verbose:
788 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
787 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
789 if copies and self.ui.verbose:
788 if copies and self.ui.verbose:
790 copies = ['%s (%s)' % c for c in copies]
789 copies = ['%s (%s)' % c for c in copies]
791 self.ui.write(_("copies: %s\n") % ' '.join(copies))
790 self.ui.write(_("copies: %s\n") % ' '.join(copies))
792
791
793 if extra and self.ui.debugflag:
792 if extra and self.ui.debugflag:
794 extraitems = extra.items()
793 extraitems = extra.items()
795 extraitems.sort()
794 extraitems.sort()
796 for key, value in extraitems:
795 for key, value in extraitems:
797 self.ui.write(_("extra: %s=%s\n")
796 self.ui.write(_("extra: %s=%s\n")
798 % (key, value.encode('string_escape')))
797 % (key, value.encode('string_escape')))
799
798
800 description = changes[4].strip()
799 description = changes[4].strip()
801 if description:
800 if description:
802 if self.ui.verbose:
801 if self.ui.verbose:
803 self.ui.write(_("description:\n"))
802 self.ui.write(_("description:\n"))
804 self.ui.write(description)
803 self.ui.write(description)
805 self.ui.write("\n\n")
804 self.ui.write("\n\n")
806 else:
805 else:
807 self.ui.write(_("summary: %s\n") %
806 self.ui.write(_("summary: %s\n") %
808 description.splitlines()[0])
807 description.splitlines()[0])
809 self.ui.write("\n")
808 self.ui.write("\n")
810
809
811 self.showpatch(changenode)
810 self.showpatch(changenode)
812
811
813 def showpatch(self, node):
812 def showpatch(self, node):
814 if self.patch:
813 if self.patch:
815 prev = self.repo.changelog.parents(node)[0]
814 prev = self.repo.changelog.parents(node)[0]
816 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
815 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
817 opts=patch.diffopts(self.ui))
816 opts=patch.diffopts(self.ui))
818 self.ui.write("\n")
817 self.ui.write("\n")
819
818
820 def _meaningful_parentrevs(self, log, rev):
819 def _meaningful_parentrevs(self, log, rev):
821 """Return list of meaningful (or all if debug) parentrevs for rev.
820 """Return list of meaningful (or all if debug) parentrevs for rev.
822
821
823 For merges (two non-nullrev revisions) both parents are meaningful.
822 For merges (two non-nullrev revisions) both parents are meaningful.
824 Otherwise the first parent revision is considered meaningful if it
823 Otherwise the first parent revision is considered meaningful if it
825 is not the preceding revision.
824 is not the preceding revision.
826 """
825 """
827 parents = log.parentrevs(rev)
826 parents = log.parentrevs(rev)
828 if not self.ui.debugflag and parents[1] == nullrev:
827 if not self.ui.debugflag and parents[1] == nullrev:
829 if parents[0] >= rev - 1:
828 if parents[0] >= rev - 1:
830 parents = []
829 parents = []
831 else:
830 else:
832 parents = [parents[0]]
831 parents = [parents[0]]
833 return parents
832 return parents
834
833
835
834
836 class changeset_templater(changeset_printer):
835 class changeset_templater(changeset_printer):
837 '''format changeset information.'''
836 '''format changeset information.'''
838
837
839 def __init__(self, ui, repo, patch, mapfile, buffered):
838 def __init__(self, ui, repo, patch, mapfile, buffered):
840 changeset_printer.__init__(self, ui, repo, patch, buffered)
839 changeset_printer.__init__(self, ui, repo, patch, buffered)
841 filters = templater.common_filters.copy()
840 filters = templater.common_filters.copy()
842 filters['formatnode'] = (ui.debugflag and (lambda x: x)
841 filters['formatnode'] = (ui.debugflag and (lambda x: x)
843 or (lambda x: x[:12]))
842 or (lambda x: x[:12]))
844 self.t = templater.templater(mapfile, filters,
843 self.t = templater.templater(mapfile, filters,
845 cache={
844 cache={
846 'parent': '{rev}:{node|formatnode} ',
845 'parent': '{rev}:{node|formatnode} ',
847 'manifest': '{rev}:{node|formatnode}',
846 'manifest': '{rev}:{node|formatnode}',
848 'filecopy': '{name} ({source})'})
847 'filecopy': '{name} ({source})'})
849
848
850 def use_template(self, t):
849 def use_template(self, t):
851 '''set template string to use'''
850 '''set template string to use'''
852 self.t.cache['changeset'] = t
851 self.t.cache['changeset'] = t
853
852
854 def _show(self, rev, changenode, copies, props):
853 def _show(self, rev, changenode, copies, props):
855 '''show a single changeset or file revision'''
854 '''show a single changeset or file revision'''
856 log = self.repo.changelog
855 log = self.repo.changelog
857 if changenode is None:
856 if changenode is None:
858 changenode = log.node(rev)
857 changenode = log.node(rev)
859 elif not rev:
858 elif not rev:
860 rev = log.rev(changenode)
859 rev = log.rev(changenode)
861
860
862 changes = log.read(changenode)
861 changes = log.read(changenode)
863
862
864 def showlist(name, values, plural=None, **args):
863 def showlist(name, values, plural=None, **args):
865 '''expand set of values.
864 '''expand set of values.
866 name is name of key in template map.
865 name is name of key in template map.
867 values is list of strings or dicts.
866 values is list of strings or dicts.
868 plural is plural of name, if not simply name + 's'.
867 plural is plural of name, if not simply name + 's'.
869
868
870 expansion works like this, given name 'foo'.
869 expansion works like this, given name 'foo'.
871
870
872 if values is empty, expand 'no_foos'.
871 if values is empty, expand 'no_foos'.
873
872
874 if 'foo' not in template map, return values as a string,
873 if 'foo' not in template map, return values as a string,
875 joined by space.
874 joined by space.
876
875
877 expand 'start_foos'.
876 expand 'start_foos'.
878
877
879 for each value, expand 'foo'. if 'last_foo' in template
878 for each value, expand 'foo'. if 'last_foo' in template
880 map, expand it instead of 'foo' for last key.
879 map, expand it instead of 'foo' for last key.
881
880
882 expand 'end_foos'.
881 expand 'end_foos'.
883 '''
882 '''
884 if plural: names = plural
883 if plural: names = plural
885 else: names = name + 's'
884 else: names = name + 's'
886 if not values:
885 if not values:
887 noname = 'no_' + names
886 noname = 'no_' + names
888 if noname in self.t:
887 if noname in self.t:
889 yield self.t(noname, **args)
888 yield self.t(noname, **args)
890 return
889 return
891 if name not in self.t:
890 if name not in self.t:
892 if isinstance(values[0], str):
891 if isinstance(values[0], str):
893 yield ' '.join(values)
892 yield ' '.join(values)
894 else:
893 else:
895 for v in values:
894 for v in values:
896 yield dict(v, **args)
895 yield dict(v, **args)
897 return
896 return
898 startname = 'start_' + names
897 startname = 'start_' + names
899 if startname in self.t:
898 if startname in self.t:
900 yield self.t(startname, **args)
899 yield self.t(startname, **args)
901 vargs = args.copy()
900 vargs = args.copy()
902 def one(v, tag=name):
901 def one(v, tag=name):
903 try:
902 try:
904 vargs.update(v)
903 vargs.update(v)
905 except (AttributeError, ValueError):
904 except (AttributeError, ValueError):
906 try:
905 try:
907 for a, b in v:
906 for a, b in v:
908 vargs[a] = b
907 vargs[a] = b
909 except ValueError:
908 except ValueError:
910 vargs[name] = v
909 vargs[name] = v
911 return self.t(tag, **vargs)
910 return self.t(tag, **vargs)
912 lastname = 'last_' + name
911 lastname = 'last_' + name
913 if lastname in self.t:
912 if lastname in self.t:
914 last = values.pop()
913 last = values.pop()
915 else:
914 else:
916 last = None
915 last = None
917 for v in values:
916 for v in values:
918 yield one(v)
917 yield one(v)
919 if last is not None:
918 if last is not None:
920 yield one(last, tag=lastname)
919 yield one(last, tag=lastname)
921 endname = 'end_' + names
920 endname = 'end_' + names
922 if endname in self.t:
921 if endname in self.t:
923 yield self.t(endname, **args)
922 yield self.t(endname, **args)
924
923
925 def showbranches(**args):
924 def showbranches(**args):
926 branch = changes[5].get("branch")
925 branch = changes[5].get("branch")
927 if branch != 'default':
926 if branch != 'default':
928 branch = util.tolocal(branch)
927 branch = util.tolocal(branch)
929 return showlist('branch', [branch], plural='branches', **args)
928 return showlist('branch', [branch], plural='branches', **args)
930
929
931 def showparents(**args):
930 def showparents(**args):
932 parents = [[('rev', p), ('node', hex(log.node(p)))]
931 parents = [[('rev', p), ('node', hex(log.node(p)))]
933 for p in self._meaningful_parentrevs(log, rev)]
932 for p in self._meaningful_parentrevs(log, rev)]
934 return showlist('parent', parents, **args)
933 return showlist('parent', parents, **args)
935
934
936 def showtags(**args):
935 def showtags(**args):
937 return showlist('tag', self.repo.nodetags(changenode), **args)
936 return showlist('tag', self.repo.nodetags(changenode), **args)
938
937
939 def showextras(**args):
938 def showextras(**args):
940 extras = changes[5].items()
939 extras = changes[5].items()
941 extras.sort()
940 extras.sort()
942 for key, value in extras:
941 for key, value in extras:
943 args = args.copy()
942 args = args.copy()
944 args.update(dict(key=key, value=value))
943 args.update(dict(key=key, value=value))
945 yield self.t('extra', **args)
944 yield self.t('extra', **args)
946
945
947 def showcopies(**args):
946 def showcopies(**args):
948 c = [{'name': x[0], 'source': x[1]} for x in copies]
947 c = [{'name': x[0], 'source': x[1]} for x in copies]
949 return showlist('file_copy', c, plural='file_copies', **args)
948 return showlist('file_copy', c, plural='file_copies', **args)
950
949
951 if self.ui.debugflag:
950 if self.ui.debugflag:
952 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
951 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
953 def showfiles(**args):
952 def showfiles(**args):
954 return showlist('file', files[0], **args)
953 return showlist('file', files[0], **args)
955 def showadds(**args):
954 def showadds(**args):
956 return showlist('file_add', files[1], **args)
955 return showlist('file_add', files[1], **args)
957 def showdels(**args):
956 def showdels(**args):
958 return showlist('file_del', files[2], **args)
957 return showlist('file_del', files[2], **args)
959 def showmanifest(**args):
958 def showmanifest(**args):
960 args = args.copy()
959 args = args.copy()
961 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
960 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
962 node=hex(changes[0])))
961 node=hex(changes[0])))
963 return self.t('manifest', **args)
962 return self.t('manifest', **args)
964 else:
963 else:
965 def showfiles(**args):
964 def showfiles(**args):
966 return showlist('file', changes[3], **args)
965 return showlist('file', changes[3], **args)
967 showadds = ''
966 showadds = ''
968 showdels = ''
967 showdels = ''
969 showmanifest = ''
968 showmanifest = ''
970
969
971 defprops = {
970 defprops = {
972 'author': changes[1],
971 'author': changes[1],
973 'branches': showbranches,
972 'branches': showbranches,
974 'date': changes[2],
973 'date': changes[2],
975 'desc': changes[4].strip(),
974 'desc': changes[4].strip(),
976 'file_adds': showadds,
975 'file_adds': showadds,
977 'file_dels': showdels,
976 'file_dels': showdels,
978 'files': showfiles,
977 'files': showfiles,
979 'file_copies': showcopies,
978 'file_copies': showcopies,
980 'manifest': showmanifest,
979 'manifest': showmanifest,
981 'node': hex(changenode),
980 'node': hex(changenode),
982 'parents': showparents,
981 'parents': showparents,
983 'rev': rev,
982 'rev': rev,
984 'tags': showtags,
983 'tags': showtags,
985 'extras': showextras,
984 'extras': showextras,
986 }
985 }
987 props = props.copy()
986 props = props.copy()
988 props.update(defprops)
987 props.update(defprops)
989
988
990 try:
989 try:
991 if self.ui.debugflag and 'header_debug' in self.t:
990 if self.ui.debugflag and 'header_debug' in self.t:
992 key = 'header_debug'
991 key = 'header_debug'
993 elif self.ui.quiet and 'header_quiet' in self.t:
992 elif self.ui.quiet and 'header_quiet' in self.t:
994 key = 'header_quiet'
993 key = 'header_quiet'
995 elif self.ui.verbose and 'header_verbose' in self.t:
994 elif self.ui.verbose and 'header_verbose' in self.t:
996 key = 'header_verbose'
995 key = 'header_verbose'
997 elif 'header' in self.t:
996 elif 'header' in self.t:
998 key = 'header'
997 key = 'header'
999 else:
998 else:
1000 key = ''
999 key = ''
1001 if key:
1000 if key:
1002 h = templater.stringify(self.t(key, **props))
1001 h = templater.stringify(self.t(key, **props))
1003 if self.buffered:
1002 if self.buffered:
1004 self.header[rev] = h
1003 self.header[rev] = h
1005 else:
1004 else:
1006 self.ui.write(h)
1005 self.ui.write(h)
1007 if self.ui.debugflag and 'changeset_debug' in self.t:
1006 if self.ui.debugflag and 'changeset_debug' in self.t:
1008 key = 'changeset_debug'
1007 key = 'changeset_debug'
1009 elif self.ui.quiet and 'changeset_quiet' in self.t:
1008 elif self.ui.quiet and 'changeset_quiet' in self.t:
1010 key = 'changeset_quiet'
1009 key = 'changeset_quiet'
1011 elif self.ui.verbose and 'changeset_verbose' in self.t:
1010 elif self.ui.verbose and 'changeset_verbose' in self.t:
1012 key = 'changeset_verbose'
1011 key = 'changeset_verbose'
1013 else:
1012 else:
1014 key = 'changeset'
1013 key = 'changeset'
1015 self.ui.write(templater.stringify(self.t(key, **props)))
1014 self.ui.write(templater.stringify(self.t(key, **props)))
1016 self.showpatch(changenode)
1015 self.showpatch(changenode)
1017 except KeyError, inst:
1016 except KeyError, inst:
1018 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
1017 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
1019 inst.args[0]))
1018 inst.args[0]))
1020 except SyntaxError, inst:
1019 except SyntaxError, inst:
1021 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
1020 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
1022
1021
1023 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
1022 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
1024 """show one changeset using template or regular display.
1023 """show one changeset using template or regular display.
1025
1024
1026 Display format will be the first non-empty hit of:
1025 Display format will be the first non-empty hit of:
1027 1. option 'template'
1026 1. option 'template'
1028 2. option 'style'
1027 2. option 'style'
1029 3. [ui] setting 'logtemplate'
1028 3. [ui] setting 'logtemplate'
1030 4. [ui] setting 'style'
1029 4. [ui] setting 'style'
1031 If all of these values are either the unset or the empty string,
1030 If all of these values are either the unset or the empty string,
1032 regular display via changeset_printer() is done.
1031 regular display via changeset_printer() is done.
1033 """
1032 """
1034 # options
1033 # options
1035 patch = False
1034 patch = False
1036 if opts.get('patch'):
1035 if opts.get('patch'):
1037 patch = matchfn or util.always
1036 patch = matchfn or util.always
1038
1037
1039 tmpl = opts.get('template')
1038 tmpl = opts.get('template')
1040 mapfile = None
1039 mapfile = None
1041 if tmpl:
1040 if tmpl:
1042 tmpl = templater.parsestring(tmpl, quoted=False)
1041 tmpl = templater.parsestring(tmpl, quoted=False)
1043 else:
1042 else:
1044 mapfile = opts.get('style')
1043 mapfile = opts.get('style')
1045 # ui settings
1044 # ui settings
1046 if not mapfile:
1045 if not mapfile:
1047 tmpl = ui.config('ui', 'logtemplate')
1046 tmpl = ui.config('ui', 'logtemplate')
1048 if tmpl:
1047 if tmpl:
1049 tmpl = templater.parsestring(tmpl)
1048 tmpl = templater.parsestring(tmpl)
1050 else:
1049 else:
1051 mapfile = ui.config('ui', 'style')
1050 mapfile = ui.config('ui', 'style')
1052
1051
1053 if tmpl or mapfile:
1052 if tmpl or mapfile:
1054 if mapfile:
1053 if mapfile:
1055 if not os.path.split(mapfile)[0]:
1054 if not os.path.split(mapfile)[0]:
1056 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1055 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1057 or templater.templatepath(mapfile))
1056 or templater.templatepath(mapfile))
1058 if mapname: mapfile = mapname
1057 if mapname: mapfile = mapname
1059 try:
1058 try:
1060 t = changeset_templater(ui, repo, patch, mapfile, buffered)
1059 t = changeset_templater(ui, repo, patch, mapfile, buffered)
1061 except SyntaxError, inst:
1060 except SyntaxError, inst:
1062 raise util.Abort(inst.args[0])
1061 raise util.Abort(inst.args[0])
1063 if tmpl: t.use_template(tmpl)
1062 if tmpl: t.use_template(tmpl)
1064 return t
1063 return t
1065 return changeset_printer(ui, repo, patch, buffered)
1064 return changeset_printer(ui, repo, patch, buffered)
1066
1065
1067 def finddate(ui, repo, date):
1066 def finddate(ui, repo, date):
1068 """Find the tipmost changeset that matches the given date spec"""
1067 """Find the tipmost changeset that matches the given date spec"""
1069 df = util.matchdate(date + " to " + date)
1068 df = util.matchdate(date + " to " + date)
1070 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1069 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1071 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
1070 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
1072 results = {}
1071 results = {}
1073 for st, rev, fns in changeiter:
1072 for st, rev, fns in changeiter:
1074 if st == 'add':
1073 if st == 'add':
1075 d = get(rev)[2]
1074 d = get(rev)[2]
1076 if df(d[0]):
1075 if df(d[0]):
1077 results[rev] = d
1076 results[rev] = d
1078 elif st == 'iter':
1077 elif st == 'iter':
1079 if rev in results:
1078 if rev in results:
1080 ui.status("Found revision %s from %s\n" %
1079 ui.status("Found revision %s from %s\n" %
1081 (rev, util.datestr(results[rev])))
1080 (rev, util.datestr(results[rev])))
1082 return str(rev)
1081 return str(rev)
1083
1082
1084 raise util.Abort(_("revision matching date not found"))
1083 raise util.Abort(_("revision matching date not found"))
1085
1084
1086 def walkchangerevs(ui, repo, pats, change, opts):
1085 def walkchangerevs(ui, repo, pats, change, opts):
1087 '''Iterate over files and the revs they changed in.
1086 '''Iterate over files and the revs they changed in.
1088
1087
1089 Callers most commonly need to iterate backwards over the history
1088 Callers most commonly need to iterate backwards over the history
1090 it is interested in. Doing so has awful (quadratic-looking)
1089 it is interested in. Doing so has awful (quadratic-looking)
1091 performance, so we use iterators in a "windowed" way.
1090 performance, so we use iterators in a "windowed" way.
1092
1091
1093 We walk a window of revisions in the desired order. Within the
1092 We walk a window of revisions in the desired order. Within the
1094 window, we first walk forwards to gather data, then in the desired
1093 window, we first walk forwards to gather data, then in the desired
1095 order (usually backwards) to display it.
1094 order (usually backwards) to display it.
1096
1095
1097 This function returns an (iterator, matchfn) tuple. The iterator
1096 This function returns an (iterator, matchfn) tuple. The iterator
1098 yields 3-tuples. They will be of one of the following forms:
1097 yields 3-tuples. They will be of one of the following forms:
1099
1098
1100 "window", incrementing, lastrev: stepping through a window,
1099 "window", incrementing, lastrev: stepping through a window,
1101 positive if walking forwards through revs, last rev in the
1100 positive if walking forwards through revs, last rev in the
1102 sequence iterated over - use to reset state for the current window
1101 sequence iterated over - use to reset state for the current window
1103
1102
1104 "add", rev, fns: out-of-order traversal of the given file names
1103 "add", rev, fns: out-of-order traversal of the given file names
1105 fns, which changed during revision rev - use to gather data for
1104 fns, which changed during revision rev - use to gather data for
1106 possible display
1105 possible display
1107
1106
1108 "iter", rev, None: in-order traversal of the revs earlier iterated
1107 "iter", rev, None: in-order traversal of the revs earlier iterated
1109 over with "add" - use to display data'''
1108 over with "add" - use to display data'''
1110
1109
1111 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1110 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1112 if start < end:
1111 if start < end:
1113 while start < end:
1112 while start < end:
1114 yield start, min(windowsize, end-start)
1113 yield start, min(windowsize, end-start)
1115 start += windowsize
1114 start += windowsize
1116 if windowsize < sizelimit:
1115 if windowsize < sizelimit:
1117 windowsize *= 2
1116 windowsize *= 2
1118 else:
1117 else:
1119 while start > end:
1118 while start > end:
1120 yield start, min(windowsize, start-end-1)
1119 yield start, min(windowsize, start-end-1)
1121 start -= windowsize
1120 start -= windowsize
1122 if windowsize < sizelimit:
1121 if windowsize < sizelimit:
1123 windowsize *= 2
1122 windowsize *= 2
1124
1123
1125 files, matchfn, anypats = matchpats(repo, pats, opts)
1124 files, matchfn, anypats = matchpats(repo, pats, opts)
1126 follow = opts.get('follow') or opts.get('follow_first')
1125 follow = opts.get('follow') or opts.get('follow_first')
1127
1126
1128 if repo.changelog.count() == 0:
1127 if repo.changelog.count() == 0:
1129 return [], matchfn
1128 return [], matchfn
1130
1129
1131 if follow:
1130 if follow:
1132 defrange = '%s:0' % repo.changectx().rev()
1131 defrange = '%s:0' % repo.changectx().rev()
1133 else:
1132 else:
1134 defrange = 'tip:0'
1133 defrange = 'tip:0'
1135 revs = revrange(repo, opts['rev'] or [defrange])
1134 revs = revrange(repo, opts['rev'] or [defrange])
1136 wanted = {}
1135 wanted = {}
1137 slowpath = anypats or opts.get('removed')
1136 slowpath = anypats or opts.get('removed')
1138 fncache = {}
1137 fncache = {}
1139
1138
1140 if not slowpath and not files:
1139 if not slowpath and not files:
1141 # No files, no patterns. Display all revs.
1140 # No files, no patterns. Display all revs.
1142 wanted = dict.fromkeys(revs)
1141 wanted = dict.fromkeys(revs)
1143 copies = []
1142 copies = []
1144 if not slowpath:
1143 if not slowpath:
1145 # Only files, no patterns. Check the history of each file.
1144 # Only files, no patterns. Check the history of each file.
1146 def filerevgen(filelog, node):
1145 def filerevgen(filelog, node):
1147 cl_count = repo.changelog.count()
1146 cl_count = repo.changelog.count()
1148 if node is None:
1147 if node is None:
1149 last = filelog.count() - 1
1148 last = filelog.count() - 1
1150 else:
1149 else:
1151 last = filelog.rev(node)
1150 last = filelog.rev(node)
1152 for i, window in increasing_windows(last, nullrev):
1151 for i, window in increasing_windows(last, nullrev):
1153 revs = []
1152 revs = []
1154 for j in xrange(i - window, i + 1):
1153 for j in xrange(i - window, i + 1):
1155 n = filelog.node(j)
1154 n = filelog.node(j)
1156 revs.append((filelog.linkrev(n),
1155 revs.append((filelog.linkrev(n),
1157 follow and filelog.renamed(n)))
1156 follow and filelog.renamed(n)))
1158 revs.reverse()
1157 revs.reverse()
1159 for rev in revs:
1158 for rev in revs:
1160 # only yield rev for which we have the changelog, it can
1159 # only yield rev for which we have the changelog, it can
1161 # happen while doing "hg log" during a pull or commit
1160 # happen while doing "hg log" during a pull or commit
1162 if rev[0] < cl_count:
1161 if rev[0] < cl_count:
1163 yield rev
1162 yield rev
1164 def iterfiles():
1163 def iterfiles():
1165 for filename in files:
1164 for filename in files:
1166 yield filename, None
1165 yield filename, None
1167 for filename_node in copies:
1166 for filename_node in copies:
1168 yield filename_node
1167 yield filename_node
1169 minrev, maxrev = min(revs), max(revs)
1168 minrev, maxrev = min(revs), max(revs)
1170 for file_, node in iterfiles():
1169 for file_, node in iterfiles():
1171 filelog = repo.file(file_)
1170 filelog = repo.file(file_)
1172 # A zero count may be a directory or deleted file, so
1171 # A zero count may be a directory or deleted file, so
1173 # try to find matching entries on the slow path.
1172 # try to find matching entries on the slow path.
1174 if filelog.count() == 0:
1173 if filelog.count() == 0:
1175 slowpath = True
1174 slowpath = True
1176 break
1175 break
1177 for rev, copied in filerevgen(filelog, node):
1176 for rev, copied in filerevgen(filelog, node):
1178 if rev <= maxrev:
1177 if rev <= maxrev:
1179 if rev < minrev:
1178 if rev < minrev:
1180 break
1179 break
1181 fncache.setdefault(rev, [])
1180 fncache.setdefault(rev, [])
1182 fncache[rev].append(file_)
1181 fncache[rev].append(file_)
1183 wanted[rev] = 1
1182 wanted[rev] = 1
1184 if follow and copied:
1183 if follow and copied:
1185 copies.append(copied)
1184 copies.append(copied)
1186 if slowpath:
1185 if slowpath:
1187 if follow:
1186 if follow:
1188 raise util.Abort(_('can only follow copies/renames for explicit '
1187 raise util.Abort(_('can only follow copies/renames for explicit '
1189 'file names'))
1188 'file names'))
1190
1189
1191 # The slow path checks files modified in every changeset.
1190 # The slow path checks files modified in every changeset.
1192 def changerevgen():
1191 def changerevgen():
1193 for i, window in increasing_windows(repo.changelog.count()-1,
1192 for i, window in increasing_windows(repo.changelog.count()-1,
1194 nullrev):
1193 nullrev):
1195 for j in xrange(i - window, i + 1):
1194 for j in xrange(i - window, i + 1):
1196 yield j, change(j)[3]
1195 yield j, change(j)[3]
1197
1196
1198 for rev, changefiles in changerevgen():
1197 for rev, changefiles in changerevgen():
1199 matches = filter(matchfn, changefiles)
1198 matches = filter(matchfn, changefiles)
1200 if matches:
1199 if matches:
1201 fncache[rev] = matches
1200 fncache[rev] = matches
1202 wanted[rev] = 1
1201 wanted[rev] = 1
1203
1202
1204 class followfilter:
1203 class followfilter:
1205 def __init__(self, onlyfirst=False):
1204 def __init__(self, onlyfirst=False):
1206 self.startrev = nullrev
1205 self.startrev = nullrev
1207 self.roots = []
1206 self.roots = []
1208 self.onlyfirst = onlyfirst
1207 self.onlyfirst = onlyfirst
1209
1208
1210 def match(self, rev):
1209 def match(self, rev):
1211 def realparents(rev):
1210 def realparents(rev):
1212 if self.onlyfirst:
1211 if self.onlyfirst:
1213 return repo.changelog.parentrevs(rev)[0:1]
1212 return repo.changelog.parentrevs(rev)[0:1]
1214 else:
1213 else:
1215 return filter(lambda x: x != nullrev,
1214 return filter(lambda x: x != nullrev,
1216 repo.changelog.parentrevs(rev))
1215 repo.changelog.parentrevs(rev))
1217
1216
1218 if self.startrev == nullrev:
1217 if self.startrev == nullrev:
1219 self.startrev = rev
1218 self.startrev = rev
1220 return True
1219 return True
1221
1220
1222 if rev > self.startrev:
1221 if rev > self.startrev:
1223 # forward: all descendants
1222 # forward: all descendants
1224 if not self.roots:
1223 if not self.roots:
1225 self.roots.append(self.startrev)
1224 self.roots.append(self.startrev)
1226 for parent in realparents(rev):
1225 for parent in realparents(rev):
1227 if parent in self.roots:
1226 if parent in self.roots:
1228 self.roots.append(rev)
1227 self.roots.append(rev)
1229 return True
1228 return True
1230 else:
1229 else:
1231 # backwards: all parents
1230 # backwards: all parents
1232 if not self.roots:
1231 if not self.roots:
1233 self.roots.extend(realparents(self.startrev))
1232 self.roots.extend(realparents(self.startrev))
1234 if rev in self.roots:
1233 if rev in self.roots:
1235 self.roots.remove(rev)
1234 self.roots.remove(rev)
1236 self.roots.extend(realparents(rev))
1235 self.roots.extend(realparents(rev))
1237 return True
1236 return True
1238
1237
1239 return False
1238 return False
1240
1239
1241 # it might be worthwhile to do this in the iterator if the rev range
1240 # it might be worthwhile to do this in the iterator if the rev range
1242 # is descending and the prune args are all within that range
1241 # is descending and the prune args are all within that range
1243 for rev in opts.get('prune', ()):
1242 for rev in opts.get('prune', ()):
1244 rev = repo.changelog.rev(repo.lookup(rev))
1243 rev = repo.changelog.rev(repo.lookup(rev))
1245 ff = followfilter()
1244 ff = followfilter()
1246 stop = min(revs[0], revs[-1])
1245 stop = min(revs[0], revs[-1])
1247 for x in xrange(rev, stop-1, -1):
1246 for x in xrange(rev, stop-1, -1):
1248 if ff.match(x) and x in wanted:
1247 if ff.match(x) and x in wanted:
1249 del wanted[x]
1248 del wanted[x]
1250
1249
1251 def iterate():
1250 def iterate():
1252 if follow and not files:
1251 if follow and not files:
1253 ff = followfilter(onlyfirst=opts.get('follow_first'))
1252 ff = followfilter(onlyfirst=opts.get('follow_first'))
1254 def want(rev):
1253 def want(rev):
1255 if ff.match(rev) and rev in wanted:
1254 if ff.match(rev) and rev in wanted:
1256 return True
1255 return True
1257 return False
1256 return False
1258 else:
1257 else:
1259 def want(rev):
1258 def want(rev):
1260 return rev in wanted
1259 return rev in wanted
1261
1260
1262 for i, window in increasing_windows(0, len(revs)):
1261 for i, window in increasing_windows(0, len(revs)):
1263 yield 'window', revs[0] < revs[-1], revs[-1]
1262 yield 'window', revs[0] < revs[-1], revs[-1]
1264 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1263 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1265 srevs = list(nrevs)
1264 srevs = list(nrevs)
1266 srevs.sort()
1265 srevs.sort()
1267 for rev in srevs:
1266 for rev in srevs:
1268 fns = fncache.get(rev)
1267 fns = fncache.get(rev)
1269 if not fns:
1268 if not fns:
1270 def fns_generator():
1269 def fns_generator():
1271 for f in change(rev)[3]:
1270 for f in change(rev)[3]:
1272 if matchfn(f):
1271 if matchfn(f):
1273 yield f
1272 yield f
1274 fns = fns_generator()
1273 fns = fns_generator()
1275 yield 'add', rev, fns
1274 yield 'add', rev, fns
1276 for rev in nrevs:
1275 for rev in nrevs:
1277 yield 'iter', rev, None
1276 yield 'iter', rev, None
1278 return iterate(), matchfn
1277 return iterate(), matchfn
@@ -1,3180 +1,3179
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import demandimport; demandimport.enable()
8 import demandimport; demandimport.enable()
9 from node import *
9 from node import *
10 from i18n import _
10 from i18n import _
11 import bisect, os, re, sys, urllib, shlex, stat
11 import bisect, os, re, sys, urllib, shlex, stat
12 import ui, hg, util, revlog, bundlerepo, extensions
12 import ui, hg, util, revlog, bundlerepo, extensions
13 import difflib, patch, time, help, mdiff, tempfile
13 import difflib, patch, time, help, mdiff, tempfile
14 import errno, version, socket
14 import errno, version, socket
15 import archival, changegroup, cmdutil, hgweb.server, sshserver
15 import archival, changegroup, cmdutil, hgweb.server, sshserver
16
16
17 # Commands start here, listed alphabetically
17 # Commands start here, listed alphabetically
18
18
19 def add(ui, repo, *pats, **opts):
19 def add(ui, repo, *pats, **opts):
20 """add the specified files on the next commit
20 """add the specified files on the next commit
21
21
22 Schedule files to be version controlled and added to the repository.
22 Schedule files to be version controlled and added to the repository.
23
23
24 The files will be added to the repository at the next commit. To
24 The files will be added to the repository at the next commit. To
25 undo an add before that, see hg revert.
25 undo an add before that, see hg revert.
26
26
27 If no names are given, add all files in the repository.
27 If no names are given, add all files in the repository.
28 """
28 """
29
29
30 names = []
30 names = []
31 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
31 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
32 if exact:
32 if exact:
33 if ui.verbose:
33 if ui.verbose:
34 ui.status(_('adding %s\n') % rel)
34 ui.status(_('adding %s\n') % rel)
35 names.append(abs)
35 names.append(abs)
36 elif abs not in repo.dirstate:
36 elif abs not in repo.dirstate:
37 ui.status(_('adding %s\n') % rel)
37 ui.status(_('adding %s\n') % rel)
38 names.append(abs)
38 names.append(abs)
39 if not opts.get('dry_run'):
39 if not opts.get('dry_run'):
40 repo.add(names)
40 repo.add(names)
41
41
42 def addremove(ui, repo, *pats, **opts):
42 def addremove(ui, repo, *pats, **opts):
43 """add all new files, delete all missing files
43 """add all new files, delete all missing files
44
44
45 Add all new files and remove all missing files from the repository.
45 Add all new files and remove all missing files from the repository.
46
46
47 New files are ignored if they match any of the patterns in .hgignore. As
47 New files are ignored if they match any of the patterns in .hgignore. As
48 with add, these changes take effect at the next commit.
48 with add, these changes take effect at the next commit.
49
49
50 Use the -s option to detect renamed files. With a parameter > 0,
50 Use the -s option to detect renamed files. With a parameter > 0,
51 this compares every removed file with every added file and records
51 this compares every removed file with every added file and records
52 those similar enough as renames. This option takes a percentage
52 those similar enough as renames. This option takes a percentage
53 between 0 (disabled) and 100 (files must be identical) as its
53 between 0 (disabled) and 100 (files must be identical) as its
54 parameter. Detecting renamed files this way can be expensive.
54 parameter. Detecting renamed files this way can be expensive.
55 """
55 """
56 sim = float(opts.get('similarity') or 0)
56 sim = float(opts.get('similarity') or 0)
57 if sim < 0 or sim > 100:
57 if sim < 0 or sim > 100:
58 raise util.Abort(_('similarity must be between 0 and 100'))
58 raise util.Abort(_('similarity must be between 0 and 100'))
59 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
59 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
60
60
61 def annotate(ui, repo, *pats, **opts):
61 def annotate(ui, repo, *pats, **opts):
62 """show changeset information per file line
62 """show changeset information per file line
63
63
64 List changes in files, showing the revision id responsible for each line
64 List changes in files, showing the revision id responsible for each line
65
65
66 This command is useful to discover who did a change or when a change took
66 This command is useful to discover who did a change or when a change took
67 place.
67 place.
68
68
69 Without the -a option, annotate will avoid processing files it
69 Without the -a option, annotate will avoid processing files it
70 detects as binary. With -a, annotate will generate an annotation
70 detects as binary. With -a, annotate will generate an annotation
71 anyway, probably with undesirable results.
71 anyway, probably with undesirable results.
72 """
72 """
73 getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
73 getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
74
74
75 if not pats:
75 if not pats:
76 raise util.Abort(_('at least one file name or pattern required'))
76 raise util.Abort(_('at least one file name or pattern required'))
77
77
78 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
78 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
79 ('number', lambda x: str(x[0].rev())),
79 ('number', lambda x: str(x[0].rev())),
80 ('changeset', lambda x: short(x[0].node())),
80 ('changeset', lambda x: short(x[0].node())),
81 ('date', getdate),
81 ('date', getdate),
82 ('follow', lambda x: x[0].path()),
82 ('follow', lambda x: x[0].path()),
83 ]
83 ]
84
84
85 if (not opts['user'] and not opts['changeset'] and not opts['date']
85 if (not opts['user'] and not opts['changeset'] and not opts['date']
86 and not opts['follow']):
86 and not opts['follow']):
87 opts['number'] = 1
87 opts['number'] = 1
88
88
89 linenumber = opts.get('line_number') is not None
89 linenumber = opts.get('line_number') is not None
90 if (linenumber and (not opts['changeset']) and (not opts['number'])):
90 if (linenumber and (not opts['changeset']) and (not opts['number'])):
91 raise util.Abort(_('at least one of -n/-c is required for -l'))
91 raise util.Abort(_('at least one of -n/-c is required for -l'))
92
92
93 funcmap = [func for op, func in opmap if opts.get(op)]
93 funcmap = [func for op, func in opmap if opts.get(op)]
94 if linenumber:
94 if linenumber:
95 lastfunc = funcmap[-1]
95 lastfunc = funcmap[-1]
96 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
96 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
97
97
98 ctx = repo.changectx(opts['rev'])
98 ctx = repo.changectx(opts['rev'])
99
99
100 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
100 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
101 node=ctx.node()):
101 node=ctx.node()):
102 fctx = ctx.filectx(abs)
102 fctx = ctx.filectx(abs)
103 if not opts['text'] and util.binary(fctx.data()):
103 if not opts['text'] and util.binary(fctx.data()):
104 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
104 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
105 continue
105 continue
106
106
107 lines = fctx.annotate(follow=opts.get('follow'),
107 lines = fctx.annotate(follow=opts.get('follow'),
108 linenumber=linenumber)
108 linenumber=linenumber)
109 pieces = []
109 pieces = []
110
110
111 for f in funcmap:
111 for f in funcmap:
112 l = [f(n) for n, dummy in lines]
112 l = [f(n) for n, dummy in lines]
113 if l:
113 if l:
114 m = max(map(len, l))
114 m = max(map(len, l))
115 pieces.append(["%*s" % (m, x) for x in l])
115 pieces.append(["%*s" % (m, x) for x in l])
116
116
117 if pieces:
117 if pieces:
118 for p, l in zip(zip(*pieces), lines):
118 for p, l in zip(zip(*pieces), lines):
119 ui.write("%s: %s" % (" ".join(p), l[1]))
119 ui.write("%s: %s" % (" ".join(p), l[1]))
120
120
121 def archive(ui, repo, dest, **opts):
121 def archive(ui, repo, dest, **opts):
122 '''create unversioned archive of a repository revision
122 '''create unversioned archive of a repository revision
123
123
124 By default, the revision used is the parent of the working
124 By default, the revision used is the parent of the working
125 directory; use "-r" to specify a different revision.
125 directory; use "-r" to specify a different revision.
126
126
127 To specify the type of archive to create, use "-t". Valid
127 To specify the type of archive to create, use "-t". Valid
128 types are:
128 types are:
129
129
130 "files" (default): a directory full of files
130 "files" (default): a directory full of files
131 "tar": tar archive, uncompressed
131 "tar": tar archive, uncompressed
132 "tbz2": tar archive, compressed using bzip2
132 "tbz2": tar archive, compressed using bzip2
133 "tgz": tar archive, compressed using gzip
133 "tgz": tar archive, compressed using gzip
134 "uzip": zip archive, uncompressed
134 "uzip": zip archive, uncompressed
135 "zip": zip archive, compressed using deflate
135 "zip": zip archive, compressed using deflate
136
136
137 The exact name of the destination archive or directory is given
137 The exact name of the destination archive or directory is given
138 using a format string; see "hg help export" for details.
138 using a format string; see "hg help export" for details.
139
139
140 Each member added to an archive file has a directory prefix
140 Each member added to an archive file has a directory prefix
141 prepended. Use "-p" to specify a format string for the prefix.
141 prepended. Use "-p" to specify a format string for the prefix.
142 The default is the basename of the archive, with suffixes removed.
142 The default is the basename of the archive, with suffixes removed.
143 '''
143 '''
144
144
145 ctx = repo.changectx(opts['rev'])
145 ctx = repo.changectx(opts['rev'])
146 if not ctx:
146 if not ctx:
147 raise util.Abort(_('repository has no revisions'))
147 raise util.Abort(_('repository has no revisions'))
148 node = ctx.node()
148 node = ctx.node()
149 dest = cmdutil.make_filename(repo, dest, node)
149 dest = cmdutil.make_filename(repo, dest, node)
150 if os.path.realpath(dest) == repo.root:
150 if os.path.realpath(dest) == repo.root:
151 raise util.Abort(_('repository root cannot be destination'))
151 raise util.Abort(_('repository root cannot be destination'))
152 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
152 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
153 kind = opts.get('type') or 'files'
153 kind = opts.get('type') or 'files'
154 prefix = opts['prefix']
154 prefix = opts['prefix']
155 if dest == '-':
155 if dest == '-':
156 if kind == 'files':
156 if kind == 'files':
157 raise util.Abort(_('cannot archive plain files to stdout'))
157 raise util.Abort(_('cannot archive plain files to stdout'))
158 dest = sys.stdout
158 dest = sys.stdout
159 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
159 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
160 prefix = cmdutil.make_filename(repo, prefix, node)
160 prefix = cmdutil.make_filename(repo, prefix, node)
161 archival.archive(repo, dest, node, kind, not opts['no_decode'],
161 archival.archive(repo, dest, node, kind, not opts['no_decode'],
162 matchfn, prefix)
162 matchfn, prefix)
163
163
164 def backout(ui, repo, node=None, rev=None, **opts):
164 def backout(ui, repo, node=None, rev=None, **opts):
165 '''reverse effect of earlier changeset
165 '''reverse effect of earlier changeset
166
166
167 Commit the backed out changes as a new changeset. The new
167 Commit the backed out changes as a new changeset. The new
168 changeset is a child of the backed out changeset.
168 changeset is a child of the backed out changeset.
169
169
170 If you back out a changeset other than the tip, a new head is
170 If you back out a changeset other than the tip, a new head is
171 created. This head is the parent of the working directory. If
171 created. This head is the parent of the working directory. If
172 you back out an old changeset, your working directory will appear
172 you back out an old changeset, your working directory will appear
173 old after the backout. You should merge the backout changeset
173 old after the backout. You should merge the backout changeset
174 with another head.
174 with another head.
175
175
176 The --merge option remembers the parent of the working directory
176 The --merge option remembers the parent of the working directory
177 before starting the backout, then merges the new head with that
177 before starting the backout, then merges the new head with that
178 changeset afterwards. This saves you from doing the merge by
178 changeset afterwards. This saves you from doing the merge by
179 hand. The result of this merge is not committed, as for a normal
179 hand. The result of this merge is not committed, as for a normal
180 merge.'''
180 merge.'''
181 if rev and node:
181 if rev and node:
182 raise util.Abort(_("please specify just one revision"))
182 raise util.Abort(_("please specify just one revision"))
183
183
184 if not rev:
184 if not rev:
185 rev = node
185 rev = node
186
186
187 if not rev:
187 if not rev:
188 raise util.Abort(_("please specify a revision to backout"))
188 raise util.Abort(_("please specify a revision to backout"))
189
189
190 cmdutil.bail_if_changed(repo)
190 cmdutil.bail_if_changed(repo)
191 op1, op2 = repo.dirstate.parents()
191 op1, op2 = repo.dirstate.parents()
192 if op2 != nullid:
192 if op2 != nullid:
193 raise util.Abort(_('outstanding uncommitted merge'))
193 raise util.Abort(_('outstanding uncommitted merge'))
194 node = repo.lookup(rev)
194 node = repo.lookup(rev)
195 p1, p2 = repo.changelog.parents(node)
195 p1, p2 = repo.changelog.parents(node)
196 if p1 == nullid:
196 if p1 == nullid:
197 raise util.Abort(_('cannot back out a change with no parents'))
197 raise util.Abort(_('cannot back out a change with no parents'))
198 if p2 != nullid:
198 if p2 != nullid:
199 if not opts['parent']:
199 if not opts['parent']:
200 raise util.Abort(_('cannot back out a merge changeset without '
200 raise util.Abort(_('cannot back out a merge changeset without '
201 '--parent'))
201 '--parent'))
202 p = repo.lookup(opts['parent'])
202 p = repo.lookup(opts['parent'])
203 if p not in (p1, p2):
203 if p not in (p1, p2):
204 raise util.Abort(_('%s is not a parent of %s') %
204 raise util.Abort(_('%s is not a parent of %s') %
205 (short(p), short(node)))
205 (short(p), short(node)))
206 parent = p
206 parent = p
207 else:
207 else:
208 if opts['parent']:
208 if opts['parent']:
209 raise util.Abort(_('cannot use --parent on non-merge changeset'))
209 raise util.Abort(_('cannot use --parent on non-merge changeset'))
210 parent = p1
210 parent = p1
211 hg.clean(repo, node, show_stats=False)
211 hg.clean(repo, node, show_stats=False)
212 revert_opts = opts.copy()
212 revert_opts = opts.copy()
213 revert_opts['date'] = None
213 revert_opts['date'] = None
214 revert_opts['all'] = True
214 revert_opts['all'] = True
215 revert_opts['rev'] = hex(parent)
215 revert_opts['rev'] = hex(parent)
216 revert(ui, repo, **revert_opts)
216 revert(ui, repo, **revert_opts)
217 commit_opts = opts.copy()
217 commit_opts = opts.copy()
218 commit_opts['addremove'] = False
218 commit_opts['addremove'] = False
219 if not commit_opts['message'] and not commit_opts['logfile']:
219 if not commit_opts['message'] and not commit_opts['logfile']:
220 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
220 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
221 commit_opts['force_editor'] = True
221 commit_opts['force_editor'] = True
222 commit(ui, repo, **commit_opts)
222 commit(ui, repo, **commit_opts)
223 def nice(node):
223 def nice(node):
224 return '%d:%s' % (repo.changelog.rev(node), short(node))
224 return '%d:%s' % (repo.changelog.rev(node), short(node))
225 ui.status(_('changeset %s backs out changeset %s\n') %
225 ui.status(_('changeset %s backs out changeset %s\n') %
226 (nice(repo.changelog.tip()), nice(node)))
226 (nice(repo.changelog.tip()), nice(node)))
227 if op1 != node:
227 if op1 != node:
228 if opts['merge']:
228 if opts['merge']:
229 ui.status(_('merging with changeset %s\n') % nice(op1))
229 ui.status(_('merging with changeset %s\n') % nice(op1))
230 hg.merge(repo, hex(op1))
230 hg.merge(repo, hex(op1))
231 else:
231 else:
232 ui.status(_('the backout changeset is a new head - '
232 ui.status(_('the backout changeset is a new head - '
233 'do not forget to merge\n'))
233 'do not forget to merge\n'))
234 ui.status(_('(use "backout --merge" '
234 ui.status(_('(use "backout --merge" '
235 'if you want to auto-merge)\n'))
235 'if you want to auto-merge)\n'))
236
236
237 def branch(ui, repo, label=None, **opts):
237 def branch(ui, repo, label=None, **opts):
238 """set or show the current branch name
238 """set or show the current branch name
239
239
240 With no argument, show the current branch name. With one argument,
240 With no argument, show the current branch name. With one argument,
241 set the working directory branch name (the branch does not exist in
241 set the working directory branch name (the branch does not exist in
242 the repository until the next commit).
242 the repository until the next commit).
243
243
244 Unless --force is specified, branch will not let you set a
244 Unless --force is specified, branch will not let you set a
245 branch name that shadows an existing branch.
245 branch name that shadows an existing branch.
246 """
246 """
247
247
248 if label:
248 if label:
249 if not opts.get('force') and label in repo.branchtags():
249 if not opts.get('force') and label in repo.branchtags():
250 if label not in [p.branch() for p in repo.workingctx().parents()]:
250 if label not in [p.branch() for p in repo.workingctx().parents()]:
251 raise util.Abort(_('a branch of the same name already exists'
251 raise util.Abort(_('a branch of the same name already exists'
252 ' (use --force to override)'))
252 ' (use --force to override)'))
253 repo.dirstate.setbranch(util.fromlocal(label))
253 repo.dirstate.setbranch(util.fromlocal(label))
254 ui.status(_('marked working directory as branch %s\n') % label)
254 ui.status(_('marked working directory as branch %s\n') % label)
255 else:
255 else:
256 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
256 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
257
257
258 def branches(ui, repo, active=False):
258 def branches(ui, repo, active=False):
259 """list repository named branches
259 """list repository named branches
260
260
261 List the repository's named branches, indicating which ones are
261 List the repository's named branches, indicating which ones are
262 inactive. If active is specified, only show active branches.
262 inactive. If active is specified, only show active branches.
263
263
264 A branch is considered active if it contains unmerged heads.
264 A branch is considered active if it contains unmerged heads.
265 """
265 """
266 b = repo.branchtags()
266 b = repo.branchtags()
267 heads = dict.fromkeys(repo.heads(), 1)
267 heads = dict.fromkeys(repo.heads(), 1)
268 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
268 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
269 l.sort()
269 l.sort()
270 l.reverse()
270 l.reverse()
271 for ishead, r, n, t in l:
271 for ishead, r, n, t in l:
272 if active and not ishead:
272 if active and not ishead:
273 # If we're only displaying active branches, abort the loop on
273 # If we're only displaying active branches, abort the loop on
274 # encountering the first inactive head
274 # encountering the first inactive head
275 break
275 break
276 else:
276 else:
277 hexfunc = ui.debugflag and hex or short
277 hexfunc = ui.debugflag and hex or short
278 if ui.quiet:
278 if ui.quiet:
279 ui.write("%s\n" % t)
279 ui.write("%s\n" % t)
280 else:
280 else:
281 spaces = " " * (30 - util.locallen(t))
281 spaces = " " * (30 - util.locallen(t))
282 # The code only gets here if inactive branches are being
282 # The code only gets here if inactive branches are being
283 # displayed or the branch is active.
283 # displayed or the branch is active.
284 isinactive = ((not ishead) and " (inactive)") or ''
284 isinactive = ((not ishead) and " (inactive)") or ''
285 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
285 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
286
286
287 def bundle(ui, repo, fname, dest=None, **opts):
287 def bundle(ui, repo, fname, dest=None, **opts):
288 """create a changegroup file
288 """create a changegroup file
289
289
290 Generate a compressed changegroup file collecting changesets not
290 Generate a compressed changegroup file collecting changesets not
291 found in the other repository.
291 found in the other repository.
292
292
293 If no destination repository is specified the destination is assumed
293 If no destination repository is specified the destination is assumed
294 to have all the nodes specified by one or more --base parameters.
294 to have all the nodes specified by one or more --base parameters.
295
295
296 The bundle file can then be transferred using conventional means and
296 The bundle file can then be transferred using conventional means and
297 applied to another repository with the unbundle or pull command.
297 applied to another repository with the unbundle or pull command.
298 This is useful when direct push and pull are not available or when
298 This is useful when direct push and pull are not available or when
299 exporting an entire repository is undesirable.
299 exporting an entire repository is undesirable.
300
300
301 Applying bundles preserves all changeset contents including
301 Applying bundles preserves all changeset contents including
302 permissions, copy/rename information, and revision history.
302 permissions, copy/rename information, and revision history.
303 """
303 """
304 revs = opts.get('rev') or None
304 revs = opts.get('rev') or None
305 if revs:
305 if revs:
306 revs = [repo.lookup(rev) for rev in revs]
306 revs = [repo.lookup(rev) for rev in revs]
307 base = opts.get('base')
307 base = opts.get('base')
308 if base:
308 if base:
309 if dest:
309 if dest:
310 raise util.Abort(_("--base is incompatible with specifiying "
310 raise util.Abort(_("--base is incompatible with specifiying "
311 "a destination"))
311 "a destination"))
312 base = [repo.lookup(rev) for rev in base]
312 base = [repo.lookup(rev) for rev in base]
313 # create the right base
313 # create the right base
314 # XXX: nodesbetween / changegroup* should be "fixed" instead
314 # XXX: nodesbetween / changegroup* should be "fixed" instead
315 o = []
315 o = []
316 has = {nullid: None}
316 has = {nullid: None}
317 for n in base:
317 for n in base:
318 has.update(repo.changelog.reachable(n))
318 has.update(repo.changelog.reachable(n))
319 if revs:
319 if revs:
320 visit = list(revs)
320 visit = list(revs)
321 else:
321 else:
322 visit = repo.changelog.heads()
322 visit = repo.changelog.heads()
323 seen = {}
323 seen = {}
324 while visit:
324 while visit:
325 n = visit.pop(0)
325 n = visit.pop(0)
326 parents = [p for p in repo.changelog.parents(n) if p not in has]
326 parents = [p for p in repo.changelog.parents(n) if p not in has]
327 if len(parents) == 0:
327 if len(parents) == 0:
328 o.insert(0, n)
328 o.insert(0, n)
329 else:
329 else:
330 for p in parents:
330 for p in parents:
331 if p not in seen:
331 if p not in seen:
332 seen[p] = 1
332 seen[p] = 1
333 visit.append(p)
333 visit.append(p)
334 else:
334 else:
335 cmdutil.setremoteconfig(ui, opts)
335 cmdutil.setremoteconfig(ui, opts)
336 dest, revs = cmdutil.parseurl(
336 dest, revs = cmdutil.parseurl(
337 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
337 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
338 other = hg.repository(ui, dest)
338 other = hg.repository(ui, dest)
339 o = repo.findoutgoing(other, force=opts['force'])
339 o = repo.findoutgoing(other, force=opts['force'])
340
340
341 if revs:
341 if revs:
342 cg = repo.changegroupsubset(o, revs, 'bundle')
342 cg = repo.changegroupsubset(o, revs, 'bundle')
343 else:
343 else:
344 cg = repo.changegroup(o, 'bundle')
344 cg = repo.changegroup(o, 'bundle')
345 changegroup.writebundle(cg, fname, "HG10BZ")
345 changegroup.writebundle(cg, fname, "HG10BZ")
346
346
347 def cat(ui, repo, file1, *pats, **opts):
347 def cat(ui, repo, file1, *pats, **opts):
348 """output the current or given revision of files
348 """output the current or given revision of files
349
349
350 Print the specified files as they were at the given revision.
350 Print the specified files as they were at the given revision.
351 If no revision is given, the parent of the working directory is used,
351 If no revision is given, the parent of the working directory is used,
352 or tip if no revision is checked out.
352 or tip if no revision is checked out.
353
353
354 Output may be to a file, in which case the name of the file is
354 Output may be to a file, in which case the name of the file is
355 given using a format string. The formatting rules are the same as
355 given using a format string. The formatting rules are the same as
356 for the export command, with the following additions:
356 for the export command, with the following additions:
357
357
358 %s basename of file being printed
358 %s basename of file being printed
359 %d dirname of file being printed, or '.' if in repo root
359 %d dirname of file being printed, or '.' if in repo root
360 %p root-relative path name of file being printed
360 %p root-relative path name of file being printed
361 """
361 """
362 ctx = repo.changectx(opts['rev'])
362 ctx = repo.changectx(opts['rev'])
363 err = 1
363 err = 1
364 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
364 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
365 ctx.node()):
365 ctx.node()):
366 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
366 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
367 fp.write(ctx.filectx(abs).data())
367 fp.write(ctx.filectx(abs).data())
368 err = 0
368 err = 0
369 return err
369 return err
370
370
371 def clone(ui, source, dest=None, **opts):
371 def clone(ui, source, dest=None, **opts):
372 """make a copy of an existing repository
372 """make a copy of an existing repository
373
373
374 Create a copy of an existing repository in a new directory.
374 Create a copy of an existing repository in a new directory.
375
375
376 If no destination directory name is specified, it defaults to the
376 If no destination directory name is specified, it defaults to the
377 basename of the source.
377 basename of the source.
378
378
379 The location of the source is added to the new repository's
379 The location of the source is added to the new repository's
380 .hg/hgrc file, as the default to be used for future pulls.
380 .hg/hgrc file, as the default to be used for future pulls.
381
381
382 For efficiency, hardlinks are used for cloning whenever the source
382 For efficiency, hardlinks are used for cloning whenever the source
383 and destination are on the same filesystem (note this applies only
383 and destination are on the same filesystem (note this applies only
384 to the repository data, not to the checked out files). Some
384 to the repository data, not to the checked out files). Some
385 filesystems, such as AFS, implement hardlinking incorrectly, but
385 filesystems, such as AFS, implement hardlinking incorrectly, but
386 do not report errors. In these cases, use the --pull option to
386 do not report errors. In these cases, use the --pull option to
387 avoid hardlinking.
387 avoid hardlinking.
388
388
389 You can safely clone repositories and checked out files using full
389 You can safely clone repositories and checked out files using full
390 hardlinks with
390 hardlinks with
391
391
392 $ cp -al REPO REPOCLONE
392 $ cp -al REPO REPOCLONE
393
393
394 which is the fastest way to clone. However, the operation is not
394 which is the fastest way to clone. However, the operation is not
395 atomic (making sure REPO is not modified during the operation is
395 atomic (making sure REPO is not modified during the operation is
396 up to you) and you have to make sure your editor breaks hardlinks
396 up to you) and you have to make sure your editor breaks hardlinks
397 (Emacs and most Linux Kernel tools do so).
397 (Emacs and most Linux Kernel tools do so).
398
398
399 If you use the -r option to clone up to a specific revision, no
399 If you use the -r option to clone up to a specific revision, no
400 subsequent revisions will be present in the cloned repository.
400 subsequent revisions will be present in the cloned repository.
401 This option implies --pull, even on local repositories.
401 This option implies --pull, even on local repositories.
402
402
403 See pull for valid source format details.
403 See pull for valid source format details.
404
404
405 It is possible to specify an ssh:// URL as the destination, but no
405 It is possible to specify an ssh:// URL as the destination, but no
406 .hg/hgrc and working directory will be created on the remote side.
406 .hg/hgrc and working directory will be created on the remote side.
407 Look at the help text for the pull command for important details
407 Look at the help text for the pull command for important details
408 about ssh:// URLs.
408 about ssh:// URLs.
409 """
409 """
410 cmdutil.setremoteconfig(ui, opts)
410 cmdutil.setremoteconfig(ui, opts)
411 hg.clone(ui, source, dest,
411 hg.clone(ui, source, dest,
412 pull=opts['pull'],
412 pull=opts['pull'],
413 stream=opts['uncompressed'],
413 stream=opts['uncompressed'],
414 rev=opts['rev'],
414 rev=opts['rev'],
415 update=not opts['noupdate'])
415 update=not opts['noupdate'])
416
416
417 def commit(ui, repo, *pats, **opts):
417 def commit(ui, repo, *pats, **opts):
418 """commit the specified files or all outstanding changes
418 """commit the specified files or all outstanding changes
419
419
420 Commit changes to the given files into the repository.
420 Commit changes to the given files into the repository.
421
421
422 If a list of files is omitted, all changes reported by "hg status"
422 If a list of files is omitted, all changes reported by "hg status"
423 will be committed.
423 will be committed.
424
424
425 If no commit message is specified, the editor configured in your hgrc
425 If no commit message is specified, the editor configured in your hgrc
426 or in the EDITOR environment variable is started to enter a message.
426 or in the EDITOR environment variable is started to enter a message.
427 """
427 """
428 message = cmdutil.logmessage(opts)
428 message = cmdutil.logmessage(opts)
429
429
430 if opts['addremove']:
430 if opts['addremove']:
431 cmdutil.addremove(repo, pats, opts)
431 cmdutil.addremove(repo, pats, opts)
432 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
432 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
433 if pats:
433 if pats:
434 status = repo.status(files=fns, match=match)
434 status = repo.status(files=fns, match=match)
435 modified, added, removed, deleted, unknown = status[:5]
435 modified, added, removed, deleted, unknown = status[:5]
436 files = modified + added + removed
436 files = modified + added + removed
437 slist = None
437 slist = None
438 for f in fns:
438 for f in fns:
439 if f == '.':
439 if f == '.':
440 continue
440 continue
441 if f not in files:
441 if f not in files:
442 rf = repo.wjoin(f)
442 rf = repo.wjoin(f)
443 try:
443 try:
444 mode = os.lstat(rf)[stat.ST_MODE]
444 mode = os.lstat(rf)[stat.ST_MODE]
445 except OSError:
445 except OSError:
446 raise util.Abort(_("file %s not found!") % rf)
446 raise util.Abort(_("file %s not found!") % rf)
447 if stat.S_ISDIR(mode):
447 if stat.S_ISDIR(mode):
448 name = f + '/'
448 name = f + '/'
449 if slist is None:
449 if slist is None:
450 slist = list(files)
450 slist = list(files)
451 slist.sort()
451 slist.sort()
452 i = bisect.bisect(slist, name)
452 i = bisect.bisect(slist, name)
453 if i >= len(slist) or not slist[i].startswith(name):
453 if i >= len(slist) or not slist[i].startswith(name):
454 raise util.Abort(_("no match under directory %s!")
454 raise util.Abort(_("no match under directory %s!")
455 % rf)
455 % rf)
456 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
456 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
457 raise util.Abort(_("can't commit %s: "
457 raise util.Abort(_("can't commit %s: "
458 "unsupported file type!") % rf)
458 "unsupported file type!") % rf)
459 elif f not in repo.dirstate:
459 elif f not in repo.dirstate:
460 raise util.Abort(_("file %s not tracked!") % rf)
460 raise util.Abort(_("file %s not tracked!") % rf)
461 else:
461 else:
462 files = []
462 files = []
463 try:
463 try:
464 repo.commit(files, message, opts['user'], opts['date'], match,
464 repo.commit(files, message, opts['user'], opts['date'], match,
465 force_editor=opts.get('force_editor'))
465 force_editor=opts.get('force_editor'))
466 except ValueError, inst:
466 except ValueError, inst:
467 raise util.Abort(str(inst))
467 raise util.Abort(str(inst))
468
468
469 def docopy(ui, repo, pats, opts, wlock):
469 def docopy(ui, repo, pats, opts):
470 # called with the repo lock held
470 # called with the repo lock held
471 #
471 #
472 # hgsep => pathname that uses "/" to separate directories
472 # hgsep => pathname that uses "/" to separate directories
473 # ossep => pathname that uses os.sep to separate directories
473 # ossep => pathname that uses os.sep to separate directories
474 cwd = repo.getcwd()
474 cwd = repo.getcwd()
475 errors = 0
475 errors = 0
476 copied = []
476 copied = []
477 targets = {}
477 targets = {}
478
478
479 # abs: hgsep
479 # abs: hgsep
480 # rel: ossep
480 # rel: ossep
481 # return: hgsep
481 # return: hgsep
482 def okaytocopy(abs, rel, exact):
482 def okaytocopy(abs, rel, exact):
483 reasons = {'?': _('is not managed'),
483 reasons = {'?': _('is not managed'),
484 'r': _('has been marked for remove')}
484 'r': _('has been marked for remove')}
485 state = repo.dirstate[abs]
485 state = repo.dirstate[abs]
486 reason = reasons.get(state)
486 reason = reasons.get(state)
487 if reason:
487 if reason:
488 if exact:
488 if exact:
489 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
489 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
490 else:
490 else:
491 if state == 'a':
491 if state == 'a':
492 origsrc = repo.dirstate.copied(abs)
492 origsrc = repo.dirstate.copied(abs)
493 if origsrc is not None:
493 if origsrc is not None:
494 return origsrc
494 return origsrc
495 return abs
495 return abs
496
496
497 # origsrc: hgsep
497 # origsrc: hgsep
498 # abssrc: hgsep
498 # abssrc: hgsep
499 # relsrc: ossep
499 # relsrc: ossep
500 # otarget: ossep
500 # otarget: ossep
501 def copy(origsrc, abssrc, relsrc, otarget, exact):
501 def copy(origsrc, abssrc, relsrc, otarget, exact):
502 abstarget = util.canonpath(repo.root, cwd, otarget)
502 abstarget = util.canonpath(repo.root, cwd, otarget)
503 reltarget = repo.pathto(abstarget, cwd)
503 reltarget = repo.pathto(abstarget, cwd)
504 prevsrc = targets.get(abstarget)
504 prevsrc = targets.get(abstarget)
505 src = repo.wjoin(abssrc)
505 src = repo.wjoin(abssrc)
506 target = repo.wjoin(abstarget)
506 target = repo.wjoin(abstarget)
507 if prevsrc is not None:
507 if prevsrc is not None:
508 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
508 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
509 (reltarget, repo.pathto(abssrc, cwd),
509 (reltarget, repo.pathto(abssrc, cwd),
510 repo.pathto(prevsrc, cwd)))
510 repo.pathto(prevsrc, cwd)))
511 return
511 return
512 if (not opts['after'] and os.path.exists(target) or
512 if (not opts['after'] and os.path.exists(target) or
513 opts['after'] and repo.dirstate[abstarget] in 'mn'):
513 opts['after'] and repo.dirstate[abstarget] in 'mn'):
514 if not opts['force']:
514 if not opts['force']:
515 ui.warn(_('%s: not overwriting - file exists\n') %
515 ui.warn(_('%s: not overwriting - file exists\n') %
516 reltarget)
516 reltarget)
517 return
517 return
518 if not opts['after'] and not opts.get('dry_run'):
518 if not opts['after'] and not opts.get('dry_run'):
519 os.unlink(target)
519 os.unlink(target)
520 if opts['after']:
520 if opts['after']:
521 if not os.path.exists(target):
521 if not os.path.exists(target):
522 return
522 return
523 else:
523 else:
524 targetdir = os.path.dirname(target) or '.'
524 targetdir = os.path.dirname(target) or '.'
525 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
525 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
526 os.makedirs(targetdir)
526 os.makedirs(targetdir)
527 try:
527 try:
528 restore = repo.dirstate[abstarget] == 'r'
528 restore = repo.dirstate[abstarget] == 'r'
529 if restore and not opts.get('dry_run'):
529 if restore and not opts.get('dry_run'):
530 repo.undelete([abstarget], wlock)
530 repo.undelete([abstarget])
531 try:
531 try:
532 if not opts.get('dry_run'):
532 if not opts.get('dry_run'):
533 util.copyfile(src, target)
533 util.copyfile(src, target)
534 restore = False
534 restore = False
535 finally:
535 finally:
536 if restore:
536 if restore:
537 repo.remove([abstarget], wlock=wlock)
537 repo.remove([abstarget])
538 except IOError, inst:
538 except IOError, inst:
539 if inst.errno == errno.ENOENT:
539 if inst.errno == errno.ENOENT:
540 ui.warn(_('%s: deleted in working copy\n') % relsrc)
540 ui.warn(_('%s: deleted in working copy\n') % relsrc)
541 else:
541 else:
542 ui.warn(_('%s: cannot copy - %s\n') %
542 ui.warn(_('%s: cannot copy - %s\n') %
543 (relsrc, inst.strerror))
543 (relsrc, inst.strerror))
544 errors += 1
544 errors += 1
545 return
545 return
546 if ui.verbose or not exact:
546 if ui.verbose or not exact:
547 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
547 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
548 targets[abstarget] = abssrc
548 targets[abstarget] = abssrc
549 if abstarget != origsrc:
549 if abstarget != origsrc:
550 if repo.dirstate[origsrc] == 'a':
550 if repo.dirstate[origsrc] == 'a':
551 if not ui.quiet:
551 if not ui.quiet:
552 ui.warn(_("%s has not been committed yet, so no copy "
552 ui.warn(_("%s has not been committed yet, so no copy "
553 "data will be stored for %s.\n")
553 "data will be stored for %s.\n")
554 % (repo.pathto(origsrc, cwd), reltarget))
554 % (repo.pathto(origsrc, cwd), reltarget))
555 if abstarget not in repo.dirstate and not opts.get('dry_run'):
555 if abstarget not in repo.dirstate and not opts.get('dry_run'):
556 repo.add([abstarget], wlock)
556 repo.add([abstarget])
557 elif not opts.get('dry_run'):
557 elif not opts.get('dry_run'):
558 repo.copy(origsrc, abstarget, wlock)
558 repo.copy(origsrc, abstarget)
559 copied.append((abssrc, relsrc, exact))
559 copied.append((abssrc, relsrc, exact))
560
560
561 # pat: ossep
561 # pat: ossep
562 # dest ossep
562 # dest ossep
563 # srcs: list of (hgsep, hgsep, ossep, bool)
563 # srcs: list of (hgsep, hgsep, ossep, bool)
564 # return: function that takes hgsep and returns ossep
564 # return: function that takes hgsep and returns ossep
565 def targetpathfn(pat, dest, srcs):
565 def targetpathfn(pat, dest, srcs):
566 if os.path.isdir(pat):
566 if os.path.isdir(pat):
567 abspfx = util.canonpath(repo.root, cwd, pat)
567 abspfx = util.canonpath(repo.root, cwd, pat)
568 abspfx = util.localpath(abspfx)
568 abspfx = util.localpath(abspfx)
569 if destdirexists:
569 if destdirexists:
570 striplen = len(os.path.split(abspfx)[0])
570 striplen = len(os.path.split(abspfx)[0])
571 else:
571 else:
572 striplen = len(abspfx)
572 striplen = len(abspfx)
573 if striplen:
573 if striplen:
574 striplen += len(os.sep)
574 striplen += len(os.sep)
575 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
575 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
576 elif destdirexists:
576 elif destdirexists:
577 res = lambda p: os.path.join(dest,
577 res = lambda p: os.path.join(dest,
578 os.path.basename(util.localpath(p)))
578 os.path.basename(util.localpath(p)))
579 else:
579 else:
580 res = lambda p: dest
580 res = lambda p: dest
581 return res
581 return res
582
582
583 # pat: ossep
583 # pat: ossep
584 # dest ossep
584 # dest ossep
585 # srcs: list of (hgsep, hgsep, ossep, bool)
585 # srcs: list of (hgsep, hgsep, ossep, bool)
586 # return: function that takes hgsep and returns ossep
586 # return: function that takes hgsep and returns ossep
587 def targetpathafterfn(pat, dest, srcs):
587 def targetpathafterfn(pat, dest, srcs):
588 if util.patkind(pat, None)[0]:
588 if util.patkind(pat, None)[0]:
589 # a mercurial pattern
589 # a mercurial pattern
590 res = lambda p: os.path.join(dest,
590 res = lambda p: os.path.join(dest,
591 os.path.basename(util.localpath(p)))
591 os.path.basename(util.localpath(p)))
592 else:
592 else:
593 abspfx = util.canonpath(repo.root, cwd, pat)
593 abspfx = util.canonpath(repo.root, cwd, pat)
594 if len(abspfx) < len(srcs[0][0]):
594 if len(abspfx) < len(srcs[0][0]):
595 # A directory. Either the target path contains the last
595 # A directory. Either the target path contains the last
596 # component of the source path or it does not.
596 # component of the source path or it does not.
597 def evalpath(striplen):
597 def evalpath(striplen):
598 score = 0
598 score = 0
599 for s in srcs:
599 for s in srcs:
600 t = os.path.join(dest, util.localpath(s[0])[striplen:])
600 t = os.path.join(dest, util.localpath(s[0])[striplen:])
601 if os.path.exists(t):
601 if os.path.exists(t):
602 score += 1
602 score += 1
603 return score
603 return score
604
604
605 abspfx = util.localpath(abspfx)
605 abspfx = util.localpath(abspfx)
606 striplen = len(abspfx)
606 striplen = len(abspfx)
607 if striplen:
607 if striplen:
608 striplen += len(os.sep)
608 striplen += len(os.sep)
609 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
609 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
610 score = evalpath(striplen)
610 score = evalpath(striplen)
611 striplen1 = len(os.path.split(abspfx)[0])
611 striplen1 = len(os.path.split(abspfx)[0])
612 if striplen1:
612 if striplen1:
613 striplen1 += len(os.sep)
613 striplen1 += len(os.sep)
614 if evalpath(striplen1) > score:
614 if evalpath(striplen1) > score:
615 striplen = striplen1
615 striplen = striplen1
616 res = lambda p: os.path.join(dest,
616 res = lambda p: os.path.join(dest,
617 util.localpath(p)[striplen:])
617 util.localpath(p)[striplen:])
618 else:
618 else:
619 # a file
619 # a file
620 if destdirexists:
620 if destdirexists:
621 res = lambda p: os.path.join(dest,
621 res = lambda p: os.path.join(dest,
622 os.path.basename(util.localpath(p)))
622 os.path.basename(util.localpath(p)))
623 else:
623 else:
624 res = lambda p: dest
624 res = lambda p: dest
625 return res
625 return res
626
626
627
627
628 pats = util.expand_glob(pats)
628 pats = util.expand_glob(pats)
629 if not pats:
629 if not pats:
630 raise util.Abort(_('no source or destination specified'))
630 raise util.Abort(_('no source or destination specified'))
631 if len(pats) == 1:
631 if len(pats) == 1:
632 raise util.Abort(_('no destination specified'))
632 raise util.Abort(_('no destination specified'))
633 dest = pats.pop()
633 dest = pats.pop()
634 destdirexists = os.path.isdir(dest)
634 destdirexists = os.path.isdir(dest)
635 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
635 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
636 raise util.Abort(_('with multiple sources, destination must be an '
636 raise util.Abort(_('with multiple sources, destination must be an '
637 'existing directory'))
637 'existing directory'))
638 if opts['after']:
638 if opts['after']:
639 tfn = targetpathafterfn
639 tfn = targetpathafterfn
640 else:
640 else:
641 tfn = targetpathfn
641 tfn = targetpathfn
642 copylist = []
642 copylist = []
643 for pat in pats:
643 for pat in pats:
644 srcs = []
644 srcs = []
645 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
645 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
646 globbed=True):
646 globbed=True):
647 origsrc = okaytocopy(abssrc, relsrc, exact)
647 origsrc = okaytocopy(abssrc, relsrc, exact)
648 if origsrc:
648 if origsrc:
649 srcs.append((origsrc, abssrc, relsrc, exact))
649 srcs.append((origsrc, abssrc, relsrc, exact))
650 if not srcs:
650 if not srcs:
651 continue
651 continue
652 copylist.append((tfn(pat, dest, srcs), srcs))
652 copylist.append((tfn(pat, dest, srcs), srcs))
653 if not copylist:
653 if not copylist:
654 raise util.Abort(_('no files to copy'))
654 raise util.Abort(_('no files to copy'))
655
655
656 for targetpath, srcs in copylist:
656 for targetpath, srcs in copylist:
657 for origsrc, abssrc, relsrc, exact in srcs:
657 for origsrc, abssrc, relsrc, exact in srcs:
658 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
658 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
659
659
660 if errors:
660 if errors:
661 ui.warn(_('(consider using --after)\n'))
661 ui.warn(_('(consider using --after)\n'))
662 return errors, copied
662 return errors, copied
663
663
664 def copy(ui, repo, *pats, **opts):
664 def copy(ui, repo, *pats, **opts):
665 """mark files as copied for the next commit
665 """mark files as copied for the next commit
666
666
667 Mark dest as having copies of source files. If dest is a
667 Mark dest as having copies of source files. If dest is a
668 directory, copies are put in that directory. If dest is a file,
668 directory, copies are put in that directory. If dest is a file,
669 there can only be one source.
669 there can only be one source.
670
670
671 By default, this command copies the contents of files as they
671 By default, this command copies the contents of files as they
672 stand in the working directory. If invoked with --after, the
672 stand in the working directory. If invoked with --after, the
673 operation is recorded, but no copying is performed.
673 operation is recorded, but no copying is performed.
674
674
675 This command takes effect in the next commit. To undo a copy
675 This command takes effect in the next commit. To undo a copy
676 before that, see hg revert.
676 before that, see hg revert.
677 """
677 """
678 wlock = repo.wlock(False)
678 wlock = repo.wlock(False)
679 try:
679 try:
680 errs, copied = docopy(ui, repo, pats, opts, wlock)
680 errs, copied = docopy(ui, repo, pats, opts)
681 finally:
681 finally:
682 del wlock
682 del wlock
683 return errs
683 return errs
684
684
685 def debugancestor(ui, index, rev1, rev2):
685 def debugancestor(ui, index, rev1, rev2):
686 """find the ancestor revision of two revisions in a given index"""
686 """find the ancestor revision of two revisions in a given index"""
687 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
687 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
688 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
688 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
689 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
689 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
690
690
691 def debugcomplete(ui, cmd='', **opts):
691 def debugcomplete(ui, cmd='', **opts):
692 """returns the completion list associated with the given command"""
692 """returns the completion list associated with the given command"""
693
693
694 if opts['options']:
694 if opts['options']:
695 options = []
695 options = []
696 otables = [globalopts]
696 otables = [globalopts]
697 if cmd:
697 if cmd:
698 aliases, entry = cmdutil.findcmd(ui, cmd)
698 aliases, entry = cmdutil.findcmd(ui, cmd)
699 otables.append(entry[1])
699 otables.append(entry[1])
700 for t in otables:
700 for t in otables:
701 for o in t:
701 for o in t:
702 if o[0]:
702 if o[0]:
703 options.append('-%s' % o[0])
703 options.append('-%s' % o[0])
704 options.append('--%s' % o[1])
704 options.append('--%s' % o[1])
705 ui.write("%s\n" % "\n".join(options))
705 ui.write("%s\n" % "\n".join(options))
706 return
706 return
707
707
708 clist = cmdutil.findpossible(ui, cmd).keys()
708 clist = cmdutil.findpossible(ui, cmd).keys()
709 clist.sort()
709 clist.sort()
710 ui.write("%s\n" % "\n".join(clist))
710 ui.write("%s\n" % "\n".join(clist))
711
711
712 def debugrebuildstate(ui, repo, rev=""):
712 def debugrebuildstate(ui, repo, rev=""):
713 """rebuild the dirstate as it would look like for the given revision"""
713 """rebuild the dirstate as it would look like for the given revision"""
714 if rev == "":
714 if rev == "":
715 rev = repo.changelog.tip()
715 rev = repo.changelog.tip()
716 ctx = repo.changectx(rev)
716 ctx = repo.changectx(rev)
717 files = ctx.manifest()
717 files = ctx.manifest()
718 wlock = repo.wlock()
718 wlock = repo.wlock()
719 try:
719 try:
720 repo.dirstate.rebuild(rev, files)
720 repo.dirstate.rebuild(rev, files)
721 finally:
721 finally:
722 del wlock
722 del wlock
723
723
724 def debugcheckstate(ui, repo):
724 def debugcheckstate(ui, repo):
725 """validate the correctness of the current dirstate"""
725 """validate the correctness of the current dirstate"""
726 parent1, parent2 = repo.dirstate.parents()
726 parent1, parent2 = repo.dirstate.parents()
727 m1 = repo.changectx(parent1).manifest()
727 m1 = repo.changectx(parent1).manifest()
728 m2 = repo.changectx(parent2).manifest()
728 m2 = repo.changectx(parent2).manifest()
729 errors = 0
729 errors = 0
730 for f in repo.dirstate:
730 for f in repo.dirstate:
731 state = repo.dirstate[f]
731 state = repo.dirstate[f]
732 if state in "nr" and f not in m1:
732 if state in "nr" and f not in m1:
733 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
733 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
734 errors += 1
734 errors += 1
735 if state in "a" and f in m1:
735 if state in "a" and f in m1:
736 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
736 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
737 errors += 1
737 errors += 1
738 if state in "m" and f not in m1 and f not in m2:
738 if state in "m" and f not in m1 and f not in m2:
739 ui.warn(_("%s in state %s, but not in either manifest\n") %
739 ui.warn(_("%s in state %s, but not in either manifest\n") %
740 (f, state))
740 (f, state))
741 errors += 1
741 errors += 1
742 for f in m1:
742 for f in m1:
743 state = repo.dirstate[f]
743 state = repo.dirstate[f]
744 if state not in "nrm":
744 if state not in "nrm":
745 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
745 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
746 errors += 1
746 errors += 1
747 if errors:
747 if errors:
748 error = _(".hg/dirstate inconsistent with current parent's manifest")
748 error = _(".hg/dirstate inconsistent with current parent's manifest")
749 raise util.Abort(error)
749 raise util.Abort(error)
750
750
751 def showconfig(ui, repo, *values, **opts):
751 def showconfig(ui, repo, *values, **opts):
752 """show combined config settings from all hgrc files
752 """show combined config settings from all hgrc files
753
753
754 With no args, print names and values of all config items.
754 With no args, print names and values of all config items.
755
755
756 With one arg of the form section.name, print just the value of
756 With one arg of the form section.name, print just the value of
757 that config item.
757 that config item.
758
758
759 With multiple args, print names and values of all config items
759 With multiple args, print names and values of all config items
760 with matching section names."""
760 with matching section names."""
761
761
762 untrusted = bool(opts.get('untrusted'))
762 untrusted = bool(opts.get('untrusted'))
763 if values:
763 if values:
764 if len([v for v in values if '.' in v]) > 1:
764 if len([v for v in values if '.' in v]) > 1:
765 raise util.Abort(_('only one config item permitted'))
765 raise util.Abort(_('only one config item permitted'))
766 for section, name, value in ui.walkconfig(untrusted=untrusted):
766 for section, name, value in ui.walkconfig(untrusted=untrusted):
767 sectname = section + '.' + name
767 sectname = section + '.' + name
768 if values:
768 if values:
769 for v in values:
769 for v in values:
770 if v == section:
770 if v == section:
771 ui.write('%s=%s\n' % (sectname, value))
771 ui.write('%s=%s\n' % (sectname, value))
772 elif v == sectname:
772 elif v == sectname:
773 ui.write(value, '\n')
773 ui.write(value, '\n')
774 else:
774 else:
775 ui.write('%s=%s\n' % (sectname, value))
775 ui.write('%s=%s\n' % (sectname, value))
776
776
777 def debugsetparents(ui, repo, rev1, rev2=None):
777 def debugsetparents(ui, repo, rev1, rev2=None):
778 """manually set the parents of the current working directory
778 """manually set the parents of the current working directory
779
779
780 This is useful for writing repository conversion tools, but should
780 This is useful for writing repository conversion tools, but should
781 be used with care.
781 be used with care.
782 """
782 """
783
783
784 if not rev2:
784 if not rev2:
785 rev2 = hex(nullid)
785 rev2 = hex(nullid)
786
786
787 wlock = repo.wlock()
787 wlock = repo.wlock()
788 try:
788 try:
789 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
789 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
790 finally:
790 finally:
791 del wlock
791 del wlock
792
792
793 def debugstate(ui, repo):
793 def debugstate(ui, repo):
794 """show the contents of the current dirstate"""
794 """show the contents of the current dirstate"""
795 dc = repo.dirstate._map
795 dc = repo.dirstate._map
796 k = dc.keys()
796 k = dc.keys()
797 k.sort()
797 k.sort()
798 for file_ in k:
798 for file_ in k:
799 if dc[file_][3] == -1:
799 if dc[file_][3] == -1:
800 # Pad or slice to locale representation
800 # Pad or slice to locale representation
801 locale_len = len(time.strftime("%x %X", time.localtime(0)))
801 locale_len = len(time.strftime("%x %X", time.localtime(0)))
802 timestr = 'unset'
802 timestr = 'unset'
803 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
803 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
804 else:
804 else:
805 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
805 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
806 ui.write("%c %3o %10d %s %s\n"
806 ui.write("%c %3o %10d %s %s\n"
807 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
807 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
808 timestr, file_))
808 timestr, file_))
809 for f in repo.dirstate.copies():
809 for f in repo.dirstate.copies():
810 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
810 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
811
811
812 def debugdata(ui, file_, rev):
812 def debugdata(ui, file_, rev):
813 """dump the contents of a data file revision"""
813 """dump the contents of a data file revision"""
814 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
814 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
815 try:
815 try:
816 ui.write(r.revision(r.lookup(rev)))
816 ui.write(r.revision(r.lookup(rev)))
817 except KeyError:
817 except KeyError:
818 raise util.Abort(_('invalid revision identifier %s') % rev)
818 raise util.Abort(_('invalid revision identifier %s') % rev)
819
819
820 def debugdate(ui, date, range=None, **opts):
820 def debugdate(ui, date, range=None, **opts):
821 """parse and display a date"""
821 """parse and display a date"""
822 if opts["extended"]:
822 if opts["extended"]:
823 d = util.parsedate(date, util.extendeddateformats)
823 d = util.parsedate(date, util.extendeddateformats)
824 else:
824 else:
825 d = util.parsedate(date)
825 d = util.parsedate(date)
826 ui.write("internal: %s %s\n" % d)
826 ui.write("internal: %s %s\n" % d)
827 ui.write("standard: %s\n" % util.datestr(d))
827 ui.write("standard: %s\n" % util.datestr(d))
828 if range:
828 if range:
829 m = util.matchdate(range)
829 m = util.matchdate(range)
830 ui.write("match: %s\n" % m(d[0]))
830 ui.write("match: %s\n" % m(d[0]))
831
831
832 def debugindex(ui, file_):
832 def debugindex(ui, file_):
833 """dump the contents of an index file"""
833 """dump the contents of an index file"""
834 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
834 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
835 ui.write(" rev offset length base linkrev" +
835 ui.write(" rev offset length base linkrev" +
836 " nodeid p1 p2\n")
836 " nodeid p1 p2\n")
837 for i in xrange(r.count()):
837 for i in xrange(r.count()):
838 node = r.node(i)
838 node = r.node(i)
839 pp = r.parents(node)
839 pp = r.parents(node)
840 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
840 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
841 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
841 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
842 short(node), short(pp[0]), short(pp[1])))
842 short(node), short(pp[0]), short(pp[1])))
843
843
844 def debugindexdot(ui, file_):
844 def debugindexdot(ui, file_):
845 """dump an index DAG as a .dot file"""
845 """dump an index DAG as a .dot file"""
846 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
846 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
847 ui.write("digraph G {\n")
847 ui.write("digraph G {\n")
848 for i in xrange(r.count()):
848 for i in xrange(r.count()):
849 node = r.node(i)
849 node = r.node(i)
850 pp = r.parents(node)
850 pp = r.parents(node)
851 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
851 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
852 if pp[1] != nullid:
852 if pp[1] != nullid:
853 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
853 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
854 ui.write("}\n")
854 ui.write("}\n")
855
855
856 def debuginstall(ui):
856 def debuginstall(ui):
857 '''test Mercurial installation'''
857 '''test Mercurial installation'''
858
858
859 def writetemp(contents):
859 def writetemp(contents):
860 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
860 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
861 f = os.fdopen(fd, "wb")
861 f = os.fdopen(fd, "wb")
862 f.write(contents)
862 f.write(contents)
863 f.close()
863 f.close()
864 return name
864 return name
865
865
866 problems = 0
866 problems = 0
867
867
868 # encoding
868 # encoding
869 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
869 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
870 try:
870 try:
871 util.fromlocal("test")
871 util.fromlocal("test")
872 except util.Abort, inst:
872 except util.Abort, inst:
873 ui.write(" %s\n" % inst)
873 ui.write(" %s\n" % inst)
874 ui.write(_(" (check that your locale is properly set)\n"))
874 ui.write(_(" (check that your locale is properly set)\n"))
875 problems += 1
875 problems += 1
876
876
877 # compiled modules
877 # compiled modules
878 ui.status(_("Checking extensions...\n"))
878 ui.status(_("Checking extensions...\n"))
879 try:
879 try:
880 import bdiff, mpatch, base85
880 import bdiff, mpatch, base85
881 except Exception, inst:
881 except Exception, inst:
882 ui.write(" %s\n" % inst)
882 ui.write(" %s\n" % inst)
883 ui.write(_(" One or more extensions could not be found"))
883 ui.write(_(" One or more extensions could not be found"))
884 ui.write(_(" (check that you compiled the extensions)\n"))
884 ui.write(_(" (check that you compiled the extensions)\n"))
885 problems += 1
885 problems += 1
886
886
887 # templates
887 # templates
888 ui.status(_("Checking templates...\n"))
888 ui.status(_("Checking templates...\n"))
889 try:
889 try:
890 import templater
890 import templater
891 t = templater.templater(templater.templatepath("map-cmdline.default"))
891 t = templater.templater(templater.templatepath("map-cmdline.default"))
892 except Exception, inst:
892 except Exception, inst:
893 ui.write(" %s\n" % inst)
893 ui.write(" %s\n" % inst)
894 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
894 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
895 problems += 1
895 problems += 1
896
896
897 # patch
897 # patch
898 ui.status(_("Checking patch...\n"))
898 ui.status(_("Checking patch...\n"))
899 patcher = ui.config('ui', 'patch')
899 patcher = ui.config('ui', 'patch')
900 patcher = ((patcher and util.find_exe(patcher)) or
900 patcher = ((patcher and util.find_exe(patcher)) or
901 util.find_exe('gpatch') or
901 util.find_exe('gpatch') or
902 util.find_exe('patch'))
902 util.find_exe('patch'))
903 if not patcher:
903 if not patcher:
904 ui.write(_(" Can't find patch or gpatch in PATH\n"))
904 ui.write(_(" Can't find patch or gpatch in PATH\n"))
905 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
905 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
906 problems += 1
906 problems += 1
907 else:
907 else:
908 # actually attempt a patch here
908 # actually attempt a patch here
909 a = "1\n2\n3\n4\n"
909 a = "1\n2\n3\n4\n"
910 b = "1\n2\n3\ninsert\n4\n"
910 b = "1\n2\n3\ninsert\n4\n"
911 fa = writetemp(a)
911 fa = writetemp(a)
912 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
912 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
913 fd = writetemp(d)
913 fd = writetemp(d)
914
914
915 files = {}
915 files = {}
916 try:
916 try:
917 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
917 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
918 except util.Abort, e:
918 except util.Abort, e:
919 ui.write(_(" patch call failed:\n"))
919 ui.write(_(" patch call failed:\n"))
920 ui.write(" " + str(e) + "\n")
920 ui.write(" " + str(e) + "\n")
921 problems += 1
921 problems += 1
922 else:
922 else:
923 if list(files) != [os.path.basename(fa)]:
923 if list(files) != [os.path.basename(fa)]:
924 ui.write(_(" unexpected patch output!"))
924 ui.write(_(" unexpected patch output!"))
925 ui.write(_(" (you may have an incompatible version of patch)\n"))
925 ui.write(_(" (you may have an incompatible version of patch)\n"))
926 problems += 1
926 problems += 1
927 a = file(fa).read()
927 a = file(fa).read()
928 if a != b:
928 if a != b:
929 ui.write(_(" patch test failed!"))
929 ui.write(_(" patch test failed!"))
930 ui.write(_(" (you may have an incompatible version of patch)\n"))
930 ui.write(_(" (you may have an incompatible version of patch)\n"))
931 problems += 1
931 problems += 1
932
932
933 os.unlink(fa)
933 os.unlink(fa)
934 os.unlink(fd)
934 os.unlink(fd)
935
935
936 # merge helper
936 # merge helper
937 ui.status(_("Checking merge helper...\n"))
937 ui.status(_("Checking merge helper...\n"))
938 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
938 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
939 or "hgmerge")
939 or "hgmerge")
940 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
940 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
941 if not cmdpath:
941 if not cmdpath:
942 if cmd == 'hgmerge':
942 if cmd == 'hgmerge':
943 ui.write(_(" No merge helper set and can't find default"
943 ui.write(_(" No merge helper set and can't find default"
944 " hgmerge script in PATH\n"))
944 " hgmerge script in PATH\n"))
945 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
945 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
946 else:
946 else:
947 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
947 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
948 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
948 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
949 problems += 1
949 problems += 1
950 else:
950 else:
951 # actually attempt a patch here
951 # actually attempt a patch here
952 fa = writetemp("1\n2\n3\n4\n")
952 fa = writetemp("1\n2\n3\n4\n")
953 fl = writetemp("1\n2\n3\ninsert\n4\n")
953 fl = writetemp("1\n2\n3\ninsert\n4\n")
954 fr = writetemp("begin\n1\n2\n3\n4\n")
954 fr = writetemp("begin\n1\n2\n3\n4\n")
955 r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr))
955 r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr))
956 if r:
956 if r:
957 ui.write(_(" Got unexpected merge error %d!\n") % r)
957 ui.write(_(" Got unexpected merge error %d!\n") % r)
958 problems += 1
958 problems += 1
959 m = file(fl).read()
959 m = file(fl).read()
960 if m != "begin\n1\n2\n3\ninsert\n4\n":
960 if m != "begin\n1\n2\n3\ninsert\n4\n":
961 ui.write(_(" Got unexpected merge results!\n"))
961 ui.write(_(" Got unexpected merge results!\n"))
962 ui.write(_(" (your merge helper may have the"
962 ui.write(_(" (your merge helper may have the"
963 " wrong argument order)\n"))
963 " wrong argument order)\n"))
964 ui.write(_(" Result: %r\n") % m)
964 ui.write(_(" Result: %r\n") % m)
965 problems += 1
965 problems += 1
966 os.unlink(fa)
966 os.unlink(fa)
967 os.unlink(fl)
967 os.unlink(fl)
968 os.unlink(fr)
968 os.unlink(fr)
969
969
970 # editor
970 # editor
971 ui.status(_("Checking commit editor...\n"))
971 ui.status(_("Checking commit editor...\n"))
972 editor = (os.environ.get("HGEDITOR") or
972 editor = (os.environ.get("HGEDITOR") or
973 ui.config("ui", "editor") or
973 ui.config("ui", "editor") or
974 os.environ.get("EDITOR", "vi"))
974 os.environ.get("EDITOR", "vi"))
975 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
975 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
976 if not cmdpath:
976 if not cmdpath:
977 if editor == 'vi':
977 if editor == 'vi':
978 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
978 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
979 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
979 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
980 else:
980 else:
981 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
981 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
982 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
982 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
983 problems += 1
983 problems += 1
984
984
985 # check username
985 # check username
986 ui.status(_("Checking username...\n"))
986 ui.status(_("Checking username...\n"))
987 user = os.environ.get("HGUSER")
987 user = os.environ.get("HGUSER")
988 if user is None:
988 if user is None:
989 user = ui.config("ui", "username")
989 user = ui.config("ui", "username")
990 if user is None:
990 if user is None:
991 user = os.environ.get("EMAIL")
991 user = os.environ.get("EMAIL")
992 if not user:
992 if not user:
993 ui.warn(" ")
993 ui.warn(" ")
994 ui.username()
994 ui.username()
995 ui.write(_(" (specify a username in your .hgrc file)\n"))
995 ui.write(_(" (specify a username in your .hgrc file)\n"))
996
996
997 if not problems:
997 if not problems:
998 ui.status(_("No problems detected\n"))
998 ui.status(_("No problems detected\n"))
999 else:
999 else:
1000 ui.write(_("%s problems detected,"
1000 ui.write(_("%s problems detected,"
1001 " please check your install!\n") % problems)
1001 " please check your install!\n") % problems)
1002
1002
1003 return problems
1003 return problems
1004
1004
1005 def debugrename(ui, repo, file1, *pats, **opts):
1005 def debugrename(ui, repo, file1, *pats, **opts):
1006 """dump rename information"""
1006 """dump rename information"""
1007
1007
1008 ctx = repo.changectx(opts.get('rev', 'tip'))
1008 ctx = repo.changectx(opts.get('rev', 'tip'))
1009 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
1009 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
1010 ctx.node()):
1010 ctx.node()):
1011 m = ctx.filectx(abs).renamed()
1011 m = ctx.filectx(abs).renamed()
1012 if m:
1012 if m:
1013 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
1013 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
1014 else:
1014 else:
1015 ui.write(_("%s not renamed\n") % rel)
1015 ui.write(_("%s not renamed\n") % rel)
1016
1016
1017 def debugwalk(ui, repo, *pats, **opts):
1017 def debugwalk(ui, repo, *pats, **opts):
1018 """show how files match on given patterns"""
1018 """show how files match on given patterns"""
1019 items = list(cmdutil.walk(repo, pats, opts))
1019 items = list(cmdutil.walk(repo, pats, opts))
1020 if not items:
1020 if not items:
1021 return
1021 return
1022 fmt = '%%s %%-%ds %%-%ds %%s' % (
1022 fmt = '%%s %%-%ds %%-%ds %%s' % (
1023 max([len(abs) for (src, abs, rel, exact) in items]),
1023 max([len(abs) for (src, abs, rel, exact) in items]),
1024 max([len(rel) for (src, abs, rel, exact) in items]))
1024 max([len(rel) for (src, abs, rel, exact) in items]))
1025 for src, abs, rel, exact in items:
1025 for src, abs, rel, exact in items:
1026 line = fmt % (src, abs, rel, exact and 'exact' or '')
1026 line = fmt % (src, abs, rel, exact and 'exact' or '')
1027 ui.write("%s\n" % line.rstrip())
1027 ui.write("%s\n" % line.rstrip())
1028
1028
1029 def diff(ui, repo, *pats, **opts):
1029 def diff(ui, repo, *pats, **opts):
1030 """diff repository (or selected files)
1030 """diff repository (or selected files)
1031
1031
1032 Show differences between revisions for the specified files.
1032 Show differences between revisions for the specified files.
1033
1033
1034 Differences between files are shown using the unified diff format.
1034 Differences between files are shown using the unified diff format.
1035
1035
1036 NOTE: diff may generate unexpected results for merges, as it will
1036 NOTE: diff may generate unexpected results for merges, as it will
1037 default to comparing against the working directory's first parent
1037 default to comparing against the working directory's first parent
1038 changeset if no revisions are specified.
1038 changeset if no revisions are specified.
1039
1039
1040 When two revision arguments are given, then changes are shown
1040 When two revision arguments are given, then changes are shown
1041 between those revisions. If only one revision is specified then
1041 between those revisions. If only one revision is specified then
1042 that revision is compared to the working directory, and, when no
1042 that revision is compared to the working directory, and, when no
1043 revisions are specified, the working directory files are compared
1043 revisions are specified, the working directory files are compared
1044 to its parent.
1044 to its parent.
1045
1045
1046 Without the -a option, diff will avoid generating diffs of files
1046 Without the -a option, diff will avoid generating diffs of files
1047 it detects as binary. With -a, diff will generate a diff anyway,
1047 it detects as binary. With -a, diff will generate a diff anyway,
1048 probably with undesirable results.
1048 probably with undesirable results.
1049 """
1049 """
1050 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1050 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1051
1051
1052 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1052 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1053
1053
1054 patch.diff(repo, node1, node2, fns, match=matchfn,
1054 patch.diff(repo, node1, node2, fns, match=matchfn,
1055 opts=patch.diffopts(ui, opts))
1055 opts=patch.diffopts(ui, opts))
1056
1056
1057 def export(ui, repo, *changesets, **opts):
1057 def export(ui, repo, *changesets, **opts):
1058 """dump the header and diffs for one or more changesets
1058 """dump the header and diffs for one or more changesets
1059
1059
1060 Print the changeset header and diffs for one or more revisions.
1060 Print the changeset header and diffs for one or more revisions.
1061
1061
1062 The information shown in the changeset header is: author,
1062 The information shown in the changeset header is: author,
1063 changeset hash, parent(s) and commit comment.
1063 changeset hash, parent(s) and commit comment.
1064
1064
1065 NOTE: export may generate unexpected diff output for merge changesets,
1065 NOTE: export may generate unexpected diff output for merge changesets,
1066 as it will compare the merge changeset against its first parent only.
1066 as it will compare the merge changeset against its first parent only.
1067
1067
1068 Output may be to a file, in which case the name of the file is
1068 Output may be to a file, in which case the name of the file is
1069 given using a format string. The formatting rules are as follows:
1069 given using a format string. The formatting rules are as follows:
1070
1070
1071 %% literal "%" character
1071 %% literal "%" character
1072 %H changeset hash (40 bytes of hexadecimal)
1072 %H changeset hash (40 bytes of hexadecimal)
1073 %N number of patches being generated
1073 %N number of patches being generated
1074 %R changeset revision number
1074 %R changeset revision number
1075 %b basename of the exporting repository
1075 %b basename of the exporting repository
1076 %h short-form changeset hash (12 bytes of hexadecimal)
1076 %h short-form changeset hash (12 bytes of hexadecimal)
1077 %n zero-padded sequence number, starting at 1
1077 %n zero-padded sequence number, starting at 1
1078 %r zero-padded changeset revision number
1078 %r zero-padded changeset revision number
1079
1079
1080 Without the -a option, export will avoid generating diffs of files
1080 Without the -a option, export will avoid generating diffs of files
1081 it detects as binary. With -a, export will generate a diff anyway,
1081 it detects as binary. With -a, export will generate a diff anyway,
1082 probably with undesirable results.
1082 probably with undesirable results.
1083
1083
1084 With the --switch-parent option, the diff will be against the second
1084 With the --switch-parent option, the diff will be against the second
1085 parent. It can be useful to review a merge.
1085 parent. It can be useful to review a merge.
1086 """
1086 """
1087 if not changesets:
1087 if not changesets:
1088 raise util.Abort(_("export requires at least one changeset"))
1088 raise util.Abort(_("export requires at least one changeset"))
1089 revs = cmdutil.revrange(repo, changesets)
1089 revs = cmdutil.revrange(repo, changesets)
1090 if len(revs) > 1:
1090 if len(revs) > 1:
1091 ui.note(_('exporting patches:\n'))
1091 ui.note(_('exporting patches:\n'))
1092 else:
1092 else:
1093 ui.note(_('exporting patch:\n'))
1093 ui.note(_('exporting patch:\n'))
1094 patch.export(repo, revs, template=opts['output'],
1094 patch.export(repo, revs, template=opts['output'],
1095 switch_parent=opts['switch_parent'],
1095 switch_parent=opts['switch_parent'],
1096 opts=patch.diffopts(ui, opts))
1096 opts=patch.diffopts(ui, opts))
1097
1097
1098 def grep(ui, repo, pattern, *pats, **opts):
1098 def grep(ui, repo, pattern, *pats, **opts):
1099 """search for a pattern in specified files and revisions
1099 """search for a pattern in specified files and revisions
1100
1100
1101 Search revisions of files for a regular expression.
1101 Search revisions of files for a regular expression.
1102
1102
1103 This command behaves differently than Unix grep. It only accepts
1103 This command behaves differently than Unix grep. It only accepts
1104 Python/Perl regexps. It searches repository history, not the
1104 Python/Perl regexps. It searches repository history, not the
1105 working directory. It always prints the revision number in which
1105 working directory. It always prints the revision number in which
1106 a match appears.
1106 a match appears.
1107
1107
1108 By default, grep only prints output for the first revision of a
1108 By default, grep only prints output for the first revision of a
1109 file in which it finds a match. To get it to print every revision
1109 file in which it finds a match. To get it to print every revision
1110 that contains a change in match status ("-" for a match that
1110 that contains a change in match status ("-" for a match that
1111 becomes a non-match, or "+" for a non-match that becomes a match),
1111 becomes a non-match, or "+" for a non-match that becomes a match),
1112 use the --all flag.
1112 use the --all flag.
1113 """
1113 """
1114 reflags = 0
1114 reflags = 0
1115 if opts['ignore_case']:
1115 if opts['ignore_case']:
1116 reflags |= re.I
1116 reflags |= re.I
1117 try:
1117 try:
1118 regexp = re.compile(pattern, reflags)
1118 regexp = re.compile(pattern, reflags)
1119 except Exception, inst:
1119 except Exception, inst:
1120 ui.warn(_("grep: invalid match pattern: %s!\n") % inst)
1120 ui.warn(_("grep: invalid match pattern: %s!\n") % inst)
1121 return None
1121 return None
1122 sep, eol = ':', '\n'
1122 sep, eol = ':', '\n'
1123 if opts['print0']:
1123 if opts['print0']:
1124 sep = eol = '\0'
1124 sep = eol = '\0'
1125
1125
1126 fcache = {}
1126 fcache = {}
1127 def getfile(fn):
1127 def getfile(fn):
1128 if fn not in fcache:
1128 if fn not in fcache:
1129 fcache[fn] = repo.file(fn)
1129 fcache[fn] = repo.file(fn)
1130 return fcache[fn]
1130 return fcache[fn]
1131
1131
1132 def matchlines(body):
1132 def matchlines(body):
1133 begin = 0
1133 begin = 0
1134 linenum = 0
1134 linenum = 0
1135 while True:
1135 while True:
1136 match = regexp.search(body, begin)
1136 match = regexp.search(body, begin)
1137 if not match:
1137 if not match:
1138 break
1138 break
1139 mstart, mend = match.span()
1139 mstart, mend = match.span()
1140 linenum += body.count('\n', begin, mstart) + 1
1140 linenum += body.count('\n', begin, mstart) + 1
1141 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1141 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1142 lend = body.find('\n', mend)
1142 lend = body.find('\n', mend)
1143 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1143 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1144 begin = lend + 1
1144 begin = lend + 1
1145
1145
1146 class linestate(object):
1146 class linestate(object):
1147 def __init__(self, line, linenum, colstart, colend):
1147 def __init__(self, line, linenum, colstart, colend):
1148 self.line = line
1148 self.line = line
1149 self.linenum = linenum
1149 self.linenum = linenum
1150 self.colstart = colstart
1150 self.colstart = colstart
1151 self.colend = colend
1151 self.colend = colend
1152
1152
1153 def __eq__(self, other):
1153 def __eq__(self, other):
1154 return self.line == other.line
1154 return self.line == other.line
1155
1155
1156 matches = {}
1156 matches = {}
1157 copies = {}
1157 copies = {}
1158 def grepbody(fn, rev, body):
1158 def grepbody(fn, rev, body):
1159 matches[rev].setdefault(fn, [])
1159 matches[rev].setdefault(fn, [])
1160 m = matches[rev][fn]
1160 m = matches[rev][fn]
1161 for lnum, cstart, cend, line in matchlines(body):
1161 for lnum, cstart, cend, line in matchlines(body):
1162 s = linestate(line, lnum, cstart, cend)
1162 s = linestate(line, lnum, cstart, cend)
1163 m.append(s)
1163 m.append(s)
1164
1164
1165 def difflinestates(a, b):
1165 def difflinestates(a, b):
1166 sm = difflib.SequenceMatcher(None, a, b)
1166 sm = difflib.SequenceMatcher(None, a, b)
1167 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1167 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1168 if tag == 'insert':
1168 if tag == 'insert':
1169 for i in xrange(blo, bhi):
1169 for i in xrange(blo, bhi):
1170 yield ('+', b[i])
1170 yield ('+', b[i])
1171 elif tag == 'delete':
1171 elif tag == 'delete':
1172 for i in xrange(alo, ahi):
1172 for i in xrange(alo, ahi):
1173 yield ('-', a[i])
1173 yield ('-', a[i])
1174 elif tag == 'replace':
1174 elif tag == 'replace':
1175 for i in xrange(alo, ahi):
1175 for i in xrange(alo, ahi):
1176 yield ('-', a[i])
1176 yield ('-', a[i])
1177 for i in xrange(blo, bhi):
1177 for i in xrange(blo, bhi):
1178 yield ('+', b[i])
1178 yield ('+', b[i])
1179
1179
1180 prev = {}
1180 prev = {}
1181 def display(fn, rev, states, prevstates):
1181 def display(fn, rev, states, prevstates):
1182 found = False
1182 found = False
1183 filerevmatches = {}
1183 filerevmatches = {}
1184 r = prev.get(fn, -1)
1184 r = prev.get(fn, -1)
1185 if opts['all']:
1185 if opts['all']:
1186 iter = difflinestates(states, prevstates)
1186 iter = difflinestates(states, prevstates)
1187 else:
1187 else:
1188 iter = [('', l) for l in prevstates]
1188 iter = [('', l) for l in prevstates]
1189 for change, l in iter:
1189 for change, l in iter:
1190 cols = [fn, str(r)]
1190 cols = [fn, str(r)]
1191 if opts['line_number']:
1191 if opts['line_number']:
1192 cols.append(str(l.linenum))
1192 cols.append(str(l.linenum))
1193 if opts['all']:
1193 if opts['all']:
1194 cols.append(change)
1194 cols.append(change)
1195 if opts['user']:
1195 if opts['user']:
1196 cols.append(ui.shortuser(get(r)[1]))
1196 cols.append(ui.shortuser(get(r)[1]))
1197 if opts['files_with_matches']:
1197 if opts['files_with_matches']:
1198 c = (fn, r)
1198 c = (fn, r)
1199 if c in filerevmatches:
1199 if c in filerevmatches:
1200 continue
1200 continue
1201 filerevmatches[c] = 1
1201 filerevmatches[c] = 1
1202 else:
1202 else:
1203 cols.append(l.line)
1203 cols.append(l.line)
1204 ui.write(sep.join(cols), eol)
1204 ui.write(sep.join(cols), eol)
1205 found = True
1205 found = True
1206 return found
1206 return found
1207
1207
1208 fstate = {}
1208 fstate = {}
1209 skip = {}
1209 skip = {}
1210 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1210 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1211 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1211 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1212 found = False
1212 found = False
1213 follow = opts.get('follow')
1213 follow = opts.get('follow')
1214 for st, rev, fns in changeiter:
1214 for st, rev, fns in changeiter:
1215 if st == 'window':
1215 if st == 'window':
1216 matches.clear()
1216 matches.clear()
1217 elif st == 'add':
1217 elif st == 'add':
1218 mf = repo.changectx(rev).manifest()
1218 mf = repo.changectx(rev).manifest()
1219 matches[rev] = {}
1219 matches[rev] = {}
1220 for fn in fns:
1220 for fn in fns:
1221 if fn in skip:
1221 if fn in skip:
1222 continue
1222 continue
1223 fstate.setdefault(fn, {})
1223 fstate.setdefault(fn, {})
1224 try:
1224 try:
1225 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1225 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1226 if follow:
1226 if follow:
1227 copied = getfile(fn).renamed(mf[fn])
1227 copied = getfile(fn).renamed(mf[fn])
1228 if copied:
1228 if copied:
1229 copies.setdefault(rev, {})[fn] = copied[0]
1229 copies.setdefault(rev, {})[fn] = copied[0]
1230 except KeyError:
1230 except KeyError:
1231 pass
1231 pass
1232 elif st == 'iter':
1232 elif st == 'iter':
1233 states = matches[rev].items()
1233 states = matches[rev].items()
1234 states.sort()
1234 states.sort()
1235 for fn, m in states:
1235 for fn, m in states:
1236 copy = copies.get(rev, {}).get(fn)
1236 copy = copies.get(rev, {}).get(fn)
1237 if fn in skip:
1237 if fn in skip:
1238 if copy:
1238 if copy:
1239 skip[copy] = True
1239 skip[copy] = True
1240 continue
1240 continue
1241 if fn in prev or fstate[fn]:
1241 if fn in prev or fstate[fn]:
1242 r = display(fn, rev, m, fstate[fn])
1242 r = display(fn, rev, m, fstate[fn])
1243 found = found or r
1243 found = found or r
1244 if r and not opts['all']:
1244 if r and not opts['all']:
1245 skip[fn] = True
1245 skip[fn] = True
1246 if copy:
1246 if copy:
1247 skip[copy] = True
1247 skip[copy] = True
1248 fstate[fn] = m
1248 fstate[fn] = m
1249 if copy:
1249 if copy:
1250 fstate[copy] = m
1250 fstate[copy] = m
1251 prev[fn] = rev
1251 prev[fn] = rev
1252
1252
1253 fstate = fstate.items()
1253 fstate = fstate.items()
1254 fstate.sort()
1254 fstate.sort()
1255 for fn, state in fstate:
1255 for fn, state in fstate:
1256 if fn in skip:
1256 if fn in skip:
1257 continue
1257 continue
1258 if fn not in copies.get(prev[fn], {}):
1258 if fn not in copies.get(prev[fn], {}):
1259 found = display(fn, rev, {}, state) or found
1259 found = display(fn, rev, {}, state) or found
1260 return (not found and 1) or 0
1260 return (not found and 1) or 0
1261
1261
1262 def heads(ui, repo, *branchrevs, **opts):
1262 def heads(ui, repo, *branchrevs, **opts):
1263 """show current repository heads or show branch heads
1263 """show current repository heads or show branch heads
1264
1264
1265 With no arguments, show all repository head changesets.
1265 With no arguments, show all repository head changesets.
1266
1266
1267 If branch or revisions names are given this will show the heads of
1267 If branch or revisions names are given this will show the heads of
1268 the specified branches or the branches those revisions are tagged
1268 the specified branches or the branches those revisions are tagged
1269 with.
1269 with.
1270
1270
1271 Repository "heads" are changesets that don't have child
1271 Repository "heads" are changesets that don't have child
1272 changesets. They are where development generally takes place and
1272 changesets. They are where development generally takes place and
1273 are the usual targets for update and merge operations.
1273 are the usual targets for update and merge operations.
1274
1274
1275 Branch heads are changesets that have a given branch tag, but have
1275 Branch heads are changesets that have a given branch tag, but have
1276 no child changesets with that tag. They are usually where
1276 no child changesets with that tag. They are usually where
1277 development on the given branch takes place.
1277 development on the given branch takes place.
1278 """
1278 """
1279 if opts['rev']:
1279 if opts['rev']:
1280 start = repo.lookup(opts['rev'])
1280 start = repo.lookup(opts['rev'])
1281 else:
1281 else:
1282 start = None
1282 start = None
1283 if not branchrevs:
1283 if not branchrevs:
1284 # Assume we're looking repo-wide heads if no revs were specified.
1284 # Assume we're looking repo-wide heads if no revs were specified.
1285 heads = repo.heads(start)
1285 heads = repo.heads(start)
1286 else:
1286 else:
1287 heads = []
1287 heads = []
1288 visitedset = util.set()
1288 visitedset = util.set()
1289 for branchrev in branchrevs:
1289 for branchrev in branchrevs:
1290 branch = repo.changectx(branchrev).branch()
1290 branch = repo.changectx(branchrev).branch()
1291 if branch in visitedset:
1291 if branch in visitedset:
1292 continue
1292 continue
1293 visitedset.add(branch)
1293 visitedset.add(branch)
1294 bheads = repo.branchheads(branch, start)
1294 bheads = repo.branchheads(branch, start)
1295 if not bheads:
1295 if not bheads:
1296 if branch != branchrev:
1296 if branch != branchrev:
1297 ui.warn(_("no changes on branch %s containing %s are "
1297 ui.warn(_("no changes on branch %s containing %s are "
1298 "reachable from %s\n")
1298 "reachable from %s\n")
1299 % (branch, branchrev, opts['rev']))
1299 % (branch, branchrev, opts['rev']))
1300 else:
1300 else:
1301 ui.warn(_("no changes on branch %s are reachable from %s\n")
1301 ui.warn(_("no changes on branch %s are reachable from %s\n")
1302 % (branch, opts['rev']))
1302 % (branch, opts['rev']))
1303 heads.extend(bheads)
1303 heads.extend(bheads)
1304 if not heads:
1304 if not heads:
1305 return 1
1305 return 1
1306 displayer = cmdutil.show_changeset(ui, repo, opts)
1306 displayer = cmdutil.show_changeset(ui, repo, opts)
1307 for n in heads:
1307 for n in heads:
1308 displayer.show(changenode=n)
1308 displayer.show(changenode=n)
1309
1309
1310 def help_(ui, name=None, with_version=False):
1310 def help_(ui, name=None, with_version=False):
1311 """show help for a command, extension, or list of commands
1311 """show help for a command, extension, or list of commands
1312
1312
1313 With no arguments, print a list of commands and short help.
1313 With no arguments, print a list of commands and short help.
1314
1314
1315 Given a command name, print help for that command.
1315 Given a command name, print help for that command.
1316
1316
1317 Given an extension name, print help for that extension, and the
1317 Given an extension name, print help for that extension, and the
1318 commands it provides."""
1318 commands it provides."""
1319 option_lists = []
1319 option_lists = []
1320
1320
1321 def addglobalopts(aliases):
1321 def addglobalopts(aliases):
1322 if ui.verbose:
1322 if ui.verbose:
1323 option_lists.append((_("global options:"), globalopts))
1323 option_lists.append((_("global options:"), globalopts))
1324 if name == 'shortlist':
1324 if name == 'shortlist':
1325 option_lists.append((_('use "hg help" for the full list '
1325 option_lists.append((_('use "hg help" for the full list '
1326 'of commands'), ()))
1326 'of commands'), ()))
1327 else:
1327 else:
1328 if name == 'shortlist':
1328 if name == 'shortlist':
1329 msg = _('use "hg help" for the full list of commands '
1329 msg = _('use "hg help" for the full list of commands '
1330 'or "hg -v" for details')
1330 'or "hg -v" for details')
1331 elif aliases:
1331 elif aliases:
1332 msg = _('use "hg -v help%s" to show aliases and '
1332 msg = _('use "hg -v help%s" to show aliases and '
1333 'global options') % (name and " " + name or "")
1333 'global options') % (name and " " + name or "")
1334 else:
1334 else:
1335 msg = _('use "hg -v help %s" to show global options') % name
1335 msg = _('use "hg -v help %s" to show global options') % name
1336 option_lists.append((msg, ()))
1336 option_lists.append((msg, ()))
1337
1337
1338 def helpcmd(name):
1338 def helpcmd(name):
1339 if with_version:
1339 if with_version:
1340 version_(ui)
1340 version_(ui)
1341 ui.write('\n')
1341 ui.write('\n')
1342 aliases, i = cmdutil.findcmd(ui, name)
1342 aliases, i = cmdutil.findcmd(ui, name)
1343 # synopsis
1343 # synopsis
1344 ui.write("%s\n\n" % i[2])
1344 ui.write("%s\n\n" % i[2])
1345
1345
1346 # description
1346 # description
1347 doc = i[0].__doc__
1347 doc = i[0].__doc__
1348 if not doc:
1348 if not doc:
1349 doc = _("(No help text available)")
1349 doc = _("(No help text available)")
1350 if ui.quiet:
1350 if ui.quiet:
1351 doc = doc.splitlines(0)[0]
1351 doc = doc.splitlines(0)[0]
1352 ui.write("%s\n" % doc.rstrip())
1352 ui.write("%s\n" % doc.rstrip())
1353
1353
1354 if not ui.quiet:
1354 if not ui.quiet:
1355 # aliases
1355 # aliases
1356 if len(aliases) > 1:
1356 if len(aliases) > 1:
1357 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1357 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1358
1358
1359 # options
1359 # options
1360 if i[1]:
1360 if i[1]:
1361 option_lists.append((_("options:\n"), i[1]))
1361 option_lists.append((_("options:\n"), i[1]))
1362
1362
1363 addglobalopts(False)
1363 addglobalopts(False)
1364
1364
1365 def helplist(select=None):
1365 def helplist(select=None):
1366 h = {}
1366 h = {}
1367 cmds = {}
1367 cmds = {}
1368 for c, e in table.items():
1368 for c, e in table.items():
1369 f = c.split("|", 1)[0]
1369 f = c.split("|", 1)[0]
1370 if select and not select(f):
1370 if select and not select(f):
1371 continue
1371 continue
1372 if name == "shortlist" and not f.startswith("^"):
1372 if name == "shortlist" and not f.startswith("^"):
1373 continue
1373 continue
1374 f = f.lstrip("^")
1374 f = f.lstrip("^")
1375 if not ui.debugflag and f.startswith("debug"):
1375 if not ui.debugflag and f.startswith("debug"):
1376 continue
1376 continue
1377 doc = e[0].__doc__
1377 doc = e[0].__doc__
1378 if not doc:
1378 if not doc:
1379 doc = _("(No help text available)")
1379 doc = _("(No help text available)")
1380 h[f] = doc.splitlines(0)[0].rstrip()
1380 h[f] = doc.splitlines(0)[0].rstrip()
1381 cmds[f] = c.lstrip("^")
1381 cmds[f] = c.lstrip("^")
1382
1382
1383 fns = h.keys()
1383 fns = h.keys()
1384 fns.sort()
1384 fns.sort()
1385 m = max(map(len, fns))
1385 m = max(map(len, fns))
1386 for f in fns:
1386 for f in fns:
1387 if ui.verbose:
1387 if ui.verbose:
1388 commands = cmds[f].replace("|",", ")
1388 commands = cmds[f].replace("|",", ")
1389 ui.write(" %s:\n %s\n"%(commands, h[f]))
1389 ui.write(" %s:\n %s\n"%(commands, h[f]))
1390 else:
1390 else:
1391 ui.write(' %-*s %s\n' % (m, f, h[f]))
1391 ui.write(' %-*s %s\n' % (m, f, h[f]))
1392
1392
1393 if not ui.quiet:
1393 if not ui.quiet:
1394 addglobalopts(True)
1394 addglobalopts(True)
1395
1395
1396 def helptopic(name):
1396 def helptopic(name):
1397 v = None
1397 v = None
1398 for i in help.helptable:
1398 for i in help.helptable:
1399 l = i.split('|')
1399 l = i.split('|')
1400 if name in l:
1400 if name in l:
1401 v = i
1401 v = i
1402 header = l[-1]
1402 header = l[-1]
1403 if not v:
1403 if not v:
1404 raise cmdutil.UnknownCommand(name)
1404 raise cmdutil.UnknownCommand(name)
1405
1405
1406 # description
1406 # description
1407 doc = help.helptable[v]
1407 doc = help.helptable[v]
1408 if not doc:
1408 if not doc:
1409 doc = _("(No help text available)")
1409 doc = _("(No help text available)")
1410 if callable(doc):
1410 if callable(doc):
1411 doc = doc()
1411 doc = doc()
1412
1412
1413 ui.write("%s\n" % header)
1413 ui.write("%s\n" % header)
1414 ui.write("%s\n" % doc.rstrip())
1414 ui.write("%s\n" % doc.rstrip())
1415
1415
1416 def helpext(name):
1416 def helpext(name):
1417 try:
1417 try:
1418 mod = extensions.find(name)
1418 mod = extensions.find(name)
1419 except KeyError:
1419 except KeyError:
1420 raise cmdutil.UnknownCommand(name)
1420 raise cmdutil.UnknownCommand(name)
1421
1421
1422 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1422 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1423 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1423 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1424 for d in doc[1:]:
1424 for d in doc[1:]:
1425 ui.write(d, '\n')
1425 ui.write(d, '\n')
1426
1426
1427 ui.status('\n')
1427 ui.status('\n')
1428
1428
1429 try:
1429 try:
1430 ct = mod.cmdtable
1430 ct = mod.cmdtable
1431 except AttributeError:
1431 except AttributeError:
1432 ct = None
1432 ct = None
1433 if not ct:
1433 if not ct:
1434 ui.status(_('no commands defined\n'))
1434 ui.status(_('no commands defined\n'))
1435 return
1435 return
1436
1436
1437 ui.status(_('list of commands:\n\n'))
1437 ui.status(_('list of commands:\n\n'))
1438 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1438 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1439 helplist(modcmds.has_key)
1439 helplist(modcmds.has_key)
1440
1440
1441 if name and name != 'shortlist':
1441 if name and name != 'shortlist':
1442 i = None
1442 i = None
1443 for f in (helpcmd, helptopic, helpext):
1443 for f in (helpcmd, helptopic, helpext):
1444 try:
1444 try:
1445 f(name)
1445 f(name)
1446 i = None
1446 i = None
1447 break
1447 break
1448 except cmdutil.UnknownCommand, inst:
1448 except cmdutil.UnknownCommand, inst:
1449 i = inst
1449 i = inst
1450 if i:
1450 if i:
1451 raise i
1451 raise i
1452
1452
1453 else:
1453 else:
1454 # program name
1454 # program name
1455 if ui.verbose or with_version:
1455 if ui.verbose or with_version:
1456 version_(ui)
1456 version_(ui)
1457 else:
1457 else:
1458 ui.status(_("Mercurial Distributed SCM\n"))
1458 ui.status(_("Mercurial Distributed SCM\n"))
1459 ui.status('\n')
1459 ui.status('\n')
1460
1460
1461 # list of commands
1461 # list of commands
1462 if name == "shortlist":
1462 if name == "shortlist":
1463 ui.status(_('basic commands:\n\n'))
1463 ui.status(_('basic commands:\n\n'))
1464 else:
1464 else:
1465 ui.status(_('list of commands:\n\n'))
1465 ui.status(_('list of commands:\n\n'))
1466
1466
1467 helplist()
1467 helplist()
1468
1468
1469 # list all option lists
1469 # list all option lists
1470 opt_output = []
1470 opt_output = []
1471 for title, options in option_lists:
1471 for title, options in option_lists:
1472 opt_output.append(("\n%s" % title, None))
1472 opt_output.append(("\n%s" % title, None))
1473 for shortopt, longopt, default, desc in options:
1473 for shortopt, longopt, default, desc in options:
1474 if "DEPRECATED" in desc and not ui.verbose: continue
1474 if "DEPRECATED" in desc and not ui.verbose: continue
1475 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1475 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1476 longopt and " --%s" % longopt),
1476 longopt and " --%s" % longopt),
1477 "%s%s" % (desc,
1477 "%s%s" % (desc,
1478 default
1478 default
1479 and _(" (default: %s)") % default
1479 and _(" (default: %s)") % default
1480 or "")))
1480 or "")))
1481
1481
1482 if opt_output:
1482 if opt_output:
1483 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1483 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1484 for first, second in opt_output:
1484 for first, second in opt_output:
1485 if second:
1485 if second:
1486 ui.write(" %-*s %s\n" % (opts_len, first, second))
1486 ui.write(" %-*s %s\n" % (opts_len, first, second))
1487 else:
1487 else:
1488 ui.write("%s\n" % first)
1488 ui.write("%s\n" % first)
1489
1489
1490 def identify(ui, repo, source=None,
1490 def identify(ui, repo, source=None,
1491 rev=None, num=None, id=None, branch=None, tags=None):
1491 rev=None, num=None, id=None, branch=None, tags=None):
1492 """identify the working copy or specified revision
1492 """identify the working copy or specified revision
1493
1493
1494 With no revision, print a summary of the current state of the repo.
1494 With no revision, print a summary of the current state of the repo.
1495
1495
1496 With a path, do a lookup in another repository.
1496 With a path, do a lookup in another repository.
1497
1497
1498 This summary identifies the repository state using one or two parent
1498 This summary identifies the repository state using one or two parent
1499 hash identifiers, followed by a "+" if there are uncommitted changes
1499 hash identifiers, followed by a "+" if there are uncommitted changes
1500 in the working directory, a list of tags for this revision and a branch
1500 in the working directory, a list of tags for this revision and a branch
1501 name for non-default branches.
1501 name for non-default branches.
1502 """
1502 """
1503
1503
1504 hexfunc = ui.debugflag and hex or short
1504 hexfunc = ui.debugflag and hex or short
1505 default = not (num or id or branch or tags)
1505 default = not (num or id or branch or tags)
1506 output = []
1506 output = []
1507
1507
1508 if source:
1508 if source:
1509 source, revs = cmdutil.parseurl(ui.expandpath(source), [])
1509 source, revs = cmdutil.parseurl(ui.expandpath(source), [])
1510 srepo = hg.repository(ui, source)
1510 srepo = hg.repository(ui, source)
1511 if not rev and revs:
1511 if not rev and revs:
1512 rev = revs[0]
1512 rev = revs[0]
1513 if not rev:
1513 if not rev:
1514 rev = "tip"
1514 rev = "tip"
1515 if num or branch or tags:
1515 if num or branch or tags:
1516 raise util.Abort(
1516 raise util.Abort(
1517 "can't query remote revision number, branch, or tags")
1517 "can't query remote revision number, branch, or tags")
1518 output = [hexfunc(srepo.lookup(rev))]
1518 output = [hexfunc(srepo.lookup(rev))]
1519 elif not rev:
1519 elif not rev:
1520 ctx = repo.workingctx()
1520 ctx = repo.workingctx()
1521 parents = ctx.parents()
1521 parents = ctx.parents()
1522 changed = False
1522 changed = False
1523 if default or id or num:
1523 if default or id or num:
1524 changed = ctx.files() + ctx.deleted()
1524 changed = ctx.files() + ctx.deleted()
1525 if default or id:
1525 if default or id:
1526 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1526 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1527 (changed) and "+" or "")]
1527 (changed) and "+" or "")]
1528 if num:
1528 if num:
1529 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1529 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1530 (changed) and "+" or ""))
1530 (changed) and "+" or ""))
1531 else:
1531 else:
1532 ctx = repo.changectx(rev)
1532 ctx = repo.changectx(rev)
1533 if default or id:
1533 if default or id:
1534 output = [hexfunc(ctx.node())]
1534 output = [hexfunc(ctx.node())]
1535 if num:
1535 if num:
1536 output.append(str(ctx.rev()))
1536 output.append(str(ctx.rev()))
1537
1537
1538 if not source and default and not ui.quiet:
1538 if not source and default and not ui.quiet:
1539 b = util.tolocal(ctx.branch())
1539 b = util.tolocal(ctx.branch())
1540 if b != 'default':
1540 if b != 'default':
1541 output.append("(%s)" % b)
1541 output.append("(%s)" % b)
1542
1542
1543 # multiple tags for a single parent separated by '/'
1543 # multiple tags for a single parent separated by '/'
1544 t = "/".join(ctx.tags())
1544 t = "/".join(ctx.tags())
1545 if t:
1545 if t:
1546 output.append(t)
1546 output.append(t)
1547
1547
1548 if branch:
1548 if branch:
1549 output.append(util.tolocal(ctx.branch()))
1549 output.append(util.tolocal(ctx.branch()))
1550
1550
1551 if tags:
1551 if tags:
1552 output.extend(ctx.tags())
1552 output.extend(ctx.tags())
1553
1553
1554 ui.write("%s\n" % ' '.join(output))
1554 ui.write("%s\n" % ' '.join(output))
1555
1555
1556 def import_(ui, repo, patch1, *patches, **opts):
1556 def import_(ui, repo, patch1, *patches, **opts):
1557 """import an ordered set of patches
1557 """import an ordered set of patches
1558
1558
1559 Import a list of patches and commit them individually.
1559 Import a list of patches and commit them individually.
1560
1560
1561 If there are outstanding changes in the working directory, import
1561 If there are outstanding changes in the working directory, import
1562 will abort unless given the -f flag.
1562 will abort unless given the -f flag.
1563
1563
1564 You can import a patch straight from a mail message. Even patches
1564 You can import a patch straight from a mail message. Even patches
1565 as attachments work (body part must be type text/plain or
1565 as attachments work (body part must be type text/plain or
1566 text/x-patch to be used). From and Subject headers of email
1566 text/x-patch to be used). From and Subject headers of email
1567 message are used as default committer and commit message. All
1567 message are used as default committer and commit message. All
1568 text/plain body parts before first diff are added to commit
1568 text/plain body parts before first diff are added to commit
1569 message.
1569 message.
1570
1570
1571 If the imported patch was generated by hg export, user and description
1571 If the imported patch was generated by hg export, user and description
1572 from patch override values from message headers and body. Values
1572 from patch override values from message headers and body. Values
1573 given on command line with -m and -u override these.
1573 given on command line with -m and -u override these.
1574
1574
1575 If --exact is specified, import will set the working directory
1575 If --exact is specified, import will set the working directory
1576 to the parent of each patch before applying it, and will abort
1576 to the parent of each patch before applying it, and will abort
1577 if the resulting changeset has a different ID than the one
1577 if the resulting changeset has a different ID than the one
1578 recorded in the patch. This may happen due to character set
1578 recorded in the patch. This may happen due to character set
1579 problems or other deficiencies in the text patch format.
1579 problems or other deficiencies in the text patch format.
1580
1580
1581 To read a patch from standard input, use patch name "-".
1581 To read a patch from standard input, use patch name "-".
1582 """
1582 """
1583 patches = (patch1,) + patches
1583 patches = (patch1,) + patches
1584
1584
1585 if opts.get('exact') or not opts['force']:
1585 if opts.get('exact') or not opts['force']:
1586 cmdutil.bail_if_changed(repo)
1586 cmdutil.bail_if_changed(repo)
1587
1587
1588 d = opts["base"]
1588 d = opts["base"]
1589 strip = opts["strip"]
1589 strip = opts["strip"]
1590 wlock = lock = None
1590 wlock = lock = None
1591 try:
1591 try:
1592 wlock = repo.wlock()
1592 wlock = repo.wlock()
1593 lock = repo.lock()
1593 lock = repo.lock()
1594 for p in patches:
1594 for p in patches:
1595 pf = os.path.join(d, p)
1595 pf = os.path.join(d, p)
1596
1596
1597 if pf == '-':
1597 if pf == '-':
1598 ui.status(_("applying patch from stdin\n"))
1598 ui.status(_("applying patch from stdin\n"))
1599 data = patch.extract(ui, sys.stdin)
1599 data = patch.extract(ui, sys.stdin)
1600 else:
1600 else:
1601 ui.status(_("applying %s\n") % p)
1601 ui.status(_("applying %s\n") % p)
1602 data = patch.extract(ui, file(pf, 'rb'))
1602 data = patch.extract(ui, file(pf, 'rb'))
1603
1603
1604 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1604 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1605
1605
1606 if tmpname is None:
1606 if tmpname is None:
1607 raise util.Abort(_('no diffs found'))
1607 raise util.Abort(_('no diffs found'))
1608
1608
1609 try:
1609 try:
1610 cmdline_message = cmdutil.logmessage(opts)
1610 cmdline_message = cmdutil.logmessage(opts)
1611 if cmdline_message:
1611 if cmdline_message:
1612 # pickup the cmdline msg
1612 # pickup the cmdline msg
1613 message = cmdline_message
1613 message = cmdline_message
1614 elif message:
1614 elif message:
1615 # pickup the patch msg
1615 # pickup the patch msg
1616 message = message.strip()
1616 message = message.strip()
1617 else:
1617 else:
1618 # launch the editor
1618 # launch the editor
1619 message = None
1619 message = None
1620 ui.debug(_('message:\n%s\n') % message)
1620 ui.debug(_('message:\n%s\n') % message)
1621
1621
1622 wp = repo.workingctx().parents()
1622 wp = repo.workingctx().parents()
1623 if opts.get('exact'):
1623 if opts.get('exact'):
1624 if not nodeid or not p1:
1624 if not nodeid or not p1:
1625 raise util.Abort(_('not a mercurial patch'))
1625 raise util.Abort(_('not a mercurial patch'))
1626 p1 = repo.lookup(p1)
1626 p1 = repo.lookup(p1)
1627 p2 = repo.lookup(p2 or hex(nullid))
1627 p2 = repo.lookup(p2 or hex(nullid))
1628
1628
1629 if p1 != wp[0].node():
1629 if p1 != wp[0].node():
1630 hg.clean(repo, p1, wlock=wlock)
1630 hg.clean(repo, p1)
1631 repo.dirstate.setparents(p1, p2)
1631 repo.dirstate.setparents(p1, p2)
1632 elif p2:
1632 elif p2:
1633 try:
1633 try:
1634 p1 = repo.lookup(p1)
1634 p1 = repo.lookup(p1)
1635 p2 = repo.lookup(p2)
1635 p2 = repo.lookup(p2)
1636 if p1 == wp[0].node():
1636 if p1 == wp[0].node():
1637 repo.dirstate.setparents(p1, p2)
1637 repo.dirstate.setparents(p1, p2)
1638 except hg.RepoError:
1638 except hg.RepoError:
1639 pass
1639 pass
1640 if opts.get('exact') or opts.get('import_branch'):
1640 if opts.get('exact') or opts.get('import_branch'):
1641 repo.dirstate.setbranch(branch or 'default')
1641 repo.dirstate.setbranch(branch or 'default')
1642
1642
1643 files = {}
1643 files = {}
1644 try:
1644 try:
1645 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1645 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1646 files=files)
1646 files=files)
1647 finally:
1647 finally:
1648 files = patch.updatedir(ui, repo, files, wlock=wlock)
1648 files = patch.updatedir(ui, repo, files)
1649 n = repo.commit(files, message, user, date, wlock=wlock,
1649 n = repo.commit(files, message, user, date)
1650 lock=lock)
1651 if opts.get('exact'):
1650 if opts.get('exact'):
1652 if hex(n) != nodeid:
1651 if hex(n) != nodeid:
1653 repo.rollback(wlock=wlock, lock=lock)
1652 repo.rollback()
1654 raise util.Abort(_('patch is damaged' +
1653 raise util.Abort(_('patch is damaged' +
1655 ' or loses information'))
1654 ' or loses information'))
1656 finally:
1655 finally:
1657 os.unlink(tmpname)
1656 os.unlink(tmpname)
1658 finally:
1657 finally:
1659 del wlock, lock
1658 del wlock, lock
1660
1659
1661 def incoming(ui, repo, source="default", **opts):
1660 def incoming(ui, repo, source="default", **opts):
1662 """show new changesets found in source
1661 """show new changesets found in source
1663
1662
1664 Show new changesets found in the specified path/URL or the default
1663 Show new changesets found in the specified path/URL or the default
1665 pull location. These are the changesets that would be pulled if a pull
1664 pull location. These are the changesets that would be pulled if a pull
1666 was requested.
1665 was requested.
1667
1666
1668 For remote repository, using --bundle avoids downloading the changesets
1667 For remote repository, using --bundle avoids downloading the changesets
1669 twice if the incoming is followed by a pull.
1668 twice if the incoming is followed by a pull.
1670
1669
1671 See pull for valid source format details.
1670 See pull for valid source format details.
1672 """
1671 """
1673 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
1672 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
1674 cmdutil.setremoteconfig(ui, opts)
1673 cmdutil.setremoteconfig(ui, opts)
1675
1674
1676 other = hg.repository(ui, source)
1675 other = hg.repository(ui, source)
1677 ui.status(_('comparing with %s\n') % source)
1676 ui.status(_('comparing with %s\n') % source)
1678 if revs:
1677 if revs:
1679 if 'lookup' in other.capabilities:
1678 if 'lookup' in other.capabilities:
1680 revs = [other.lookup(rev) for rev in revs]
1679 revs = [other.lookup(rev) for rev in revs]
1681 else:
1680 else:
1682 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1681 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1683 raise util.Abort(error)
1682 raise util.Abort(error)
1684 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1683 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1685 if not incoming:
1684 if not incoming:
1686 try:
1685 try:
1687 os.unlink(opts["bundle"])
1686 os.unlink(opts["bundle"])
1688 except:
1687 except:
1689 pass
1688 pass
1690 ui.status(_("no changes found\n"))
1689 ui.status(_("no changes found\n"))
1691 return 1
1690 return 1
1692
1691
1693 cleanup = None
1692 cleanup = None
1694 try:
1693 try:
1695 fname = opts["bundle"]
1694 fname = opts["bundle"]
1696 if fname or not other.local():
1695 if fname or not other.local():
1697 # create a bundle (uncompressed if other repo is not local)
1696 # create a bundle (uncompressed if other repo is not local)
1698 if revs is None:
1697 if revs is None:
1699 cg = other.changegroup(incoming, "incoming")
1698 cg = other.changegroup(incoming, "incoming")
1700 else:
1699 else:
1701 if 'changegroupsubset' not in other.capabilities:
1700 if 'changegroupsubset' not in other.capabilities:
1702 raise util.Abort(_("Partial incoming cannot be done because other repository doesn't support changegroupsubset."))
1701 raise util.Abort(_("Partial incoming cannot be done because other repository doesn't support changegroupsubset."))
1703 cg = other.changegroupsubset(incoming, revs, 'incoming')
1702 cg = other.changegroupsubset(incoming, revs, 'incoming')
1704 bundletype = other.local() and "HG10BZ" or "HG10UN"
1703 bundletype = other.local() and "HG10BZ" or "HG10UN"
1705 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1704 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1706 # keep written bundle?
1705 # keep written bundle?
1707 if opts["bundle"]:
1706 if opts["bundle"]:
1708 cleanup = None
1707 cleanup = None
1709 if not other.local():
1708 if not other.local():
1710 # use the created uncompressed bundlerepo
1709 # use the created uncompressed bundlerepo
1711 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1710 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1712
1711
1713 o = other.changelog.nodesbetween(incoming, revs)[0]
1712 o = other.changelog.nodesbetween(incoming, revs)[0]
1714 if opts['newest_first']:
1713 if opts['newest_first']:
1715 o.reverse()
1714 o.reverse()
1716 displayer = cmdutil.show_changeset(ui, other, opts)
1715 displayer = cmdutil.show_changeset(ui, other, opts)
1717 for n in o:
1716 for n in o:
1718 parents = [p for p in other.changelog.parents(n) if p != nullid]
1717 parents = [p for p in other.changelog.parents(n) if p != nullid]
1719 if opts['no_merges'] and len(parents) == 2:
1718 if opts['no_merges'] and len(parents) == 2:
1720 continue
1719 continue
1721 displayer.show(changenode=n)
1720 displayer.show(changenode=n)
1722 finally:
1721 finally:
1723 if hasattr(other, 'close'):
1722 if hasattr(other, 'close'):
1724 other.close()
1723 other.close()
1725 if cleanup:
1724 if cleanup:
1726 os.unlink(cleanup)
1725 os.unlink(cleanup)
1727
1726
1728 def init(ui, dest=".", **opts):
1727 def init(ui, dest=".", **opts):
1729 """create a new repository in the given directory
1728 """create a new repository in the given directory
1730
1729
1731 Initialize a new repository in the given directory. If the given
1730 Initialize a new repository in the given directory. If the given
1732 directory does not exist, it is created.
1731 directory does not exist, it is created.
1733
1732
1734 If no directory is given, the current directory is used.
1733 If no directory is given, the current directory is used.
1735
1734
1736 It is possible to specify an ssh:// URL as the destination.
1735 It is possible to specify an ssh:// URL as the destination.
1737 Look at the help text for the pull command for important details
1736 Look at the help text for the pull command for important details
1738 about ssh:// URLs.
1737 about ssh:// URLs.
1739 """
1738 """
1740 cmdutil.setremoteconfig(ui, opts)
1739 cmdutil.setremoteconfig(ui, opts)
1741 hg.repository(ui, dest, create=1)
1740 hg.repository(ui, dest, create=1)
1742
1741
1743 def locate(ui, repo, *pats, **opts):
1742 def locate(ui, repo, *pats, **opts):
1744 """locate files matching specific patterns
1743 """locate files matching specific patterns
1745
1744
1746 Print all files under Mercurial control whose names match the
1745 Print all files under Mercurial control whose names match the
1747 given patterns.
1746 given patterns.
1748
1747
1749 This command searches the entire repository by default. To search
1748 This command searches the entire repository by default. To search
1750 just the current directory and its subdirectories, use
1749 just the current directory and its subdirectories, use
1751 "--include .".
1750 "--include .".
1752
1751
1753 If no patterns are given to match, this command prints all file
1752 If no patterns are given to match, this command prints all file
1754 names.
1753 names.
1755
1754
1756 If you want to feed the output of this command into the "xargs"
1755 If you want to feed the output of this command into the "xargs"
1757 command, use the "-0" option to both this command and "xargs".
1756 command, use the "-0" option to both this command and "xargs".
1758 This will avoid the problem of "xargs" treating single filenames
1757 This will avoid the problem of "xargs" treating single filenames
1759 that contain white space as multiple filenames.
1758 that contain white space as multiple filenames.
1760 """
1759 """
1761 end = opts['print0'] and '\0' or '\n'
1760 end = opts['print0'] and '\0' or '\n'
1762 rev = opts['rev']
1761 rev = opts['rev']
1763 if rev:
1762 if rev:
1764 node = repo.lookup(rev)
1763 node = repo.lookup(rev)
1765 else:
1764 else:
1766 node = None
1765 node = None
1767
1766
1768 ret = 1
1767 ret = 1
1769 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1768 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1770 badmatch=util.always,
1769 badmatch=util.always,
1771 default='relglob'):
1770 default='relglob'):
1772 if src == 'b':
1771 if src == 'b':
1773 continue
1772 continue
1774 if not node and abs not in repo.dirstate:
1773 if not node and abs not in repo.dirstate:
1775 continue
1774 continue
1776 if opts['fullpath']:
1775 if opts['fullpath']:
1777 ui.write(os.path.join(repo.root, abs), end)
1776 ui.write(os.path.join(repo.root, abs), end)
1778 else:
1777 else:
1779 ui.write(((pats and rel) or abs), end)
1778 ui.write(((pats and rel) or abs), end)
1780 ret = 0
1779 ret = 0
1781
1780
1782 return ret
1781 return ret
1783
1782
1784 def log(ui, repo, *pats, **opts):
1783 def log(ui, repo, *pats, **opts):
1785 """show revision history of entire repository or files
1784 """show revision history of entire repository or files
1786
1785
1787 Print the revision history of the specified files or the entire
1786 Print the revision history of the specified files or the entire
1788 project.
1787 project.
1789
1788
1790 File history is shown without following rename or copy history of
1789 File history is shown without following rename or copy history of
1791 files. Use -f/--follow with a file name to follow history across
1790 files. Use -f/--follow with a file name to follow history across
1792 renames and copies. --follow without a file name will only show
1791 renames and copies. --follow without a file name will only show
1793 ancestors or descendants of the starting revision. --follow-first
1792 ancestors or descendants of the starting revision. --follow-first
1794 only follows the first parent of merge revisions.
1793 only follows the first parent of merge revisions.
1795
1794
1796 If no revision range is specified, the default is tip:0 unless
1795 If no revision range is specified, the default is tip:0 unless
1797 --follow is set, in which case the working directory parent is
1796 --follow is set, in which case the working directory parent is
1798 used as the starting revision.
1797 used as the starting revision.
1799
1798
1800 By default this command outputs: changeset id and hash, tags,
1799 By default this command outputs: changeset id and hash, tags,
1801 non-trivial parents, user, date and time, and a summary for each
1800 non-trivial parents, user, date and time, and a summary for each
1802 commit. When the -v/--verbose switch is used, the list of changed
1801 commit. When the -v/--verbose switch is used, the list of changed
1803 files and full commit message is shown.
1802 files and full commit message is shown.
1804
1803
1805 NOTE: log -p may generate unexpected diff output for merge
1804 NOTE: log -p may generate unexpected diff output for merge
1806 changesets, as it will compare the merge changeset against its
1805 changesets, as it will compare the merge changeset against its
1807 first parent only. Also, the files: list will only reflect files
1806 first parent only. Also, the files: list will only reflect files
1808 that are different from BOTH parents.
1807 that are different from BOTH parents.
1809
1808
1810 """
1809 """
1811
1810
1812 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1811 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1813 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1812 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1814
1813
1815 if opts['limit']:
1814 if opts['limit']:
1816 try:
1815 try:
1817 limit = int(opts['limit'])
1816 limit = int(opts['limit'])
1818 except ValueError:
1817 except ValueError:
1819 raise util.Abort(_('limit must be a positive integer'))
1818 raise util.Abort(_('limit must be a positive integer'))
1820 if limit <= 0: raise util.Abort(_('limit must be positive'))
1819 if limit <= 0: raise util.Abort(_('limit must be positive'))
1821 else:
1820 else:
1822 limit = sys.maxint
1821 limit = sys.maxint
1823 count = 0
1822 count = 0
1824
1823
1825 if opts['copies'] and opts['rev']:
1824 if opts['copies'] and opts['rev']:
1826 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1825 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1827 else:
1826 else:
1828 endrev = repo.changelog.count()
1827 endrev = repo.changelog.count()
1829 rcache = {}
1828 rcache = {}
1830 ncache = {}
1829 ncache = {}
1831 dcache = []
1830 dcache = []
1832 def getrenamed(fn, rev, man):
1831 def getrenamed(fn, rev, man):
1833 '''looks up all renames for a file (up to endrev) the first
1832 '''looks up all renames for a file (up to endrev) the first
1834 time the file is given. It indexes on the changerev and only
1833 time the file is given. It indexes on the changerev and only
1835 parses the manifest if linkrev != changerev.
1834 parses the manifest if linkrev != changerev.
1836 Returns rename info for fn at changerev rev.'''
1835 Returns rename info for fn at changerev rev.'''
1837 if fn not in rcache:
1836 if fn not in rcache:
1838 rcache[fn] = {}
1837 rcache[fn] = {}
1839 ncache[fn] = {}
1838 ncache[fn] = {}
1840 fl = repo.file(fn)
1839 fl = repo.file(fn)
1841 for i in xrange(fl.count()):
1840 for i in xrange(fl.count()):
1842 node = fl.node(i)
1841 node = fl.node(i)
1843 lr = fl.linkrev(node)
1842 lr = fl.linkrev(node)
1844 renamed = fl.renamed(node)
1843 renamed = fl.renamed(node)
1845 rcache[fn][lr] = renamed
1844 rcache[fn][lr] = renamed
1846 if renamed:
1845 if renamed:
1847 ncache[fn][node] = renamed
1846 ncache[fn][node] = renamed
1848 if lr >= endrev:
1847 if lr >= endrev:
1849 break
1848 break
1850 if rev in rcache[fn]:
1849 if rev in rcache[fn]:
1851 return rcache[fn][rev]
1850 return rcache[fn][rev]
1852 mr = repo.manifest.rev(man)
1851 mr = repo.manifest.rev(man)
1853 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1852 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1854 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1853 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1855 if not dcache or dcache[0] != man:
1854 if not dcache or dcache[0] != man:
1856 dcache[:] = [man, repo.manifest.readdelta(man)]
1855 dcache[:] = [man, repo.manifest.readdelta(man)]
1857 if fn in dcache[1]:
1856 if fn in dcache[1]:
1858 return ncache[fn].get(dcache[1][fn])
1857 return ncache[fn].get(dcache[1][fn])
1859 return None
1858 return None
1860
1859
1861 df = False
1860 df = False
1862 if opts["date"]:
1861 if opts["date"]:
1863 df = util.matchdate(opts["date"])
1862 df = util.matchdate(opts["date"])
1864
1863
1865 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1864 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1866 for st, rev, fns in changeiter:
1865 for st, rev, fns in changeiter:
1867 if st == 'add':
1866 if st == 'add':
1868 changenode = repo.changelog.node(rev)
1867 changenode = repo.changelog.node(rev)
1869 parents = [p for p in repo.changelog.parentrevs(rev)
1868 parents = [p for p in repo.changelog.parentrevs(rev)
1870 if p != nullrev]
1869 if p != nullrev]
1871 if opts['no_merges'] and len(parents) == 2:
1870 if opts['no_merges'] and len(parents) == 2:
1872 continue
1871 continue
1873 if opts['only_merges'] and len(parents) != 2:
1872 if opts['only_merges'] and len(parents) != 2:
1874 continue
1873 continue
1875
1874
1876 if df:
1875 if df:
1877 changes = get(rev)
1876 changes = get(rev)
1878 if not df(changes[2][0]):
1877 if not df(changes[2][0]):
1879 continue
1878 continue
1880
1879
1881 if opts['keyword']:
1880 if opts['keyword']:
1882 changes = get(rev)
1881 changes = get(rev)
1883 miss = 0
1882 miss = 0
1884 for k in [kw.lower() for kw in opts['keyword']]:
1883 for k in [kw.lower() for kw in opts['keyword']]:
1885 if not (k in changes[1].lower() or
1884 if not (k in changes[1].lower() or
1886 k in changes[4].lower() or
1885 k in changes[4].lower() or
1887 k in " ".join(changes[3]).lower()):
1886 k in " ".join(changes[3]).lower()):
1888 miss = 1
1887 miss = 1
1889 break
1888 break
1890 if miss:
1889 if miss:
1891 continue
1890 continue
1892
1891
1893 copies = []
1892 copies = []
1894 if opts.get('copies') and rev:
1893 if opts.get('copies') and rev:
1895 mf = get(rev)[0]
1894 mf = get(rev)[0]
1896 for fn in get(rev)[3]:
1895 for fn in get(rev)[3]:
1897 rename = getrenamed(fn, rev, mf)
1896 rename = getrenamed(fn, rev, mf)
1898 if rename:
1897 if rename:
1899 copies.append((fn, rename[0]))
1898 copies.append((fn, rename[0]))
1900 displayer.show(rev, changenode, copies=copies)
1899 displayer.show(rev, changenode, copies=copies)
1901 elif st == 'iter':
1900 elif st == 'iter':
1902 if count == limit: break
1901 if count == limit: break
1903 if displayer.flush(rev):
1902 if displayer.flush(rev):
1904 count += 1
1903 count += 1
1905
1904
1906 def manifest(ui, repo, rev=None):
1905 def manifest(ui, repo, rev=None):
1907 """output the current or given revision of the project manifest
1906 """output the current or given revision of the project manifest
1908
1907
1909 Print a list of version controlled files for the given revision.
1908 Print a list of version controlled files for the given revision.
1910 If no revision is given, the parent of the working directory is used,
1909 If no revision is given, the parent of the working directory is used,
1911 or tip if no revision is checked out.
1910 or tip if no revision is checked out.
1912
1911
1913 The manifest is the list of files being version controlled. If no revision
1912 The manifest is the list of files being version controlled. If no revision
1914 is given then the first parent of the working directory is used.
1913 is given then the first parent of the working directory is used.
1915
1914
1916 With -v flag, print file permissions. With --debug flag, print
1915 With -v flag, print file permissions. With --debug flag, print
1917 file revision hashes.
1916 file revision hashes.
1918 """
1917 """
1919
1918
1920 m = repo.changectx(rev).manifest()
1919 m = repo.changectx(rev).manifest()
1921 files = m.keys()
1920 files = m.keys()
1922 files.sort()
1921 files.sort()
1923
1922
1924 for f in files:
1923 for f in files:
1925 if ui.debugflag:
1924 if ui.debugflag:
1926 ui.write("%40s " % hex(m[f]))
1925 ui.write("%40s " % hex(m[f]))
1927 if ui.verbose:
1926 if ui.verbose:
1928 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1927 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1929 ui.write("%s\n" % f)
1928 ui.write("%s\n" % f)
1930
1929
1931 def merge(ui, repo, node=None, force=None, rev=None):
1930 def merge(ui, repo, node=None, force=None, rev=None):
1932 """merge working directory with another revision
1931 """merge working directory with another revision
1933
1932
1934 Merge the contents of the current working directory and the
1933 Merge the contents of the current working directory and the
1935 requested revision. Files that changed between either parent are
1934 requested revision. Files that changed between either parent are
1936 marked as changed for the next commit and a commit must be
1935 marked as changed for the next commit and a commit must be
1937 performed before any further updates are allowed.
1936 performed before any further updates are allowed.
1938
1937
1939 If no revision is specified, the working directory's parent is a
1938 If no revision is specified, the working directory's parent is a
1940 head revision, and the repository contains exactly one other head,
1939 head revision, and the repository contains exactly one other head,
1941 the other head is merged with by default. Otherwise, an explicit
1940 the other head is merged with by default. Otherwise, an explicit
1942 revision to merge with must be provided.
1941 revision to merge with must be provided.
1943 """
1942 """
1944
1943
1945 if rev and node:
1944 if rev and node:
1946 raise util.Abort(_("please specify just one revision"))
1945 raise util.Abort(_("please specify just one revision"))
1947
1946
1948 if not node:
1947 if not node:
1949 node = rev
1948 node = rev
1950
1949
1951 if not node:
1950 if not node:
1952 heads = repo.heads()
1951 heads = repo.heads()
1953 if len(heads) > 2:
1952 if len(heads) > 2:
1954 raise util.Abort(_('repo has %d heads - '
1953 raise util.Abort(_('repo has %d heads - '
1955 'please merge with an explicit rev') %
1954 'please merge with an explicit rev') %
1956 len(heads))
1955 len(heads))
1957 if len(heads) == 1:
1956 if len(heads) == 1:
1958 raise util.Abort(_('there is nothing to merge - '
1957 raise util.Abort(_('there is nothing to merge - '
1959 'use "hg update" instead'))
1958 'use "hg update" instead'))
1960 parent = repo.dirstate.parents()[0]
1959 parent = repo.dirstate.parents()[0]
1961 if parent not in heads:
1960 if parent not in heads:
1962 raise util.Abort(_('working dir not at a head rev - '
1961 raise util.Abort(_('working dir not at a head rev - '
1963 'use "hg update" or merge with an explicit rev'))
1962 'use "hg update" or merge with an explicit rev'))
1964 node = parent == heads[0] and heads[-1] or heads[0]
1963 node = parent == heads[0] and heads[-1] or heads[0]
1965 return hg.merge(repo, node, force=force)
1964 return hg.merge(repo, node, force=force)
1966
1965
1967 def outgoing(ui, repo, dest=None, **opts):
1966 def outgoing(ui, repo, dest=None, **opts):
1968 """show changesets not found in destination
1967 """show changesets not found in destination
1969
1968
1970 Show changesets not found in the specified destination repository or
1969 Show changesets not found in the specified destination repository or
1971 the default push location. These are the changesets that would be pushed
1970 the default push location. These are the changesets that would be pushed
1972 if a push was requested.
1971 if a push was requested.
1973
1972
1974 See pull for valid destination format details.
1973 See pull for valid destination format details.
1975 """
1974 """
1976 dest, revs = cmdutil.parseurl(
1975 dest, revs = cmdutil.parseurl(
1977 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1976 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1978 cmdutil.setremoteconfig(ui, opts)
1977 cmdutil.setremoteconfig(ui, opts)
1979 if revs:
1978 if revs:
1980 revs = [repo.lookup(rev) for rev in revs]
1979 revs = [repo.lookup(rev) for rev in revs]
1981
1980
1982 other = hg.repository(ui, dest)
1981 other = hg.repository(ui, dest)
1983 ui.status(_('comparing with %s\n') % dest)
1982 ui.status(_('comparing with %s\n') % dest)
1984 o = repo.findoutgoing(other, force=opts['force'])
1983 o = repo.findoutgoing(other, force=opts['force'])
1985 if not o:
1984 if not o:
1986 ui.status(_("no changes found\n"))
1985 ui.status(_("no changes found\n"))
1987 return 1
1986 return 1
1988 o = repo.changelog.nodesbetween(o, revs)[0]
1987 o = repo.changelog.nodesbetween(o, revs)[0]
1989 if opts['newest_first']:
1988 if opts['newest_first']:
1990 o.reverse()
1989 o.reverse()
1991 displayer = cmdutil.show_changeset(ui, repo, opts)
1990 displayer = cmdutil.show_changeset(ui, repo, opts)
1992 for n in o:
1991 for n in o:
1993 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1992 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1994 if opts['no_merges'] and len(parents) == 2:
1993 if opts['no_merges'] and len(parents) == 2:
1995 continue
1994 continue
1996 displayer.show(changenode=n)
1995 displayer.show(changenode=n)
1997
1996
1998 def parents(ui, repo, file_=None, **opts):
1997 def parents(ui, repo, file_=None, **opts):
1999 """show the parents of the working dir or revision
1998 """show the parents of the working dir or revision
2000
1999
2001 Print the working directory's parent revisions. If a
2000 Print the working directory's parent revisions. If a
2002 revision is given via --rev, the parent of that revision
2001 revision is given via --rev, the parent of that revision
2003 will be printed. If a file argument is given, revision in
2002 will be printed. If a file argument is given, revision in
2004 which the file was last changed (before the working directory
2003 which the file was last changed (before the working directory
2005 revision or the argument to --rev if given) is printed.
2004 revision or the argument to --rev if given) is printed.
2006 """
2005 """
2007 rev = opts.get('rev')
2006 rev = opts.get('rev')
2008 if file_:
2007 if file_:
2009 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
2008 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
2010 if anypats or len(files) != 1:
2009 if anypats or len(files) != 1:
2011 raise util.Abort(_('can only specify an explicit file name'))
2010 raise util.Abort(_('can only specify an explicit file name'))
2012 ctx = repo.filectx(files[0], changeid=rev)
2011 ctx = repo.filectx(files[0], changeid=rev)
2013 elif rev:
2012 elif rev:
2014 ctx = repo.changectx(rev)
2013 ctx = repo.changectx(rev)
2015 else:
2014 else:
2016 ctx = repo.workingctx()
2015 ctx = repo.workingctx()
2017 p = [cp.node() for cp in ctx.parents()]
2016 p = [cp.node() for cp in ctx.parents()]
2018
2017
2019 displayer = cmdutil.show_changeset(ui, repo, opts)
2018 displayer = cmdutil.show_changeset(ui, repo, opts)
2020 for n in p:
2019 for n in p:
2021 if n != nullid:
2020 if n != nullid:
2022 displayer.show(changenode=n)
2021 displayer.show(changenode=n)
2023
2022
2024 def paths(ui, repo, search=None):
2023 def paths(ui, repo, search=None):
2025 """show definition of symbolic path names
2024 """show definition of symbolic path names
2026
2025
2027 Show definition of symbolic path name NAME. If no name is given, show
2026 Show definition of symbolic path name NAME. If no name is given, show
2028 definition of available names.
2027 definition of available names.
2029
2028
2030 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2029 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2031 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2030 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2032 """
2031 """
2033 if search:
2032 if search:
2034 for name, path in ui.configitems("paths"):
2033 for name, path in ui.configitems("paths"):
2035 if name == search:
2034 if name == search:
2036 ui.write("%s\n" % path)
2035 ui.write("%s\n" % path)
2037 return
2036 return
2038 ui.warn(_("not found!\n"))
2037 ui.warn(_("not found!\n"))
2039 return 1
2038 return 1
2040 else:
2039 else:
2041 for name, path in ui.configitems("paths"):
2040 for name, path in ui.configitems("paths"):
2042 ui.write("%s = %s\n" % (name, path))
2041 ui.write("%s = %s\n" % (name, path))
2043
2042
2044 def postincoming(ui, repo, modheads, optupdate, wasempty):
2043 def postincoming(ui, repo, modheads, optupdate, wasempty):
2045 if modheads == 0:
2044 if modheads == 0:
2046 return
2045 return
2047 if optupdate:
2046 if optupdate:
2048 if wasempty:
2047 if wasempty:
2049 return hg.update(repo, repo.lookup('default'))
2048 return hg.update(repo, repo.lookup('default'))
2050 elif modheads == 1:
2049 elif modheads == 1:
2051 return hg.update(repo, repo.changelog.tip()) # update
2050 return hg.update(repo, repo.changelog.tip()) # update
2052 else:
2051 else:
2053 ui.status(_("not updating, since new heads added\n"))
2052 ui.status(_("not updating, since new heads added\n"))
2054 if modheads > 1:
2053 if modheads > 1:
2055 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2054 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2056 else:
2055 else:
2057 ui.status(_("(run 'hg update' to get a working copy)\n"))
2056 ui.status(_("(run 'hg update' to get a working copy)\n"))
2058
2057
2059 def pull(ui, repo, source="default", **opts):
2058 def pull(ui, repo, source="default", **opts):
2060 """pull changes from the specified source
2059 """pull changes from the specified source
2061
2060
2062 Pull changes from a remote repository to a local one.
2061 Pull changes from a remote repository to a local one.
2063
2062
2064 This finds all changes from the repository at the specified path
2063 This finds all changes from the repository at the specified path
2065 or URL and adds them to the local repository. By default, this
2064 or URL and adds them to the local repository. By default, this
2066 does not update the copy of the project in the working directory.
2065 does not update the copy of the project in the working directory.
2067
2066
2068 Valid URLs are of the form:
2067 Valid URLs are of the form:
2069
2068
2070 local/filesystem/path (or file://local/filesystem/path)
2069 local/filesystem/path (or file://local/filesystem/path)
2071 http://[user@]host[:port]/[path]
2070 http://[user@]host[:port]/[path]
2072 https://[user@]host[:port]/[path]
2071 https://[user@]host[:port]/[path]
2073 ssh://[user@]host[:port]/[path]
2072 ssh://[user@]host[:port]/[path]
2074 static-http://host[:port]/[path]
2073 static-http://host[:port]/[path]
2075
2074
2076 Paths in the local filesystem can either point to Mercurial
2075 Paths in the local filesystem can either point to Mercurial
2077 repositories or to bundle files (as created by 'hg bundle' or
2076 repositories or to bundle files (as created by 'hg bundle' or
2078 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2077 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2079 allows access to a Mercurial repository where you simply use a web
2078 allows access to a Mercurial repository where you simply use a web
2080 server to publish the .hg directory as static content.
2079 server to publish the .hg directory as static content.
2081
2080
2082 An optional identifier after # indicates a particular branch, tag,
2081 An optional identifier after # indicates a particular branch, tag,
2083 or changeset to pull.
2082 or changeset to pull.
2084
2083
2085 Some notes about using SSH with Mercurial:
2084 Some notes about using SSH with Mercurial:
2086 - SSH requires an accessible shell account on the destination machine
2085 - SSH requires an accessible shell account on the destination machine
2087 and a copy of hg in the remote path or specified with as remotecmd.
2086 and a copy of hg in the remote path or specified with as remotecmd.
2088 - path is relative to the remote user's home directory by default.
2087 - path is relative to the remote user's home directory by default.
2089 Use an extra slash at the start of a path to specify an absolute path:
2088 Use an extra slash at the start of a path to specify an absolute path:
2090 ssh://example.com//tmp/repository
2089 ssh://example.com//tmp/repository
2091 - Mercurial doesn't use its own compression via SSH; the right thing
2090 - Mercurial doesn't use its own compression via SSH; the right thing
2092 to do is to configure it in your ~/.ssh/config, e.g.:
2091 to do is to configure it in your ~/.ssh/config, e.g.:
2093 Host *.mylocalnetwork.example.com
2092 Host *.mylocalnetwork.example.com
2094 Compression no
2093 Compression no
2095 Host *
2094 Host *
2096 Compression yes
2095 Compression yes
2097 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2096 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2098 with the --ssh command line option.
2097 with the --ssh command line option.
2099 """
2098 """
2100 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
2099 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
2101 cmdutil.setremoteconfig(ui, opts)
2100 cmdutil.setremoteconfig(ui, opts)
2102
2101
2103 other = hg.repository(ui, source)
2102 other = hg.repository(ui, source)
2104 ui.status(_('pulling from %s\n') % (source))
2103 ui.status(_('pulling from %s\n') % (source))
2105 if revs:
2104 if revs:
2106 if 'lookup' in other.capabilities:
2105 if 'lookup' in other.capabilities:
2107 revs = [other.lookup(rev) for rev in revs]
2106 revs = [other.lookup(rev) for rev in revs]
2108 else:
2107 else:
2109 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2108 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2110 raise util.Abort(error)
2109 raise util.Abort(error)
2111
2110
2112 wasempty = repo.changelog.count() == 0
2111 wasempty = repo.changelog.count() == 0
2113 modheads = repo.pull(other, heads=revs, force=opts['force'])
2112 modheads = repo.pull(other, heads=revs, force=opts['force'])
2114 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2113 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2115
2114
2116 def push(ui, repo, dest=None, **opts):
2115 def push(ui, repo, dest=None, **opts):
2117 """push changes to the specified destination
2116 """push changes to the specified destination
2118
2117
2119 Push changes from the local repository to the given destination.
2118 Push changes from the local repository to the given destination.
2120
2119
2121 This is the symmetrical operation for pull. It helps to move
2120 This is the symmetrical operation for pull. It helps to move
2122 changes from the current repository to a different one. If the
2121 changes from the current repository to a different one. If the
2123 destination is local this is identical to a pull in that directory
2122 destination is local this is identical to a pull in that directory
2124 from the current one.
2123 from the current one.
2125
2124
2126 By default, push will refuse to run if it detects the result would
2125 By default, push will refuse to run if it detects the result would
2127 increase the number of remote heads. This generally indicates the
2126 increase the number of remote heads. This generally indicates the
2128 the client has forgotten to sync and merge before pushing.
2127 the client has forgotten to sync and merge before pushing.
2129
2128
2130 Valid URLs are of the form:
2129 Valid URLs are of the form:
2131
2130
2132 local/filesystem/path (or file://local/filesystem/path)
2131 local/filesystem/path (or file://local/filesystem/path)
2133 ssh://[user@]host[:port]/[path]
2132 ssh://[user@]host[:port]/[path]
2134 http://[user@]host[:port]/[path]
2133 http://[user@]host[:port]/[path]
2135 https://[user@]host[:port]/[path]
2134 https://[user@]host[:port]/[path]
2136
2135
2137 An optional identifier after # indicates a particular branch, tag,
2136 An optional identifier after # indicates a particular branch, tag,
2138 or changeset to push.
2137 or changeset to push.
2139
2138
2140 Look at the help text for the pull command for important details
2139 Look at the help text for the pull command for important details
2141 about ssh:// URLs.
2140 about ssh:// URLs.
2142
2141
2143 Pushing to http:// and https:// URLs is only possible, if this
2142 Pushing to http:// and https:// URLs is only possible, if this
2144 feature is explicitly enabled on the remote Mercurial server.
2143 feature is explicitly enabled on the remote Mercurial server.
2145 """
2144 """
2146 dest, revs = cmdutil.parseurl(
2145 dest, revs = cmdutil.parseurl(
2147 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2146 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2148 cmdutil.setremoteconfig(ui, opts)
2147 cmdutil.setremoteconfig(ui, opts)
2149
2148
2150 other = hg.repository(ui, dest)
2149 other = hg.repository(ui, dest)
2151 ui.status('pushing to %s\n' % (dest))
2150 ui.status('pushing to %s\n' % (dest))
2152 if revs:
2151 if revs:
2153 revs = [repo.lookup(rev) for rev in revs]
2152 revs = [repo.lookup(rev) for rev in revs]
2154 r = repo.push(other, opts['force'], revs=revs)
2153 r = repo.push(other, opts['force'], revs=revs)
2155 return r == 0
2154 return r == 0
2156
2155
2157 def rawcommit(ui, repo, *pats, **opts):
2156 def rawcommit(ui, repo, *pats, **opts):
2158 """raw commit interface (DEPRECATED)
2157 """raw commit interface (DEPRECATED)
2159
2158
2160 (DEPRECATED)
2159 (DEPRECATED)
2161 Lowlevel commit, for use in helper scripts.
2160 Lowlevel commit, for use in helper scripts.
2162
2161
2163 This command is not intended to be used by normal users, as it is
2162 This command is not intended to be used by normal users, as it is
2164 primarily useful for importing from other SCMs.
2163 primarily useful for importing from other SCMs.
2165
2164
2166 This command is now deprecated and will be removed in a future
2165 This command is now deprecated and will be removed in a future
2167 release, please use debugsetparents and commit instead.
2166 release, please use debugsetparents and commit instead.
2168 """
2167 """
2169
2168
2170 ui.warn(_("(the rawcommit command is deprecated)\n"))
2169 ui.warn(_("(the rawcommit command is deprecated)\n"))
2171
2170
2172 message = cmdutil.logmessage(opts)
2171 message = cmdutil.logmessage(opts)
2173
2172
2174 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2173 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2175 if opts['files']:
2174 if opts['files']:
2176 files += open(opts['files']).read().splitlines()
2175 files += open(opts['files']).read().splitlines()
2177
2176
2178 parents = [repo.lookup(p) for p in opts['parent']]
2177 parents = [repo.lookup(p) for p in opts['parent']]
2179
2178
2180 try:
2179 try:
2181 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2180 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2182 except ValueError, inst:
2181 except ValueError, inst:
2183 raise util.Abort(str(inst))
2182 raise util.Abort(str(inst))
2184
2183
2185 def recover(ui, repo):
2184 def recover(ui, repo):
2186 """roll back an interrupted transaction
2185 """roll back an interrupted transaction
2187
2186
2188 Recover from an interrupted commit or pull.
2187 Recover from an interrupted commit or pull.
2189
2188
2190 This command tries to fix the repository status after an interrupted
2189 This command tries to fix the repository status after an interrupted
2191 operation. It should only be necessary when Mercurial suggests it.
2190 operation. It should only be necessary when Mercurial suggests it.
2192 """
2191 """
2193 if repo.recover():
2192 if repo.recover():
2194 return hg.verify(repo)
2193 return hg.verify(repo)
2195 return 1
2194 return 1
2196
2195
2197 def remove(ui, repo, *pats, **opts):
2196 def remove(ui, repo, *pats, **opts):
2198 """remove the specified files on the next commit
2197 """remove the specified files on the next commit
2199
2198
2200 Schedule the indicated files for removal from the repository.
2199 Schedule the indicated files for removal from the repository.
2201
2200
2202 This only removes files from the current branch, not from the
2201 This only removes files from the current branch, not from the
2203 entire project history. If the files still exist in the working
2202 entire project history. If the files still exist in the working
2204 directory, they will be deleted from it. If invoked with --after,
2203 directory, they will be deleted from it. If invoked with --after,
2205 files are marked as removed, but not actually unlinked unless --force
2204 files are marked as removed, but not actually unlinked unless --force
2206 is also given. Without exact file names, --after will only mark
2205 is also given. Without exact file names, --after will only mark
2207 files as removed if they are no longer in the working directory.
2206 files as removed if they are no longer in the working directory.
2208
2207
2209 This command schedules the files to be removed at the next commit.
2208 This command schedules the files to be removed at the next commit.
2210 To undo a remove before that, see hg revert.
2209 To undo a remove before that, see hg revert.
2211
2210
2212 Modified files and added files are not removed by default. To
2211 Modified files and added files are not removed by default. To
2213 remove them, use the -f/--force option.
2212 remove them, use the -f/--force option.
2214 """
2213 """
2215 names = []
2214 names = []
2216 if not opts['after'] and not pats:
2215 if not opts['after'] and not pats:
2217 raise util.Abort(_('no files specified'))
2216 raise util.Abort(_('no files specified'))
2218 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2217 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2219 exact = dict.fromkeys(files)
2218 exact = dict.fromkeys(files)
2220 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2219 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2221 modified, added, removed, deleted, unknown = mardu
2220 modified, added, removed, deleted, unknown = mardu
2222 remove, forget = [], []
2221 remove, forget = [], []
2223 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2222 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2224 reason = None
2223 reason = None
2225 if abs in modified and not opts['force']:
2224 if abs in modified and not opts['force']:
2226 reason = _('is modified (use -f to force removal)')
2225 reason = _('is modified (use -f to force removal)')
2227 elif abs in added:
2226 elif abs in added:
2228 if opts['force']:
2227 if opts['force']:
2229 forget.append(abs)
2228 forget.append(abs)
2230 continue
2229 continue
2231 reason = _('has been marked for add (use -f to force removal)')
2230 reason = _('has been marked for add (use -f to force removal)')
2232 elif abs not in repo.dirstate:
2231 elif abs not in repo.dirstate:
2233 reason = _('is not managed')
2232 reason = _('is not managed')
2234 elif opts['after'] and not exact and abs not in deleted:
2233 elif opts['after'] and not exact and abs not in deleted:
2235 continue
2234 continue
2236 elif abs in removed:
2235 elif abs in removed:
2237 continue
2236 continue
2238 if reason:
2237 if reason:
2239 if exact:
2238 if exact:
2240 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2239 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2241 else:
2240 else:
2242 if ui.verbose or not exact:
2241 if ui.verbose or not exact:
2243 ui.status(_('removing %s\n') % rel)
2242 ui.status(_('removing %s\n') % rel)
2244 remove.append(abs)
2243 remove.append(abs)
2245 repo.forget(forget)
2244 repo.forget(forget)
2246 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2245 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2247
2246
2248 def rename(ui, repo, *pats, **opts):
2247 def rename(ui, repo, *pats, **opts):
2249 """rename files; equivalent of copy + remove
2248 """rename files; equivalent of copy + remove
2250
2249
2251 Mark dest as copies of sources; mark sources for deletion. If
2250 Mark dest as copies of sources; mark sources for deletion. If
2252 dest is a directory, copies are put in that directory. If dest is
2251 dest is a directory, copies are put in that directory. If dest is
2253 a file, there can only be one source.
2252 a file, there can only be one source.
2254
2253
2255 By default, this command copies the contents of files as they
2254 By default, this command copies the contents of files as they
2256 stand in the working directory. If invoked with --after, the
2255 stand in the working directory. If invoked with --after, the
2257 operation is recorded, but no copying is performed.
2256 operation is recorded, but no copying is performed.
2258
2257
2259 This command takes effect in the next commit. To undo a rename
2258 This command takes effect in the next commit. To undo a rename
2260 before that, see hg revert.
2259 before that, see hg revert.
2261 """
2260 """
2262 wlock = repo.wlock(False)
2261 wlock = repo.wlock(False)
2263 try:
2262 try:
2264 errs, copied = docopy(ui, repo, pats, opts, wlock)
2263 errs, copied = docopy(ui, repo, pats, opts)
2265 names = []
2264 names = []
2266 for abs, rel, exact in copied:
2265 for abs, rel, exact in copied:
2267 if ui.verbose or not exact:
2266 if ui.verbose or not exact:
2268 ui.status(_('removing %s\n') % rel)
2267 ui.status(_('removing %s\n') % rel)
2269 names.append(abs)
2268 names.append(abs)
2270 if not opts.get('dry_run'):
2269 if not opts.get('dry_run'):
2271 repo.remove(names, True, wlock=wlock)
2270 repo.remove(names, True)
2272 return errs
2271 return errs
2273 finally:
2272 finally:
2274 del wlock
2273 del wlock
2275
2274
2276 def revert(ui, repo, *pats, **opts):
2275 def revert(ui, repo, *pats, **opts):
2277 """revert files or dirs to their states as of some revision
2276 """revert files or dirs to their states as of some revision
2278
2277
2279 With no revision specified, revert the named files or directories
2278 With no revision specified, revert the named files or directories
2280 to the contents they had in the parent of the working directory.
2279 to the contents they had in the parent of the working directory.
2281 This restores the contents of the affected files to an unmodified
2280 This restores the contents of the affected files to an unmodified
2282 state and unschedules adds, removes, copies, and renames. If the
2281 state and unschedules adds, removes, copies, and renames. If the
2283 working directory has two parents, you must explicitly specify the
2282 working directory has two parents, you must explicitly specify the
2284 revision to revert to.
2283 revision to revert to.
2285
2284
2286 Modified files are saved with a .orig suffix before reverting.
2285 Modified files are saved with a .orig suffix before reverting.
2287 To disable these backups, use --no-backup.
2286 To disable these backups, use --no-backup.
2288
2287
2289 Using the -r option, revert the given files or directories to their
2288 Using the -r option, revert the given files or directories to their
2290 contents as of a specific revision. This can be helpful to "roll
2289 contents as of a specific revision. This can be helpful to "roll
2291 back" some or all of a change that should not have been committed.
2290 back" some or all of a change that should not have been committed.
2292
2291
2293 Revert modifies the working directory. It does not commit any
2292 Revert modifies the working directory. It does not commit any
2294 changes, or change the parent of the working directory. If you
2293 changes, or change the parent of the working directory. If you
2295 revert to a revision other than the parent of the working
2294 revert to a revision other than the parent of the working
2296 directory, the reverted files will thus appear modified
2295 directory, the reverted files will thus appear modified
2297 afterwards.
2296 afterwards.
2298
2297
2299 If a file has been deleted, it is restored. If the executable
2298 If a file has been deleted, it is restored. If the executable
2300 mode of a file was changed, it is reset.
2299 mode of a file was changed, it is reset.
2301
2300
2302 If names are given, all files matching the names are reverted.
2301 If names are given, all files matching the names are reverted.
2303
2302
2304 If no arguments are given, no files are reverted.
2303 If no arguments are given, no files are reverted.
2305 """
2304 """
2306
2305
2307 if opts["date"]:
2306 if opts["date"]:
2308 if opts["rev"]:
2307 if opts["rev"]:
2309 raise util.Abort(_("you can't specify a revision and a date"))
2308 raise util.Abort(_("you can't specify a revision and a date"))
2310 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2309 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2311
2310
2312 if not pats and not opts['all']:
2311 if not pats and not opts['all']:
2313 raise util.Abort(_('no files or directories specified; '
2312 raise util.Abort(_('no files or directories specified; '
2314 'use --all to revert the whole repo'))
2313 'use --all to revert the whole repo'))
2315
2314
2316 parent, p2 = repo.dirstate.parents()
2315 parent, p2 = repo.dirstate.parents()
2317 if not opts['rev'] and p2 != nullid:
2316 if not opts['rev'] and p2 != nullid:
2318 raise util.Abort(_('uncommitted merge - please provide a '
2317 raise util.Abort(_('uncommitted merge - please provide a '
2319 'specific revision'))
2318 'specific revision'))
2320 ctx = repo.changectx(opts['rev'])
2319 ctx = repo.changectx(opts['rev'])
2321 node = ctx.node()
2320 node = ctx.node()
2322 mf = ctx.manifest()
2321 mf = ctx.manifest()
2323 if node == parent:
2322 if node == parent:
2324 pmf = mf
2323 pmf = mf
2325 else:
2324 else:
2326 pmf = None
2325 pmf = None
2327
2326
2328 # need all matching names in dirstate and manifest of target rev,
2327 # need all matching names in dirstate and manifest of target rev,
2329 # so have to walk both. do not print errors if files exist in one
2328 # so have to walk both. do not print errors if files exist in one
2330 # but not other.
2329 # but not other.
2331
2330
2332 names = {}
2331 names = {}
2333 target_only = {}
2332 target_only = {}
2334
2333
2335 wlock = repo.wlock()
2334 wlock = repo.wlock()
2336 try:
2335 try:
2337 # walk dirstate.
2336 # walk dirstate.
2338 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2337 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2339 badmatch=mf.has_key):
2338 badmatch=mf.has_key):
2340 names[abs] = (rel, exact)
2339 names[abs] = (rel, exact)
2341 if src == 'b':
2340 if src == 'b':
2342 target_only[abs] = True
2341 target_only[abs] = True
2343
2342
2344 # walk target manifest.
2343 # walk target manifest.
2345
2344
2346 def badmatch(path):
2345 def badmatch(path):
2347 if path in names:
2346 if path in names:
2348 return True
2347 return True
2349 path_ = path + '/'
2348 path_ = path + '/'
2350 for f in names:
2349 for f in names:
2351 if f.startswith(path_):
2350 if f.startswith(path_):
2352 return True
2351 return True
2353 return False
2352 return False
2354
2353
2355 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2354 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2356 badmatch=badmatch):
2355 badmatch=badmatch):
2357 if abs in names or src == 'b':
2356 if abs in names or src == 'b':
2358 continue
2357 continue
2359 names[abs] = (rel, exact)
2358 names[abs] = (rel, exact)
2360 target_only[abs] = True
2359 target_only[abs] = True
2361
2360
2362 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2361 changes = repo.status(match=names.has_key)[:5]
2363 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2362 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2364
2363
2365 revert = ([], _('reverting %s\n'))
2364 revert = ([], _('reverting %s\n'))
2366 add = ([], _('adding %s\n'))
2365 add = ([], _('adding %s\n'))
2367 remove = ([], _('removing %s\n'))
2366 remove = ([], _('removing %s\n'))
2368 forget = ([], _('forgetting %s\n'))
2367 forget = ([], _('forgetting %s\n'))
2369 undelete = ([], _('undeleting %s\n'))
2368 undelete = ([], _('undeleting %s\n'))
2370 update = {}
2369 update = {}
2371
2370
2372 disptable = (
2371 disptable = (
2373 # dispatch table:
2372 # dispatch table:
2374 # file state
2373 # file state
2375 # action if in target manifest
2374 # action if in target manifest
2376 # action if not in target manifest
2375 # action if not in target manifest
2377 # make backup if in target manifest
2376 # make backup if in target manifest
2378 # make backup if not in target manifest
2377 # make backup if not in target manifest
2379 (modified, revert, remove, True, True),
2378 (modified, revert, remove, True, True),
2380 (added, revert, forget, True, False),
2379 (added, revert, forget, True, False),
2381 (removed, undelete, None, False, False),
2380 (removed, undelete, None, False, False),
2382 (deleted, revert, remove, False, False),
2381 (deleted, revert, remove, False, False),
2383 (unknown, add, None, True, False),
2382 (unknown, add, None, True, False),
2384 (target_only, add, None, False, False),
2383 (target_only, add, None, False, False),
2385 )
2384 )
2386
2385
2387 entries = names.items()
2386 entries = names.items()
2388 entries.sort()
2387 entries.sort()
2389
2388
2390 for abs, (rel, exact) in entries:
2389 for abs, (rel, exact) in entries:
2391 mfentry = mf.get(abs)
2390 mfentry = mf.get(abs)
2392 target = repo.wjoin(abs)
2391 target = repo.wjoin(abs)
2393 def handle(xlist, dobackup):
2392 def handle(xlist, dobackup):
2394 xlist[0].append(abs)
2393 xlist[0].append(abs)
2395 update[abs] = 1
2394 update[abs] = 1
2396 if dobackup and not opts['no_backup'] and util.lexists(target):
2395 if dobackup and not opts['no_backup'] and util.lexists(target):
2397 bakname = "%s.orig" % rel
2396 bakname = "%s.orig" % rel
2398 ui.note(_('saving current version of %s as %s\n') %
2397 ui.note(_('saving current version of %s as %s\n') %
2399 (rel, bakname))
2398 (rel, bakname))
2400 if not opts.get('dry_run'):
2399 if not opts.get('dry_run'):
2401 util.copyfile(target, bakname)
2400 util.copyfile(target, bakname)
2402 if ui.verbose or not exact:
2401 if ui.verbose or not exact:
2403 ui.status(xlist[1] % rel)
2402 ui.status(xlist[1] % rel)
2404 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2403 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2405 if abs not in table: continue
2404 if abs not in table: continue
2406 # file has changed in dirstate
2405 # file has changed in dirstate
2407 if mfentry:
2406 if mfentry:
2408 handle(hitlist, backuphit)
2407 handle(hitlist, backuphit)
2409 elif misslist is not None:
2408 elif misslist is not None:
2410 handle(misslist, backupmiss)
2409 handle(misslist, backupmiss)
2411 else:
2410 else:
2412 if exact: ui.warn(_('file not managed: %s\n') % rel)
2411 if exact: ui.warn(_('file not managed: %s\n') % rel)
2413 break
2412 break
2414 else:
2413 else:
2415 # file has not changed in dirstate
2414 # file has not changed in dirstate
2416 if node == parent:
2415 if node == parent:
2417 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2416 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2418 continue
2417 continue
2419 if pmf is None:
2418 if pmf is None:
2420 # only need parent manifest in this unlikely case,
2419 # only need parent manifest in this unlikely case,
2421 # so do not read by default
2420 # so do not read by default
2422 pmf = repo.changectx(parent).manifest()
2421 pmf = repo.changectx(parent).manifest()
2423 if abs in pmf:
2422 if abs in pmf:
2424 if mfentry:
2423 if mfentry:
2425 # if version of file is same in parent and target
2424 # if version of file is same in parent and target
2426 # manifests, do nothing
2425 # manifests, do nothing
2427 if pmf[abs] != mfentry:
2426 if pmf[abs] != mfentry:
2428 handle(revert, False)
2427 handle(revert, False)
2429 else:
2428 else:
2430 handle(remove, False)
2429 handle(remove, False)
2431
2430
2432 if not opts.get('dry_run'):
2431 if not opts.get('dry_run'):
2433 for f in forget[0]:
2432 for f in forget[0]:
2434 repo.dirstate.forget(f)
2433 repo.dirstate.forget(f)
2435 r = hg.revert(repo, node, update.has_key, wlock)
2434 r = hg.revert(repo, node, update.has_key)
2436 for f in add[0]:
2435 for f in add[0]:
2437 repo.dirstate.add(f)
2436 repo.dirstate.add(f)
2438 for f in undelete[0]:
2437 for f in undelete[0]:
2439 repo.dirstate.normal(f)
2438 repo.dirstate.normal(f)
2440 for f in remove[0]:
2439 for f in remove[0]:
2441 repo.dirstate.remove(f)
2440 repo.dirstate.remove(f)
2442 return r
2441 return r
2443 finally:
2442 finally:
2444 del wlock
2443 del wlock
2445
2444
2446 def rollback(ui, repo):
2445 def rollback(ui, repo):
2447 """roll back the last transaction in this repository
2446 """roll back the last transaction in this repository
2448
2447
2449 Roll back the last transaction in this repository, restoring the
2448 Roll back the last transaction in this repository, restoring the
2450 project to its state prior to the transaction.
2449 project to its state prior to the transaction.
2451
2450
2452 Transactions are used to encapsulate the effects of all commands
2451 Transactions are used to encapsulate the effects of all commands
2453 that create new changesets or propagate existing changesets into a
2452 that create new changesets or propagate existing changesets into a
2454 repository. For example, the following commands are transactional,
2453 repository. For example, the following commands are transactional,
2455 and their effects can be rolled back:
2454 and their effects can be rolled back:
2456
2455
2457 commit
2456 commit
2458 import
2457 import
2459 pull
2458 pull
2460 push (with this repository as destination)
2459 push (with this repository as destination)
2461 unbundle
2460 unbundle
2462
2461
2463 This command should be used with care. There is only one level of
2462 This command should be used with care. There is only one level of
2464 rollback, and there is no way to undo a rollback. It will also
2463 rollback, and there is no way to undo a rollback. It will also
2465 restore the dirstate at the time of the last transaction, which
2464 restore the dirstate at the time of the last transaction, which
2466 may lose subsequent dirstate changes.
2465 may lose subsequent dirstate changes.
2467
2466
2468 This command is not intended for use on public repositories. Once
2467 This command is not intended for use on public repositories. Once
2469 changes are visible for pull by other users, rolling a transaction
2468 changes are visible for pull by other users, rolling a transaction
2470 back locally is ineffective (someone else may already have pulled
2469 back locally is ineffective (someone else may already have pulled
2471 the changes). Furthermore, a race is possible with readers of the
2470 the changes). Furthermore, a race is possible with readers of the
2472 repository; for example an in-progress pull from the repository
2471 repository; for example an in-progress pull from the repository
2473 may fail if a rollback is performed.
2472 may fail if a rollback is performed.
2474 """
2473 """
2475 repo.rollback()
2474 repo.rollback()
2476
2475
2477 def root(ui, repo):
2476 def root(ui, repo):
2478 """print the root (top) of the current working dir
2477 """print the root (top) of the current working dir
2479
2478
2480 Print the root directory of the current repository.
2479 Print the root directory of the current repository.
2481 """
2480 """
2482 ui.write(repo.root + "\n")
2481 ui.write(repo.root + "\n")
2483
2482
2484 def serve(ui, repo, **opts):
2483 def serve(ui, repo, **opts):
2485 """export the repository via HTTP
2484 """export the repository via HTTP
2486
2485
2487 Start a local HTTP repository browser and pull server.
2486 Start a local HTTP repository browser and pull server.
2488
2487
2489 By default, the server logs accesses to stdout and errors to
2488 By default, the server logs accesses to stdout and errors to
2490 stderr. Use the "-A" and "-E" options to log to files.
2489 stderr. Use the "-A" and "-E" options to log to files.
2491 """
2490 """
2492
2491
2493 if opts["stdio"]:
2492 if opts["stdio"]:
2494 if repo is None:
2493 if repo is None:
2495 raise hg.RepoError(_("There is no Mercurial repository here"
2494 raise hg.RepoError(_("There is no Mercurial repository here"
2496 " (.hg not found)"))
2495 " (.hg not found)"))
2497 s = sshserver.sshserver(ui, repo)
2496 s = sshserver.sshserver(ui, repo)
2498 s.serve_forever()
2497 s.serve_forever()
2499
2498
2500 parentui = ui.parentui or ui
2499 parentui = ui.parentui or ui
2501 optlist = ("name templates style address port ipv6"
2500 optlist = ("name templates style address port ipv6"
2502 " accesslog errorlog webdir_conf certificate")
2501 " accesslog errorlog webdir_conf certificate")
2503 for o in optlist.split():
2502 for o in optlist.split():
2504 if opts[o]:
2503 if opts[o]:
2505 parentui.setconfig("web", o, str(opts[o]))
2504 parentui.setconfig("web", o, str(opts[o]))
2506 if repo.ui != parentui:
2505 if repo.ui != parentui:
2507 repo.ui.setconfig("web", o, str(opts[o]))
2506 repo.ui.setconfig("web", o, str(opts[o]))
2508
2507
2509 if repo is None and not ui.config("web", "webdir_conf"):
2508 if repo is None and not ui.config("web", "webdir_conf"):
2510 raise hg.RepoError(_("There is no Mercurial repository here"
2509 raise hg.RepoError(_("There is no Mercurial repository here"
2511 " (.hg not found)"))
2510 " (.hg not found)"))
2512
2511
2513 class service:
2512 class service:
2514 def init(self):
2513 def init(self):
2515 util.set_signal_handler()
2514 util.set_signal_handler()
2516 try:
2515 try:
2517 self.httpd = hgweb.server.create_server(parentui, repo)
2516 self.httpd = hgweb.server.create_server(parentui, repo)
2518 except socket.error, inst:
2517 except socket.error, inst:
2519 raise util.Abort(_('cannot start server: ') + inst.args[1])
2518 raise util.Abort(_('cannot start server: ') + inst.args[1])
2520
2519
2521 if not ui.verbose: return
2520 if not ui.verbose: return
2522
2521
2523 if self.httpd.port != 80:
2522 if self.httpd.port != 80:
2524 ui.status(_('listening at http://%s:%d/\n') %
2523 ui.status(_('listening at http://%s:%d/\n') %
2525 (self.httpd.addr, self.httpd.port))
2524 (self.httpd.addr, self.httpd.port))
2526 else:
2525 else:
2527 ui.status(_('listening at http://%s/\n') % self.httpd.addr)
2526 ui.status(_('listening at http://%s/\n') % self.httpd.addr)
2528
2527
2529 def run(self):
2528 def run(self):
2530 self.httpd.serve_forever()
2529 self.httpd.serve_forever()
2531
2530
2532 service = service()
2531 service = service()
2533
2532
2534 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2533 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2535
2534
2536 def status(ui, repo, *pats, **opts):
2535 def status(ui, repo, *pats, **opts):
2537 """show changed files in the working directory
2536 """show changed files in the working directory
2538
2537
2539 Show status of files in the repository. If names are given, only
2538 Show status of files in the repository. If names are given, only
2540 files that match are shown. Files that are clean or ignored, are
2539 files that match are shown. Files that are clean or ignored, are
2541 not listed unless -c (clean), -i (ignored) or -A is given.
2540 not listed unless -c (clean), -i (ignored) or -A is given.
2542
2541
2543 NOTE: status may appear to disagree with diff if permissions have
2542 NOTE: status may appear to disagree with diff if permissions have
2544 changed or a merge has occurred. The standard diff format does not
2543 changed or a merge has occurred. The standard diff format does not
2545 report permission changes and diff only reports changes relative
2544 report permission changes and diff only reports changes relative
2546 to one merge parent.
2545 to one merge parent.
2547
2546
2548 If one revision is given, it is used as the base revision.
2547 If one revision is given, it is used as the base revision.
2549 If two revisions are given, the difference between them is shown.
2548 If two revisions are given, the difference between them is shown.
2550
2549
2551 The codes used to show the status of files are:
2550 The codes used to show the status of files are:
2552 M = modified
2551 M = modified
2553 A = added
2552 A = added
2554 R = removed
2553 R = removed
2555 C = clean
2554 C = clean
2556 ! = deleted, but still tracked
2555 ! = deleted, but still tracked
2557 ? = not tracked
2556 ? = not tracked
2558 I = ignored (not shown by default)
2557 I = ignored (not shown by default)
2559 = the previous added file was copied from here
2558 = the previous added file was copied from here
2560 """
2559 """
2561
2560
2562 all = opts['all']
2561 all = opts['all']
2563 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2562 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2564
2563
2565 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2564 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2566 cwd = (pats and repo.getcwd()) or ''
2565 cwd = (pats and repo.getcwd()) or ''
2567 modified, added, removed, deleted, unknown, ignored, clean = [
2566 modified, added, removed, deleted, unknown, ignored, clean = [
2568 n for n in repo.status(node1=node1, node2=node2, files=files,
2567 n for n in repo.status(node1=node1, node2=node2, files=files,
2569 match=matchfn,
2568 match=matchfn,
2570 list_ignored=all or opts['ignored'],
2569 list_ignored=all or opts['ignored'],
2571 list_clean=all or opts['clean'])]
2570 list_clean=all or opts['clean'])]
2572
2571
2573 changetypes = (('modified', 'M', modified),
2572 changetypes = (('modified', 'M', modified),
2574 ('added', 'A', added),
2573 ('added', 'A', added),
2575 ('removed', 'R', removed),
2574 ('removed', 'R', removed),
2576 ('deleted', '!', deleted),
2575 ('deleted', '!', deleted),
2577 ('unknown', '?', unknown),
2576 ('unknown', '?', unknown),
2578 ('ignored', 'I', ignored))
2577 ('ignored', 'I', ignored))
2579
2578
2580 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2579 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2581
2580
2582 end = opts['print0'] and '\0' or '\n'
2581 end = opts['print0'] and '\0' or '\n'
2583
2582
2584 for opt, char, changes in ([ct for ct in explicit_changetypes
2583 for opt, char, changes in ([ct for ct in explicit_changetypes
2585 if all or opts[ct[0]]]
2584 if all or opts[ct[0]]]
2586 or changetypes):
2585 or changetypes):
2587 if opts['no_status']:
2586 if opts['no_status']:
2588 format = "%%s%s" % end
2587 format = "%%s%s" % end
2589 else:
2588 else:
2590 format = "%s %%s%s" % (char, end)
2589 format = "%s %%s%s" % (char, end)
2591
2590
2592 for f in changes:
2591 for f in changes:
2593 ui.write(format % repo.pathto(f, cwd))
2592 ui.write(format % repo.pathto(f, cwd))
2594 if ((all or opts.get('copies')) and not opts.get('no_status')):
2593 if ((all or opts.get('copies')) and not opts.get('no_status')):
2595 copied = repo.dirstate.copied(f)
2594 copied = repo.dirstate.copied(f)
2596 if copied:
2595 if copied:
2597 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2596 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2598
2597
2599 def tag(ui, repo, name, rev_=None, **opts):
2598 def tag(ui, repo, name, rev_=None, **opts):
2600 """add a tag for the current or given revision
2599 """add a tag for the current or given revision
2601
2600
2602 Name a particular revision using <name>.
2601 Name a particular revision using <name>.
2603
2602
2604 Tags are used to name particular revisions of the repository and are
2603 Tags are used to name particular revisions of the repository and are
2605 very useful to compare different revision, to go back to significant
2604 very useful to compare different revision, to go back to significant
2606 earlier versions or to mark branch points as releases, etc.
2605 earlier versions or to mark branch points as releases, etc.
2607
2606
2608 If no revision is given, the parent of the working directory is used,
2607 If no revision is given, the parent of the working directory is used,
2609 or tip if no revision is checked out.
2608 or tip if no revision is checked out.
2610
2609
2611 To facilitate version control, distribution, and merging of tags,
2610 To facilitate version control, distribution, and merging of tags,
2612 they are stored as a file named ".hgtags" which is managed
2611 they are stored as a file named ".hgtags" which is managed
2613 similarly to other project files and can be hand-edited if
2612 similarly to other project files and can be hand-edited if
2614 necessary. The file '.hg/localtags' is used for local tags (not
2613 necessary. The file '.hg/localtags' is used for local tags (not
2615 shared among repositories).
2614 shared among repositories).
2616 """
2615 """
2617 if name in ['tip', '.', 'null']:
2616 if name in ['tip', '.', 'null']:
2618 raise util.Abort(_("the name '%s' is reserved") % name)
2617 raise util.Abort(_("the name '%s' is reserved") % name)
2619 if rev_ is not None:
2618 if rev_ is not None:
2620 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2619 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2621 "please use 'hg tag [-r REV] NAME' instead\n"))
2620 "please use 'hg tag [-r REV] NAME' instead\n"))
2622 if opts['rev']:
2621 if opts['rev']:
2623 raise util.Abort(_("use only one form to specify the revision"))
2622 raise util.Abort(_("use only one form to specify the revision"))
2624 if opts['rev'] and opts['remove']:
2623 if opts['rev'] and opts['remove']:
2625 raise util.Abort(_("--rev and --remove are incompatible"))
2624 raise util.Abort(_("--rev and --remove are incompatible"))
2626 if opts['rev']:
2625 if opts['rev']:
2627 rev_ = opts['rev']
2626 rev_ = opts['rev']
2628 message = opts['message']
2627 message = opts['message']
2629 if opts['remove']:
2628 if opts['remove']:
2630 if not name in repo.tags():
2629 if not name in repo.tags():
2631 raise util.Abort(_('tag %s does not exist') % name)
2630 raise util.Abort(_('tag %s does not exist') % name)
2632 rev_ = nullid
2631 rev_ = nullid
2633 if not message:
2632 if not message:
2634 message = _('Removed tag %s') % name
2633 message = _('Removed tag %s') % name
2635 elif name in repo.tags() and not opts['force']:
2634 elif name in repo.tags() and not opts['force']:
2636 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2635 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2637 % name)
2636 % name)
2638 if not rev_ and repo.dirstate.parents()[1] != nullid:
2637 if not rev_ and repo.dirstate.parents()[1] != nullid:
2639 raise util.Abort(_('uncommitted merge - please provide a '
2638 raise util.Abort(_('uncommitted merge - please provide a '
2640 'specific revision'))
2639 'specific revision'))
2641 r = repo.changectx(rev_).node()
2640 r = repo.changectx(rev_).node()
2642
2641
2643 if not message:
2642 if not message:
2644 message = _('Added tag %s for changeset %s') % (name, short(r))
2643 message = _('Added tag %s for changeset %s') % (name, short(r))
2645
2644
2646 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2645 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2647
2646
2648 def tags(ui, repo):
2647 def tags(ui, repo):
2649 """list repository tags
2648 """list repository tags
2650
2649
2651 List the repository tags.
2650 List the repository tags.
2652
2651
2653 This lists both regular and local tags.
2652 This lists both regular and local tags.
2654 """
2653 """
2655
2654
2656 l = repo.tagslist()
2655 l = repo.tagslist()
2657 l.reverse()
2656 l.reverse()
2658 hexfunc = ui.debugflag and hex or short
2657 hexfunc = ui.debugflag and hex or short
2659 for t, n in l:
2658 for t, n in l:
2660 try:
2659 try:
2661 hn = hexfunc(n)
2660 hn = hexfunc(n)
2662 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2661 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2663 except revlog.LookupError:
2662 except revlog.LookupError:
2664 r = " ?:%s" % hn
2663 r = " ?:%s" % hn
2665 if ui.quiet:
2664 if ui.quiet:
2666 ui.write("%s\n" % t)
2665 ui.write("%s\n" % t)
2667 else:
2666 else:
2668 spaces = " " * (30 - util.locallen(t))
2667 spaces = " " * (30 - util.locallen(t))
2669 ui.write("%s%s %s\n" % (t, spaces, r))
2668 ui.write("%s%s %s\n" % (t, spaces, r))
2670
2669
2671 def tip(ui, repo, **opts):
2670 def tip(ui, repo, **opts):
2672 """show the tip revision
2671 """show the tip revision
2673
2672
2674 Show the tip revision.
2673 Show the tip revision.
2675 """
2674 """
2676 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2675 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2677
2676
2678 def unbundle(ui, repo, fname1, *fnames, **opts):
2677 def unbundle(ui, repo, fname1, *fnames, **opts):
2679 """apply one or more changegroup files
2678 """apply one or more changegroup files
2680
2679
2681 Apply one or more compressed changegroup files generated by the
2680 Apply one or more compressed changegroup files generated by the
2682 bundle command.
2681 bundle command.
2683 """
2682 """
2684 fnames = (fname1,) + fnames
2683 fnames = (fname1,) + fnames
2685 result = None
2684 result = None
2686 wasempty = repo.changelog.count() == 0
2685 wasempty = repo.changelog.count() == 0
2687 for fname in fnames:
2686 for fname in fnames:
2688 if os.path.exists(fname):
2687 if os.path.exists(fname):
2689 f = open(fname, "rb")
2688 f = open(fname, "rb")
2690 else:
2689 else:
2691 f = urllib.urlopen(fname)
2690 f = urllib.urlopen(fname)
2692 gen = changegroup.readbundle(f, fname)
2691 gen = changegroup.readbundle(f, fname)
2693 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2692 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2694
2693
2695 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2694 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2696
2695
2697 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2696 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2698 """update working directory
2697 """update working directory
2699
2698
2700 Update the working directory to the specified revision, or the
2699 Update the working directory to the specified revision, or the
2701 tip of the current branch if none is specified.
2700 tip of the current branch if none is specified.
2702
2701
2703 If there are no outstanding changes in the working directory and
2702 If there are no outstanding changes in the working directory and
2704 there is a linear relationship between the current version and the
2703 there is a linear relationship between the current version and the
2705 requested version, the result is the requested version.
2704 requested version, the result is the requested version.
2706
2705
2707 To merge the working directory with another revision, use the
2706 To merge the working directory with another revision, use the
2708 merge command.
2707 merge command.
2709
2708
2710 By default, update will refuse to run if doing so would require
2709 By default, update will refuse to run if doing so would require
2711 discarding local changes.
2710 discarding local changes.
2712 """
2711 """
2713 if rev and node:
2712 if rev and node:
2714 raise util.Abort(_("please specify just one revision"))
2713 raise util.Abort(_("please specify just one revision"))
2715
2714
2716 if not rev:
2715 if not rev:
2717 rev = node
2716 rev = node
2718
2717
2719 if date:
2718 if date:
2720 if rev:
2719 if rev:
2721 raise util.Abort(_("you can't specify a revision and a date"))
2720 raise util.Abort(_("you can't specify a revision and a date"))
2722 rev = cmdutil.finddate(ui, repo, date)
2721 rev = cmdutil.finddate(ui, repo, date)
2723
2722
2724 if clean:
2723 if clean:
2725 return hg.clean(repo, rev)
2724 return hg.clean(repo, rev)
2726 else:
2725 else:
2727 return hg.update(repo, rev)
2726 return hg.update(repo, rev)
2728
2727
2729 def verify(ui, repo):
2728 def verify(ui, repo):
2730 """verify the integrity of the repository
2729 """verify the integrity of the repository
2731
2730
2732 Verify the integrity of the current repository.
2731 Verify the integrity of the current repository.
2733
2732
2734 This will perform an extensive check of the repository's
2733 This will perform an extensive check of the repository's
2735 integrity, validating the hashes and checksums of each entry in
2734 integrity, validating the hashes and checksums of each entry in
2736 the changelog, manifest, and tracked files, as well as the
2735 the changelog, manifest, and tracked files, as well as the
2737 integrity of their crosslinks and indices.
2736 integrity of their crosslinks and indices.
2738 """
2737 """
2739 return hg.verify(repo)
2738 return hg.verify(repo)
2740
2739
2741 def version_(ui):
2740 def version_(ui):
2742 """output version and copyright information"""
2741 """output version and copyright information"""
2743 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2742 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2744 % version.get_version())
2743 % version.get_version())
2745 ui.status(_(
2744 ui.status(_(
2746 "\nCopyright (C) 2005-2007 Matt Mackall <mpm@selenic.com> and others\n"
2745 "\nCopyright (C) 2005-2007 Matt Mackall <mpm@selenic.com> and others\n"
2747 "This is free software; see the source for copying conditions. "
2746 "This is free software; see the source for copying conditions. "
2748 "There is NO\nwarranty; "
2747 "There is NO\nwarranty; "
2749 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2748 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2750 ))
2749 ))
2751
2750
2752 # Command options and aliases are listed here, alphabetically
2751 # Command options and aliases are listed here, alphabetically
2753
2752
2754 globalopts = [
2753 globalopts = [
2755 ('R', 'repository', '',
2754 ('R', 'repository', '',
2756 _('repository root directory or symbolic path name')),
2755 _('repository root directory or symbolic path name')),
2757 ('', 'cwd', '', _('change working directory')),
2756 ('', 'cwd', '', _('change working directory')),
2758 ('y', 'noninteractive', None,
2757 ('y', 'noninteractive', None,
2759 _('do not prompt, assume \'yes\' for any required answers')),
2758 _('do not prompt, assume \'yes\' for any required answers')),
2760 ('q', 'quiet', None, _('suppress output')),
2759 ('q', 'quiet', None, _('suppress output')),
2761 ('v', 'verbose', None, _('enable additional output')),
2760 ('v', 'verbose', None, _('enable additional output')),
2762 ('', 'config', [], _('set/override config option')),
2761 ('', 'config', [], _('set/override config option')),
2763 ('', 'debug', None, _('enable debugging output')),
2762 ('', 'debug', None, _('enable debugging output')),
2764 ('', 'debugger', None, _('start debugger')),
2763 ('', 'debugger', None, _('start debugger')),
2765 ('', 'encoding', util._encoding, _('set the charset encoding')),
2764 ('', 'encoding', util._encoding, _('set the charset encoding')),
2766 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2765 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2767 ('', 'lsprof', None, _('print improved command execution profile')),
2766 ('', 'lsprof', None, _('print improved command execution profile')),
2768 ('', 'traceback', None, _('print traceback on exception')),
2767 ('', 'traceback', None, _('print traceback on exception')),
2769 ('', 'time', None, _('time how long the command takes')),
2768 ('', 'time', None, _('time how long the command takes')),
2770 ('', 'profile', None, _('print command execution profile')),
2769 ('', 'profile', None, _('print command execution profile')),
2771 ('', 'version', None, _('output version information and exit')),
2770 ('', 'version', None, _('output version information and exit')),
2772 ('h', 'help', None, _('display help and exit')),
2771 ('h', 'help', None, _('display help and exit')),
2773 ]
2772 ]
2774
2773
2775 dryrunopts = [('n', 'dry-run', None,
2774 dryrunopts = [('n', 'dry-run', None,
2776 _('do not perform actions, just print output'))]
2775 _('do not perform actions, just print output'))]
2777
2776
2778 remoteopts = [
2777 remoteopts = [
2779 ('e', 'ssh', '', _('specify ssh command to use')),
2778 ('e', 'ssh', '', _('specify ssh command to use')),
2780 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2779 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2781 ]
2780 ]
2782
2781
2783 walkopts = [
2782 walkopts = [
2784 ('I', 'include', [], _('include names matching the given patterns')),
2783 ('I', 'include', [], _('include names matching the given patterns')),
2785 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2784 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2786 ]
2785 ]
2787
2786
2788 commitopts = [
2787 commitopts = [
2789 ('m', 'message', '', _('use <text> as commit message')),
2788 ('m', 'message', '', _('use <text> as commit message')),
2790 ('l', 'logfile', '', _('read commit message from <file>')),
2789 ('l', 'logfile', '', _('read commit message from <file>')),
2791 ]
2790 ]
2792
2791
2793 table = {
2792 table = {
2794 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2793 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2795 "addremove":
2794 "addremove":
2796 (addremove,
2795 (addremove,
2797 [('s', 'similarity', '',
2796 [('s', 'similarity', '',
2798 _('guess renamed files by similarity (0<=s<=100)')),
2797 _('guess renamed files by similarity (0<=s<=100)')),
2799 ] + walkopts + dryrunopts,
2798 ] + walkopts + dryrunopts,
2800 _('hg addremove [OPTION]... [FILE]...')),
2799 _('hg addremove [OPTION]... [FILE]...')),
2801 "^annotate":
2800 "^annotate":
2802 (annotate,
2801 (annotate,
2803 [('r', 'rev', '', _('annotate the specified revision')),
2802 [('r', 'rev', '', _('annotate the specified revision')),
2804 ('f', 'follow', None, _('follow file copies and renames')),
2803 ('f', 'follow', None, _('follow file copies and renames')),
2805 ('a', 'text', None, _('treat all files as text')),
2804 ('a', 'text', None, _('treat all files as text')),
2806 ('u', 'user', None, _('list the author')),
2805 ('u', 'user', None, _('list the author')),
2807 ('d', 'date', None, _('list the date')),
2806 ('d', 'date', None, _('list the date')),
2808 ('n', 'number', None, _('list the revision number (default)')),
2807 ('n', 'number', None, _('list the revision number (default)')),
2809 ('c', 'changeset', None, _('list the changeset')),
2808 ('c', 'changeset', None, _('list the changeset')),
2810 ('l', 'line-number', None,
2809 ('l', 'line-number', None,
2811 _('show line number at the first appearance'))
2810 _('show line number at the first appearance'))
2812 ] + walkopts,
2811 ] + walkopts,
2813 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2812 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2814 "archive":
2813 "archive":
2815 (archive,
2814 (archive,
2816 [('', 'no-decode', None, _('do not pass files through decoders')),
2815 [('', 'no-decode', None, _('do not pass files through decoders')),
2817 ('p', 'prefix', '', _('directory prefix for files in archive')),
2816 ('p', 'prefix', '', _('directory prefix for files in archive')),
2818 ('r', 'rev', '', _('revision to distribute')),
2817 ('r', 'rev', '', _('revision to distribute')),
2819 ('t', 'type', '', _('type of distribution to create')),
2818 ('t', 'type', '', _('type of distribution to create')),
2820 ] + walkopts,
2819 ] + walkopts,
2821 _('hg archive [OPTION]... DEST')),
2820 _('hg archive [OPTION]... DEST')),
2822 "backout":
2821 "backout":
2823 (backout,
2822 (backout,
2824 [('', 'merge', None,
2823 [('', 'merge', None,
2825 _('merge with old dirstate parent after backout')),
2824 _('merge with old dirstate parent after backout')),
2826 ('d', 'date', '', _('record datecode as commit date')),
2825 ('d', 'date', '', _('record datecode as commit date')),
2827 ('', 'parent', '', _('parent to choose when backing out merge')),
2826 ('', 'parent', '', _('parent to choose when backing out merge')),
2828 ('u', 'user', '', _('record user as committer')),
2827 ('u', 'user', '', _('record user as committer')),
2829 ('r', 'rev', '', _('revision to backout')),
2828 ('r', 'rev', '', _('revision to backout')),
2830 ] + walkopts + commitopts,
2829 ] + walkopts + commitopts,
2831 _('hg backout [OPTION]... [-r] REV')),
2830 _('hg backout [OPTION]... [-r] REV')),
2832 "branch":
2831 "branch":
2833 (branch,
2832 (branch,
2834 [('f', 'force', None,
2833 [('f', 'force', None,
2835 _('set branch name even if it shadows an existing branch'))],
2834 _('set branch name even if it shadows an existing branch'))],
2836 _('hg branch [NAME]')),
2835 _('hg branch [NAME]')),
2837 "branches":
2836 "branches":
2838 (branches,
2837 (branches,
2839 [('a', 'active', False,
2838 [('a', 'active', False,
2840 _('show only branches that have unmerged heads'))],
2839 _('show only branches that have unmerged heads'))],
2841 _('hg branches [-a]')),
2840 _('hg branches [-a]')),
2842 "bundle":
2841 "bundle":
2843 (bundle,
2842 (bundle,
2844 [('f', 'force', None,
2843 [('f', 'force', None,
2845 _('run even when remote repository is unrelated')),
2844 _('run even when remote repository is unrelated')),
2846 ('r', 'rev', [],
2845 ('r', 'rev', [],
2847 _('a changeset you would like to bundle')),
2846 _('a changeset you would like to bundle')),
2848 ('', 'base', [],
2847 ('', 'base', [],
2849 _('a base changeset to specify instead of a destination')),
2848 _('a base changeset to specify instead of a destination')),
2850 ] + remoteopts,
2849 ] + remoteopts,
2851 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2850 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2852 "cat":
2851 "cat":
2853 (cat,
2852 (cat,
2854 [('o', 'output', '', _('print output to file with formatted name')),
2853 [('o', 'output', '', _('print output to file with formatted name')),
2855 ('r', 'rev', '', _('print the given revision')),
2854 ('r', 'rev', '', _('print the given revision')),
2856 ] + walkopts,
2855 ] + walkopts,
2857 _('hg cat [OPTION]... FILE...')),
2856 _('hg cat [OPTION]... FILE...')),
2858 "^clone":
2857 "^clone":
2859 (clone,
2858 (clone,
2860 [('U', 'noupdate', None, _('do not update the new working directory')),
2859 [('U', 'noupdate', None, _('do not update the new working directory')),
2861 ('r', 'rev', [],
2860 ('r', 'rev', [],
2862 _('a changeset you would like to have after cloning')),
2861 _('a changeset you would like to have after cloning')),
2863 ('', 'pull', None, _('use pull protocol to copy metadata')),
2862 ('', 'pull', None, _('use pull protocol to copy metadata')),
2864 ('', 'uncompressed', None,
2863 ('', 'uncompressed', None,
2865 _('use uncompressed transfer (fast over LAN)')),
2864 _('use uncompressed transfer (fast over LAN)')),
2866 ] + remoteopts,
2865 ] + remoteopts,
2867 _('hg clone [OPTION]... SOURCE [DEST]')),
2866 _('hg clone [OPTION]... SOURCE [DEST]')),
2868 "^commit|ci":
2867 "^commit|ci":
2869 (commit,
2868 (commit,
2870 [('A', 'addremove', None,
2869 [('A', 'addremove', None,
2871 _('mark new/missing files as added/removed before committing')),
2870 _('mark new/missing files as added/removed before committing')),
2872 ('d', 'date', '', _('record datecode as commit date')),
2871 ('d', 'date', '', _('record datecode as commit date')),
2873 ('u', 'user', '', _('record user as commiter')),
2872 ('u', 'user', '', _('record user as commiter')),
2874 ] + walkopts + commitopts,
2873 ] + walkopts + commitopts,
2875 _('hg commit [OPTION]... [FILE]...')),
2874 _('hg commit [OPTION]... [FILE]...')),
2876 "copy|cp":
2875 "copy|cp":
2877 (copy,
2876 (copy,
2878 [('A', 'after', None, _('record a copy that has already occurred')),
2877 [('A', 'after', None, _('record a copy that has already occurred')),
2879 ('f', 'force', None,
2878 ('f', 'force', None,
2880 _('forcibly copy over an existing managed file')),
2879 _('forcibly copy over an existing managed file')),
2881 ] + walkopts + dryrunopts,
2880 ] + walkopts + dryrunopts,
2882 _('hg copy [OPTION]... [SOURCE]... DEST')),
2881 _('hg copy [OPTION]... [SOURCE]... DEST')),
2883 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2882 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2884 "debugcomplete":
2883 "debugcomplete":
2885 (debugcomplete,
2884 (debugcomplete,
2886 [('o', 'options', None, _('show the command options'))],
2885 [('o', 'options', None, _('show the command options'))],
2887 _('debugcomplete [-o] CMD')),
2886 _('debugcomplete [-o] CMD')),
2888 "debuginstall": (debuginstall, [], _('debuginstall')),
2887 "debuginstall": (debuginstall, [], _('debuginstall')),
2889 "debugrebuildstate":
2888 "debugrebuildstate":
2890 (debugrebuildstate,
2889 (debugrebuildstate,
2891 [('r', 'rev', '', _('revision to rebuild to'))],
2890 [('r', 'rev', '', _('revision to rebuild to'))],
2892 _('debugrebuildstate [-r REV] [REV]')),
2891 _('debugrebuildstate [-r REV] [REV]')),
2893 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2892 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2894 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2893 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2895 "debugstate": (debugstate, [], _('debugstate')),
2894 "debugstate": (debugstate, [], _('debugstate')),
2896 "debugdate":
2895 "debugdate":
2897 (debugdate,
2896 (debugdate,
2898 [('e', 'extended', None, _('try extended date formats'))],
2897 [('e', 'extended', None, _('try extended date formats'))],
2899 _('debugdate [-e] DATE [RANGE]')),
2898 _('debugdate [-e] DATE [RANGE]')),
2900 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2899 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2901 "debugindex": (debugindex, [], _('debugindex FILE')),
2900 "debugindex": (debugindex, [], _('debugindex FILE')),
2902 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2901 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2903 "debugrename":
2902 "debugrename":
2904 (debugrename,
2903 (debugrename,
2905 [('r', 'rev', '', _('revision to debug'))],
2904 [('r', 'rev', '', _('revision to debug'))],
2906 _('debugrename [-r REV] FILE')),
2905 _('debugrename [-r REV] FILE')),
2907 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2906 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2908 "^diff":
2907 "^diff":
2909 (diff,
2908 (diff,
2910 [('r', 'rev', [], _('revision')),
2909 [('r', 'rev', [], _('revision')),
2911 ('a', 'text', None, _('treat all files as text')),
2910 ('a', 'text', None, _('treat all files as text')),
2912 ('p', 'show-function', None,
2911 ('p', 'show-function', None,
2913 _('show which function each change is in')),
2912 _('show which function each change is in')),
2914 ('g', 'git', None, _('use git extended diff format')),
2913 ('g', 'git', None, _('use git extended diff format')),
2915 ('', 'nodates', None, _("don't include dates in diff headers")),
2914 ('', 'nodates', None, _("don't include dates in diff headers")),
2916 ('w', 'ignore-all-space', None,
2915 ('w', 'ignore-all-space', None,
2917 _('ignore white space when comparing lines')),
2916 _('ignore white space when comparing lines')),
2918 ('b', 'ignore-space-change', None,
2917 ('b', 'ignore-space-change', None,
2919 _('ignore changes in the amount of white space')),
2918 _('ignore changes in the amount of white space')),
2920 ('B', 'ignore-blank-lines', None,
2919 ('B', 'ignore-blank-lines', None,
2921 _('ignore changes whose lines are all blank')),
2920 _('ignore changes whose lines are all blank')),
2922 ] + walkopts,
2921 ] + walkopts,
2923 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2922 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2924 "^export":
2923 "^export":
2925 (export,
2924 (export,
2926 [('o', 'output', '', _('print output to file with formatted name')),
2925 [('o', 'output', '', _('print output to file with formatted name')),
2927 ('a', 'text', None, _('treat all files as text')),
2926 ('a', 'text', None, _('treat all files as text')),
2928 ('g', 'git', None, _('use git extended diff format')),
2927 ('g', 'git', None, _('use git extended diff format')),
2929 ('', 'nodates', None, _("don't include dates in diff headers")),
2928 ('', 'nodates', None, _("don't include dates in diff headers")),
2930 ('', 'switch-parent', None, _('diff against the second parent'))],
2929 ('', 'switch-parent', None, _('diff against the second parent'))],
2931 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2930 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2932 "grep":
2931 "grep":
2933 (grep,
2932 (grep,
2934 [('0', 'print0', None, _('end fields with NUL')),
2933 [('0', 'print0', None, _('end fields with NUL')),
2935 ('', 'all', None, _('print all revisions that match')),
2934 ('', 'all', None, _('print all revisions that match')),
2936 ('f', 'follow', None,
2935 ('f', 'follow', None,
2937 _('follow changeset history, or file history across copies and renames')),
2936 _('follow changeset history, or file history across copies and renames')),
2938 ('i', 'ignore-case', None, _('ignore case when matching')),
2937 ('i', 'ignore-case', None, _('ignore case when matching')),
2939 ('l', 'files-with-matches', None,
2938 ('l', 'files-with-matches', None,
2940 _('print only filenames and revs that match')),
2939 _('print only filenames and revs that match')),
2941 ('n', 'line-number', None, _('print matching line numbers')),
2940 ('n', 'line-number', None, _('print matching line numbers')),
2942 ('r', 'rev', [], _('search in given revision range')),
2941 ('r', 'rev', [], _('search in given revision range')),
2943 ('u', 'user', None, _('print user who committed change')),
2942 ('u', 'user', None, _('print user who committed change')),
2944 ] + walkopts,
2943 ] + walkopts,
2945 _('hg grep [OPTION]... PATTERN [FILE]...')),
2944 _('hg grep [OPTION]... PATTERN [FILE]...')),
2946 "heads":
2945 "heads":
2947 (heads,
2946 (heads,
2948 [('', 'style', '', _('display using template map file')),
2947 [('', 'style', '', _('display using template map file')),
2949 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2948 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2950 ('', 'template', '', _('display with template'))],
2949 ('', 'template', '', _('display with template'))],
2951 _('hg heads [-r REV] [REV]...')),
2950 _('hg heads [-r REV] [REV]...')),
2952 "help": (help_, [], _('hg help [COMMAND]')),
2951 "help": (help_, [], _('hg help [COMMAND]')),
2953 "identify|id":
2952 "identify|id":
2954 (identify,
2953 (identify,
2955 [('r', 'rev', '', _('identify the specified rev')),
2954 [('r', 'rev', '', _('identify the specified rev')),
2956 ('n', 'num', None, _('show local revision number')),
2955 ('n', 'num', None, _('show local revision number')),
2957 ('i', 'id', None, _('show global revision id')),
2956 ('i', 'id', None, _('show global revision id')),
2958 ('b', 'branch', None, _('show branch')),
2957 ('b', 'branch', None, _('show branch')),
2959 ('t', 'tags', None, _('show tags'))],
2958 ('t', 'tags', None, _('show tags'))],
2960 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2959 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2961 "import|patch":
2960 "import|patch":
2962 (import_,
2961 (import_,
2963 [('p', 'strip', 1,
2962 [('p', 'strip', 1,
2964 _('directory strip option for patch. This has the same\n'
2963 _('directory strip option for patch. This has the same\n'
2965 'meaning as the corresponding patch option')),
2964 'meaning as the corresponding patch option')),
2966 ('b', 'base', '', _('base path')),
2965 ('b', 'base', '', _('base path')),
2967 ('f', 'force', None,
2966 ('f', 'force', None,
2968 _('skip check for outstanding uncommitted changes')),
2967 _('skip check for outstanding uncommitted changes')),
2969 ('', 'exact', None,
2968 ('', 'exact', None,
2970 _('apply patch to the nodes from which it was generated')),
2969 _('apply patch to the nodes from which it was generated')),
2971 ('', 'import-branch', None,
2970 ('', 'import-branch', None,
2972 _('Use any branch information in patch (implied by --exact)'))] + commitopts,
2971 _('Use any branch information in patch (implied by --exact)'))] + commitopts,
2973 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2972 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2974 "incoming|in": (incoming,
2973 "incoming|in": (incoming,
2975 [('M', 'no-merges', None, _('do not show merges')),
2974 [('M', 'no-merges', None, _('do not show merges')),
2976 ('f', 'force', None,
2975 ('f', 'force', None,
2977 _('run even when remote repository is unrelated')),
2976 _('run even when remote repository is unrelated')),
2978 ('', 'style', '', _('display using template map file')),
2977 ('', 'style', '', _('display using template map file')),
2979 ('n', 'newest-first', None, _('show newest record first')),
2978 ('n', 'newest-first', None, _('show newest record first')),
2980 ('', 'bundle', '', _('file to store the bundles into')),
2979 ('', 'bundle', '', _('file to store the bundles into')),
2981 ('p', 'patch', None, _('show patch')),
2980 ('p', 'patch', None, _('show patch')),
2982 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2981 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2983 ('', 'template', '', _('display with template')),
2982 ('', 'template', '', _('display with template')),
2984 ] + remoteopts,
2983 ] + remoteopts,
2985 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2984 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2986 ' [--bundle FILENAME] [SOURCE]')),
2985 ' [--bundle FILENAME] [SOURCE]')),
2987 "^init":
2986 "^init":
2988 (init,
2987 (init,
2989 remoteopts,
2988 remoteopts,
2990 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2989 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2991 "locate":
2990 "locate":
2992 (locate,
2991 (locate,
2993 [('r', 'rev', '', _('search the repository as it stood at rev')),
2992 [('r', 'rev', '', _('search the repository as it stood at rev')),
2994 ('0', 'print0', None,
2993 ('0', 'print0', None,
2995 _('end filenames with NUL, for use with xargs')),
2994 _('end filenames with NUL, for use with xargs')),
2996 ('f', 'fullpath', None,
2995 ('f', 'fullpath', None,
2997 _('print complete paths from the filesystem root')),
2996 _('print complete paths from the filesystem root')),
2998 ] + walkopts,
2997 ] + walkopts,
2999 _('hg locate [OPTION]... [PATTERN]...')),
2998 _('hg locate [OPTION]... [PATTERN]...')),
3000 "^log|history":
2999 "^log|history":
3001 (log,
3000 (log,
3002 [('f', 'follow', None,
3001 [('f', 'follow', None,
3003 _('follow changeset history, or file history across copies and renames')),
3002 _('follow changeset history, or file history across copies and renames')),
3004 ('', 'follow-first', None,
3003 ('', 'follow-first', None,
3005 _('only follow the first parent of merge changesets')),
3004 _('only follow the first parent of merge changesets')),
3006 ('d', 'date', '', _('show revs matching date spec')),
3005 ('d', 'date', '', _('show revs matching date spec')),
3007 ('C', 'copies', None, _('show copied files')),
3006 ('C', 'copies', None, _('show copied files')),
3008 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3007 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3009 ('l', 'limit', '', _('limit number of changes displayed')),
3008 ('l', 'limit', '', _('limit number of changes displayed')),
3010 ('r', 'rev', [], _('show the specified revision or range')),
3009 ('r', 'rev', [], _('show the specified revision or range')),
3011 ('', 'removed', None, _('include revs where files were removed')),
3010 ('', 'removed', None, _('include revs where files were removed')),
3012 ('M', 'no-merges', None, _('do not show merges')),
3011 ('M', 'no-merges', None, _('do not show merges')),
3013 ('', 'style', '', _('display using template map file')),
3012 ('', 'style', '', _('display using template map file')),
3014 ('m', 'only-merges', None, _('show only merges')),
3013 ('m', 'only-merges', None, _('show only merges')),
3015 ('p', 'patch', None, _('show patch')),
3014 ('p', 'patch', None, _('show patch')),
3016 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3015 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3017 ('', 'template', '', _('display with template')),
3016 ('', 'template', '', _('display with template')),
3018 ] + walkopts,
3017 ] + walkopts,
3019 _('hg log [OPTION]... [FILE]')),
3018 _('hg log [OPTION]... [FILE]')),
3020 "manifest": (manifest, [], _('hg manifest [REV]')),
3019 "manifest": (manifest, [], _('hg manifest [REV]')),
3021 "^merge":
3020 "^merge":
3022 (merge,
3021 (merge,
3023 [('f', 'force', None, _('force a merge with outstanding changes')),
3022 [('f', 'force', None, _('force a merge with outstanding changes')),
3024 ('r', 'rev', '', _('revision to merge')),
3023 ('r', 'rev', '', _('revision to merge')),
3025 ],
3024 ],
3026 _('hg merge [-f] [[-r] REV]')),
3025 _('hg merge [-f] [[-r] REV]')),
3027 "outgoing|out": (outgoing,
3026 "outgoing|out": (outgoing,
3028 [('M', 'no-merges', None, _('do not show merges')),
3027 [('M', 'no-merges', None, _('do not show merges')),
3029 ('f', 'force', None,
3028 ('f', 'force', None,
3030 _('run even when remote repository is unrelated')),
3029 _('run even when remote repository is unrelated')),
3031 ('p', 'patch', None, _('show patch')),
3030 ('p', 'patch', None, _('show patch')),
3032 ('', 'style', '', _('display using template map file')),
3031 ('', 'style', '', _('display using template map file')),
3033 ('r', 'rev', [], _('a specific revision you would like to push')),
3032 ('r', 'rev', [], _('a specific revision you would like to push')),
3034 ('n', 'newest-first', None, _('show newest record first')),
3033 ('n', 'newest-first', None, _('show newest record first')),
3035 ('', 'template', '', _('display with template')),
3034 ('', 'template', '', _('display with template')),
3036 ] + remoteopts,
3035 ] + remoteopts,
3037 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3036 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3038 "^parents":
3037 "^parents":
3039 (parents,
3038 (parents,
3040 [('r', 'rev', '', _('show parents from the specified rev')),
3039 [('r', 'rev', '', _('show parents from the specified rev')),
3041 ('', 'style', '', _('display using template map file')),
3040 ('', 'style', '', _('display using template map file')),
3042 ('', 'template', '', _('display with template'))],
3041 ('', 'template', '', _('display with template'))],
3043 _('hg parents [-r REV] [FILE]')),
3042 _('hg parents [-r REV] [FILE]')),
3044 "paths": (paths, [], _('hg paths [NAME]')),
3043 "paths": (paths, [], _('hg paths [NAME]')),
3045 "^pull":
3044 "^pull":
3046 (pull,
3045 (pull,
3047 [('u', 'update', None,
3046 [('u', 'update', None,
3048 _('update to new tip if changesets were pulled')),
3047 _('update to new tip if changesets were pulled')),
3049 ('f', 'force', None,
3048 ('f', 'force', None,
3050 _('run even when remote repository is unrelated')),
3049 _('run even when remote repository is unrelated')),
3051 ('r', 'rev', [],
3050 ('r', 'rev', [],
3052 _('a specific revision up to which you would like to pull')),
3051 _('a specific revision up to which you would like to pull')),
3053 ] + remoteopts,
3052 ] + remoteopts,
3054 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3053 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3055 "^push":
3054 "^push":
3056 (push,
3055 (push,
3057 [('f', 'force', None, _('force push')),
3056 [('f', 'force', None, _('force push')),
3058 ('r', 'rev', [], _('a specific revision you would like to push')),
3057 ('r', 'rev', [], _('a specific revision you would like to push')),
3059 ] + remoteopts,
3058 ] + remoteopts,
3060 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3059 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3061 "debugrawcommit|rawcommit":
3060 "debugrawcommit|rawcommit":
3062 (rawcommit,
3061 (rawcommit,
3063 [('p', 'parent', [], _('parent')),
3062 [('p', 'parent', [], _('parent')),
3064 ('d', 'date', '', _('date code')),
3063 ('d', 'date', '', _('date code')),
3065 ('u', 'user', '', _('user')),
3064 ('u', 'user', '', _('user')),
3066 ('F', 'files', '', _('file list'))
3065 ('F', 'files', '', _('file list'))
3067 ] + commitopts,
3066 ] + commitopts,
3068 _('hg debugrawcommit [OPTION]... [FILE]...')),
3067 _('hg debugrawcommit [OPTION]... [FILE]...')),
3069 "recover": (recover, [], _('hg recover')),
3068 "recover": (recover, [], _('hg recover')),
3070 "^remove|rm":
3069 "^remove|rm":
3071 (remove,
3070 (remove,
3072 [('A', 'after', None, _('record remove that has already occurred')),
3071 [('A', 'after', None, _('record remove that has already occurred')),
3073 ('f', 'force', None, _('remove file even if modified')),
3072 ('f', 'force', None, _('remove file even if modified')),
3074 ] + walkopts,
3073 ] + walkopts,
3075 _('hg remove [OPTION]... FILE...')),
3074 _('hg remove [OPTION]... FILE...')),
3076 "rename|mv":
3075 "rename|mv":
3077 (rename,
3076 (rename,
3078 [('A', 'after', None, _('record a rename that has already occurred')),
3077 [('A', 'after', None, _('record a rename that has already occurred')),
3079 ('f', 'force', None,
3078 ('f', 'force', None,
3080 _('forcibly copy over an existing managed file')),
3079 _('forcibly copy over an existing managed file')),
3081 ] + walkopts + dryrunopts,
3080 ] + walkopts + dryrunopts,
3082 _('hg rename [OPTION]... SOURCE... DEST')),
3081 _('hg rename [OPTION]... SOURCE... DEST')),
3083 "^revert":
3082 "^revert":
3084 (revert,
3083 (revert,
3085 [('a', 'all', None, _('revert all changes when no arguments given')),
3084 [('a', 'all', None, _('revert all changes when no arguments given')),
3086 ('d', 'date', '', _('tipmost revision matching date')),
3085 ('d', 'date', '', _('tipmost revision matching date')),
3087 ('r', 'rev', '', _('revision to revert to')),
3086 ('r', 'rev', '', _('revision to revert to')),
3088 ('', 'no-backup', None, _('do not save backup copies of files')),
3087 ('', 'no-backup', None, _('do not save backup copies of files')),
3089 ] + walkopts + dryrunopts,
3088 ] + walkopts + dryrunopts,
3090 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3089 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3091 "rollback": (rollback, [], _('hg rollback')),
3090 "rollback": (rollback, [], _('hg rollback')),
3092 "root": (root, [], _('hg root')),
3091 "root": (root, [], _('hg root')),
3093 "showconfig|debugconfig":
3092 "showconfig|debugconfig":
3094 (showconfig,
3093 (showconfig,
3095 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3094 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3096 _('showconfig [-u] [NAME]...')),
3095 _('showconfig [-u] [NAME]...')),
3097 "^serve":
3096 "^serve":
3098 (serve,
3097 (serve,
3099 [('A', 'accesslog', '', _('name of access log file to write to')),
3098 [('A', 'accesslog', '', _('name of access log file to write to')),
3100 ('d', 'daemon', None, _('run server in background')),
3099 ('d', 'daemon', None, _('run server in background')),
3101 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3100 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3102 ('E', 'errorlog', '', _('name of error log file to write to')),
3101 ('E', 'errorlog', '', _('name of error log file to write to')),
3103 ('p', 'port', 0, _('port to use (default: 8000)')),
3102 ('p', 'port', 0, _('port to use (default: 8000)')),
3104 ('a', 'address', '', _('address to use')),
3103 ('a', 'address', '', _('address to use')),
3105 ('n', 'name', '',
3104 ('n', 'name', '',
3106 _('name to show in web pages (default: working dir)')),
3105 _('name to show in web pages (default: working dir)')),
3107 ('', 'webdir-conf', '', _('name of the webdir config file'
3106 ('', 'webdir-conf', '', _('name of the webdir config file'
3108 ' (serve more than one repo)')),
3107 ' (serve more than one repo)')),
3109 ('', 'pid-file', '', _('name of file to write process ID to')),
3108 ('', 'pid-file', '', _('name of file to write process ID to')),
3110 ('', 'stdio', None, _('for remote clients')),
3109 ('', 'stdio', None, _('for remote clients')),
3111 ('t', 'templates', '', _('web templates to use')),
3110 ('t', 'templates', '', _('web templates to use')),
3112 ('', 'style', '', _('template style to use')),
3111 ('', 'style', '', _('template style to use')),
3113 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3112 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3114 ('', 'certificate', '', _('SSL certificate file'))],
3113 ('', 'certificate', '', _('SSL certificate file'))],
3115 _('hg serve [OPTION]...')),
3114 _('hg serve [OPTION]...')),
3116 "^status|st":
3115 "^status|st":
3117 (status,
3116 (status,
3118 [('A', 'all', None, _('show status of all files')),
3117 [('A', 'all', None, _('show status of all files')),
3119 ('m', 'modified', None, _('show only modified files')),
3118 ('m', 'modified', None, _('show only modified files')),
3120 ('a', 'added', None, _('show only added files')),
3119 ('a', 'added', None, _('show only added files')),
3121 ('r', 'removed', None, _('show only removed files')),
3120 ('r', 'removed', None, _('show only removed files')),
3122 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3121 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3123 ('c', 'clean', None, _('show only files without changes')),
3122 ('c', 'clean', None, _('show only files without changes')),
3124 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3123 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3125 ('i', 'ignored', None, _('show only ignored files')),
3124 ('i', 'ignored', None, _('show only ignored files')),
3126 ('n', 'no-status', None, _('hide status prefix')),
3125 ('n', 'no-status', None, _('hide status prefix')),
3127 ('C', 'copies', None, _('show source of copied files')),
3126 ('C', 'copies', None, _('show source of copied files')),
3128 ('0', 'print0', None,
3127 ('0', 'print0', None,
3129 _('end filenames with NUL, for use with xargs')),
3128 _('end filenames with NUL, for use with xargs')),
3130 ('', 'rev', [], _('show difference from revision')),
3129 ('', 'rev', [], _('show difference from revision')),
3131 ] + walkopts,
3130 ] + walkopts,
3132 _('hg status [OPTION]... [FILE]...')),
3131 _('hg status [OPTION]... [FILE]...')),
3133 "tag":
3132 "tag":
3134 (tag,
3133 (tag,
3135 [('f', 'force', None, _('replace existing tag')),
3134 [('f', 'force', None, _('replace existing tag')),
3136 ('l', 'local', None, _('make the tag local')),
3135 ('l', 'local', None, _('make the tag local')),
3137 ('m', 'message', '', _('message for tag commit log entry')),
3136 ('m', 'message', '', _('message for tag commit log entry')),
3138 ('d', 'date', '', _('record datecode as commit date')),
3137 ('d', 'date', '', _('record datecode as commit date')),
3139 ('u', 'user', '', _('record user as commiter')),
3138 ('u', 'user', '', _('record user as commiter')),
3140 ('r', 'rev', '', _('revision to tag')),
3139 ('r', 'rev', '', _('revision to tag')),
3141 ('', 'remove', None, _('remove a tag'))],
3140 ('', 'remove', None, _('remove a tag'))],
3142 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3141 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3143 "tags": (tags, [], _('hg tags')),
3142 "tags": (tags, [], _('hg tags')),
3144 "tip":
3143 "tip":
3145 (tip,
3144 (tip,
3146 [('', 'style', '', _('display using template map file')),
3145 [('', 'style', '', _('display using template map file')),
3147 ('p', 'patch', None, _('show patch')),
3146 ('p', 'patch', None, _('show patch')),
3148 ('', 'template', '', _('display with template'))],
3147 ('', 'template', '', _('display with template'))],
3149 _('hg tip [-p]')),
3148 _('hg tip [-p]')),
3150 "unbundle":
3149 "unbundle":
3151 (unbundle,
3150 (unbundle,
3152 [('u', 'update', None,
3151 [('u', 'update', None,
3153 _('update to new tip if changesets were unbundled'))],
3152 _('update to new tip if changesets were unbundled'))],
3154 _('hg unbundle [-u] FILE...')),
3153 _('hg unbundle [-u] FILE...')),
3155 "^update|up|checkout|co":
3154 "^update|up|checkout|co":
3156 (update,
3155 (update,
3157 [('C', 'clean', None, _('overwrite locally modified files')),
3156 [('C', 'clean', None, _('overwrite locally modified files')),
3158 ('d', 'date', '', _('tipmost revision matching date')),
3157 ('d', 'date', '', _('tipmost revision matching date')),
3159 ('r', 'rev', '', _('revision'))],
3158 ('r', 'rev', '', _('revision'))],
3160 _('hg update [-C] [-d DATE] [[-r] REV]')),
3159 _('hg update [-C] [-d DATE] [[-r] REV]')),
3161 "verify": (verify, [], _('hg verify')),
3160 "verify": (verify, [], _('hg verify')),
3162 "version": (version_, [], _('hg version')),
3161 "version": (version_, [], _('hg version')),
3163 }
3162 }
3164
3163
3165 extensions.commandtable = table
3164 extensions.commandtable = table
3166
3165
3167 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3166 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3168 " debugindex debugindexdot debugdate debuginstall")
3167 " debugindex debugindexdot debugdate debuginstall")
3169 optionalrepo = ("paths serve showconfig")
3168 optionalrepo = ("paths serve showconfig")
3170
3169
3171 def dispatch(args, argv0=None):
3170 def dispatch(args, argv0=None):
3172 try:
3171 try:
3173 u = ui.ui(traceback='--traceback' in args)
3172 u = ui.ui(traceback='--traceback' in args)
3174 except util.Abort, inst:
3173 except util.Abort, inst:
3175 sys.stderr.write(_("abort: %s\n") % inst)
3174 sys.stderr.write(_("abort: %s\n") % inst)
3176 return -1
3175 return -1
3177 return cmdutil.runcatch(u, args, argv0=argv0)
3176 return cmdutil.runcatch(u, args, argv0=argv0)
3178
3177
3179 def run():
3178 def run():
3180 sys.exit(dispatch(sys.argv[1:], argv0=sys.argv[0]))
3179 sys.exit(dispatch(sys.argv[1:], argv0=sys.argv[0]))
@@ -1,281 +1,281
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from node import *
9 from node import *
10 from repo import *
10 from repo import *
11 from i18n import _
11 from i18n import _
12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
13 import errno, lock, os, shutil, util, cmdutil, extensions
13 import errno, lock, os, shutil, util, cmdutil, extensions
14 import merge as _merge
14 import merge as _merge
15 import verify as _verify
15 import verify as _verify
16
16
17 def _local(path):
17 def _local(path):
18 return (os.path.isfile(util.drop_scheme('file', path)) and
18 return (os.path.isfile(util.drop_scheme('file', path)) and
19 bundlerepo or localrepo)
19 bundlerepo or localrepo)
20
20
21 schemes = {
21 schemes = {
22 'bundle': bundlerepo,
22 'bundle': bundlerepo,
23 'file': _local,
23 'file': _local,
24 'http': httprepo,
24 'http': httprepo,
25 'https': httprepo,
25 'https': httprepo,
26 'ssh': sshrepo,
26 'ssh': sshrepo,
27 'static-http': statichttprepo,
27 'static-http': statichttprepo,
28 }
28 }
29
29
30 def _lookup(path):
30 def _lookup(path):
31 scheme = 'file'
31 scheme = 'file'
32 if path:
32 if path:
33 c = path.find(':')
33 c = path.find(':')
34 if c > 0:
34 if c > 0:
35 scheme = path[:c]
35 scheme = path[:c]
36 thing = schemes.get(scheme) or schemes['file']
36 thing = schemes.get(scheme) or schemes['file']
37 try:
37 try:
38 return thing(path)
38 return thing(path)
39 except TypeError:
39 except TypeError:
40 return thing
40 return thing
41
41
42 def islocal(repo):
42 def islocal(repo):
43 '''return true if repo or path is local'''
43 '''return true if repo or path is local'''
44 if isinstance(repo, str):
44 if isinstance(repo, str):
45 try:
45 try:
46 return _lookup(repo).islocal(repo)
46 return _lookup(repo).islocal(repo)
47 except AttributeError:
47 except AttributeError:
48 return False
48 return False
49 return repo.local()
49 return repo.local()
50
50
51 def repository(ui, path='', create=False):
51 def repository(ui, path='', create=False):
52 """return a repository object for the specified path"""
52 """return a repository object for the specified path"""
53 repo = _lookup(path).instance(ui, path, create)
53 repo = _lookup(path).instance(ui, path, create)
54 ui = getattr(repo, "ui", ui)
54 ui = getattr(repo, "ui", ui)
55 for hook in extensions.setuphooks:
55 for hook in extensions.setuphooks:
56 hook(ui, repo)
56 hook(ui, repo)
57 return repo
57 return repo
58
58
59 def defaultdest(source):
59 def defaultdest(source):
60 '''return default destination of clone if none is given'''
60 '''return default destination of clone if none is given'''
61 return os.path.basename(os.path.normpath(source))
61 return os.path.basename(os.path.normpath(source))
62
62
63 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
63 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
64 stream=False):
64 stream=False):
65 """Make a copy of an existing repository.
65 """Make a copy of an existing repository.
66
66
67 Create a copy of an existing repository in a new directory. The
67 Create a copy of an existing repository in a new directory. The
68 source and destination are URLs, as passed to the repository
68 source and destination are URLs, as passed to the repository
69 function. Returns a pair of repository objects, the source and
69 function. Returns a pair of repository objects, the source and
70 newly created destination.
70 newly created destination.
71
71
72 The location of the source is added to the new repository's
72 The location of the source is added to the new repository's
73 .hg/hgrc file, as the default to be used for future pulls and
73 .hg/hgrc file, as the default to be used for future pulls and
74 pushes.
74 pushes.
75
75
76 If an exception is raised, the partly cloned/updated destination
76 If an exception is raised, the partly cloned/updated destination
77 repository will be deleted.
77 repository will be deleted.
78
78
79 Arguments:
79 Arguments:
80
80
81 source: repository object or URL
81 source: repository object or URL
82
82
83 dest: URL of destination repository to create (defaults to base
83 dest: URL of destination repository to create (defaults to base
84 name of source repository)
84 name of source repository)
85
85
86 pull: always pull from source repository, even in local case
86 pull: always pull from source repository, even in local case
87
87
88 stream: stream raw data uncompressed from repository (fast over
88 stream: stream raw data uncompressed from repository (fast over
89 LAN, slow over WAN)
89 LAN, slow over WAN)
90
90
91 rev: revision to clone up to (implies pull=True)
91 rev: revision to clone up to (implies pull=True)
92
92
93 update: update working directory after clone completes, if
93 update: update working directory after clone completes, if
94 destination is local repository
94 destination is local repository
95 """
95 """
96
96
97 origsource = source
97 origsource = source
98 source, rev = cmdutil.parseurl(ui.expandpath(source), rev)
98 source, rev = cmdutil.parseurl(ui.expandpath(source), rev)
99
99
100 if isinstance(source, str):
100 if isinstance(source, str):
101 src_repo = repository(ui, source)
101 src_repo = repository(ui, source)
102 else:
102 else:
103 src_repo = source
103 src_repo = source
104 source = src_repo.url()
104 source = src_repo.url()
105
105
106 if dest is None:
106 if dest is None:
107 dest = defaultdest(source)
107 dest = defaultdest(source)
108 ui.status(_("destination directory: %s\n") % dest)
108 ui.status(_("destination directory: %s\n") % dest)
109
109
110 def localpath(path):
110 def localpath(path):
111 if path.startswith('file://'):
111 if path.startswith('file://'):
112 return path[7:]
112 return path[7:]
113 if path.startswith('file:'):
113 if path.startswith('file:'):
114 return path[5:]
114 return path[5:]
115 return path
115 return path
116
116
117 dest = localpath(dest)
117 dest = localpath(dest)
118 source = localpath(source)
118 source = localpath(source)
119
119
120 if os.path.exists(dest):
120 if os.path.exists(dest):
121 raise util.Abort(_("destination '%s' already exists") % dest)
121 raise util.Abort(_("destination '%s' already exists") % dest)
122
122
123 class DirCleanup(object):
123 class DirCleanup(object):
124 def __init__(self, dir_):
124 def __init__(self, dir_):
125 self.rmtree = shutil.rmtree
125 self.rmtree = shutil.rmtree
126 self.dir_ = dir_
126 self.dir_ = dir_
127 def close(self):
127 def close(self):
128 self.dir_ = None
128 self.dir_ = None
129 def __del__(self):
129 def __del__(self):
130 if self.dir_:
130 if self.dir_:
131 self.rmtree(self.dir_, True)
131 self.rmtree(self.dir_, True)
132
132
133 src_lock = dest_lock = dir_cleanup = None
133 src_lock = dest_lock = dir_cleanup = None
134 try:
134 try:
135 if islocal(dest):
135 if islocal(dest):
136 dir_cleanup = DirCleanup(dest)
136 dir_cleanup = DirCleanup(dest)
137
137
138 abspath = origsource
138 abspath = origsource
139 copy = False
139 copy = False
140 if src_repo.local() and islocal(dest):
140 if src_repo.local() and islocal(dest):
141 abspath = os.path.abspath(origsource)
141 abspath = os.path.abspath(origsource)
142 copy = not pull and not rev
142 copy = not pull and not rev
143
143
144 if copy:
144 if copy:
145 try:
145 try:
146 # we use a lock here because if we race with commit, we
146 # we use a lock here because if we race with commit, we
147 # can end up with extra data in the cloned revlogs that's
147 # can end up with extra data in the cloned revlogs that's
148 # not pointed to by changesets, thus causing verify to
148 # not pointed to by changesets, thus causing verify to
149 # fail
149 # fail
150 src_lock = src_repo.lock()
150 src_lock = src_repo.lock()
151 except lock.LockException:
151 except lock.LockException:
152 copy = False
152 copy = False
153
153
154 if copy:
154 if copy:
155 def force_copy(src, dst):
155 def force_copy(src, dst):
156 try:
156 try:
157 util.copyfiles(src, dst)
157 util.copyfiles(src, dst)
158 except OSError, inst:
158 except OSError, inst:
159 if inst.errno != errno.ENOENT:
159 if inst.errno != errno.ENOENT:
160 raise
160 raise
161
161
162 src_store = os.path.realpath(src_repo.spath)
162 src_store = os.path.realpath(src_repo.spath)
163 if not os.path.exists(dest):
163 if not os.path.exists(dest):
164 os.mkdir(dest)
164 os.mkdir(dest)
165 dest_path = os.path.realpath(os.path.join(dest, ".hg"))
165 dest_path = os.path.realpath(os.path.join(dest, ".hg"))
166 os.mkdir(dest_path)
166 os.mkdir(dest_path)
167 if src_repo.spath != src_repo.path:
167 if src_repo.spath != src_repo.path:
168 dest_store = os.path.join(dest_path, "store")
168 dest_store = os.path.join(dest_path, "store")
169 os.mkdir(dest_store)
169 os.mkdir(dest_store)
170 else:
170 else:
171 dest_store = dest_path
171 dest_store = dest_path
172 # copy the requires file
172 # copy the requires file
173 force_copy(src_repo.join("requires"),
173 force_copy(src_repo.join("requires"),
174 os.path.join(dest_path, "requires"))
174 os.path.join(dest_path, "requires"))
175 # we lock here to avoid premature writing to the target
175 # we lock here to avoid premature writing to the target
176 dest_lock = lock.lock(os.path.join(dest_store, "lock"))
176 dest_lock = lock.lock(os.path.join(dest_store, "lock"))
177
177
178 files = ("data",
178 files = ("data",
179 "00manifest.d", "00manifest.i",
179 "00manifest.d", "00manifest.i",
180 "00changelog.d", "00changelog.i")
180 "00changelog.d", "00changelog.i")
181 for f in files:
181 for f in files:
182 src = os.path.join(src_store, f)
182 src = os.path.join(src_store, f)
183 dst = os.path.join(dest_store, f)
183 dst = os.path.join(dest_store, f)
184 force_copy(src, dst)
184 force_copy(src, dst)
185
185
186 # we need to re-init the repo after manually copying the data
186 # we need to re-init the repo after manually copying the data
187 # into it
187 # into it
188 dest_repo = repository(ui, dest)
188 dest_repo = repository(ui, dest)
189
189
190 else:
190 else:
191 dest_repo = repository(ui, dest, create=True)
191 dest_repo = repository(ui, dest, create=True)
192
192
193 revs = None
193 revs = None
194 if rev:
194 if rev:
195 if 'lookup' not in src_repo.capabilities:
195 if 'lookup' not in src_repo.capabilities:
196 raise util.Abort(_("src repository does not support revision "
196 raise util.Abort(_("src repository does not support revision "
197 "lookup and so doesn't support clone by "
197 "lookup and so doesn't support clone by "
198 "revision"))
198 "revision"))
199 revs = [src_repo.lookup(r) for r in rev]
199 revs = [src_repo.lookup(r) for r in rev]
200
200
201 if dest_repo.local():
201 if dest_repo.local():
202 dest_repo.clone(src_repo, heads=revs, stream=stream)
202 dest_repo.clone(src_repo, heads=revs, stream=stream)
203 elif src_repo.local():
203 elif src_repo.local():
204 src_repo.push(dest_repo, revs=revs)
204 src_repo.push(dest_repo, revs=revs)
205 else:
205 else:
206 raise util.Abort(_("clone from remote to remote not supported"))
206 raise util.Abort(_("clone from remote to remote not supported"))
207
207
208 if dest_repo.local():
208 if dest_repo.local():
209 fp = dest_repo.opener("hgrc", "w", text=True)
209 fp = dest_repo.opener("hgrc", "w", text=True)
210 fp.write("[paths]\n")
210 fp.write("[paths]\n")
211 fp.write("default = %s\n" % abspath)
211 fp.write("default = %s\n" % abspath)
212 fp.close()
212 fp.close()
213
213
214 if update:
214 if update:
215 try:
215 try:
216 checkout = dest_repo.lookup("default")
216 checkout = dest_repo.lookup("default")
217 except:
217 except:
218 checkout = dest_repo.changelog.tip()
218 checkout = dest_repo.changelog.tip()
219 _update(dest_repo, checkout)
219 _update(dest_repo, checkout)
220 if dir_cleanup:
220 if dir_cleanup:
221 dir_cleanup.close()
221 dir_cleanup.close()
222
222
223 return src_repo, dest_repo
223 return src_repo, dest_repo
224 finally:
224 finally:
225 del src_lock, dest_lock, dir_cleanup
225 del src_lock, dest_lock, dir_cleanup
226
226
227 def _showstats(repo, stats):
227 def _showstats(repo, stats):
228 stats = ((stats[0], _("updated")),
228 stats = ((stats[0], _("updated")),
229 (stats[1], _("merged")),
229 (stats[1], _("merged")),
230 (stats[2], _("removed")),
230 (stats[2], _("removed")),
231 (stats[3], _("unresolved")))
231 (stats[3], _("unresolved")))
232 note = ", ".join([_("%d files %s") % s for s in stats])
232 note = ", ".join([_("%d files %s") % s for s in stats])
233 repo.ui.status("%s\n" % note)
233 repo.ui.status("%s\n" % note)
234
234
235 def _update(repo, node): return update(repo, node)
235 def _update(repo, node): return update(repo, node)
236
236
237 def update(repo, node):
237 def update(repo, node):
238 """update the working directory to node, merging linear changes"""
238 """update the working directory to node, merging linear changes"""
239 pl = repo.parents()
239 pl = repo.parents()
240 stats = _merge.update(repo, node, False, False, None, None)
240 stats = _merge.update(repo, node, False, False, None)
241 _showstats(repo, stats)
241 _showstats(repo, stats)
242 if stats[3]:
242 if stats[3]:
243 repo.ui.status(_("There are unresolved merges with"
243 repo.ui.status(_("There are unresolved merges with"
244 " locally modified files.\n"))
244 " locally modified files.\n"))
245 if stats[1]:
245 if stats[1]:
246 repo.ui.status(_("You can finish the partial merge using:\n"))
246 repo.ui.status(_("You can finish the partial merge using:\n"))
247 else:
247 else:
248 repo.ui.status(_("You can redo the full merge using:\n"))
248 repo.ui.status(_("You can redo the full merge using:\n"))
249 # len(pl)==1, otherwise _merge.update() would have raised util.Abort:
249 # len(pl)==1, otherwise _merge.update() would have raised util.Abort:
250 repo.ui.status(_(" hg update %s\n hg update %s\n")
250 repo.ui.status(_(" hg update %s\n hg update %s\n")
251 % (pl[0].rev(), repo.changectx(node).rev()))
251 % (pl[0].rev(), repo.changectx(node).rev()))
252 return stats[3]
252 return stats[3]
253
253
254 def clean(repo, node, wlock=None, show_stats=True):
254 def clean(repo, node, show_stats=True):
255 """forcibly switch the working directory to node, clobbering changes"""
255 """forcibly switch the working directory to node, clobbering changes"""
256 stats = _merge.update(repo, node, False, True, None, wlock)
256 stats = _merge.update(repo, node, False, True, None)
257 if show_stats: _showstats(repo, stats)
257 if show_stats: _showstats(repo, stats)
258 return stats[3]
258 return stats[3]
259
259
260 def merge(repo, node, force=None, remind=True, wlock=None):
260 def merge(repo, node, force=None, remind=True):
261 """branch merge with node, resolving changes"""
261 """branch merge with node, resolving changes"""
262 stats = _merge.update(repo, node, True, force, False, wlock)
262 stats = _merge.update(repo, node, True, force, False)
263 _showstats(repo, stats)
263 _showstats(repo, stats)
264 if stats[3]:
264 if stats[3]:
265 pl = repo.parents()
265 pl = repo.parents()
266 repo.ui.status(_("There are unresolved merges,"
266 repo.ui.status(_("There are unresolved merges,"
267 " you can redo the full merge using:\n"
267 " you can redo the full merge using:\n"
268 " hg update -C %s\n"
268 " hg update -C %s\n"
269 " hg merge %s\n")
269 " hg merge %s\n")
270 % (pl[0].rev(), pl[1].rev()))
270 % (pl[0].rev(), pl[1].rev()))
271 elif remind:
271 elif remind:
272 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
272 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
273 return stats[3]
273 return stats[3]
274
274
275 def revert(repo, node, choose, wlock):
275 def revert(repo, node, choose):
276 """revert changes to revision in node without updating dirstate"""
276 """revert changes to revision in node without updating dirstate"""
277 return _merge.update(repo, node, False, True, choose, wlock)[3]
277 return _merge.update(repo, node, False, True, choose)[3]
278
278
279 def verify(repo):
279 def verify(repo):
280 """verify the consistency of a repository"""
280 """verify the consistency of a repository"""
281 return _verify.verify(repo)
281 return _verify.verify(repo)
@@ -1,1978 +1,1981
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import repo, changegroup
10 import repo, changegroup
11 import changelog, dirstate, filelog, manifest, context, weakref
11 import changelog, dirstate, filelog, manifest, context, weakref
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
13 import os, revlog, time, util, extensions, hook
13 import os, revlog, time, util, extensions, hook
14
14
15 class localrepository(repo.repository):
15 class localrepository(repo.repository):
16 capabilities = ('lookup', 'changegroupsubset')
16 capabilities = ('lookup', 'changegroupsubset')
17 supported = ('revlogv1', 'store')
17 supported = ('revlogv1', 'store')
18
18
19 def __init__(self, parentui, path=None, create=0):
19 def __init__(self, parentui, path=None, create=0):
20 repo.repository.__init__(self)
20 repo.repository.__init__(self)
21 self.path = path
21 self.path = path
22 self.root = os.path.realpath(path)
22 self.root = os.path.realpath(path)
23 self.path = os.path.join(self.root, ".hg")
23 self.path = os.path.join(self.root, ".hg")
24 self.origroot = path
24 self.origroot = path
25 self.opener = util.opener(self.path)
25 self.opener = util.opener(self.path)
26 self.wopener = util.opener(self.root)
26 self.wopener = util.opener(self.root)
27
27
28 if not os.path.isdir(self.path):
28 if not os.path.isdir(self.path):
29 if create:
29 if create:
30 if not os.path.exists(path):
30 if not os.path.exists(path):
31 os.mkdir(path)
31 os.mkdir(path)
32 os.mkdir(self.path)
32 os.mkdir(self.path)
33 requirements = ["revlogv1"]
33 requirements = ["revlogv1"]
34 if parentui.configbool('format', 'usestore', True):
34 if parentui.configbool('format', 'usestore', True):
35 os.mkdir(os.path.join(self.path, "store"))
35 os.mkdir(os.path.join(self.path, "store"))
36 requirements.append("store")
36 requirements.append("store")
37 # create an invalid changelog
37 # create an invalid changelog
38 self.opener("00changelog.i", "a").write(
38 self.opener("00changelog.i", "a").write(
39 '\0\0\0\2' # represents revlogv2
39 '\0\0\0\2' # represents revlogv2
40 ' dummy changelog to prevent using the old repo layout'
40 ' dummy changelog to prevent using the old repo layout'
41 )
41 )
42 reqfile = self.opener("requires", "w")
42 reqfile = self.opener("requires", "w")
43 for r in requirements:
43 for r in requirements:
44 reqfile.write("%s\n" % r)
44 reqfile.write("%s\n" % r)
45 reqfile.close()
45 reqfile.close()
46 else:
46 else:
47 raise repo.RepoError(_("repository %s not found") % path)
47 raise repo.RepoError(_("repository %s not found") % path)
48 elif create:
48 elif create:
49 raise repo.RepoError(_("repository %s already exists") % path)
49 raise repo.RepoError(_("repository %s already exists") % path)
50 else:
50 else:
51 # find requirements
51 # find requirements
52 try:
52 try:
53 requirements = self.opener("requires").read().splitlines()
53 requirements = self.opener("requires").read().splitlines()
54 except IOError, inst:
54 except IOError, inst:
55 if inst.errno != errno.ENOENT:
55 if inst.errno != errno.ENOENT:
56 raise
56 raise
57 requirements = []
57 requirements = []
58 # check them
58 # check them
59 for r in requirements:
59 for r in requirements:
60 if r not in self.supported:
60 if r not in self.supported:
61 raise repo.RepoError(_("requirement '%s' not supported") % r)
61 raise repo.RepoError(_("requirement '%s' not supported") % r)
62
62
63 # setup store
63 # setup store
64 if "store" in requirements:
64 if "store" in requirements:
65 self.encodefn = util.encodefilename
65 self.encodefn = util.encodefilename
66 self.decodefn = util.decodefilename
66 self.decodefn = util.decodefilename
67 self.spath = os.path.join(self.path, "store")
67 self.spath = os.path.join(self.path, "store")
68 else:
68 else:
69 self.encodefn = lambda x: x
69 self.encodefn = lambda x: x
70 self.decodefn = lambda x: x
70 self.decodefn = lambda x: x
71 self.spath = self.path
71 self.spath = self.path
72 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
72 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
73
73
74 self.ui = ui.ui(parentui=parentui)
74 self.ui = ui.ui(parentui=parentui)
75 try:
75 try:
76 self.ui.readconfig(self.join("hgrc"), self.root)
76 self.ui.readconfig(self.join("hgrc"), self.root)
77 extensions.loadall(self.ui)
77 extensions.loadall(self.ui)
78 except IOError:
78 except IOError:
79 pass
79 pass
80
80
81 self.tagscache = None
81 self.tagscache = None
82 self.branchcache = None
82 self.branchcache = None
83 self.nodetagscache = None
83 self.nodetagscache = None
84 self.filterpats = {}
84 self.filterpats = {}
85 self._transref = self._lockref = self._wlockref = None
85 self._transref = self._lockref = self._wlockref = None
86
86
87 def __getattr__(self, name):
87 def __getattr__(self, name):
88 if name == 'changelog':
88 if name == 'changelog':
89 self.changelog = changelog.changelog(self.sopener)
89 self.changelog = changelog.changelog(self.sopener)
90 self.sopener.defversion = self.changelog.version
90 self.sopener.defversion = self.changelog.version
91 return self.changelog
91 return self.changelog
92 if name == 'manifest':
92 if name == 'manifest':
93 self.changelog
93 self.changelog
94 self.manifest = manifest.manifest(self.sopener)
94 self.manifest = manifest.manifest(self.sopener)
95 return self.manifest
95 return self.manifest
96 if name == 'dirstate':
96 if name == 'dirstate':
97 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
97 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
98 return self.dirstate
98 return self.dirstate
99 else:
99 else:
100 raise AttributeError, name
100 raise AttributeError, name
101
101
102 def url(self):
102 def url(self):
103 return 'file:' + self.root
103 return 'file:' + self.root
104
104
105 def hook(self, name, throw=False, **args):
105 def hook(self, name, throw=False, **args):
106 return hook.hook(self.ui, self, name, throw, **args)
106 return hook.hook(self.ui, self, name, throw, **args)
107
107
108 tag_disallowed = ':\r\n'
108 tag_disallowed = ':\r\n'
109
109
110 def _tag(self, name, node, message, local, user, date, parent=None,
110 def _tag(self, name, node, message, local, user, date, parent=None,
111 extra={}):
111 extra={}):
112 use_dirstate = parent is None
112 use_dirstate = parent is None
113
113
114 for c in self.tag_disallowed:
114 for c in self.tag_disallowed:
115 if c in name:
115 if c in name:
116 raise util.Abort(_('%r cannot be used in a tag name') % c)
116 raise util.Abort(_('%r cannot be used in a tag name') % c)
117
117
118 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
118 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
119
119
120 def writetag(fp, name, munge, prevtags):
120 def writetag(fp, name, munge, prevtags):
121 if prevtags and prevtags[-1] != '\n':
121 if prevtags and prevtags[-1] != '\n':
122 fp.write('\n')
122 fp.write('\n')
123 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
123 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
124 fp.close()
124 fp.close()
125 self.hook('tag', node=hex(node), tag=name, local=local)
125 self.hook('tag', node=hex(node), tag=name, local=local)
126
126
127 prevtags = ''
127 prevtags = ''
128 if local:
128 if local:
129 try:
129 try:
130 fp = self.opener('localtags', 'r+')
130 fp = self.opener('localtags', 'r+')
131 except IOError, err:
131 except IOError, err:
132 fp = self.opener('localtags', 'a')
132 fp = self.opener('localtags', 'a')
133 else:
133 else:
134 prevtags = fp.read()
134 prevtags = fp.read()
135
135
136 # local tags are stored in the current charset
136 # local tags are stored in the current charset
137 writetag(fp, name, None, prevtags)
137 writetag(fp, name, None, prevtags)
138 return
138 return
139
139
140 if use_dirstate:
140 if use_dirstate:
141 try:
141 try:
142 fp = self.wfile('.hgtags', 'rb+')
142 fp = self.wfile('.hgtags', 'rb+')
143 except IOError, err:
143 except IOError, err:
144 fp = self.wfile('.hgtags', 'ab')
144 fp = self.wfile('.hgtags', 'ab')
145 else:
145 else:
146 prevtags = fp.read()
146 prevtags = fp.read()
147 else:
147 else:
148 try:
148 try:
149 prevtags = self.filectx('.hgtags', parent).data()
149 prevtags = self.filectx('.hgtags', parent).data()
150 except revlog.LookupError:
150 except revlog.LookupError:
151 pass
151 pass
152 fp = self.wfile('.hgtags', 'wb')
152 fp = self.wfile('.hgtags', 'wb')
153
153
154 # committed tags are stored in UTF-8
154 # committed tags are stored in UTF-8
155 writetag(fp, name, util.fromlocal, prevtags)
155 writetag(fp, name, util.fromlocal, prevtags)
156
156
157 if use_dirstate and '.hgtags' not in self.dirstate:
157 if use_dirstate and '.hgtags' not in self.dirstate:
158 self.add(['.hgtags'])
158 self.add(['.hgtags'])
159
159
160 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
160 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
161 extra=extra)
161 extra=extra)
162
162
163 self.hook('tag', node=hex(node), tag=name, local=local)
163 self.hook('tag', node=hex(node), tag=name, local=local)
164
164
165 return tagnode
165 return tagnode
166
166
167 def tag(self, name, node, message, local, user, date):
167 def tag(self, name, node, message, local, user, date):
168 '''tag a revision with a symbolic name.
168 '''tag a revision with a symbolic name.
169
169
170 if local is True, the tag is stored in a per-repository file.
170 if local is True, the tag is stored in a per-repository file.
171 otherwise, it is stored in the .hgtags file, and a new
171 otherwise, it is stored in the .hgtags file, and a new
172 changeset is committed with the change.
172 changeset is committed with the change.
173
173
174 keyword arguments:
174 keyword arguments:
175
175
176 local: whether to store tag in non-version-controlled file
176 local: whether to store tag in non-version-controlled file
177 (default False)
177 (default False)
178
178
179 message: commit message to use if committing
179 message: commit message to use if committing
180
180
181 user: name of user to use if committing
181 user: name of user to use if committing
182
182
183 date: date tuple to use if committing'''
183 date: date tuple to use if committing'''
184
184
185 for x in self.status()[:5]:
185 for x in self.status()[:5]:
186 if '.hgtags' in x:
186 if '.hgtags' in x:
187 raise util.Abort(_('working copy of .hgtags is changed '
187 raise util.Abort(_('working copy of .hgtags is changed '
188 '(please commit .hgtags manually)'))
188 '(please commit .hgtags manually)'))
189
189
190
190
191 self._tag(name, node, message, local, user, date)
191 self._tag(name, node, message, local, user, date)
192
192
193 def tags(self):
193 def tags(self):
194 '''return a mapping of tag to node'''
194 '''return a mapping of tag to node'''
195 if self.tagscache:
195 if self.tagscache:
196 return self.tagscache
196 return self.tagscache
197
197
198 globaltags = {}
198 globaltags = {}
199
199
200 def readtags(lines, fn):
200 def readtags(lines, fn):
201 filetags = {}
201 filetags = {}
202 count = 0
202 count = 0
203
203
204 def warn(msg):
204 def warn(msg):
205 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
205 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
206
206
207 for l in lines:
207 for l in lines:
208 count += 1
208 count += 1
209 if not l:
209 if not l:
210 continue
210 continue
211 s = l.split(" ", 1)
211 s = l.split(" ", 1)
212 if len(s) != 2:
212 if len(s) != 2:
213 warn(_("cannot parse entry"))
213 warn(_("cannot parse entry"))
214 continue
214 continue
215 node, key = s
215 node, key = s
216 key = util.tolocal(key.strip()) # stored in UTF-8
216 key = util.tolocal(key.strip()) # stored in UTF-8
217 try:
217 try:
218 bin_n = bin(node)
218 bin_n = bin(node)
219 except TypeError:
219 except TypeError:
220 warn(_("node '%s' is not well formed") % node)
220 warn(_("node '%s' is not well formed") % node)
221 continue
221 continue
222 if bin_n not in self.changelog.nodemap:
222 if bin_n not in self.changelog.nodemap:
223 warn(_("tag '%s' refers to unknown node") % key)
223 warn(_("tag '%s' refers to unknown node") % key)
224 continue
224 continue
225
225
226 h = []
226 h = []
227 if key in filetags:
227 if key in filetags:
228 n, h = filetags[key]
228 n, h = filetags[key]
229 h.append(n)
229 h.append(n)
230 filetags[key] = (bin_n, h)
230 filetags[key] = (bin_n, h)
231
231
232 for k, nh in filetags.items():
232 for k, nh in filetags.items():
233 if k not in globaltags:
233 if k not in globaltags:
234 globaltags[k] = nh
234 globaltags[k] = nh
235 continue
235 continue
236 # we prefer the global tag if:
236 # we prefer the global tag if:
237 # it supercedes us OR
237 # it supercedes us OR
238 # mutual supercedes and it has a higher rank
238 # mutual supercedes and it has a higher rank
239 # otherwise we win because we're tip-most
239 # otherwise we win because we're tip-most
240 an, ah = nh
240 an, ah = nh
241 bn, bh = globaltags[k]
241 bn, bh = globaltags[k]
242 if (bn != an and an in bh and
242 if (bn != an and an in bh and
243 (bn not in ah or len(bh) > len(ah))):
243 (bn not in ah or len(bh) > len(ah))):
244 an = bn
244 an = bn
245 ah.extend([n for n in bh if n not in ah])
245 ah.extend([n for n in bh if n not in ah])
246 globaltags[k] = an, ah
246 globaltags[k] = an, ah
247
247
248 # read the tags file from each head, ending with the tip
248 # read the tags file from each head, ending with the tip
249 f = None
249 f = None
250 for rev, node, fnode in self._hgtagsnodes():
250 for rev, node, fnode in self._hgtagsnodes():
251 f = (f and f.filectx(fnode) or
251 f = (f and f.filectx(fnode) or
252 self.filectx('.hgtags', fileid=fnode))
252 self.filectx('.hgtags', fileid=fnode))
253 readtags(f.data().splitlines(), f)
253 readtags(f.data().splitlines(), f)
254
254
255 try:
255 try:
256 data = util.fromlocal(self.opener("localtags").read())
256 data = util.fromlocal(self.opener("localtags").read())
257 # localtags are stored in the local character set
257 # localtags are stored in the local character set
258 # while the internal tag table is stored in UTF-8
258 # while the internal tag table is stored in UTF-8
259 readtags(data.splitlines(), "localtags")
259 readtags(data.splitlines(), "localtags")
260 except IOError:
260 except IOError:
261 pass
261 pass
262
262
263 self.tagscache = {}
263 self.tagscache = {}
264 for k,nh in globaltags.items():
264 for k,nh in globaltags.items():
265 n = nh[0]
265 n = nh[0]
266 if n != nullid:
266 if n != nullid:
267 self.tagscache[k] = n
267 self.tagscache[k] = n
268 self.tagscache['tip'] = self.changelog.tip()
268 self.tagscache['tip'] = self.changelog.tip()
269
269
270 return self.tagscache
270 return self.tagscache
271
271
272 def _hgtagsnodes(self):
272 def _hgtagsnodes(self):
273 heads = self.heads()
273 heads = self.heads()
274 heads.reverse()
274 heads.reverse()
275 last = {}
275 last = {}
276 ret = []
276 ret = []
277 for node in heads:
277 for node in heads:
278 c = self.changectx(node)
278 c = self.changectx(node)
279 rev = c.rev()
279 rev = c.rev()
280 try:
280 try:
281 fnode = c.filenode('.hgtags')
281 fnode = c.filenode('.hgtags')
282 except revlog.LookupError:
282 except revlog.LookupError:
283 continue
283 continue
284 ret.append((rev, node, fnode))
284 ret.append((rev, node, fnode))
285 if fnode in last:
285 if fnode in last:
286 ret[last[fnode]] = None
286 ret[last[fnode]] = None
287 last[fnode] = len(ret) - 1
287 last[fnode] = len(ret) - 1
288 return [item for item in ret if item]
288 return [item for item in ret if item]
289
289
290 def tagslist(self):
290 def tagslist(self):
291 '''return a list of tags ordered by revision'''
291 '''return a list of tags ordered by revision'''
292 l = []
292 l = []
293 for t, n in self.tags().items():
293 for t, n in self.tags().items():
294 try:
294 try:
295 r = self.changelog.rev(n)
295 r = self.changelog.rev(n)
296 except:
296 except:
297 r = -2 # sort to the beginning of the list if unknown
297 r = -2 # sort to the beginning of the list if unknown
298 l.append((r, t, n))
298 l.append((r, t, n))
299 l.sort()
299 l.sort()
300 return [(t, n) for r, t, n in l]
300 return [(t, n) for r, t, n in l]
301
301
302 def nodetags(self, node):
302 def nodetags(self, node):
303 '''return the tags associated with a node'''
303 '''return the tags associated with a node'''
304 if not self.nodetagscache:
304 if not self.nodetagscache:
305 self.nodetagscache = {}
305 self.nodetagscache = {}
306 for t, n in self.tags().items():
306 for t, n in self.tags().items():
307 self.nodetagscache.setdefault(n, []).append(t)
307 self.nodetagscache.setdefault(n, []).append(t)
308 return self.nodetagscache.get(node, [])
308 return self.nodetagscache.get(node, [])
309
309
310 def _branchtags(self):
310 def _branchtags(self):
311 partial, last, lrev = self._readbranchcache()
311 partial, last, lrev = self._readbranchcache()
312
312
313 tiprev = self.changelog.count() - 1
313 tiprev = self.changelog.count() - 1
314 if lrev != tiprev:
314 if lrev != tiprev:
315 self._updatebranchcache(partial, lrev+1, tiprev+1)
315 self._updatebranchcache(partial, lrev+1, tiprev+1)
316 self._writebranchcache(partial, self.changelog.tip(), tiprev)
316 self._writebranchcache(partial, self.changelog.tip(), tiprev)
317
317
318 return partial
318 return partial
319
319
320 def branchtags(self):
320 def branchtags(self):
321 if self.branchcache is not None:
321 if self.branchcache is not None:
322 return self.branchcache
322 return self.branchcache
323
323
324 self.branchcache = {} # avoid recursion in changectx
324 self.branchcache = {} # avoid recursion in changectx
325 partial = self._branchtags()
325 partial = self._branchtags()
326
326
327 # the branch cache is stored on disk as UTF-8, but in the local
327 # the branch cache is stored on disk as UTF-8, but in the local
328 # charset internally
328 # charset internally
329 for k, v in partial.items():
329 for k, v in partial.items():
330 self.branchcache[util.tolocal(k)] = v
330 self.branchcache[util.tolocal(k)] = v
331 return self.branchcache
331 return self.branchcache
332
332
333 def _readbranchcache(self):
333 def _readbranchcache(self):
334 partial = {}
334 partial = {}
335 try:
335 try:
336 f = self.opener("branch.cache")
336 f = self.opener("branch.cache")
337 lines = f.read().split('\n')
337 lines = f.read().split('\n')
338 f.close()
338 f.close()
339 except (IOError, OSError):
339 except (IOError, OSError):
340 return {}, nullid, nullrev
340 return {}, nullid, nullrev
341
341
342 try:
342 try:
343 last, lrev = lines.pop(0).split(" ", 1)
343 last, lrev = lines.pop(0).split(" ", 1)
344 last, lrev = bin(last), int(lrev)
344 last, lrev = bin(last), int(lrev)
345 if not (lrev < self.changelog.count() and
345 if not (lrev < self.changelog.count() and
346 self.changelog.node(lrev) == last): # sanity check
346 self.changelog.node(lrev) == last): # sanity check
347 # invalidate the cache
347 # invalidate the cache
348 raise ValueError('Invalid branch cache: unknown tip')
348 raise ValueError('Invalid branch cache: unknown tip')
349 for l in lines:
349 for l in lines:
350 if not l: continue
350 if not l: continue
351 node, label = l.split(" ", 1)
351 node, label = l.split(" ", 1)
352 partial[label.strip()] = bin(node)
352 partial[label.strip()] = bin(node)
353 except (KeyboardInterrupt, util.SignalInterrupt):
353 except (KeyboardInterrupt, util.SignalInterrupt):
354 raise
354 raise
355 except Exception, inst:
355 except Exception, inst:
356 if self.ui.debugflag:
356 if self.ui.debugflag:
357 self.ui.warn(str(inst), '\n')
357 self.ui.warn(str(inst), '\n')
358 partial, last, lrev = {}, nullid, nullrev
358 partial, last, lrev = {}, nullid, nullrev
359 return partial, last, lrev
359 return partial, last, lrev
360
360
361 def _writebranchcache(self, branches, tip, tiprev):
361 def _writebranchcache(self, branches, tip, tiprev):
362 try:
362 try:
363 f = self.opener("branch.cache", "w", atomictemp=True)
363 f = self.opener("branch.cache", "w", atomictemp=True)
364 f.write("%s %s\n" % (hex(tip), tiprev))
364 f.write("%s %s\n" % (hex(tip), tiprev))
365 for label, node in branches.iteritems():
365 for label, node in branches.iteritems():
366 f.write("%s %s\n" % (hex(node), label))
366 f.write("%s %s\n" % (hex(node), label))
367 f.rename()
367 f.rename()
368 except (IOError, OSError):
368 except (IOError, OSError):
369 pass
369 pass
370
370
371 def _updatebranchcache(self, partial, start, end):
371 def _updatebranchcache(self, partial, start, end):
372 for r in xrange(start, end):
372 for r in xrange(start, end):
373 c = self.changectx(r)
373 c = self.changectx(r)
374 b = c.branch()
374 b = c.branch()
375 partial[b] = c.node()
375 partial[b] = c.node()
376
376
377 def lookup(self, key):
377 def lookup(self, key):
378 if key == '.':
378 if key == '.':
379 key, second = self.dirstate.parents()
379 key, second = self.dirstate.parents()
380 if key == nullid:
380 if key == nullid:
381 raise repo.RepoError(_("no revision checked out"))
381 raise repo.RepoError(_("no revision checked out"))
382 if second != nullid:
382 if second != nullid:
383 self.ui.warn(_("warning: working directory has two parents, "
383 self.ui.warn(_("warning: working directory has two parents, "
384 "tag '.' uses the first\n"))
384 "tag '.' uses the first\n"))
385 elif key == 'null':
385 elif key == 'null':
386 return nullid
386 return nullid
387 n = self.changelog._match(key)
387 n = self.changelog._match(key)
388 if n:
388 if n:
389 return n
389 return n
390 if key in self.tags():
390 if key in self.tags():
391 return self.tags()[key]
391 return self.tags()[key]
392 if key in self.branchtags():
392 if key in self.branchtags():
393 return self.branchtags()[key]
393 return self.branchtags()[key]
394 n = self.changelog._partialmatch(key)
394 n = self.changelog._partialmatch(key)
395 if n:
395 if n:
396 return n
396 return n
397 try:
397 try:
398 if len(key) == 20:
398 if len(key) == 20:
399 key = hex(key)
399 key = hex(key)
400 except:
400 except:
401 pass
401 pass
402 raise repo.RepoError(_("unknown revision '%s'") % key)
402 raise repo.RepoError(_("unknown revision '%s'") % key)
403
403
404 def dev(self):
404 def dev(self):
405 return os.lstat(self.path).st_dev
405 return os.lstat(self.path).st_dev
406
406
407 def local(self):
407 def local(self):
408 return True
408 return True
409
409
410 def join(self, f):
410 def join(self, f):
411 return os.path.join(self.path, f)
411 return os.path.join(self.path, f)
412
412
413 def sjoin(self, f):
413 def sjoin(self, f):
414 f = self.encodefn(f)
414 f = self.encodefn(f)
415 return os.path.join(self.spath, f)
415 return os.path.join(self.spath, f)
416
416
417 def wjoin(self, f):
417 def wjoin(self, f):
418 return os.path.join(self.root, f)
418 return os.path.join(self.root, f)
419
419
420 def file(self, f):
420 def file(self, f):
421 if f[0] == '/':
421 if f[0] == '/':
422 f = f[1:]
422 f = f[1:]
423 return filelog.filelog(self.sopener, f)
423 return filelog.filelog(self.sopener, f)
424
424
425 def changectx(self, changeid=None):
425 def changectx(self, changeid=None):
426 return context.changectx(self, changeid)
426 return context.changectx(self, changeid)
427
427
428 def workingctx(self):
428 def workingctx(self):
429 return context.workingctx(self)
429 return context.workingctx(self)
430
430
431 def parents(self, changeid=None):
431 def parents(self, changeid=None):
432 '''
432 '''
433 get list of changectxs for parents of changeid or working directory
433 get list of changectxs for parents of changeid or working directory
434 '''
434 '''
435 if changeid is None:
435 if changeid is None:
436 pl = self.dirstate.parents()
436 pl = self.dirstate.parents()
437 else:
437 else:
438 n = self.changelog.lookup(changeid)
438 n = self.changelog.lookup(changeid)
439 pl = self.changelog.parents(n)
439 pl = self.changelog.parents(n)
440 if pl[1] == nullid:
440 if pl[1] == nullid:
441 return [self.changectx(pl[0])]
441 return [self.changectx(pl[0])]
442 return [self.changectx(pl[0]), self.changectx(pl[1])]
442 return [self.changectx(pl[0]), self.changectx(pl[1])]
443
443
444 def filectx(self, path, changeid=None, fileid=None):
444 def filectx(self, path, changeid=None, fileid=None):
445 """changeid can be a changeset revision, node, or tag.
445 """changeid can be a changeset revision, node, or tag.
446 fileid can be a file revision or node."""
446 fileid can be a file revision or node."""
447 return context.filectx(self, path, changeid, fileid)
447 return context.filectx(self, path, changeid, fileid)
448
448
449 def getcwd(self):
449 def getcwd(self):
450 return self.dirstate.getcwd()
450 return self.dirstate.getcwd()
451
451
452 def pathto(self, f, cwd=None):
452 def pathto(self, f, cwd=None):
453 return self.dirstate.pathto(f, cwd)
453 return self.dirstate.pathto(f, cwd)
454
454
455 def wfile(self, f, mode='r'):
455 def wfile(self, f, mode='r'):
456 return self.wopener(f, mode)
456 return self.wopener(f, mode)
457
457
458 def _link(self, f):
458 def _link(self, f):
459 return os.path.islink(self.wjoin(f))
459 return os.path.islink(self.wjoin(f))
460
460
461 def _filter(self, filter, filename, data):
461 def _filter(self, filter, filename, data):
462 if filter not in self.filterpats:
462 if filter not in self.filterpats:
463 l = []
463 l = []
464 for pat, cmd in self.ui.configitems(filter):
464 for pat, cmd in self.ui.configitems(filter):
465 mf = util.matcher(self.root, "", [pat], [], [])[1]
465 mf = util.matcher(self.root, "", [pat], [], [])[1]
466 l.append((mf, cmd))
466 l.append((mf, cmd))
467 self.filterpats[filter] = l
467 self.filterpats[filter] = l
468
468
469 for mf, cmd in self.filterpats[filter]:
469 for mf, cmd in self.filterpats[filter]:
470 if mf(filename):
470 if mf(filename):
471 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
471 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
472 data = util.filter(data, cmd)
472 data = util.filter(data, cmd)
473 break
473 break
474
474
475 return data
475 return data
476
476
477 def wread(self, filename):
477 def wread(self, filename):
478 if self._link(filename):
478 if self._link(filename):
479 data = os.readlink(self.wjoin(filename))
479 data = os.readlink(self.wjoin(filename))
480 else:
480 else:
481 data = self.wopener(filename, 'r').read()
481 data = self.wopener(filename, 'r').read()
482 return self._filter("encode", filename, data)
482 return self._filter("encode", filename, data)
483
483
484 def wwrite(self, filename, data, flags):
484 def wwrite(self, filename, data, flags):
485 data = self._filter("decode", filename, data)
485 data = self._filter("decode", filename, data)
486 if "l" in flags:
486 if "l" in flags:
487 self.wopener.symlink(data, filename)
487 self.wopener.symlink(data, filename)
488 else:
488 else:
489 try:
489 try:
490 if self._link(filename):
490 if self._link(filename):
491 os.unlink(self.wjoin(filename))
491 os.unlink(self.wjoin(filename))
492 except OSError:
492 except OSError:
493 pass
493 pass
494 self.wopener(filename, 'w').write(data)
494 self.wopener(filename, 'w').write(data)
495 util.set_exec(self.wjoin(filename), "x" in flags)
495 util.set_exec(self.wjoin(filename), "x" in flags)
496
496
497 def wwritedata(self, filename, data):
497 def wwritedata(self, filename, data):
498 return self._filter("decode", filename, data)
498 return self._filter("decode", filename, data)
499
499
500 def transaction(self):
500 def transaction(self):
501 if self._transref and self._transref():
501 if self._transref and self._transref():
502 return self._transref().nest()
502 return self._transref().nest()
503
503
504 # save dirstate for rollback
504 # save dirstate for rollback
505 try:
505 try:
506 ds = self.opener("dirstate").read()
506 ds = self.opener("dirstate").read()
507 except IOError:
507 except IOError:
508 ds = ""
508 ds = ""
509 self.opener("journal.dirstate", "w").write(ds)
509 self.opener("journal.dirstate", "w").write(ds)
510
510
511 renames = [(self.sjoin("journal"), self.sjoin("undo")),
511 renames = [(self.sjoin("journal"), self.sjoin("undo")),
512 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
512 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
513 tr = transaction.transaction(self.ui.warn, self.sopener,
513 tr = transaction.transaction(self.ui.warn, self.sopener,
514 self.sjoin("journal"),
514 self.sjoin("journal"),
515 aftertrans(renames))
515 aftertrans(renames))
516 self._transref = weakref.ref(tr)
516 self._transref = weakref.ref(tr)
517 return tr
517 return tr
518
518
519 def recover(self):
519 def recover(self):
520 l = self.lock()
520 l = self.lock()
521 try:
521 try:
522 if os.path.exists(self.sjoin("journal")):
522 if os.path.exists(self.sjoin("journal")):
523 self.ui.status(_("rolling back interrupted transaction\n"))
523 self.ui.status(_("rolling back interrupted transaction\n"))
524 transaction.rollback(self.sopener, self.sjoin("journal"))
524 transaction.rollback(self.sopener, self.sjoin("journal"))
525 self.invalidate()
525 self.invalidate()
526 return True
526 return True
527 else:
527 else:
528 self.ui.warn(_("no interrupted transaction available\n"))
528 self.ui.warn(_("no interrupted transaction available\n"))
529 return False
529 return False
530 finally:
530 finally:
531 del l
531 del l
532
532
533 def rollback(self, wlock=None, lock=None):
533 def rollback(self):
534 wlock = lock = None
534 try:
535 try:
535 if not wlock:
536 wlock = self.wlock()
536 wlock = self.wlock()
537 if not lock:
538 lock = self.lock()
537 lock = self.lock()
539 if os.path.exists(self.sjoin("undo")):
538 if os.path.exists(self.sjoin("undo")):
540 self.ui.status(_("rolling back last transaction\n"))
539 self.ui.status(_("rolling back last transaction\n"))
541 transaction.rollback(self.sopener, self.sjoin("undo"))
540 transaction.rollback(self.sopener, self.sjoin("undo"))
542 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
541 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
543 self.invalidate()
542 self.invalidate()
544 self.dirstate.invalidate()
543 self.dirstate.invalidate()
545 else:
544 else:
546 self.ui.warn(_("no rollback information available\n"))
545 self.ui.warn(_("no rollback information available\n"))
547 finally:
546 finally:
548 del wlock, lock
547 del wlock, lock
549
548
550 def invalidate(self):
549 def invalidate(self):
551 for a in "changelog manifest".split():
550 for a in "changelog manifest".split():
552 if hasattr(self, a):
551 if hasattr(self, a):
553 self.__delattr__(a)
552 self.__delattr__(a)
554 self.tagscache = None
553 self.tagscache = None
555 self.nodetagscache = None
554 self.nodetagscache = None
556
555
557 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
556 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
558 try:
557 try:
559 l = lock.lock(lockname, 0, releasefn, desc=desc)
558 l = lock.lock(lockname, 0, releasefn, desc=desc)
560 except lock.LockHeld, inst:
559 except lock.LockHeld, inst:
561 if not wait:
560 if not wait:
562 raise
561 raise
563 self.ui.warn(_("waiting for lock on %s held by %r\n") %
562 self.ui.warn(_("waiting for lock on %s held by %r\n") %
564 (desc, inst.locker))
563 (desc, inst.locker))
565 # default to 600 seconds timeout
564 # default to 600 seconds timeout
566 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
565 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
567 releasefn, desc=desc)
566 releasefn, desc=desc)
568 if acquirefn:
567 if acquirefn:
569 acquirefn()
568 acquirefn()
570 return l
569 return l
571
570
572 def lock(self, wait=True):
571 def lock(self, wait=True):
573 return self._lock(self.sjoin("lock"), wait, None, self.invalidate,
572 if self._lockref and self._lockref():
573 return self._lockref()
574
575 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
574 _('repository %s') % self.origroot)
576 _('repository %s') % self.origroot)
577 self._lockref = weakref.ref(l)
578 return l
575
579
576 def wlock(self, wait=True):
580 def wlock(self, wait=True):
577 return self._lock(self.join("wlock"), wait, self.dirstate.write,
581 if self._wlockref and self._wlockref():
578 self.dirstate.invalidate,
582 return self._wlockref()
579 _('working directory of %s') % self.origroot)
583
584 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
585 self.dirstate.invalidate, _('working directory of %s') %
586 self.origroot)
587 self._wlockref = weakref.ref(l)
588 return l
580
589
581 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
590 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
582 """
591 """
583 commit an individual file as part of a larger transaction
592 commit an individual file as part of a larger transaction
584 """
593 """
585
594
586 t = self.wread(fn)
595 t = self.wread(fn)
587 fl = self.file(fn)
596 fl = self.file(fn)
588 fp1 = manifest1.get(fn, nullid)
597 fp1 = manifest1.get(fn, nullid)
589 fp2 = manifest2.get(fn, nullid)
598 fp2 = manifest2.get(fn, nullid)
590
599
591 meta = {}
600 meta = {}
592 cp = self.dirstate.copied(fn)
601 cp = self.dirstate.copied(fn)
593 if cp:
602 if cp:
594 # Mark the new revision of this file as a copy of another
603 # Mark the new revision of this file as a copy of another
595 # file. This copy data will effectively act as a parent
604 # file. This copy data will effectively act as a parent
596 # of this new revision. If this is a merge, the first
605 # of this new revision. If this is a merge, the first
597 # parent will be the nullid (meaning "look up the copy data")
606 # parent will be the nullid (meaning "look up the copy data")
598 # and the second one will be the other parent. For example:
607 # and the second one will be the other parent. For example:
599 #
608 #
600 # 0 --- 1 --- 3 rev1 changes file foo
609 # 0 --- 1 --- 3 rev1 changes file foo
601 # \ / rev2 renames foo to bar and changes it
610 # \ / rev2 renames foo to bar and changes it
602 # \- 2 -/ rev3 should have bar with all changes and
611 # \- 2 -/ rev3 should have bar with all changes and
603 # should record that bar descends from
612 # should record that bar descends from
604 # bar in rev2 and foo in rev1
613 # bar in rev2 and foo in rev1
605 #
614 #
606 # this allows this merge to succeed:
615 # this allows this merge to succeed:
607 #
616 #
608 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
617 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
609 # \ / merging rev3 and rev4 should use bar@rev2
618 # \ / merging rev3 and rev4 should use bar@rev2
610 # \- 2 --- 4 as the merge base
619 # \- 2 --- 4 as the merge base
611 #
620 #
612 meta["copy"] = cp
621 meta["copy"] = cp
613 if not manifest2: # not a branch merge
622 if not manifest2: # not a branch merge
614 meta["copyrev"] = hex(manifest1.get(cp, nullid))
623 meta["copyrev"] = hex(manifest1.get(cp, nullid))
615 fp2 = nullid
624 fp2 = nullid
616 elif fp2 != nullid: # copied on remote side
625 elif fp2 != nullid: # copied on remote side
617 meta["copyrev"] = hex(manifest1.get(cp, nullid))
626 meta["copyrev"] = hex(manifest1.get(cp, nullid))
618 elif fp1 != nullid: # copied on local side, reversed
627 elif fp1 != nullid: # copied on local side, reversed
619 meta["copyrev"] = hex(manifest2.get(cp))
628 meta["copyrev"] = hex(manifest2.get(cp))
620 fp2 = fp1
629 fp2 = fp1
621 else: # directory rename
630 else: # directory rename
622 meta["copyrev"] = hex(manifest1.get(cp, nullid))
631 meta["copyrev"] = hex(manifest1.get(cp, nullid))
623 self.ui.debug(_(" %s: copy %s:%s\n") %
632 self.ui.debug(_(" %s: copy %s:%s\n") %
624 (fn, cp, meta["copyrev"]))
633 (fn, cp, meta["copyrev"]))
625 fp1 = nullid
634 fp1 = nullid
626 elif fp2 != nullid:
635 elif fp2 != nullid:
627 # is one parent an ancestor of the other?
636 # is one parent an ancestor of the other?
628 fpa = fl.ancestor(fp1, fp2)
637 fpa = fl.ancestor(fp1, fp2)
629 if fpa == fp1:
638 if fpa == fp1:
630 fp1, fp2 = fp2, nullid
639 fp1, fp2 = fp2, nullid
631 elif fpa == fp2:
640 elif fpa == fp2:
632 fp2 = nullid
641 fp2 = nullid
633
642
634 # is the file unmodified from the parent? report existing entry
643 # is the file unmodified from the parent? report existing entry
635 if fp2 == nullid and not fl.cmp(fp1, t):
644 if fp2 == nullid and not fl.cmp(fp1, t):
636 return fp1
645 return fp1
637
646
638 changelist.append(fn)
647 changelist.append(fn)
639 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
648 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
640
649
641 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
650 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
642 if p1 is None:
651 if p1 is None:
643 p1, p2 = self.dirstate.parents()
652 p1, p2 = self.dirstate.parents()
644 return self.commit(files=files, text=text, user=user, date=date,
653 return self.commit(files=files, text=text, user=user, date=date,
645 p1=p1, p2=p2, wlock=wlock, extra=extra)
654 p1=p1, p2=p2, extra=extra)
646
655
647 def commit(self, files=None, text="", user=None, date=None,
656 def commit(self, files=None, text="", user=None, date=None,
648 match=util.always, force=False, lock=None, wlock=None,
657 match=util.always, force=False, force_editor=False,
649 force_editor=False, p1=None, p2=None, extra={}):
658 p1=None, p2=None, extra={}):
650 tr = None
659 wlock = lock = tr = None
651 try:
660 try:
652 commit = []
661 commit = []
653 remove = []
662 remove = []
654 changed = []
663 changed = []
655 use_dirstate = (p1 is None) # not rawcommit
664 use_dirstate = (p1 is None) # not rawcommit
656 extra = extra.copy()
665 extra = extra.copy()
657
666
658 if use_dirstate:
667 if use_dirstate:
659 if files:
668 if files:
660 for f in files:
669 for f in files:
661 s = self.dirstate[f]
670 s = self.dirstate[f]
662 if s in 'nma':
671 if s in 'nma':
663 commit.append(f)
672 commit.append(f)
664 elif s == 'r':
673 elif s == 'r':
665 remove.append(f)
674 remove.append(f)
666 else:
675 else:
667 self.ui.warn(_("%s not tracked!\n") % f)
676 self.ui.warn(_("%s not tracked!\n") % f)
668 else:
677 else:
669 changes = self.status(match=match)[:5]
678 changes = self.status(match=match)[:5]
670 modified, added, removed, deleted, unknown = changes
679 modified, added, removed, deleted, unknown = changes
671 commit = modified + added
680 commit = modified + added
672 remove = removed
681 remove = removed
673 else:
682 else:
674 commit = files
683 commit = files
675
684
676 if use_dirstate:
685 if use_dirstate:
677 p1, p2 = self.dirstate.parents()
686 p1, p2 = self.dirstate.parents()
678 update_dirstate = True
687 update_dirstate = True
679 else:
688 else:
680 p1, p2 = p1, p2 or nullid
689 p1, p2 = p1, p2 or nullid
681 update_dirstate = (self.dirstate.parents()[0] == p1)
690 update_dirstate = (self.dirstate.parents()[0] == p1)
682
691
683 c1 = self.changelog.read(p1)
692 c1 = self.changelog.read(p1)
684 c2 = self.changelog.read(p2)
693 c2 = self.changelog.read(p2)
685 m1 = self.manifest.read(c1[0]).copy()
694 m1 = self.manifest.read(c1[0]).copy()
686 m2 = self.manifest.read(c2[0])
695 m2 = self.manifest.read(c2[0])
687
696
688 if use_dirstate:
697 if use_dirstate:
689 branchname = self.workingctx().branch()
698 branchname = self.workingctx().branch()
690 try:
699 try:
691 branchname = branchname.decode('UTF-8').encode('UTF-8')
700 branchname = branchname.decode('UTF-8').encode('UTF-8')
692 except UnicodeDecodeError:
701 except UnicodeDecodeError:
693 raise util.Abort(_('branch name not in UTF-8!'))
702 raise util.Abort(_('branch name not in UTF-8!'))
694 else:
703 else:
695 branchname = ""
704 branchname = ""
696
705
697 if use_dirstate:
706 if use_dirstate:
698 oldname = c1[5].get("branch") # stored in UTF-8
707 oldname = c1[5].get("branch") # stored in UTF-8
699 if (not commit and not remove and not force and p2 == nullid
708 if (not commit and not remove and not force and p2 == nullid
700 and branchname == oldname):
709 and branchname == oldname):
701 self.ui.status(_("nothing changed\n"))
710 self.ui.status(_("nothing changed\n"))
702 return None
711 return None
703
712
704 xp1 = hex(p1)
713 xp1 = hex(p1)
705 if p2 == nullid: xp2 = ''
714 if p2 == nullid: xp2 = ''
706 else: xp2 = hex(p2)
715 else: xp2 = hex(p2)
707
716
708 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
717 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
709
718
710 if not wlock:
711 wlock = self.wlock()
719 wlock = self.wlock()
712 if not lock:
713 lock = self.lock()
720 lock = self.lock()
714 tr = self.transaction()
721 tr = self.transaction()
715
722
716 # check in files
723 # check in files
717 new = {}
724 new = {}
718 linkrev = self.changelog.count()
725 linkrev = self.changelog.count()
719 commit.sort()
726 commit.sort()
720 is_exec = util.execfunc(self.root, m1.execf)
727 is_exec = util.execfunc(self.root, m1.execf)
721 is_link = util.linkfunc(self.root, m1.linkf)
728 is_link = util.linkfunc(self.root, m1.linkf)
722 for f in commit:
729 for f in commit:
723 self.ui.note(f + "\n")
730 self.ui.note(f + "\n")
724 try:
731 try:
725 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
732 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
726 new_exec = is_exec(f)
733 new_exec = is_exec(f)
727 new_link = is_link(f)
734 new_link = is_link(f)
728 if not changed or changed[-1] != f:
735 if not changed or changed[-1] != f:
729 # mention the file in the changelog if some
736 # mention the file in the changelog if some
730 # flag changed, even if there was no content
737 # flag changed, even if there was no content
731 # change.
738 # change.
732 old_exec = m1.execf(f)
739 old_exec = m1.execf(f)
733 old_link = m1.linkf(f)
740 old_link = m1.linkf(f)
734 if old_exec != new_exec or old_link != new_link:
741 if old_exec != new_exec or old_link != new_link:
735 changed.append(f)
742 changed.append(f)
736 m1.set(f, new_exec, new_link)
743 m1.set(f, new_exec, new_link)
737 except (OSError, IOError):
744 except (OSError, IOError):
738 if use_dirstate:
745 if use_dirstate:
739 self.ui.warn(_("trouble committing %s!\n") % f)
746 self.ui.warn(_("trouble committing %s!\n") % f)
740 raise
747 raise
741 else:
748 else:
742 remove.append(f)
749 remove.append(f)
743
750
744 # update manifest
751 # update manifest
745 m1.update(new)
752 m1.update(new)
746 remove.sort()
753 remove.sort()
747 removed = []
754 removed = []
748
755
749 for f in remove:
756 for f in remove:
750 if f in m1:
757 if f in m1:
751 del m1[f]
758 del m1[f]
752 removed.append(f)
759 removed.append(f)
753 elif f in m2:
760 elif f in m2:
754 removed.append(f)
761 removed.append(f)
755 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0],
762 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0],
756 (new, removed))
763 (new, removed))
757
764
758 # add changeset
765 # add changeset
759 new = new.keys()
766 new = new.keys()
760 new.sort()
767 new.sort()
761
768
762 user = user or self.ui.username()
769 user = user or self.ui.username()
763 if not text or force_editor:
770 if not text or force_editor:
764 edittext = []
771 edittext = []
765 if text:
772 if text:
766 edittext.append(text)
773 edittext.append(text)
767 edittext.append("")
774 edittext.append("")
768 edittext.append("HG: user: %s" % user)
775 edittext.append("HG: user: %s" % user)
769 if p2 != nullid:
776 if p2 != nullid:
770 edittext.append("HG: branch merge")
777 edittext.append("HG: branch merge")
771 if branchname:
778 if branchname:
772 edittext.append("HG: branch %s" % util.tolocal(branchname))
779 edittext.append("HG: branch %s" % util.tolocal(branchname))
773 edittext.extend(["HG: changed %s" % f for f in changed])
780 edittext.extend(["HG: changed %s" % f for f in changed])
774 edittext.extend(["HG: removed %s" % f for f in removed])
781 edittext.extend(["HG: removed %s" % f for f in removed])
775 if not changed and not remove:
782 if not changed and not remove:
776 edittext.append("HG: no files changed")
783 edittext.append("HG: no files changed")
777 edittext.append("")
784 edittext.append("")
778 # run editor in the repository root
785 # run editor in the repository root
779 olddir = os.getcwd()
786 olddir = os.getcwd()
780 os.chdir(self.root)
787 os.chdir(self.root)
781 text = self.ui.edit("\n".join(edittext), user)
788 text = self.ui.edit("\n".join(edittext), user)
782 os.chdir(olddir)
789 os.chdir(olddir)
783
790
784 lines = [line.rstrip() for line in text.rstrip().splitlines()]
791 lines = [line.rstrip() for line in text.rstrip().splitlines()]
785 while lines and not lines[0]:
792 while lines and not lines[0]:
786 del lines[0]
793 del lines[0]
787 if not lines:
794 if not lines:
788 return None
795 return None
789 text = '\n'.join(lines)
796 text = '\n'.join(lines)
790 if branchname:
797 if branchname:
791 extra["branch"] = branchname
798 extra["branch"] = branchname
792 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
799 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
793 user, date, extra)
800 user, date, extra)
794 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
801 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
795 parent2=xp2)
802 parent2=xp2)
796 tr.close()
803 tr.close()
797
804
798 if self.branchcache and "branch" in extra:
805 if self.branchcache and "branch" in extra:
799 self.branchcache[util.tolocal(extra["branch"])] = n
806 self.branchcache[util.tolocal(extra["branch"])] = n
800
807
801 if use_dirstate or update_dirstate:
808 if use_dirstate or update_dirstate:
802 self.dirstate.setparents(n)
809 self.dirstate.setparents(n)
803 if use_dirstate:
810 if use_dirstate:
804 for f in new:
811 for f in new:
805 self.dirstate.normal(f)
812 self.dirstate.normal(f)
806 for f in removed:
813 for f in removed:
807 self.dirstate.forget(f)
814 self.dirstate.forget(f)
808
815
809 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
816 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
810 return n
817 return n
811 finally:
818 finally:
812 del lock, wlock, tr
819 del lock, wlock, tr
813
820
814 def walk(self, node=None, files=[], match=util.always, badmatch=None):
821 def walk(self, node=None, files=[], match=util.always, badmatch=None):
815 '''
822 '''
816 walk recursively through the directory tree or a given
823 walk recursively through the directory tree or a given
817 changeset, finding all files matched by the match
824 changeset, finding all files matched by the match
818 function
825 function
819
826
820 results are yielded in a tuple (src, filename), where src
827 results are yielded in a tuple (src, filename), where src
821 is one of:
828 is one of:
822 'f' the file was found in the directory tree
829 'f' the file was found in the directory tree
823 'm' the file was only in the dirstate and not in the tree
830 'm' the file was only in the dirstate and not in the tree
824 'b' file was not found and matched badmatch
831 'b' file was not found and matched badmatch
825 '''
832 '''
826
833
827 if node:
834 if node:
828 fdict = dict.fromkeys(files)
835 fdict = dict.fromkeys(files)
829 # for dirstate.walk, files=['.'] means "walk the whole tree".
836 # for dirstate.walk, files=['.'] means "walk the whole tree".
830 # follow that here, too
837 # follow that here, too
831 fdict.pop('.', None)
838 fdict.pop('.', None)
832 mdict = self.manifest.read(self.changelog.read(node)[0])
839 mdict = self.manifest.read(self.changelog.read(node)[0])
833 mfiles = mdict.keys()
840 mfiles = mdict.keys()
834 mfiles.sort()
841 mfiles.sort()
835 for fn in mfiles:
842 for fn in mfiles:
836 for ffn in fdict:
843 for ffn in fdict:
837 # match if the file is the exact name or a directory
844 # match if the file is the exact name or a directory
838 if ffn == fn or fn.startswith("%s/" % ffn):
845 if ffn == fn or fn.startswith("%s/" % ffn):
839 del fdict[ffn]
846 del fdict[ffn]
840 break
847 break
841 if match(fn):
848 if match(fn):
842 yield 'm', fn
849 yield 'm', fn
843 ffiles = fdict.keys()
850 ffiles = fdict.keys()
844 ffiles.sort()
851 ffiles.sort()
845 for fn in ffiles:
852 for fn in ffiles:
846 if badmatch and badmatch(fn):
853 if badmatch and badmatch(fn):
847 if match(fn):
854 if match(fn):
848 yield 'b', fn
855 yield 'b', fn
849 else:
856 else:
850 self.ui.warn(_('%s: No such file in rev %s\n')
857 self.ui.warn(_('%s: No such file in rev %s\n')
851 % (self.pathto(fn), short(node)))
858 % (self.pathto(fn), short(node)))
852 else:
859 else:
853 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
860 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
854 yield src, fn
861 yield src, fn
855
862
856 def status(self, node1=None, node2=None, files=[], match=util.always,
863 def status(self, node1=None, node2=None, files=[], match=util.always,
857 wlock=None, list_ignored=False, list_clean=False):
864 list_ignored=False, list_clean=False):
858 """return status of files between two nodes or node and working directory
865 """return status of files between two nodes or node and working directory
859
866
860 If node1 is None, use the first dirstate parent instead.
867 If node1 is None, use the first dirstate parent instead.
861 If node2 is None, compare node1 with working directory.
868 If node2 is None, compare node1 with working directory.
862 """
869 """
863
870
864 def fcmp(fn, getnode):
871 def fcmp(fn, getnode):
865 t1 = self.wread(fn)
872 t1 = self.wread(fn)
866 return self.file(fn).cmp(getnode(fn), t1)
873 return self.file(fn).cmp(getnode(fn), t1)
867
874
868 def mfmatches(node):
875 def mfmatches(node):
869 change = self.changelog.read(node)
876 change = self.changelog.read(node)
870 mf = self.manifest.read(change[0]).copy()
877 mf = self.manifest.read(change[0]).copy()
871 for fn in mf.keys():
878 for fn in mf.keys():
872 if not match(fn):
879 if not match(fn):
873 del mf[fn]
880 del mf[fn]
874 return mf
881 return mf
875
882
876 modified, added, removed, deleted, unknown = [], [], [], [], []
883 modified, added, removed, deleted, unknown = [], [], [], [], []
877 ignored, clean = [], []
884 ignored, clean = [], []
878
885
879 compareworking = False
886 compareworking = False
880 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
887 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
881 compareworking = True
888 compareworking = True
882
889
883 if not compareworking:
890 if not compareworking:
884 # read the manifest from node1 before the manifest from node2,
891 # read the manifest from node1 before the manifest from node2,
885 # so that we'll hit the manifest cache if we're going through
892 # so that we'll hit the manifest cache if we're going through
886 # all the revisions in parent->child order.
893 # all the revisions in parent->child order.
887 mf1 = mfmatches(node1)
894 mf1 = mfmatches(node1)
888
895
889 # are we comparing the working directory?
896 # are we comparing the working directory?
890 if not node2:
897 if not node2:
891 (lookup, modified, added, removed, deleted, unknown,
898 (lookup, modified, added, removed, deleted, unknown,
892 ignored, clean) = self.dirstate.status(files, match,
899 ignored, clean) = self.dirstate.status(files, match,
893 list_ignored, list_clean)
900 list_ignored, list_clean)
894
901
895 # are we comparing working dir against its parent?
902 # are we comparing working dir against its parent?
896 if compareworking:
903 if compareworking:
897 if lookup:
904 if lookup:
898 fixup = []
905 fixup = []
899 # do a full compare of any files that might have changed
906 # do a full compare of any files that might have changed
900 ctx = self.changectx()
907 ctx = self.changectx()
901 for f in lookup:
908 for f in lookup:
902 if f not in ctx or ctx[f].cmp(self.wread(f)):
909 if f not in ctx or ctx[f].cmp(self.wread(f)):
903 modified.append(f)
910 modified.append(f)
904 else:
911 else:
905 fixup.append(f)
912 fixup.append(f)
906 if list_clean:
913 if list_clean:
907 clean.append(f)
914 clean.append(f)
908
915
909 # update dirstate for files that are actually clean
916 # update dirstate for files that are actually clean
910 if fixup:
917 if fixup:
911 fixlock = wlock
918 wlock = None
912 try:
919 try:
913 if not fixlock:
914 try:
920 try:
915 fixlock = self.wlock(False)
921 wlock = self.wlock(False)
916 except lock.LockException:
922 except lock.LockException:
917 pass
923 pass
918 if fixlock:
924 if wlock:
919 for f in fixup:
925 for f in fixup:
920 self.dirstate.normal(f)
926 self.dirstate.normal(f)
921 finally:
927 finally:
922 del fixlock
928 del wlock
923 else:
929 else:
924 # we are comparing working dir against non-parent
930 # we are comparing working dir against non-parent
925 # generate a pseudo-manifest for the working dir
931 # generate a pseudo-manifest for the working dir
926 # XXX: create it in dirstate.py ?
932 # XXX: create it in dirstate.py ?
927 mf2 = mfmatches(self.dirstate.parents()[0])
933 mf2 = mfmatches(self.dirstate.parents()[0])
928 is_exec = util.execfunc(self.root, mf2.execf)
934 is_exec = util.execfunc(self.root, mf2.execf)
929 is_link = util.linkfunc(self.root, mf2.linkf)
935 is_link = util.linkfunc(self.root, mf2.linkf)
930 for f in lookup + modified + added:
936 for f in lookup + modified + added:
931 mf2[f] = ""
937 mf2[f] = ""
932 mf2.set(f, is_exec(f), is_link(f))
938 mf2.set(f, is_exec(f), is_link(f))
933 for f in removed:
939 for f in removed:
934 if f in mf2:
940 if f in mf2:
935 del mf2[f]
941 del mf2[f]
936
942
937 else:
943 else:
938 # we are comparing two revisions
944 # we are comparing two revisions
939 mf2 = mfmatches(node2)
945 mf2 = mfmatches(node2)
940
946
941 if not compareworking:
947 if not compareworking:
942 # flush lists from dirstate before comparing manifests
948 # flush lists from dirstate before comparing manifests
943 modified, added, clean = [], [], []
949 modified, added, clean = [], [], []
944
950
945 # make sure to sort the files so we talk to the disk in a
951 # make sure to sort the files so we talk to the disk in a
946 # reasonable order
952 # reasonable order
947 mf2keys = mf2.keys()
953 mf2keys = mf2.keys()
948 mf2keys.sort()
954 mf2keys.sort()
949 getnode = lambda fn: mf1.get(fn, nullid)
955 getnode = lambda fn: mf1.get(fn, nullid)
950 for fn in mf2keys:
956 for fn in mf2keys:
951 if mf1.has_key(fn):
957 if mf1.has_key(fn):
952 if (mf1.flags(fn) != mf2.flags(fn) or
958 if (mf1.flags(fn) != mf2.flags(fn) or
953 (mf1[fn] != mf2[fn] and
959 (mf1[fn] != mf2[fn] and
954 (mf2[fn] != "" or fcmp(fn, getnode)))):
960 (mf2[fn] != "" or fcmp(fn, getnode)))):
955 modified.append(fn)
961 modified.append(fn)
956 elif list_clean:
962 elif list_clean:
957 clean.append(fn)
963 clean.append(fn)
958 del mf1[fn]
964 del mf1[fn]
959 else:
965 else:
960 added.append(fn)
966 added.append(fn)
961
967
962 removed = mf1.keys()
968 removed = mf1.keys()
963
969
964 # sort and return results:
970 # sort and return results:
965 for l in modified, added, removed, deleted, unknown, ignored, clean:
971 for l in modified, added, removed, deleted, unknown, ignored, clean:
966 l.sort()
972 l.sort()
967 return (modified, added, removed, deleted, unknown, ignored, clean)
973 return (modified, added, removed, deleted, unknown, ignored, clean)
968
974
969 def add(self, list, wlock=None):
975 def add(self, list):
976 wlock = self.wlock()
970 try:
977 try:
971 if not wlock:
972 wlock = self.wlock()
973 for f in list:
978 for f in list:
974 p = self.wjoin(f)
979 p = self.wjoin(f)
975 try:
980 try:
976 st = os.lstat(p)
981 st = os.lstat(p)
977 except:
982 except:
978 self.ui.warn(_("%s does not exist!\n") % f)
983 self.ui.warn(_("%s does not exist!\n") % f)
979 continue
984 continue
980 if st.st_size > 10000000:
985 if st.st_size > 10000000:
981 self.ui.warn(_("%s: files over 10MB may cause memory and"
986 self.ui.warn(_("%s: files over 10MB may cause memory and"
982 " performance problems\n"
987 " performance problems\n"
983 "(use 'hg revert %s' to unadd the file)\n")
988 "(use 'hg revert %s' to unadd the file)\n")
984 % (f, f))
989 % (f, f))
985 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
990 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
986 self.ui.warn(_("%s not added: only files and symlinks "
991 self.ui.warn(_("%s not added: only files and symlinks "
987 "supported currently\n") % f)
992 "supported currently\n") % f)
988 elif self.dirstate[f] in 'an':
993 elif self.dirstate[f] in 'an':
989 self.ui.warn(_("%s already tracked!\n") % f)
994 self.ui.warn(_("%s already tracked!\n") % f)
990 else:
995 else:
991 self.dirstate.add(f)
996 self.dirstate.add(f)
992 finally:
997 finally:
993 del wlock
998 del wlock
994
999
995 def forget(self, list, wlock=None):
1000 def forget(self, list):
1001 wlock = self.wlock()
996 try:
1002 try:
997 if not wlock:
998 wlock = self.wlock()
999 for f in list:
1003 for f in list:
1000 if self.dirstate[f] != 'a':
1004 if self.dirstate[f] != 'a':
1001 self.ui.warn(_("%s not added!\n") % f)
1005 self.ui.warn(_("%s not added!\n") % f)
1002 else:
1006 else:
1003 self.dirstate.forget(f)
1007 self.dirstate.forget(f)
1004 finally:
1008 finally:
1005 del wlock
1009 del wlock
1006
1010
1007 def remove(self, list, unlink=False, wlock=None):
1011 def remove(self, list, unlink=False):
1012 wlock = None
1008 try:
1013 try:
1009 if unlink:
1014 if unlink:
1010 for f in list:
1015 for f in list:
1011 try:
1016 try:
1012 util.unlink(self.wjoin(f))
1017 util.unlink(self.wjoin(f))
1013 except OSError, inst:
1018 except OSError, inst:
1014 if inst.errno != errno.ENOENT:
1019 if inst.errno != errno.ENOENT:
1015 raise
1020 raise
1016 if not wlock:
1017 wlock = self.wlock()
1021 wlock = self.wlock()
1018 for f in list:
1022 for f in list:
1019 if unlink and os.path.exists(self.wjoin(f)):
1023 if unlink and os.path.exists(self.wjoin(f)):
1020 self.ui.warn(_("%s still exists!\n") % f)
1024 self.ui.warn(_("%s still exists!\n") % f)
1021 elif self.dirstate[f] == 'a':
1025 elif self.dirstate[f] == 'a':
1022 self.dirstate.forget(f)
1026 self.dirstate.forget(f)
1023 elif f not in self.dirstate:
1027 elif f not in self.dirstate:
1024 self.ui.warn(_("%s not tracked!\n") % f)
1028 self.ui.warn(_("%s not tracked!\n") % f)
1025 else:
1029 else:
1026 self.dirstate.remove(f)
1030 self.dirstate.remove(f)
1027 finally:
1031 finally:
1028 del wlock
1032 del wlock
1029
1033
1030 def undelete(self, list, wlock=None):
1034 def undelete(self, list):
1035 wlock = None
1031 try:
1036 try:
1032 p = self.dirstate.parents()[0]
1037 p = self.dirstate.parents()[0]
1033 mn = self.changelog.read(p)[0]
1038 mn = self.changelog.read(p)[0]
1034 m = self.manifest.read(mn)
1039 m = self.manifest.read(mn)
1035 if not wlock:
1036 wlock = self.wlock()
1040 wlock = self.wlock()
1037 for f in list:
1041 for f in list:
1038 if self.dirstate[f] != 'r':
1042 if self.dirstate[f] != 'r':
1039 self.ui.warn("%s not removed!\n" % f)
1043 self.ui.warn("%s not removed!\n" % f)
1040 else:
1044 else:
1041 t = self.file(f).read(m[f])
1045 t = self.file(f).read(m[f])
1042 self.wwrite(f, t, m.flags(f))
1046 self.wwrite(f, t, m.flags(f))
1043 self.dirstate.normal(f)
1047 self.dirstate.normal(f)
1044 finally:
1048 finally:
1045 del wlock
1049 del wlock
1046
1050
1047 def copy(self, source, dest, wlock=None):
1051 def copy(self, source, dest):
1052 wlock = None
1048 try:
1053 try:
1049 p = self.wjoin(dest)
1054 p = self.wjoin(dest)
1050 if not (os.path.exists(p) or os.path.islink(p)):
1055 if not (os.path.exists(p) or os.path.islink(p)):
1051 self.ui.warn(_("%s does not exist!\n") % dest)
1056 self.ui.warn(_("%s does not exist!\n") % dest)
1052 elif not (os.path.isfile(p) or os.path.islink(p)):
1057 elif not (os.path.isfile(p) or os.path.islink(p)):
1053 self.ui.warn(_("copy failed: %s is not a file or a "
1058 self.ui.warn(_("copy failed: %s is not a file or a "
1054 "symbolic link\n") % dest)
1059 "symbolic link\n") % dest)
1055 else:
1060 else:
1056 if not wlock:
1057 wlock = self.wlock()
1061 wlock = self.wlock()
1058 if dest not in self.dirstate:
1062 if dest not in self.dirstate:
1059 self.dirstate.add(dest)
1063 self.dirstate.add(dest)
1060 self.dirstate.copy(source, dest)
1064 self.dirstate.copy(source, dest)
1061 finally:
1065 finally:
1062 del wlock
1066 del wlock
1063
1067
1064 def heads(self, start=None):
1068 def heads(self, start=None):
1065 heads = self.changelog.heads(start)
1069 heads = self.changelog.heads(start)
1066 # sort the output in rev descending order
1070 # sort the output in rev descending order
1067 heads = [(-self.changelog.rev(h), h) for h in heads]
1071 heads = [(-self.changelog.rev(h), h) for h in heads]
1068 heads.sort()
1072 heads.sort()
1069 return [n for (r, n) in heads]
1073 return [n for (r, n) in heads]
1070
1074
1071 def branchheads(self, branch, start=None):
1075 def branchheads(self, branch, start=None):
1072 branches = self.branchtags()
1076 branches = self.branchtags()
1073 if branch not in branches:
1077 if branch not in branches:
1074 return []
1078 return []
1075 # The basic algorithm is this:
1079 # The basic algorithm is this:
1076 #
1080 #
1077 # Start from the branch tip since there are no later revisions that can
1081 # Start from the branch tip since there are no later revisions that can
1078 # possibly be in this branch, and the tip is a guaranteed head.
1082 # possibly be in this branch, and the tip is a guaranteed head.
1079 #
1083 #
1080 # Remember the tip's parents as the first ancestors, since these by
1084 # Remember the tip's parents as the first ancestors, since these by
1081 # definition are not heads.
1085 # definition are not heads.
1082 #
1086 #
1083 # Step backwards from the brach tip through all the revisions. We are
1087 # Step backwards from the brach tip through all the revisions. We are
1084 # guaranteed by the rules of Mercurial that we will now be visiting the
1088 # guaranteed by the rules of Mercurial that we will now be visiting the
1085 # nodes in reverse topological order (children before parents).
1089 # nodes in reverse topological order (children before parents).
1086 #
1090 #
1087 # If a revision is one of the ancestors of a head then we can toss it
1091 # If a revision is one of the ancestors of a head then we can toss it
1088 # out of the ancestors set (we've already found it and won't be
1092 # out of the ancestors set (we've already found it and won't be
1089 # visiting it again) and put its parents in the ancestors set.
1093 # visiting it again) and put its parents in the ancestors set.
1090 #
1094 #
1091 # Otherwise, if a revision is in the branch it's another head, since it
1095 # Otherwise, if a revision is in the branch it's another head, since it
1092 # wasn't in the ancestor list of an existing head. So add it to the
1096 # wasn't in the ancestor list of an existing head. So add it to the
1093 # head list, and add its parents to the ancestor list.
1097 # head list, and add its parents to the ancestor list.
1094 #
1098 #
1095 # If it is not in the branch ignore it.
1099 # If it is not in the branch ignore it.
1096 #
1100 #
1097 # Once we have a list of heads, use nodesbetween to filter out all the
1101 # Once we have a list of heads, use nodesbetween to filter out all the
1098 # heads that cannot be reached from startrev. There may be a more
1102 # heads that cannot be reached from startrev. There may be a more
1099 # efficient way to do this as part of the previous algorithm.
1103 # efficient way to do this as part of the previous algorithm.
1100
1104
1101 set = util.set
1105 set = util.set
1102 heads = [self.changelog.rev(branches[branch])]
1106 heads = [self.changelog.rev(branches[branch])]
1103 # Don't care if ancestors contains nullrev or not.
1107 # Don't care if ancestors contains nullrev or not.
1104 ancestors = set(self.changelog.parentrevs(heads[0]))
1108 ancestors = set(self.changelog.parentrevs(heads[0]))
1105 for rev in xrange(heads[0] - 1, nullrev, -1):
1109 for rev in xrange(heads[0] - 1, nullrev, -1):
1106 if rev in ancestors:
1110 if rev in ancestors:
1107 ancestors.update(self.changelog.parentrevs(rev))
1111 ancestors.update(self.changelog.parentrevs(rev))
1108 ancestors.remove(rev)
1112 ancestors.remove(rev)
1109 elif self.changectx(rev).branch() == branch:
1113 elif self.changectx(rev).branch() == branch:
1110 heads.append(rev)
1114 heads.append(rev)
1111 ancestors.update(self.changelog.parentrevs(rev))
1115 ancestors.update(self.changelog.parentrevs(rev))
1112 heads = [self.changelog.node(rev) for rev in heads]
1116 heads = [self.changelog.node(rev) for rev in heads]
1113 if start is not None:
1117 if start is not None:
1114 heads = self.changelog.nodesbetween([start], heads)[2]
1118 heads = self.changelog.nodesbetween([start], heads)[2]
1115 return heads
1119 return heads
1116
1120
1117 def branches(self, nodes):
1121 def branches(self, nodes):
1118 if not nodes:
1122 if not nodes:
1119 nodes = [self.changelog.tip()]
1123 nodes = [self.changelog.tip()]
1120 b = []
1124 b = []
1121 for n in nodes:
1125 for n in nodes:
1122 t = n
1126 t = n
1123 while 1:
1127 while 1:
1124 p = self.changelog.parents(n)
1128 p = self.changelog.parents(n)
1125 if p[1] != nullid or p[0] == nullid:
1129 if p[1] != nullid or p[0] == nullid:
1126 b.append((t, n, p[0], p[1]))
1130 b.append((t, n, p[0], p[1]))
1127 break
1131 break
1128 n = p[0]
1132 n = p[0]
1129 return b
1133 return b
1130
1134
1131 def between(self, pairs):
1135 def between(self, pairs):
1132 r = []
1136 r = []
1133
1137
1134 for top, bottom in pairs:
1138 for top, bottom in pairs:
1135 n, l, i = top, [], 0
1139 n, l, i = top, [], 0
1136 f = 1
1140 f = 1
1137
1141
1138 while n != bottom:
1142 while n != bottom:
1139 p = self.changelog.parents(n)[0]
1143 p = self.changelog.parents(n)[0]
1140 if i == f:
1144 if i == f:
1141 l.append(n)
1145 l.append(n)
1142 f = f * 2
1146 f = f * 2
1143 n = p
1147 n = p
1144 i += 1
1148 i += 1
1145
1149
1146 r.append(l)
1150 r.append(l)
1147
1151
1148 return r
1152 return r
1149
1153
1150 def findincoming(self, remote, base=None, heads=None, force=False):
1154 def findincoming(self, remote, base=None, heads=None, force=False):
1151 """Return list of roots of the subsets of missing nodes from remote
1155 """Return list of roots of the subsets of missing nodes from remote
1152
1156
1153 If base dict is specified, assume that these nodes and their parents
1157 If base dict is specified, assume that these nodes and their parents
1154 exist on the remote side and that no child of a node of base exists
1158 exist on the remote side and that no child of a node of base exists
1155 in both remote and self.
1159 in both remote and self.
1156 Furthermore base will be updated to include the nodes that exists
1160 Furthermore base will be updated to include the nodes that exists
1157 in self and remote but no children exists in self and remote.
1161 in self and remote but no children exists in self and remote.
1158 If a list of heads is specified, return only nodes which are heads
1162 If a list of heads is specified, return only nodes which are heads
1159 or ancestors of these heads.
1163 or ancestors of these heads.
1160
1164
1161 All the ancestors of base are in self and in remote.
1165 All the ancestors of base are in self and in remote.
1162 All the descendants of the list returned are missing in self.
1166 All the descendants of the list returned are missing in self.
1163 (and so we know that the rest of the nodes are missing in remote, see
1167 (and so we know that the rest of the nodes are missing in remote, see
1164 outgoing)
1168 outgoing)
1165 """
1169 """
1166 m = self.changelog.nodemap
1170 m = self.changelog.nodemap
1167 search = []
1171 search = []
1168 fetch = {}
1172 fetch = {}
1169 seen = {}
1173 seen = {}
1170 seenbranch = {}
1174 seenbranch = {}
1171 if base == None:
1175 if base == None:
1172 base = {}
1176 base = {}
1173
1177
1174 if not heads:
1178 if not heads:
1175 heads = remote.heads()
1179 heads = remote.heads()
1176
1180
1177 if self.changelog.tip() == nullid:
1181 if self.changelog.tip() == nullid:
1178 base[nullid] = 1
1182 base[nullid] = 1
1179 if heads != [nullid]:
1183 if heads != [nullid]:
1180 return [nullid]
1184 return [nullid]
1181 return []
1185 return []
1182
1186
1183 # assume we're closer to the tip than the root
1187 # assume we're closer to the tip than the root
1184 # and start by examining the heads
1188 # and start by examining the heads
1185 self.ui.status(_("searching for changes\n"))
1189 self.ui.status(_("searching for changes\n"))
1186
1190
1187 unknown = []
1191 unknown = []
1188 for h in heads:
1192 for h in heads:
1189 if h not in m:
1193 if h not in m:
1190 unknown.append(h)
1194 unknown.append(h)
1191 else:
1195 else:
1192 base[h] = 1
1196 base[h] = 1
1193
1197
1194 if not unknown:
1198 if not unknown:
1195 return []
1199 return []
1196
1200
1197 req = dict.fromkeys(unknown)
1201 req = dict.fromkeys(unknown)
1198 reqcnt = 0
1202 reqcnt = 0
1199
1203
1200 # search through remote branches
1204 # search through remote branches
1201 # a 'branch' here is a linear segment of history, with four parts:
1205 # a 'branch' here is a linear segment of history, with four parts:
1202 # head, root, first parent, second parent
1206 # head, root, first parent, second parent
1203 # (a branch always has two parents (or none) by definition)
1207 # (a branch always has two parents (or none) by definition)
1204 unknown = remote.branches(unknown)
1208 unknown = remote.branches(unknown)
1205 while unknown:
1209 while unknown:
1206 r = []
1210 r = []
1207 while unknown:
1211 while unknown:
1208 n = unknown.pop(0)
1212 n = unknown.pop(0)
1209 if n[0] in seen:
1213 if n[0] in seen:
1210 continue
1214 continue
1211
1215
1212 self.ui.debug(_("examining %s:%s\n")
1216 self.ui.debug(_("examining %s:%s\n")
1213 % (short(n[0]), short(n[1])))
1217 % (short(n[0]), short(n[1])))
1214 if n[0] == nullid: # found the end of the branch
1218 if n[0] == nullid: # found the end of the branch
1215 pass
1219 pass
1216 elif n in seenbranch:
1220 elif n in seenbranch:
1217 self.ui.debug(_("branch already found\n"))
1221 self.ui.debug(_("branch already found\n"))
1218 continue
1222 continue
1219 elif n[1] and n[1] in m: # do we know the base?
1223 elif n[1] and n[1] in m: # do we know the base?
1220 self.ui.debug(_("found incomplete branch %s:%s\n")
1224 self.ui.debug(_("found incomplete branch %s:%s\n")
1221 % (short(n[0]), short(n[1])))
1225 % (short(n[0]), short(n[1])))
1222 search.append(n) # schedule branch range for scanning
1226 search.append(n) # schedule branch range for scanning
1223 seenbranch[n] = 1
1227 seenbranch[n] = 1
1224 else:
1228 else:
1225 if n[1] not in seen and n[1] not in fetch:
1229 if n[1] not in seen and n[1] not in fetch:
1226 if n[2] in m and n[3] in m:
1230 if n[2] in m and n[3] in m:
1227 self.ui.debug(_("found new changeset %s\n") %
1231 self.ui.debug(_("found new changeset %s\n") %
1228 short(n[1]))
1232 short(n[1]))
1229 fetch[n[1]] = 1 # earliest unknown
1233 fetch[n[1]] = 1 # earliest unknown
1230 for p in n[2:4]:
1234 for p in n[2:4]:
1231 if p in m:
1235 if p in m:
1232 base[p] = 1 # latest known
1236 base[p] = 1 # latest known
1233
1237
1234 for p in n[2:4]:
1238 for p in n[2:4]:
1235 if p not in req and p not in m:
1239 if p not in req and p not in m:
1236 r.append(p)
1240 r.append(p)
1237 req[p] = 1
1241 req[p] = 1
1238 seen[n[0]] = 1
1242 seen[n[0]] = 1
1239
1243
1240 if r:
1244 if r:
1241 reqcnt += 1
1245 reqcnt += 1
1242 self.ui.debug(_("request %d: %s\n") %
1246 self.ui.debug(_("request %d: %s\n") %
1243 (reqcnt, " ".join(map(short, r))))
1247 (reqcnt, " ".join(map(short, r))))
1244 for p in xrange(0, len(r), 10):
1248 for p in xrange(0, len(r), 10):
1245 for b in remote.branches(r[p:p+10]):
1249 for b in remote.branches(r[p:p+10]):
1246 self.ui.debug(_("received %s:%s\n") %
1250 self.ui.debug(_("received %s:%s\n") %
1247 (short(b[0]), short(b[1])))
1251 (short(b[0]), short(b[1])))
1248 unknown.append(b)
1252 unknown.append(b)
1249
1253
1250 # do binary search on the branches we found
1254 # do binary search on the branches we found
1251 while search:
1255 while search:
1252 n = search.pop(0)
1256 n = search.pop(0)
1253 reqcnt += 1
1257 reqcnt += 1
1254 l = remote.between([(n[0], n[1])])[0]
1258 l = remote.between([(n[0], n[1])])[0]
1255 l.append(n[1])
1259 l.append(n[1])
1256 p = n[0]
1260 p = n[0]
1257 f = 1
1261 f = 1
1258 for i in l:
1262 for i in l:
1259 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1263 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1260 if i in m:
1264 if i in m:
1261 if f <= 2:
1265 if f <= 2:
1262 self.ui.debug(_("found new branch changeset %s\n") %
1266 self.ui.debug(_("found new branch changeset %s\n") %
1263 short(p))
1267 short(p))
1264 fetch[p] = 1
1268 fetch[p] = 1
1265 base[i] = 1
1269 base[i] = 1
1266 else:
1270 else:
1267 self.ui.debug(_("narrowed branch search to %s:%s\n")
1271 self.ui.debug(_("narrowed branch search to %s:%s\n")
1268 % (short(p), short(i)))
1272 % (short(p), short(i)))
1269 search.append((p, i))
1273 search.append((p, i))
1270 break
1274 break
1271 p, f = i, f * 2
1275 p, f = i, f * 2
1272
1276
1273 # sanity check our fetch list
1277 # sanity check our fetch list
1274 for f in fetch.keys():
1278 for f in fetch.keys():
1275 if f in m:
1279 if f in m:
1276 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1280 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1277
1281
1278 if base.keys() == [nullid]:
1282 if base.keys() == [nullid]:
1279 if force:
1283 if force:
1280 self.ui.warn(_("warning: repository is unrelated\n"))
1284 self.ui.warn(_("warning: repository is unrelated\n"))
1281 else:
1285 else:
1282 raise util.Abort(_("repository is unrelated"))
1286 raise util.Abort(_("repository is unrelated"))
1283
1287
1284 self.ui.debug(_("found new changesets starting at ") +
1288 self.ui.debug(_("found new changesets starting at ") +
1285 " ".join([short(f) for f in fetch]) + "\n")
1289 " ".join([short(f) for f in fetch]) + "\n")
1286
1290
1287 self.ui.debug(_("%d total queries\n") % reqcnt)
1291 self.ui.debug(_("%d total queries\n") % reqcnt)
1288
1292
1289 return fetch.keys()
1293 return fetch.keys()
1290
1294
1291 def findoutgoing(self, remote, base=None, heads=None, force=False):
1295 def findoutgoing(self, remote, base=None, heads=None, force=False):
1292 """Return list of nodes that are roots of subsets not in remote
1296 """Return list of nodes that are roots of subsets not in remote
1293
1297
1294 If base dict is specified, assume that these nodes and their parents
1298 If base dict is specified, assume that these nodes and their parents
1295 exist on the remote side.
1299 exist on the remote side.
1296 If a list of heads is specified, return only nodes which are heads
1300 If a list of heads is specified, return only nodes which are heads
1297 or ancestors of these heads, and return a second element which
1301 or ancestors of these heads, and return a second element which
1298 contains all remote heads which get new children.
1302 contains all remote heads which get new children.
1299 """
1303 """
1300 if base == None:
1304 if base == None:
1301 base = {}
1305 base = {}
1302 self.findincoming(remote, base, heads, force=force)
1306 self.findincoming(remote, base, heads, force=force)
1303
1307
1304 self.ui.debug(_("common changesets up to ")
1308 self.ui.debug(_("common changesets up to ")
1305 + " ".join(map(short, base.keys())) + "\n")
1309 + " ".join(map(short, base.keys())) + "\n")
1306
1310
1307 remain = dict.fromkeys(self.changelog.nodemap)
1311 remain = dict.fromkeys(self.changelog.nodemap)
1308
1312
1309 # prune everything remote has from the tree
1313 # prune everything remote has from the tree
1310 del remain[nullid]
1314 del remain[nullid]
1311 remove = base.keys()
1315 remove = base.keys()
1312 while remove:
1316 while remove:
1313 n = remove.pop(0)
1317 n = remove.pop(0)
1314 if n in remain:
1318 if n in remain:
1315 del remain[n]
1319 del remain[n]
1316 for p in self.changelog.parents(n):
1320 for p in self.changelog.parents(n):
1317 remove.append(p)
1321 remove.append(p)
1318
1322
1319 # find every node whose parents have been pruned
1323 # find every node whose parents have been pruned
1320 subset = []
1324 subset = []
1321 # find every remote head that will get new children
1325 # find every remote head that will get new children
1322 updated_heads = {}
1326 updated_heads = {}
1323 for n in remain:
1327 for n in remain:
1324 p1, p2 = self.changelog.parents(n)
1328 p1, p2 = self.changelog.parents(n)
1325 if p1 not in remain and p2 not in remain:
1329 if p1 not in remain and p2 not in remain:
1326 subset.append(n)
1330 subset.append(n)
1327 if heads:
1331 if heads:
1328 if p1 in heads:
1332 if p1 in heads:
1329 updated_heads[p1] = True
1333 updated_heads[p1] = True
1330 if p2 in heads:
1334 if p2 in heads:
1331 updated_heads[p2] = True
1335 updated_heads[p2] = True
1332
1336
1333 # this is the set of all roots we have to push
1337 # this is the set of all roots we have to push
1334 if heads:
1338 if heads:
1335 return subset, updated_heads.keys()
1339 return subset, updated_heads.keys()
1336 else:
1340 else:
1337 return subset
1341 return subset
1338
1342
1339 def pull(self, remote, heads=None, force=False, lock=None):
1343 def pull(self, remote, heads=None, force=False):
1344 lock = self.lock()
1340 try:
1345 try:
1341 if not lock:
1342 lock = self.lock()
1343 fetch = self.findincoming(remote, force=force)
1346 fetch = self.findincoming(remote, force=force)
1344 if fetch == [nullid]:
1347 if fetch == [nullid]:
1345 self.ui.status(_("requesting all changes\n"))
1348 self.ui.status(_("requesting all changes\n"))
1346
1349
1347 if not fetch:
1350 if not fetch:
1348 self.ui.status(_("no changes found\n"))
1351 self.ui.status(_("no changes found\n"))
1349 return 0
1352 return 0
1350
1353
1351 if heads is None:
1354 if heads is None:
1352 cg = remote.changegroup(fetch, 'pull')
1355 cg = remote.changegroup(fetch, 'pull')
1353 else:
1356 else:
1354 if 'changegroupsubset' not in remote.capabilities:
1357 if 'changegroupsubset' not in remote.capabilities:
1355 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1358 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1356 cg = remote.changegroupsubset(fetch, heads, 'pull')
1359 cg = remote.changegroupsubset(fetch, heads, 'pull')
1357 return self.addchangegroup(cg, 'pull', remote.url())
1360 return self.addchangegroup(cg, 'pull', remote.url())
1358 finally:
1361 finally:
1359 del lock
1362 del lock
1360
1363
1361 def push(self, remote, force=False, revs=None):
1364 def push(self, remote, force=False, revs=None):
1362 # there are two ways to push to remote repo:
1365 # there are two ways to push to remote repo:
1363 #
1366 #
1364 # addchangegroup assumes local user can lock remote
1367 # addchangegroup assumes local user can lock remote
1365 # repo (local filesystem, old ssh servers).
1368 # repo (local filesystem, old ssh servers).
1366 #
1369 #
1367 # unbundle assumes local user cannot lock remote repo (new ssh
1370 # unbundle assumes local user cannot lock remote repo (new ssh
1368 # servers, http servers).
1371 # servers, http servers).
1369
1372
1370 if remote.capable('unbundle'):
1373 if remote.capable('unbundle'):
1371 return self.push_unbundle(remote, force, revs)
1374 return self.push_unbundle(remote, force, revs)
1372 return self.push_addchangegroup(remote, force, revs)
1375 return self.push_addchangegroup(remote, force, revs)
1373
1376
1374 def prepush(self, remote, force, revs):
1377 def prepush(self, remote, force, revs):
1375 base = {}
1378 base = {}
1376 remote_heads = remote.heads()
1379 remote_heads = remote.heads()
1377 inc = self.findincoming(remote, base, remote_heads, force=force)
1380 inc = self.findincoming(remote, base, remote_heads, force=force)
1378
1381
1379 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1382 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1380 if revs is not None:
1383 if revs is not None:
1381 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1384 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1382 else:
1385 else:
1383 bases, heads = update, self.changelog.heads()
1386 bases, heads = update, self.changelog.heads()
1384
1387
1385 if not bases:
1388 if not bases:
1386 self.ui.status(_("no changes found\n"))
1389 self.ui.status(_("no changes found\n"))
1387 return None, 1
1390 return None, 1
1388 elif not force:
1391 elif not force:
1389 # check if we're creating new remote heads
1392 # check if we're creating new remote heads
1390 # to be a remote head after push, node must be either
1393 # to be a remote head after push, node must be either
1391 # - unknown locally
1394 # - unknown locally
1392 # - a local outgoing head descended from update
1395 # - a local outgoing head descended from update
1393 # - a remote head that's known locally and not
1396 # - a remote head that's known locally and not
1394 # ancestral to an outgoing head
1397 # ancestral to an outgoing head
1395
1398
1396 warn = 0
1399 warn = 0
1397
1400
1398 if remote_heads == [nullid]:
1401 if remote_heads == [nullid]:
1399 warn = 0
1402 warn = 0
1400 elif not revs and len(heads) > len(remote_heads):
1403 elif not revs and len(heads) > len(remote_heads):
1401 warn = 1
1404 warn = 1
1402 else:
1405 else:
1403 newheads = list(heads)
1406 newheads = list(heads)
1404 for r in remote_heads:
1407 for r in remote_heads:
1405 if r in self.changelog.nodemap:
1408 if r in self.changelog.nodemap:
1406 desc = self.changelog.heads(r, heads)
1409 desc = self.changelog.heads(r, heads)
1407 l = [h for h in heads if h in desc]
1410 l = [h for h in heads if h in desc]
1408 if not l:
1411 if not l:
1409 newheads.append(r)
1412 newheads.append(r)
1410 else:
1413 else:
1411 newheads.append(r)
1414 newheads.append(r)
1412 if len(newheads) > len(remote_heads):
1415 if len(newheads) > len(remote_heads):
1413 warn = 1
1416 warn = 1
1414
1417
1415 if warn:
1418 if warn:
1416 self.ui.warn(_("abort: push creates new remote branches!\n"))
1419 self.ui.warn(_("abort: push creates new remote branches!\n"))
1417 self.ui.status(_("(did you forget to merge?"
1420 self.ui.status(_("(did you forget to merge?"
1418 " use push -f to force)\n"))
1421 " use push -f to force)\n"))
1419 return None, 1
1422 return None, 1
1420 elif inc:
1423 elif inc:
1421 self.ui.warn(_("note: unsynced remote changes!\n"))
1424 self.ui.warn(_("note: unsynced remote changes!\n"))
1422
1425
1423
1426
1424 if revs is None:
1427 if revs is None:
1425 cg = self.changegroup(update, 'push')
1428 cg = self.changegroup(update, 'push')
1426 else:
1429 else:
1427 cg = self.changegroupsubset(update, revs, 'push')
1430 cg = self.changegroupsubset(update, revs, 'push')
1428 return cg, remote_heads
1431 return cg, remote_heads
1429
1432
1430 def push_addchangegroup(self, remote, force, revs):
1433 def push_addchangegroup(self, remote, force, revs):
1431 lock = remote.lock()
1434 lock = remote.lock()
1432 try:
1435 try:
1433 ret = self.prepush(remote, force, revs)
1436 ret = self.prepush(remote, force, revs)
1434 if ret[0] is not None:
1437 if ret[0] is not None:
1435 cg, remote_heads = ret
1438 cg, remote_heads = ret
1436 return remote.addchangegroup(cg, 'push', self.url())
1439 return remote.addchangegroup(cg, 'push', self.url())
1437 return ret[1]
1440 return ret[1]
1438 finally:
1441 finally:
1439 del lock
1442 del lock
1440
1443
1441 def push_unbundle(self, remote, force, revs):
1444 def push_unbundle(self, remote, force, revs):
1442 # local repo finds heads on server, finds out what revs it
1445 # local repo finds heads on server, finds out what revs it
1443 # must push. once revs transferred, if server finds it has
1446 # must push. once revs transferred, if server finds it has
1444 # different heads (someone else won commit/push race), server
1447 # different heads (someone else won commit/push race), server
1445 # aborts.
1448 # aborts.
1446
1449
1447 ret = self.prepush(remote, force, revs)
1450 ret = self.prepush(remote, force, revs)
1448 if ret[0] is not None:
1451 if ret[0] is not None:
1449 cg, remote_heads = ret
1452 cg, remote_heads = ret
1450 if force: remote_heads = ['force']
1453 if force: remote_heads = ['force']
1451 return remote.unbundle(cg, remote_heads, 'push')
1454 return remote.unbundle(cg, remote_heads, 'push')
1452 return ret[1]
1455 return ret[1]
1453
1456
1454 def changegroupinfo(self, nodes):
1457 def changegroupinfo(self, nodes):
1455 self.ui.note(_("%d changesets found\n") % len(nodes))
1458 self.ui.note(_("%d changesets found\n") % len(nodes))
1456 if self.ui.debugflag:
1459 if self.ui.debugflag:
1457 self.ui.debug(_("List of changesets:\n"))
1460 self.ui.debug(_("List of changesets:\n"))
1458 for node in nodes:
1461 for node in nodes:
1459 self.ui.debug("%s\n" % hex(node))
1462 self.ui.debug("%s\n" % hex(node))
1460
1463
1461 def changegroupsubset(self, bases, heads, source):
1464 def changegroupsubset(self, bases, heads, source):
1462 """This function generates a changegroup consisting of all the nodes
1465 """This function generates a changegroup consisting of all the nodes
1463 that are descendents of any of the bases, and ancestors of any of
1466 that are descendents of any of the bases, and ancestors of any of
1464 the heads.
1467 the heads.
1465
1468
1466 It is fairly complex as determining which filenodes and which
1469 It is fairly complex as determining which filenodes and which
1467 manifest nodes need to be included for the changeset to be complete
1470 manifest nodes need to be included for the changeset to be complete
1468 is non-trivial.
1471 is non-trivial.
1469
1472
1470 Another wrinkle is doing the reverse, figuring out which changeset in
1473 Another wrinkle is doing the reverse, figuring out which changeset in
1471 the changegroup a particular filenode or manifestnode belongs to."""
1474 the changegroup a particular filenode or manifestnode belongs to."""
1472
1475
1473 self.hook('preoutgoing', throw=True, source=source)
1476 self.hook('preoutgoing', throw=True, source=source)
1474
1477
1475 # Set up some initial variables
1478 # Set up some initial variables
1476 # Make it easy to refer to self.changelog
1479 # Make it easy to refer to self.changelog
1477 cl = self.changelog
1480 cl = self.changelog
1478 # msng is short for missing - compute the list of changesets in this
1481 # msng is short for missing - compute the list of changesets in this
1479 # changegroup.
1482 # changegroup.
1480 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1483 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1481 self.changegroupinfo(msng_cl_lst)
1484 self.changegroupinfo(msng_cl_lst)
1482 # Some bases may turn out to be superfluous, and some heads may be
1485 # Some bases may turn out to be superfluous, and some heads may be
1483 # too. nodesbetween will return the minimal set of bases and heads
1486 # too. nodesbetween will return the minimal set of bases and heads
1484 # necessary to re-create the changegroup.
1487 # necessary to re-create the changegroup.
1485
1488
1486 # Known heads are the list of heads that it is assumed the recipient
1489 # Known heads are the list of heads that it is assumed the recipient
1487 # of this changegroup will know about.
1490 # of this changegroup will know about.
1488 knownheads = {}
1491 knownheads = {}
1489 # We assume that all parents of bases are known heads.
1492 # We assume that all parents of bases are known heads.
1490 for n in bases:
1493 for n in bases:
1491 for p in cl.parents(n):
1494 for p in cl.parents(n):
1492 if p != nullid:
1495 if p != nullid:
1493 knownheads[p] = 1
1496 knownheads[p] = 1
1494 knownheads = knownheads.keys()
1497 knownheads = knownheads.keys()
1495 if knownheads:
1498 if knownheads:
1496 # Now that we know what heads are known, we can compute which
1499 # Now that we know what heads are known, we can compute which
1497 # changesets are known. The recipient must know about all
1500 # changesets are known. The recipient must know about all
1498 # changesets required to reach the known heads from the null
1501 # changesets required to reach the known heads from the null
1499 # changeset.
1502 # changeset.
1500 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1503 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1501 junk = None
1504 junk = None
1502 # Transform the list into an ersatz set.
1505 # Transform the list into an ersatz set.
1503 has_cl_set = dict.fromkeys(has_cl_set)
1506 has_cl_set = dict.fromkeys(has_cl_set)
1504 else:
1507 else:
1505 # If there were no known heads, the recipient cannot be assumed to
1508 # If there were no known heads, the recipient cannot be assumed to
1506 # know about any changesets.
1509 # know about any changesets.
1507 has_cl_set = {}
1510 has_cl_set = {}
1508
1511
1509 # Make it easy to refer to self.manifest
1512 # Make it easy to refer to self.manifest
1510 mnfst = self.manifest
1513 mnfst = self.manifest
1511 # We don't know which manifests are missing yet
1514 # We don't know which manifests are missing yet
1512 msng_mnfst_set = {}
1515 msng_mnfst_set = {}
1513 # Nor do we know which filenodes are missing.
1516 # Nor do we know which filenodes are missing.
1514 msng_filenode_set = {}
1517 msng_filenode_set = {}
1515
1518
1516 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1519 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1517 junk = None
1520 junk = None
1518
1521
1519 # A changeset always belongs to itself, so the changenode lookup
1522 # A changeset always belongs to itself, so the changenode lookup
1520 # function for a changenode is identity.
1523 # function for a changenode is identity.
1521 def identity(x):
1524 def identity(x):
1522 return x
1525 return x
1523
1526
1524 # A function generating function. Sets up an environment for the
1527 # A function generating function. Sets up an environment for the
1525 # inner function.
1528 # inner function.
1526 def cmp_by_rev_func(revlog):
1529 def cmp_by_rev_func(revlog):
1527 # Compare two nodes by their revision number in the environment's
1530 # Compare two nodes by their revision number in the environment's
1528 # revision history. Since the revision number both represents the
1531 # revision history. Since the revision number both represents the
1529 # most efficient order to read the nodes in, and represents a
1532 # most efficient order to read the nodes in, and represents a
1530 # topological sorting of the nodes, this function is often useful.
1533 # topological sorting of the nodes, this function is often useful.
1531 def cmp_by_rev(a, b):
1534 def cmp_by_rev(a, b):
1532 return cmp(revlog.rev(a), revlog.rev(b))
1535 return cmp(revlog.rev(a), revlog.rev(b))
1533 return cmp_by_rev
1536 return cmp_by_rev
1534
1537
1535 # If we determine that a particular file or manifest node must be a
1538 # If we determine that a particular file or manifest node must be a
1536 # node that the recipient of the changegroup will already have, we can
1539 # node that the recipient of the changegroup will already have, we can
1537 # also assume the recipient will have all the parents. This function
1540 # also assume the recipient will have all the parents. This function
1538 # prunes them from the set of missing nodes.
1541 # prunes them from the set of missing nodes.
1539 def prune_parents(revlog, hasset, msngset):
1542 def prune_parents(revlog, hasset, msngset):
1540 haslst = hasset.keys()
1543 haslst = hasset.keys()
1541 haslst.sort(cmp_by_rev_func(revlog))
1544 haslst.sort(cmp_by_rev_func(revlog))
1542 for node in haslst:
1545 for node in haslst:
1543 parentlst = [p for p in revlog.parents(node) if p != nullid]
1546 parentlst = [p for p in revlog.parents(node) if p != nullid]
1544 while parentlst:
1547 while parentlst:
1545 n = parentlst.pop()
1548 n = parentlst.pop()
1546 if n not in hasset:
1549 if n not in hasset:
1547 hasset[n] = 1
1550 hasset[n] = 1
1548 p = [p for p in revlog.parents(n) if p != nullid]
1551 p = [p for p in revlog.parents(n) if p != nullid]
1549 parentlst.extend(p)
1552 parentlst.extend(p)
1550 for n in hasset:
1553 for n in hasset:
1551 msngset.pop(n, None)
1554 msngset.pop(n, None)
1552
1555
1553 # This is a function generating function used to set up an environment
1556 # This is a function generating function used to set up an environment
1554 # for the inner function to execute in.
1557 # for the inner function to execute in.
1555 def manifest_and_file_collector(changedfileset):
1558 def manifest_and_file_collector(changedfileset):
1556 # This is an information gathering function that gathers
1559 # This is an information gathering function that gathers
1557 # information from each changeset node that goes out as part of
1560 # information from each changeset node that goes out as part of
1558 # the changegroup. The information gathered is a list of which
1561 # the changegroup. The information gathered is a list of which
1559 # manifest nodes are potentially required (the recipient may
1562 # manifest nodes are potentially required (the recipient may
1560 # already have them) and total list of all files which were
1563 # already have them) and total list of all files which were
1561 # changed in any changeset in the changegroup.
1564 # changed in any changeset in the changegroup.
1562 #
1565 #
1563 # We also remember the first changenode we saw any manifest
1566 # We also remember the first changenode we saw any manifest
1564 # referenced by so we can later determine which changenode 'owns'
1567 # referenced by so we can later determine which changenode 'owns'
1565 # the manifest.
1568 # the manifest.
1566 def collect_manifests_and_files(clnode):
1569 def collect_manifests_and_files(clnode):
1567 c = cl.read(clnode)
1570 c = cl.read(clnode)
1568 for f in c[3]:
1571 for f in c[3]:
1569 # This is to make sure we only have one instance of each
1572 # This is to make sure we only have one instance of each
1570 # filename string for each filename.
1573 # filename string for each filename.
1571 changedfileset.setdefault(f, f)
1574 changedfileset.setdefault(f, f)
1572 msng_mnfst_set.setdefault(c[0], clnode)
1575 msng_mnfst_set.setdefault(c[0], clnode)
1573 return collect_manifests_and_files
1576 return collect_manifests_and_files
1574
1577
1575 # Figure out which manifest nodes (of the ones we think might be part
1578 # Figure out which manifest nodes (of the ones we think might be part
1576 # of the changegroup) the recipient must know about and remove them
1579 # of the changegroup) the recipient must know about and remove them
1577 # from the changegroup.
1580 # from the changegroup.
1578 def prune_manifests():
1581 def prune_manifests():
1579 has_mnfst_set = {}
1582 has_mnfst_set = {}
1580 for n in msng_mnfst_set:
1583 for n in msng_mnfst_set:
1581 # If a 'missing' manifest thinks it belongs to a changenode
1584 # If a 'missing' manifest thinks it belongs to a changenode
1582 # the recipient is assumed to have, obviously the recipient
1585 # the recipient is assumed to have, obviously the recipient
1583 # must have that manifest.
1586 # must have that manifest.
1584 linknode = cl.node(mnfst.linkrev(n))
1587 linknode = cl.node(mnfst.linkrev(n))
1585 if linknode in has_cl_set:
1588 if linknode in has_cl_set:
1586 has_mnfst_set[n] = 1
1589 has_mnfst_set[n] = 1
1587 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1590 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1588
1591
1589 # Use the information collected in collect_manifests_and_files to say
1592 # Use the information collected in collect_manifests_and_files to say
1590 # which changenode any manifestnode belongs to.
1593 # which changenode any manifestnode belongs to.
1591 def lookup_manifest_link(mnfstnode):
1594 def lookup_manifest_link(mnfstnode):
1592 return msng_mnfst_set[mnfstnode]
1595 return msng_mnfst_set[mnfstnode]
1593
1596
1594 # A function generating function that sets up the initial environment
1597 # A function generating function that sets up the initial environment
1595 # the inner function.
1598 # the inner function.
1596 def filenode_collector(changedfiles):
1599 def filenode_collector(changedfiles):
1597 next_rev = [0]
1600 next_rev = [0]
1598 # This gathers information from each manifestnode included in the
1601 # This gathers information from each manifestnode included in the
1599 # changegroup about which filenodes the manifest node references
1602 # changegroup about which filenodes the manifest node references
1600 # so we can include those in the changegroup too.
1603 # so we can include those in the changegroup too.
1601 #
1604 #
1602 # It also remembers which changenode each filenode belongs to. It
1605 # It also remembers which changenode each filenode belongs to. It
1603 # does this by assuming the a filenode belongs to the changenode
1606 # does this by assuming the a filenode belongs to the changenode
1604 # the first manifest that references it belongs to.
1607 # the first manifest that references it belongs to.
1605 def collect_msng_filenodes(mnfstnode):
1608 def collect_msng_filenodes(mnfstnode):
1606 r = mnfst.rev(mnfstnode)
1609 r = mnfst.rev(mnfstnode)
1607 if r == next_rev[0]:
1610 if r == next_rev[0]:
1608 # If the last rev we looked at was the one just previous,
1611 # If the last rev we looked at was the one just previous,
1609 # we only need to see a diff.
1612 # we only need to see a diff.
1610 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1613 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1611 # For each line in the delta
1614 # For each line in the delta
1612 for dline in delta.splitlines():
1615 for dline in delta.splitlines():
1613 # get the filename and filenode for that line
1616 # get the filename and filenode for that line
1614 f, fnode = dline.split('\0')
1617 f, fnode = dline.split('\0')
1615 fnode = bin(fnode[:40])
1618 fnode = bin(fnode[:40])
1616 f = changedfiles.get(f, None)
1619 f = changedfiles.get(f, None)
1617 # And if the file is in the list of files we care
1620 # And if the file is in the list of files we care
1618 # about.
1621 # about.
1619 if f is not None:
1622 if f is not None:
1620 # Get the changenode this manifest belongs to
1623 # Get the changenode this manifest belongs to
1621 clnode = msng_mnfst_set[mnfstnode]
1624 clnode = msng_mnfst_set[mnfstnode]
1622 # Create the set of filenodes for the file if
1625 # Create the set of filenodes for the file if
1623 # there isn't one already.
1626 # there isn't one already.
1624 ndset = msng_filenode_set.setdefault(f, {})
1627 ndset = msng_filenode_set.setdefault(f, {})
1625 # And set the filenode's changelog node to the
1628 # And set the filenode's changelog node to the
1626 # manifest's if it hasn't been set already.
1629 # manifest's if it hasn't been set already.
1627 ndset.setdefault(fnode, clnode)
1630 ndset.setdefault(fnode, clnode)
1628 else:
1631 else:
1629 # Otherwise we need a full manifest.
1632 # Otherwise we need a full manifest.
1630 m = mnfst.read(mnfstnode)
1633 m = mnfst.read(mnfstnode)
1631 # For every file in we care about.
1634 # For every file in we care about.
1632 for f in changedfiles:
1635 for f in changedfiles:
1633 fnode = m.get(f, None)
1636 fnode = m.get(f, None)
1634 # If it's in the manifest
1637 # If it's in the manifest
1635 if fnode is not None:
1638 if fnode is not None:
1636 # See comments above.
1639 # See comments above.
1637 clnode = msng_mnfst_set[mnfstnode]
1640 clnode = msng_mnfst_set[mnfstnode]
1638 ndset = msng_filenode_set.setdefault(f, {})
1641 ndset = msng_filenode_set.setdefault(f, {})
1639 ndset.setdefault(fnode, clnode)
1642 ndset.setdefault(fnode, clnode)
1640 # Remember the revision we hope to see next.
1643 # Remember the revision we hope to see next.
1641 next_rev[0] = r + 1
1644 next_rev[0] = r + 1
1642 return collect_msng_filenodes
1645 return collect_msng_filenodes
1643
1646
1644 # We have a list of filenodes we think we need for a file, lets remove
1647 # We have a list of filenodes we think we need for a file, lets remove
1645 # all those we now the recipient must have.
1648 # all those we now the recipient must have.
1646 def prune_filenodes(f, filerevlog):
1649 def prune_filenodes(f, filerevlog):
1647 msngset = msng_filenode_set[f]
1650 msngset = msng_filenode_set[f]
1648 hasset = {}
1651 hasset = {}
1649 # If a 'missing' filenode thinks it belongs to a changenode we
1652 # If a 'missing' filenode thinks it belongs to a changenode we
1650 # assume the recipient must have, then the recipient must have
1653 # assume the recipient must have, then the recipient must have
1651 # that filenode.
1654 # that filenode.
1652 for n in msngset:
1655 for n in msngset:
1653 clnode = cl.node(filerevlog.linkrev(n))
1656 clnode = cl.node(filerevlog.linkrev(n))
1654 if clnode in has_cl_set:
1657 if clnode in has_cl_set:
1655 hasset[n] = 1
1658 hasset[n] = 1
1656 prune_parents(filerevlog, hasset, msngset)
1659 prune_parents(filerevlog, hasset, msngset)
1657
1660
1658 # A function generator function that sets up the a context for the
1661 # A function generator function that sets up the a context for the
1659 # inner function.
1662 # inner function.
1660 def lookup_filenode_link_func(fname):
1663 def lookup_filenode_link_func(fname):
1661 msngset = msng_filenode_set[fname]
1664 msngset = msng_filenode_set[fname]
1662 # Lookup the changenode the filenode belongs to.
1665 # Lookup the changenode the filenode belongs to.
1663 def lookup_filenode_link(fnode):
1666 def lookup_filenode_link(fnode):
1664 return msngset[fnode]
1667 return msngset[fnode]
1665 return lookup_filenode_link
1668 return lookup_filenode_link
1666
1669
1667 # Now that we have all theses utility functions to help out and
1670 # Now that we have all theses utility functions to help out and
1668 # logically divide up the task, generate the group.
1671 # logically divide up the task, generate the group.
1669 def gengroup():
1672 def gengroup():
1670 # The set of changed files starts empty.
1673 # The set of changed files starts empty.
1671 changedfiles = {}
1674 changedfiles = {}
1672 # Create a changenode group generator that will call our functions
1675 # Create a changenode group generator that will call our functions
1673 # back to lookup the owning changenode and collect information.
1676 # back to lookup the owning changenode and collect information.
1674 group = cl.group(msng_cl_lst, identity,
1677 group = cl.group(msng_cl_lst, identity,
1675 manifest_and_file_collector(changedfiles))
1678 manifest_and_file_collector(changedfiles))
1676 for chnk in group:
1679 for chnk in group:
1677 yield chnk
1680 yield chnk
1678
1681
1679 # The list of manifests has been collected by the generator
1682 # The list of manifests has been collected by the generator
1680 # calling our functions back.
1683 # calling our functions back.
1681 prune_manifests()
1684 prune_manifests()
1682 msng_mnfst_lst = msng_mnfst_set.keys()
1685 msng_mnfst_lst = msng_mnfst_set.keys()
1683 # Sort the manifestnodes by revision number.
1686 # Sort the manifestnodes by revision number.
1684 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1687 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1685 # Create a generator for the manifestnodes that calls our lookup
1688 # Create a generator for the manifestnodes that calls our lookup
1686 # and data collection functions back.
1689 # and data collection functions back.
1687 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1690 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1688 filenode_collector(changedfiles))
1691 filenode_collector(changedfiles))
1689 for chnk in group:
1692 for chnk in group:
1690 yield chnk
1693 yield chnk
1691
1694
1692 # These are no longer needed, dereference and toss the memory for
1695 # These are no longer needed, dereference and toss the memory for
1693 # them.
1696 # them.
1694 msng_mnfst_lst = None
1697 msng_mnfst_lst = None
1695 msng_mnfst_set.clear()
1698 msng_mnfst_set.clear()
1696
1699
1697 changedfiles = changedfiles.keys()
1700 changedfiles = changedfiles.keys()
1698 changedfiles.sort()
1701 changedfiles.sort()
1699 # Go through all our files in order sorted by name.
1702 # Go through all our files in order sorted by name.
1700 for fname in changedfiles:
1703 for fname in changedfiles:
1701 filerevlog = self.file(fname)
1704 filerevlog = self.file(fname)
1702 # Toss out the filenodes that the recipient isn't really
1705 # Toss out the filenodes that the recipient isn't really
1703 # missing.
1706 # missing.
1704 if msng_filenode_set.has_key(fname):
1707 if msng_filenode_set.has_key(fname):
1705 prune_filenodes(fname, filerevlog)
1708 prune_filenodes(fname, filerevlog)
1706 msng_filenode_lst = msng_filenode_set[fname].keys()
1709 msng_filenode_lst = msng_filenode_set[fname].keys()
1707 else:
1710 else:
1708 msng_filenode_lst = []
1711 msng_filenode_lst = []
1709 # If any filenodes are left, generate the group for them,
1712 # If any filenodes are left, generate the group for them,
1710 # otherwise don't bother.
1713 # otherwise don't bother.
1711 if len(msng_filenode_lst) > 0:
1714 if len(msng_filenode_lst) > 0:
1712 yield changegroup.genchunk(fname)
1715 yield changegroup.genchunk(fname)
1713 # Sort the filenodes by their revision #
1716 # Sort the filenodes by their revision #
1714 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1717 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1715 # Create a group generator and only pass in a changenode
1718 # Create a group generator and only pass in a changenode
1716 # lookup function as we need to collect no information
1719 # lookup function as we need to collect no information
1717 # from filenodes.
1720 # from filenodes.
1718 group = filerevlog.group(msng_filenode_lst,
1721 group = filerevlog.group(msng_filenode_lst,
1719 lookup_filenode_link_func(fname))
1722 lookup_filenode_link_func(fname))
1720 for chnk in group:
1723 for chnk in group:
1721 yield chnk
1724 yield chnk
1722 if msng_filenode_set.has_key(fname):
1725 if msng_filenode_set.has_key(fname):
1723 # Don't need this anymore, toss it to free memory.
1726 # Don't need this anymore, toss it to free memory.
1724 del msng_filenode_set[fname]
1727 del msng_filenode_set[fname]
1725 # Signal that no more groups are left.
1728 # Signal that no more groups are left.
1726 yield changegroup.closechunk()
1729 yield changegroup.closechunk()
1727
1730
1728 if msng_cl_lst:
1731 if msng_cl_lst:
1729 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1732 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1730
1733
1731 return util.chunkbuffer(gengroup())
1734 return util.chunkbuffer(gengroup())
1732
1735
1733 def changegroup(self, basenodes, source):
1736 def changegroup(self, basenodes, source):
1734 """Generate a changegroup of all nodes that we have that a recipient
1737 """Generate a changegroup of all nodes that we have that a recipient
1735 doesn't.
1738 doesn't.
1736
1739
1737 This is much easier than the previous function as we can assume that
1740 This is much easier than the previous function as we can assume that
1738 the recipient has any changenode we aren't sending them."""
1741 the recipient has any changenode we aren't sending them."""
1739
1742
1740 self.hook('preoutgoing', throw=True, source=source)
1743 self.hook('preoutgoing', throw=True, source=source)
1741
1744
1742 cl = self.changelog
1745 cl = self.changelog
1743 nodes = cl.nodesbetween(basenodes, None)[0]
1746 nodes = cl.nodesbetween(basenodes, None)[0]
1744 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1747 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1745 self.changegroupinfo(nodes)
1748 self.changegroupinfo(nodes)
1746
1749
1747 def identity(x):
1750 def identity(x):
1748 return x
1751 return x
1749
1752
1750 def gennodelst(revlog):
1753 def gennodelst(revlog):
1751 for r in xrange(0, revlog.count()):
1754 for r in xrange(0, revlog.count()):
1752 n = revlog.node(r)
1755 n = revlog.node(r)
1753 if revlog.linkrev(n) in revset:
1756 if revlog.linkrev(n) in revset:
1754 yield n
1757 yield n
1755
1758
1756 def changed_file_collector(changedfileset):
1759 def changed_file_collector(changedfileset):
1757 def collect_changed_files(clnode):
1760 def collect_changed_files(clnode):
1758 c = cl.read(clnode)
1761 c = cl.read(clnode)
1759 for fname in c[3]:
1762 for fname in c[3]:
1760 changedfileset[fname] = 1
1763 changedfileset[fname] = 1
1761 return collect_changed_files
1764 return collect_changed_files
1762
1765
1763 def lookuprevlink_func(revlog):
1766 def lookuprevlink_func(revlog):
1764 def lookuprevlink(n):
1767 def lookuprevlink(n):
1765 return cl.node(revlog.linkrev(n))
1768 return cl.node(revlog.linkrev(n))
1766 return lookuprevlink
1769 return lookuprevlink
1767
1770
1768 def gengroup():
1771 def gengroup():
1769 # construct a list of all changed files
1772 # construct a list of all changed files
1770 changedfiles = {}
1773 changedfiles = {}
1771
1774
1772 for chnk in cl.group(nodes, identity,
1775 for chnk in cl.group(nodes, identity,
1773 changed_file_collector(changedfiles)):
1776 changed_file_collector(changedfiles)):
1774 yield chnk
1777 yield chnk
1775 changedfiles = changedfiles.keys()
1778 changedfiles = changedfiles.keys()
1776 changedfiles.sort()
1779 changedfiles.sort()
1777
1780
1778 mnfst = self.manifest
1781 mnfst = self.manifest
1779 nodeiter = gennodelst(mnfst)
1782 nodeiter = gennodelst(mnfst)
1780 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1783 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1781 yield chnk
1784 yield chnk
1782
1785
1783 for fname in changedfiles:
1786 for fname in changedfiles:
1784 filerevlog = self.file(fname)
1787 filerevlog = self.file(fname)
1785 nodeiter = gennodelst(filerevlog)
1788 nodeiter = gennodelst(filerevlog)
1786 nodeiter = list(nodeiter)
1789 nodeiter = list(nodeiter)
1787 if nodeiter:
1790 if nodeiter:
1788 yield changegroup.genchunk(fname)
1791 yield changegroup.genchunk(fname)
1789 lookup = lookuprevlink_func(filerevlog)
1792 lookup = lookuprevlink_func(filerevlog)
1790 for chnk in filerevlog.group(nodeiter, lookup):
1793 for chnk in filerevlog.group(nodeiter, lookup):
1791 yield chnk
1794 yield chnk
1792
1795
1793 yield changegroup.closechunk()
1796 yield changegroup.closechunk()
1794
1797
1795 if nodes:
1798 if nodes:
1796 self.hook('outgoing', node=hex(nodes[0]), source=source)
1799 self.hook('outgoing', node=hex(nodes[0]), source=source)
1797
1800
1798 return util.chunkbuffer(gengroup())
1801 return util.chunkbuffer(gengroup())
1799
1802
1800 def addchangegroup(self, source, srctype, url):
1803 def addchangegroup(self, source, srctype, url):
1801 """add changegroup to repo.
1804 """add changegroup to repo.
1802
1805
1803 return values:
1806 return values:
1804 - nothing changed or no source: 0
1807 - nothing changed or no source: 0
1805 - more heads than before: 1+added heads (2..n)
1808 - more heads than before: 1+added heads (2..n)
1806 - less heads than before: -1-removed heads (-2..-n)
1809 - less heads than before: -1-removed heads (-2..-n)
1807 - number of heads stays the same: 1
1810 - number of heads stays the same: 1
1808 """
1811 """
1809 def csmap(x):
1812 def csmap(x):
1810 self.ui.debug(_("add changeset %s\n") % short(x))
1813 self.ui.debug(_("add changeset %s\n") % short(x))
1811 return cl.count()
1814 return cl.count()
1812
1815
1813 def revmap(x):
1816 def revmap(x):
1814 return cl.rev(x)
1817 return cl.rev(x)
1815
1818
1816 if not source:
1819 if not source:
1817 return 0
1820 return 0
1818
1821
1819 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1822 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1820
1823
1821 changesets = files = revisions = 0
1824 changesets = files = revisions = 0
1822
1825
1823 # write changelog data to temp files so concurrent readers will not see
1826 # write changelog data to temp files so concurrent readers will not see
1824 # inconsistent view
1827 # inconsistent view
1825 cl = self.changelog
1828 cl = self.changelog
1826 cl.delayupdate()
1829 cl.delayupdate()
1827 oldheads = len(cl.heads())
1830 oldheads = len(cl.heads())
1828
1831
1829 tr = self.transaction()
1832 tr = self.transaction()
1830 try:
1833 try:
1831 # pull off the changeset group
1834 # pull off the changeset group
1832 self.ui.status(_("adding changesets\n"))
1835 self.ui.status(_("adding changesets\n"))
1833 cor = cl.count() - 1
1836 cor = cl.count() - 1
1834 chunkiter = changegroup.chunkiter(source)
1837 chunkiter = changegroup.chunkiter(source)
1835 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1838 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1836 raise util.Abort(_("received changelog group is empty"))
1839 raise util.Abort(_("received changelog group is empty"))
1837 cnr = cl.count() - 1
1840 cnr = cl.count() - 1
1838 changesets = cnr - cor
1841 changesets = cnr - cor
1839
1842
1840 # pull off the manifest group
1843 # pull off the manifest group
1841 self.ui.status(_("adding manifests\n"))
1844 self.ui.status(_("adding manifests\n"))
1842 chunkiter = changegroup.chunkiter(source)
1845 chunkiter = changegroup.chunkiter(source)
1843 # no need to check for empty manifest group here:
1846 # no need to check for empty manifest group here:
1844 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1847 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1845 # no new manifest will be created and the manifest group will
1848 # no new manifest will be created and the manifest group will
1846 # be empty during the pull
1849 # be empty during the pull
1847 self.manifest.addgroup(chunkiter, revmap, tr)
1850 self.manifest.addgroup(chunkiter, revmap, tr)
1848
1851
1849 # process the files
1852 # process the files
1850 self.ui.status(_("adding file changes\n"))
1853 self.ui.status(_("adding file changes\n"))
1851 while 1:
1854 while 1:
1852 f = changegroup.getchunk(source)
1855 f = changegroup.getchunk(source)
1853 if not f:
1856 if not f:
1854 break
1857 break
1855 self.ui.debug(_("adding %s revisions\n") % f)
1858 self.ui.debug(_("adding %s revisions\n") % f)
1856 fl = self.file(f)
1859 fl = self.file(f)
1857 o = fl.count()
1860 o = fl.count()
1858 chunkiter = changegroup.chunkiter(source)
1861 chunkiter = changegroup.chunkiter(source)
1859 if fl.addgroup(chunkiter, revmap, tr) is None:
1862 if fl.addgroup(chunkiter, revmap, tr) is None:
1860 raise util.Abort(_("received file revlog group is empty"))
1863 raise util.Abort(_("received file revlog group is empty"))
1861 revisions += fl.count() - o
1864 revisions += fl.count() - o
1862 files += 1
1865 files += 1
1863
1866
1864 # make changelog see real files again
1867 # make changelog see real files again
1865 cl.finalize(tr)
1868 cl.finalize(tr)
1866
1869
1867 newheads = len(self.changelog.heads())
1870 newheads = len(self.changelog.heads())
1868 heads = ""
1871 heads = ""
1869 if oldheads and newheads != oldheads:
1872 if oldheads and newheads != oldheads:
1870 heads = _(" (%+d heads)") % (newheads - oldheads)
1873 heads = _(" (%+d heads)") % (newheads - oldheads)
1871
1874
1872 self.ui.status(_("added %d changesets"
1875 self.ui.status(_("added %d changesets"
1873 " with %d changes to %d files%s\n")
1876 " with %d changes to %d files%s\n")
1874 % (changesets, revisions, files, heads))
1877 % (changesets, revisions, files, heads))
1875
1878
1876 if changesets > 0:
1879 if changesets > 0:
1877 self.hook('pretxnchangegroup', throw=True,
1880 self.hook('pretxnchangegroup', throw=True,
1878 node=hex(self.changelog.node(cor+1)), source=srctype,
1881 node=hex(self.changelog.node(cor+1)), source=srctype,
1879 url=url)
1882 url=url)
1880
1883
1881 tr.close()
1884 tr.close()
1882 finally:
1885 finally:
1883 del tr
1886 del tr
1884
1887
1885 if changesets > 0:
1888 if changesets > 0:
1886 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1889 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1887 source=srctype, url=url)
1890 source=srctype, url=url)
1888
1891
1889 for i in xrange(cor + 1, cnr + 1):
1892 for i in xrange(cor + 1, cnr + 1):
1890 self.hook("incoming", node=hex(self.changelog.node(i)),
1893 self.hook("incoming", node=hex(self.changelog.node(i)),
1891 source=srctype, url=url)
1894 source=srctype, url=url)
1892
1895
1893 # never return 0 here:
1896 # never return 0 here:
1894 if newheads < oldheads:
1897 if newheads < oldheads:
1895 return newheads - oldheads - 1
1898 return newheads - oldheads - 1
1896 else:
1899 else:
1897 return newheads - oldheads + 1
1900 return newheads - oldheads + 1
1898
1901
1899
1902
1900 def stream_in(self, remote):
1903 def stream_in(self, remote):
1901 fp = remote.stream_out()
1904 fp = remote.stream_out()
1902 l = fp.readline()
1905 l = fp.readline()
1903 try:
1906 try:
1904 resp = int(l)
1907 resp = int(l)
1905 except ValueError:
1908 except ValueError:
1906 raise util.UnexpectedOutput(
1909 raise util.UnexpectedOutput(
1907 _('Unexpected response from remote server:'), l)
1910 _('Unexpected response from remote server:'), l)
1908 if resp == 1:
1911 if resp == 1:
1909 raise util.Abort(_('operation forbidden by server'))
1912 raise util.Abort(_('operation forbidden by server'))
1910 elif resp == 2:
1913 elif resp == 2:
1911 raise util.Abort(_('locking the remote repository failed'))
1914 raise util.Abort(_('locking the remote repository failed'))
1912 elif resp != 0:
1915 elif resp != 0:
1913 raise util.Abort(_('the server sent an unknown error code'))
1916 raise util.Abort(_('the server sent an unknown error code'))
1914 self.ui.status(_('streaming all changes\n'))
1917 self.ui.status(_('streaming all changes\n'))
1915 l = fp.readline()
1918 l = fp.readline()
1916 try:
1919 try:
1917 total_files, total_bytes = map(int, l.split(' ', 1))
1920 total_files, total_bytes = map(int, l.split(' ', 1))
1918 except ValueError, TypeError:
1921 except ValueError, TypeError:
1919 raise util.UnexpectedOutput(
1922 raise util.UnexpectedOutput(
1920 _('Unexpected response from remote server:'), l)
1923 _('Unexpected response from remote server:'), l)
1921 self.ui.status(_('%d files to transfer, %s of data\n') %
1924 self.ui.status(_('%d files to transfer, %s of data\n') %
1922 (total_files, util.bytecount(total_bytes)))
1925 (total_files, util.bytecount(total_bytes)))
1923 start = time.time()
1926 start = time.time()
1924 for i in xrange(total_files):
1927 for i in xrange(total_files):
1925 # XXX doesn't support '\n' or '\r' in filenames
1928 # XXX doesn't support '\n' or '\r' in filenames
1926 l = fp.readline()
1929 l = fp.readline()
1927 try:
1930 try:
1928 name, size = l.split('\0', 1)
1931 name, size = l.split('\0', 1)
1929 size = int(size)
1932 size = int(size)
1930 except ValueError, TypeError:
1933 except ValueError, TypeError:
1931 raise util.UnexpectedOutput(
1934 raise util.UnexpectedOutput(
1932 _('Unexpected response from remote server:'), l)
1935 _('Unexpected response from remote server:'), l)
1933 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1936 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1934 ofp = self.sopener(name, 'w')
1937 ofp = self.sopener(name, 'w')
1935 for chunk in util.filechunkiter(fp, limit=size):
1938 for chunk in util.filechunkiter(fp, limit=size):
1936 ofp.write(chunk)
1939 ofp.write(chunk)
1937 ofp.close()
1940 ofp.close()
1938 elapsed = time.time() - start
1941 elapsed = time.time() - start
1939 if elapsed <= 0:
1942 if elapsed <= 0:
1940 elapsed = 0.001
1943 elapsed = 0.001
1941 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1944 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1942 (util.bytecount(total_bytes), elapsed,
1945 (util.bytecount(total_bytes), elapsed,
1943 util.bytecount(total_bytes / elapsed)))
1946 util.bytecount(total_bytes / elapsed)))
1944 self.invalidate()
1947 self.invalidate()
1945 return len(self.heads()) + 1
1948 return len(self.heads()) + 1
1946
1949
1947 def clone(self, remote, heads=[], stream=False):
1950 def clone(self, remote, heads=[], stream=False):
1948 '''clone remote repository.
1951 '''clone remote repository.
1949
1952
1950 keyword arguments:
1953 keyword arguments:
1951 heads: list of revs to clone (forces use of pull)
1954 heads: list of revs to clone (forces use of pull)
1952 stream: use streaming clone if possible'''
1955 stream: use streaming clone if possible'''
1953
1956
1954 # now, all clients that can request uncompressed clones can
1957 # now, all clients that can request uncompressed clones can
1955 # read repo formats supported by all servers that can serve
1958 # read repo formats supported by all servers that can serve
1956 # them.
1959 # them.
1957
1960
1958 # if revlog format changes, client will have to check version
1961 # if revlog format changes, client will have to check version
1959 # and format flags on "stream" capability, and use
1962 # and format flags on "stream" capability, and use
1960 # uncompressed only if compatible.
1963 # uncompressed only if compatible.
1961
1964
1962 if stream and not heads and remote.capable('stream'):
1965 if stream and not heads and remote.capable('stream'):
1963 return self.stream_in(remote)
1966 return self.stream_in(remote)
1964 return self.pull(remote, heads)
1967 return self.pull(remote, heads)
1965
1968
1966 # used to avoid circular references so destructors work
1969 # used to avoid circular references so destructors work
1967 def aftertrans(files):
1970 def aftertrans(files):
1968 renamefiles = [tuple(t) for t in files]
1971 renamefiles = [tuple(t) for t in files]
1969 def a():
1972 def a():
1970 for src, dest in renamefiles:
1973 for src, dest in renamefiles:
1971 util.rename(src, dest)
1974 util.rename(src, dest)
1972 return a
1975 return a
1973
1976
1974 def instance(ui, path, create):
1977 def instance(ui, path, create):
1975 return localrepository(ui, util.drop_scheme('file', path), create)
1978 return localrepository(ui, util.drop_scheme('file', path), create)
1976
1979
1977 def islocal(path):
1980 def islocal(path):
1978 return True
1981 return True
@@ -1,572 +1,569
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import errno, util, os, tempfile, context
10 import errno, util, os, tempfile, context
11
11
12 def filemerge(repo, fw, fo, wctx, mctx):
12 def filemerge(repo, fw, fo, wctx, mctx):
13 """perform a 3-way merge in the working directory
13 """perform a 3-way merge in the working directory
14
14
15 fw = filename in the working directory
15 fw = filename in the working directory
16 fo = filename in other parent
16 fo = filename in other parent
17 wctx, mctx = working and merge changecontexts
17 wctx, mctx = working and merge changecontexts
18 """
18 """
19
19
20 def temp(prefix, ctx):
20 def temp(prefix, ctx):
21 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
21 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
22 (fd, name) = tempfile.mkstemp(prefix=pre)
22 (fd, name) = tempfile.mkstemp(prefix=pre)
23 data = repo.wwritedata(ctx.path(), ctx.data())
23 data = repo.wwritedata(ctx.path(), ctx.data())
24 f = os.fdopen(fd, "wb")
24 f = os.fdopen(fd, "wb")
25 f.write(data)
25 f.write(data)
26 f.close()
26 f.close()
27 return name
27 return name
28
28
29 fcm = wctx.filectx(fw)
29 fcm = wctx.filectx(fw)
30 fco = mctx.filectx(fo)
30 fco = mctx.filectx(fo)
31
31
32 if not fco.cmp(fcm.data()): # files identical?
32 if not fco.cmp(fcm.data()): # files identical?
33 return None
33 return None
34
34
35 fca = fcm.ancestor(fco)
35 fca = fcm.ancestor(fco)
36 if not fca:
36 if not fca:
37 fca = repo.filectx(fw, fileid=nullrev)
37 fca = repo.filectx(fw, fileid=nullrev)
38 a = repo.wjoin(fw)
38 a = repo.wjoin(fw)
39 b = temp("base", fca)
39 b = temp("base", fca)
40 c = temp("other", fco)
40 c = temp("other", fco)
41
41
42 if fw != fo:
42 if fw != fo:
43 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
43 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
44 else:
44 else:
45 repo.ui.status(_("merging %s\n") % fw)
45 repo.ui.status(_("merging %s\n") % fw)
46
46
47 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
47 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
48
48
49 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
49 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
50 or "hgmerge")
50 or "hgmerge")
51 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
51 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
52 environ={'HG_FILE': fw,
52 environ={'HG_FILE': fw,
53 'HG_MY_NODE': str(wctx.parents()[0]),
53 'HG_MY_NODE': str(wctx.parents()[0]),
54 'HG_OTHER_NODE': str(mctx)})
54 'HG_OTHER_NODE': str(mctx)})
55 if r:
55 if r:
56 repo.ui.warn(_("merging %s failed!\n") % fw)
56 repo.ui.warn(_("merging %s failed!\n") % fw)
57
57
58 os.unlink(b)
58 os.unlink(b)
59 os.unlink(c)
59 os.unlink(c)
60 return r
60 return r
61
61
62 def checkunknown(wctx, mctx):
62 def checkunknown(wctx, mctx):
63 "check for collisions between unknown files and files in mctx"
63 "check for collisions between unknown files and files in mctx"
64 man = mctx.manifest()
64 man = mctx.manifest()
65 for f in wctx.unknown():
65 for f in wctx.unknown():
66 if f in man:
66 if f in man:
67 if mctx.filectx(f).cmp(wctx.filectx(f).data()):
67 if mctx.filectx(f).cmp(wctx.filectx(f).data()):
68 raise util.Abort(_("untracked local file '%s' differs"
68 raise util.Abort(_("untracked local file '%s' differs"
69 " from remote version") % f)
69 " from remote version") % f)
70
70
71 def checkcollision(mctx):
71 def checkcollision(mctx):
72 "check for case folding collisions in the destination context"
72 "check for case folding collisions in the destination context"
73 folded = {}
73 folded = {}
74 for fn in mctx.manifest():
74 for fn in mctx.manifest():
75 fold = fn.lower()
75 fold = fn.lower()
76 if fold in folded:
76 if fold in folded:
77 raise util.Abort(_("case-folding collision between %s and %s")
77 raise util.Abort(_("case-folding collision between %s and %s")
78 % (fn, folded[fold]))
78 % (fn, folded[fold]))
79 folded[fold] = fn
79 folded[fold] = fn
80
80
81 def forgetremoved(wctx, mctx):
81 def forgetremoved(wctx, mctx):
82 """
82 """
83 Forget removed files
83 Forget removed files
84
84
85 If we're jumping between revisions (as opposed to merging), and if
85 If we're jumping between revisions (as opposed to merging), and if
86 neither the working directory nor the target rev has the file,
86 neither the working directory nor the target rev has the file,
87 then we need to remove it from the dirstate, to prevent the
87 then we need to remove it from the dirstate, to prevent the
88 dirstate from listing the file when it is no longer in the
88 dirstate from listing the file when it is no longer in the
89 manifest.
89 manifest.
90 """
90 """
91
91
92 action = []
92 action = []
93 man = mctx.manifest()
93 man = mctx.manifest()
94 for f in wctx.deleted() + wctx.removed():
94 for f in wctx.deleted() + wctx.removed():
95 if f not in man:
95 if f not in man:
96 action.append((f, "f"))
96 action.append((f, "f"))
97
97
98 return action
98 return action
99
99
100 def findcopies(repo, m1, m2, ma, limit):
100 def findcopies(repo, m1, m2, ma, limit):
101 """
101 """
102 Find moves and copies between m1 and m2 back to limit linkrev
102 Find moves and copies between m1 and m2 back to limit linkrev
103 """
103 """
104
104
105 def nonoverlap(d1, d2, d3):
105 def nonoverlap(d1, d2, d3):
106 "Return list of elements in d1 not in d2 or d3"
106 "Return list of elements in d1 not in d2 or d3"
107 l = [d for d in d1 if d not in d3 and d not in d2]
107 l = [d for d in d1 if d not in d3 and d not in d2]
108 l.sort()
108 l.sort()
109 return l
109 return l
110
110
111 def dirname(f):
111 def dirname(f):
112 s = f.rfind("/")
112 s = f.rfind("/")
113 if s == -1:
113 if s == -1:
114 return ""
114 return ""
115 return f[:s]
115 return f[:s]
116
116
117 def dirs(files):
117 def dirs(files):
118 d = {}
118 d = {}
119 for f in files:
119 for f in files:
120 f = dirname(f)
120 f = dirname(f)
121 while f not in d:
121 while f not in d:
122 d[f] = True
122 d[f] = True
123 f = dirname(f)
123 f = dirname(f)
124 return d
124 return d
125
125
126 wctx = repo.workingctx()
126 wctx = repo.workingctx()
127
127
128 def makectx(f, n):
128 def makectx(f, n):
129 if len(n) == 20:
129 if len(n) == 20:
130 return repo.filectx(f, fileid=n)
130 return repo.filectx(f, fileid=n)
131 return wctx.filectx(f)
131 return wctx.filectx(f)
132 ctx = util.cachefunc(makectx)
132 ctx = util.cachefunc(makectx)
133
133
134 def findold(fctx):
134 def findold(fctx):
135 "find files that path was copied from, back to linkrev limit"
135 "find files that path was copied from, back to linkrev limit"
136 old = {}
136 old = {}
137 seen = {}
137 seen = {}
138 orig = fctx.path()
138 orig = fctx.path()
139 visit = [fctx]
139 visit = [fctx]
140 while visit:
140 while visit:
141 fc = visit.pop()
141 fc = visit.pop()
142 s = str(fc)
142 s = str(fc)
143 if s in seen:
143 if s in seen:
144 continue
144 continue
145 seen[s] = 1
145 seen[s] = 1
146 if fc.path() != orig and fc.path() not in old:
146 if fc.path() != orig and fc.path() not in old:
147 old[fc.path()] = 1
147 old[fc.path()] = 1
148 if fc.rev() < limit:
148 if fc.rev() < limit:
149 continue
149 continue
150 visit += fc.parents()
150 visit += fc.parents()
151
151
152 old = old.keys()
152 old = old.keys()
153 old.sort()
153 old.sort()
154 return old
154 return old
155
155
156 copy = {}
156 copy = {}
157 fullcopy = {}
157 fullcopy = {}
158 diverge = {}
158 diverge = {}
159
159
160 def checkcopies(c, man, aman):
160 def checkcopies(c, man, aman):
161 '''check possible copies for filectx c'''
161 '''check possible copies for filectx c'''
162 for of in findold(c):
162 for of in findold(c):
163 fullcopy[c.path()] = of # remember for dir rename detection
163 fullcopy[c.path()] = of # remember for dir rename detection
164 if of not in man: # original file not in other manifest?
164 if of not in man: # original file not in other manifest?
165 if of in ma:
165 if of in ma:
166 diverge.setdefault(of, []).append(c.path())
166 diverge.setdefault(of, []).append(c.path())
167 continue
167 continue
168 # if the original file is unchanged on the other branch,
168 # if the original file is unchanged on the other branch,
169 # no merge needed
169 # no merge needed
170 if man[of] == aman.get(of):
170 if man[of] == aman.get(of):
171 continue
171 continue
172 c2 = ctx(of, man[of])
172 c2 = ctx(of, man[of])
173 ca = c.ancestor(c2)
173 ca = c.ancestor(c2)
174 if not ca: # unrelated?
174 if not ca: # unrelated?
175 continue
175 continue
176 # named changed on only one side?
176 # named changed on only one side?
177 if ca.path() == c.path() or ca.path() == c2.path():
177 if ca.path() == c.path() or ca.path() == c2.path():
178 if c == ca or c2 == ca: # no merge needed, ignore copy
178 if c == ca or c2 == ca: # no merge needed, ignore copy
179 continue
179 continue
180 copy[c.path()] = of
180 copy[c.path()] = of
181
181
182 if not repo.ui.configbool("merge", "followcopies", True):
182 if not repo.ui.configbool("merge", "followcopies", True):
183 return {}, {}
183 return {}, {}
184
184
185 # avoid silly behavior for update from empty dir
185 # avoid silly behavior for update from empty dir
186 if not m1 or not m2 or not ma:
186 if not m1 or not m2 or not ma:
187 return {}, {}
187 return {}, {}
188
188
189 u1 = nonoverlap(m1, m2, ma)
189 u1 = nonoverlap(m1, m2, ma)
190 u2 = nonoverlap(m2, m1, ma)
190 u2 = nonoverlap(m2, m1, ma)
191
191
192 for f in u1:
192 for f in u1:
193 checkcopies(ctx(f, m1[f]), m2, ma)
193 checkcopies(ctx(f, m1[f]), m2, ma)
194
194
195 for f in u2:
195 for f in u2:
196 checkcopies(ctx(f, m2[f]), m1, ma)
196 checkcopies(ctx(f, m2[f]), m1, ma)
197
197
198 d2 = {}
198 d2 = {}
199 for of, fl in diverge.items():
199 for of, fl in diverge.items():
200 for f in fl:
200 for f in fl:
201 fo = list(fl)
201 fo = list(fl)
202 fo.remove(f)
202 fo.remove(f)
203 d2[f] = (of, fo)
203 d2[f] = (of, fo)
204
204
205 if not fullcopy or not repo.ui.configbool("merge", "followdirs", True):
205 if not fullcopy or not repo.ui.configbool("merge", "followdirs", True):
206 return copy, diverge
206 return copy, diverge
207
207
208 # generate a directory move map
208 # generate a directory move map
209 d1, d2 = dirs(m1), dirs(m2)
209 d1, d2 = dirs(m1), dirs(m2)
210 invalid = {}
210 invalid = {}
211 dirmove = {}
211 dirmove = {}
212
212
213 # examine each file copy for a potential directory move, which is
213 # examine each file copy for a potential directory move, which is
214 # when all the files in a directory are moved to a new directory
214 # when all the files in a directory are moved to a new directory
215 for dst, src in fullcopy.items():
215 for dst, src in fullcopy.items():
216 dsrc, ddst = dirname(src), dirname(dst)
216 dsrc, ddst = dirname(src), dirname(dst)
217 if dsrc in invalid:
217 if dsrc in invalid:
218 # already seen to be uninteresting
218 # already seen to be uninteresting
219 continue
219 continue
220 elif dsrc in d1 and ddst in d1:
220 elif dsrc in d1 and ddst in d1:
221 # directory wasn't entirely moved locally
221 # directory wasn't entirely moved locally
222 invalid[dsrc] = True
222 invalid[dsrc] = True
223 elif dsrc in d2 and ddst in d2:
223 elif dsrc in d2 and ddst in d2:
224 # directory wasn't entirely moved remotely
224 # directory wasn't entirely moved remotely
225 invalid[dsrc] = True
225 invalid[dsrc] = True
226 elif dsrc in dirmove and dirmove[dsrc] != ddst:
226 elif dsrc in dirmove and dirmove[dsrc] != ddst:
227 # files from the same directory moved to two different places
227 # files from the same directory moved to two different places
228 invalid[dsrc] = True
228 invalid[dsrc] = True
229 else:
229 else:
230 # looks good so far
230 # looks good so far
231 dirmove[dsrc + "/"] = ddst + "/"
231 dirmove[dsrc + "/"] = ddst + "/"
232
232
233 for i in invalid:
233 for i in invalid:
234 if i in dirmove:
234 if i in dirmove:
235 del dirmove[i]
235 del dirmove[i]
236
236
237 del d1, d2, invalid
237 del d1, d2, invalid
238
238
239 if not dirmove:
239 if not dirmove:
240 return copy, diverge
240 return copy, diverge
241
241
242 # check unaccounted nonoverlapping files against directory moves
242 # check unaccounted nonoverlapping files against directory moves
243 for f in u1 + u2:
243 for f in u1 + u2:
244 if f not in fullcopy:
244 if f not in fullcopy:
245 for d in dirmove:
245 for d in dirmove:
246 if f.startswith(d):
246 if f.startswith(d):
247 # new file added in a directory that was moved, move it
247 # new file added in a directory that was moved, move it
248 copy[f] = dirmove[d] + f[len(d):]
248 copy[f] = dirmove[d] + f[len(d):]
249 break
249 break
250
250
251 return copy, diverge
251 return copy, diverge
252
252
253 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
253 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
254 """
254 """
255 Merge p1 and p2 with ancestor ma and generate merge action list
255 Merge p1 and p2 with ancestor ma and generate merge action list
256
256
257 overwrite = whether we clobber working files
257 overwrite = whether we clobber working files
258 partial = function to filter file lists
258 partial = function to filter file lists
259 """
259 """
260
260
261 repo.ui.note(_("resolving manifests\n"))
261 repo.ui.note(_("resolving manifests\n"))
262 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
262 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
263 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
263 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
264
264
265 m1 = p1.manifest()
265 m1 = p1.manifest()
266 m2 = p2.manifest()
266 m2 = p2.manifest()
267 ma = pa.manifest()
267 ma = pa.manifest()
268 backwards = (pa == p2)
268 backwards = (pa == p2)
269 action = []
269 action = []
270 copy = {}
270 copy = {}
271 diverge = {}
271 diverge = {}
272
272
273 def fmerge(f, f2=None, fa=None):
273 def fmerge(f, f2=None, fa=None):
274 """merge flags"""
274 """merge flags"""
275 if not f2:
275 if not f2:
276 f2 = f
276 f2 = f
277 fa = f
277 fa = f
278 a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2)
278 a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2)
279 if ((a^b) | (a^c)) ^ a:
279 if ((a^b) | (a^c)) ^ a:
280 return 'x'
280 return 'x'
281 a, b, c = ma.linkf(fa), m1.linkf(f), m2.linkf(f2)
281 a, b, c = ma.linkf(fa), m1.linkf(f), m2.linkf(f2)
282 if ((a^b) | (a^c)) ^ a:
282 if ((a^b) | (a^c)) ^ a:
283 return 'l'
283 return 'l'
284 return ''
284 return ''
285
285
286 def act(msg, m, f, *args):
286 def act(msg, m, f, *args):
287 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
287 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
288 action.append((f, m) + args)
288 action.append((f, m) + args)
289
289
290 if not (backwards or overwrite):
290 if not (backwards or overwrite):
291 copy, diverge = findcopies(repo, m1, m2, ma, pa.rev())
291 copy, diverge = findcopies(repo, m1, m2, ma, pa.rev())
292
292
293 for of, fl in diverge.items():
293 for of, fl in diverge.items():
294 act("divergent renames", "dr", of, fl)
294 act("divergent renames", "dr", of, fl)
295
295
296 copied = dict.fromkeys(copy.values())
296 copied = dict.fromkeys(copy.values())
297
297
298 # Compare manifests
298 # Compare manifests
299 for f, n in m1.iteritems():
299 for f, n in m1.iteritems():
300 if partial and not partial(f):
300 if partial and not partial(f):
301 continue
301 continue
302 if f in m2:
302 if f in m2:
303 # are files different?
303 # are files different?
304 if n != m2[f]:
304 if n != m2[f]:
305 a = ma.get(f, nullid)
305 a = ma.get(f, nullid)
306 # are both different from the ancestor?
306 # are both different from the ancestor?
307 if not overwrite and n != a and m2[f] != a:
307 if not overwrite and n != a and m2[f] != a:
308 act("versions differ", "m", f, f, f, fmerge(f), False)
308 act("versions differ", "m", f, f, f, fmerge(f), False)
309 # are we clobbering?
309 # are we clobbering?
310 # is remote's version newer?
310 # is remote's version newer?
311 # or are we going back in time and clean?
311 # or are we going back in time and clean?
312 elif overwrite or m2[f] != a or (backwards and not n[20:]):
312 elif overwrite or m2[f] != a or (backwards and not n[20:]):
313 act("remote is newer", "g", f, m2.flags(f))
313 act("remote is newer", "g", f, m2.flags(f))
314 # local is newer, not overwrite, check mode bits
314 # local is newer, not overwrite, check mode bits
315 elif fmerge(f) != m1.flags(f):
315 elif fmerge(f) != m1.flags(f):
316 act("update permissions", "e", f, m2.flags(f))
316 act("update permissions", "e", f, m2.flags(f))
317 # contents same, check mode bits
317 # contents same, check mode bits
318 elif m1.flags(f) != m2.flags(f):
318 elif m1.flags(f) != m2.flags(f):
319 if overwrite or fmerge(f) != m1.flags(f):
319 if overwrite or fmerge(f) != m1.flags(f):
320 act("update permissions", "e", f, m2.flags(f))
320 act("update permissions", "e", f, m2.flags(f))
321 elif f in copied:
321 elif f in copied:
322 continue
322 continue
323 elif f in copy:
323 elif f in copy:
324 f2 = copy[f]
324 f2 = copy[f]
325 if f2 not in m2: # directory rename
325 if f2 not in m2: # directory rename
326 act("remote renamed directory to " + f2, "d",
326 act("remote renamed directory to " + f2, "d",
327 f, None, f2, m1.flags(f))
327 f, None, f2, m1.flags(f))
328 elif f2 in m1: # case 2 A,B/B/B
328 elif f2 in m1: # case 2 A,B/B/B
329 act("local copied to " + f2, "m",
329 act("local copied to " + f2, "m",
330 f, f2, f, fmerge(f, f2, f2), False)
330 f, f2, f, fmerge(f, f2, f2), False)
331 else: # case 4,21 A/B/B
331 else: # case 4,21 A/B/B
332 act("local moved to " + f2, "m",
332 act("local moved to " + f2, "m",
333 f, f2, f, fmerge(f, f2, f2), False)
333 f, f2, f, fmerge(f, f2, f2), False)
334 elif f in ma:
334 elif f in ma:
335 if n != ma[f] and not overwrite:
335 if n != ma[f] and not overwrite:
336 if repo.ui.prompt(
336 if repo.ui.prompt(
337 (_(" local changed %s which remote deleted\n") % f) +
337 (_(" local changed %s which remote deleted\n") % f) +
338 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("d"):
338 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("d"):
339 act("prompt delete", "r", f)
339 act("prompt delete", "r", f)
340 else:
340 else:
341 act("other deleted", "r", f)
341 act("other deleted", "r", f)
342 else:
342 else:
343 # file is created on branch or in working directory
343 # file is created on branch or in working directory
344 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
344 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
345 act("remote deleted", "r", f)
345 act("remote deleted", "r", f)
346
346
347 for f, n in m2.iteritems():
347 for f, n in m2.iteritems():
348 if partial and not partial(f):
348 if partial and not partial(f):
349 continue
349 continue
350 if f in m1:
350 if f in m1:
351 continue
351 continue
352 if f in copied:
352 if f in copied:
353 continue
353 continue
354 if f in copy:
354 if f in copy:
355 f2 = copy[f]
355 f2 = copy[f]
356 if f2 not in m1: # directory rename
356 if f2 not in m1: # directory rename
357 act("local renamed directory to " + f2, "d",
357 act("local renamed directory to " + f2, "d",
358 None, f, f2, m2.flags(f))
358 None, f, f2, m2.flags(f))
359 elif f2 in m2: # rename case 1, A/A,B/A
359 elif f2 in m2: # rename case 1, A/A,B/A
360 act("remote copied to " + f, "m",
360 act("remote copied to " + f, "m",
361 f2, f, f, fmerge(f2, f, f2), False)
361 f2, f, f, fmerge(f2, f, f2), False)
362 else: # case 3,20 A/B/A
362 else: # case 3,20 A/B/A
363 act("remote moved to " + f, "m",
363 act("remote moved to " + f, "m",
364 f2, f, f, fmerge(f2, f, f2), True)
364 f2, f, f, fmerge(f2, f, f2), True)
365 elif f in ma:
365 elif f in ma:
366 if overwrite or backwards:
366 if overwrite or backwards:
367 act("recreating", "g", f, m2.flags(f))
367 act("recreating", "g", f, m2.flags(f))
368 elif n != ma[f]:
368 elif n != ma[f]:
369 if repo.ui.prompt(
369 if repo.ui.prompt(
370 (_("remote changed %s which local deleted\n") % f) +
370 (_("remote changed %s which local deleted\n") % f) +
371 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("k"):
371 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("k"):
372 act("prompt recreating", "g", f, m2.flags(f))
372 act("prompt recreating", "g", f, m2.flags(f))
373 else:
373 else:
374 act("remote created", "g", f, m2.flags(f))
374 act("remote created", "g", f, m2.flags(f))
375
375
376 return action
376 return action
377
377
378 def applyupdates(repo, action, wctx, mctx):
378 def applyupdates(repo, action, wctx, mctx):
379 "apply the merge action list to the working directory"
379 "apply the merge action list to the working directory"
380
380
381 updated, merged, removed, unresolved = 0, 0, 0, 0
381 updated, merged, removed, unresolved = 0, 0, 0, 0
382 action.sort()
382 action.sort()
383 for a in action:
383 for a in action:
384 f, m = a[:2]
384 f, m = a[:2]
385 if f and f[0] == "/":
385 if f and f[0] == "/":
386 continue
386 continue
387 if m == "r": # remove
387 if m == "r": # remove
388 repo.ui.note(_("removing %s\n") % f)
388 repo.ui.note(_("removing %s\n") % f)
389 util.audit_path(f)
389 util.audit_path(f)
390 try:
390 try:
391 util.unlink(repo.wjoin(f))
391 util.unlink(repo.wjoin(f))
392 except OSError, inst:
392 except OSError, inst:
393 if inst.errno != errno.ENOENT:
393 if inst.errno != errno.ENOENT:
394 repo.ui.warn(_("update failed to remove %s: %s!\n") %
394 repo.ui.warn(_("update failed to remove %s: %s!\n") %
395 (f, inst.strerror))
395 (f, inst.strerror))
396 removed += 1
396 removed += 1
397 elif m == "m": # merge
397 elif m == "m": # merge
398 f2, fd, flags, move = a[2:]
398 f2, fd, flags, move = a[2:]
399 r = filemerge(repo, f, f2, wctx, mctx)
399 r = filemerge(repo, f, f2, wctx, mctx)
400 if r > 0:
400 if r > 0:
401 unresolved += 1
401 unresolved += 1
402 else:
402 else:
403 if r is None:
403 if r is None:
404 updated += 1
404 updated += 1
405 else:
405 else:
406 merged += 1
406 merged += 1
407 if f != fd:
407 if f != fd:
408 repo.ui.debug(_("copying %s to %s\n") % (f, fd))
408 repo.ui.debug(_("copying %s to %s\n") % (f, fd))
409 repo.wwrite(fd, repo.wread(f), flags)
409 repo.wwrite(fd, repo.wread(f), flags)
410 if move:
410 if move:
411 repo.ui.debug(_("removing %s\n") % f)
411 repo.ui.debug(_("removing %s\n") % f)
412 os.unlink(repo.wjoin(f))
412 os.unlink(repo.wjoin(f))
413 util.set_exec(repo.wjoin(fd), "x" in flags)
413 util.set_exec(repo.wjoin(fd), "x" in flags)
414 elif m == "g": # get
414 elif m == "g": # get
415 flags = a[2]
415 flags = a[2]
416 repo.ui.note(_("getting %s\n") % f)
416 repo.ui.note(_("getting %s\n") % f)
417 t = mctx.filectx(f).data()
417 t = mctx.filectx(f).data()
418 repo.wwrite(f, t, flags)
418 repo.wwrite(f, t, flags)
419 updated += 1
419 updated += 1
420 elif m == "d": # directory rename
420 elif m == "d": # directory rename
421 f2, fd, flags = a[2:]
421 f2, fd, flags = a[2:]
422 if f:
422 if f:
423 repo.ui.note(_("moving %s to %s\n") % (f, fd))
423 repo.ui.note(_("moving %s to %s\n") % (f, fd))
424 t = wctx.filectx(f).data()
424 t = wctx.filectx(f).data()
425 repo.wwrite(fd, t, flags)
425 repo.wwrite(fd, t, flags)
426 util.unlink(repo.wjoin(f))
426 util.unlink(repo.wjoin(f))
427 if f2:
427 if f2:
428 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
428 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
429 t = mctx.filectx(f2).data()
429 t = mctx.filectx(f2).data()
430 repo.wwrite(fd, t, flags)
430 repo.wwrite(fd, t, flags)
431 updated += 1
431 updated += 1
432 elif m == "dr": # divergent renames
432 elif m == "dr": # divergent renames
433 fl = a[2]
433 fl = a[2]
434 repo.ui.warn("warning: detected divergent renames of %s to:\n" % f)
434 repo.ui.warn("warning: detected divergent renames of %s to:\n" % f)
435 for nf in fl:
435 for nf in fl:
436 repo.ui.warn(" %s\n" % nf)
436 repo.ui.warn(" %s\n" % nf)
437 elif m == "e": # exec
437 elif m == "e": # exec
438 flags = a[2]
438 flags = a[2]
439 util.set_exec(repo.wjoin(f), flags)
439 util.set_exec(repo.wjoin(f), flags)
440
440
441 return updated, merged, removed, unresolved
441 return updated, merged, removed, unresolved
442
442
443 def recordupdates(repo, action, branchmerge):
443 def recordupdates(repo, action, branchmerge):
444 "record merge actions to the dirstate"
444 "record merge actions to the dirstate"
445
445
446 for a in action:
446 for a in action:
447 f, m = a[:2]
447 f, m = a[:2]
448 if m == "r": # remove
448 if m == "r": # remove
449 if branchmerge:
449 if branchmerge:
450 repo.dirstate.remove(f)
450 repo.dirstate.remove(f)
451 else:
451 else:
452 repo.dirstate.forget(f)
452 repo.dirstate.forget(f)
453 elif m == "f": # forget
453 elif m == "f": # forget
454 repo.dirstate.forget(f)
454 repo.dirstate.forget(f)
455 elif m == "g": # get
455 elif m == "g": # get
456 if branchmerge:
456 if branchmerge:
457 repo.dirstate.normaldirty(f)
457 repo.dirstate.normaldirty(f)
458 else:
458 else:
459 repo.dirstate.normal(f)
459 repo.dirstate.normal(f)
460 elif m == "m": # merge
460 elif m == "m": # merge
461 f2, fd, flag, move = a[2:]
461 f2, fd, flag, move = a[2:]
462 if branchmerge:
462 if branchmerge:
463 # We've done a branch merge, mark this file as merged
463 # We've done a branch merge, mark this file as merged
464 # so that we properly record the merger later
464 # so that we properly record the merger later
465 repo.dirstate.merge(fd)
465 repo.dirstate.merge(fd)
466 if f != f2: # copy/rename
466 if f != f2: # copy/rename
467 if move:
467 if move:
468 repo.dirstate.remove(f)
468 repo.dirstate.remove(f)
469 if f != fd:
469 if f != fd:
470 repo.dirstate.copy(f, fd)
470 repo.dirstate.copy(f, fd)
471 else:
471 else:
472 repo.dirstate.copy(f2, fd)
472 repo.dirstate.copy(f2, fd)
473 else:
473 else:
474 # We've update-merged a locally modified file, so
474 # We've update-merged a locally modified file, so
475 # we set the dirstate to emulate a normal checkout
475 # we set the dirstate to emulate a normal checkout
476 # of that file some time in the past. Thus our
476 # of that file some time in the past. Thus our
477 # merge will appear as a normal local file
477 # merge will appear as a normal local file
478 # modification.
478 # modification.
479 repo.dirstate.normaldirty(fd)
479 repo.dirstate.normaldirty(fd)
480 if move:
480 if move:
481 repo.dirstate.forget(f)
481 repo.dirstate.forget(f)
482 elif m == "d": # directory rename
482 elif m == "d": # directory rename
483 f2, fd, flag = a[2:]
483 f2, fd, flag = a[2:]
484 if not f2 and f not in repo.dirstate:
484 if not f2 and f not in repo.dirstate:
485 # untracked file moved
485 # untracked file moved
486 continue
486 continue
487 if branchmerge:
487 if branchmerge:
488 repo.dirstate.add(fd)
488 repo.dirstate.add(fd)
489 if f:
489 if f:
490 repo.dirstate.remove(f)
490 repo.dirstate.remove(f)
491 repo.dirstate.copy(f, fd)
491 repo.dirstate.copy(f, fd)
492 if f2:
492 if f2:
493 repo.dirstate.copy(f2, fd)
493 repo.dirstate.copy(f2, fd)
494 else:
494 else:
495 repo.dirstate.normal(fd)
495 repo.dirstate.normal(fd)
496 if f:
496 if f:
497 repo.dirstate.forget(f)
497 repo.dirstate.forget(f)
498
498
499 def update(repo, node, branchmerge, force, partial, wlock):
499 def update(repo, node, branchmerge, force, partial):
500 """
500 """
501 Perform a merge between the working directory and the given node
501 Perform a merge between the working directory and the given node
502
502
503 branchmerge = whether to merge between branches
503 branchmerge = whether to merge between branches
504 force = whether to force branch merging or file overwriting
504 force = whether to force branch merging or file overwriting
505 partial = a function to filter file lists (dirstate not updated)
505 partial = a function to filter file lists (dirstate not updated)
506 wlock = working dir lock, if already held
507 """
506 """
508
507
508 wlock = repo.wlock()
509 try:
509 try:
510 if not wlock:
511 wlock = repo.wlock()
512
513 wc = repo.workingctx()
510 wc = repo.workingctx()
514 if node is None:
511 if node is None:
515 # tip of current branch
512 # tip of current branch
516 try:
513 try:
517 node = repo.branchtags()[wc.branch()]
514 node = repo.branchtags()[wc.branch()]
518 except KeyError:
515 except KeyError:
519 raise util.Abort(_("branch %s not found") % wc.branch())
516 raise util.Abort(_("branch %s not found") % wc.branch())
520 overwrite = force and not branchmerge
517 overwrite = force and not branchmerge
521 forcemerge = force and branchmerge
518 forcemerge = force and branchmerge
522 pl = wc.parents()
519 pl = wc.parents()
523 p1, p2 = pl[0], repo.changectx(node)
520 p1, p2 = pl[0], repo.changectx(node)
524 pa = p1.ancestor(p2)
521 pa = p1.ancestor(p2)
525 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
522 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
526 fastforward = False
523 fastforward = False
527
524
528 ### check phase
525 ### check phase
529 if not overwrite and len(pl) > 1:
526 if not overwrite and len(pl) > 1:
530 raise util.Abort(_("outstanding uncommitted merges"))
527 raise util.Abort(_("outstanding uncommitted merges"))
531 if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
528 if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
532 if branchmerge:
529 if branchmerge:
533 if p1.branch() != p2.branch() and pa != p2:
530 if p1.branch() != p2.branch() and pa != p2:
534 fastforward = True
531 fastforward = True
535 else:
532 else:
536 raise util.Abort(_("there is nothing to merge, just use "
533 raise util.Abort(_("there is nothing to merge, just use "
537 "'hg update' or look at 'hg heads'"))
534 "'hg update' or look at 'hg heads'"))
538 elif not (overwrite or branchmerge):
535 elif not (overwrite or branchmerge):
539 raise util.Abort(_("update spans branches, use 'hg merge' "
536 raise util.Abort(_("update spans branches, use 'hg merge' "
540 "or 'hg update -C' to lose changes"))
537 "or 'hg update -C' to lose changes"))
541 if branchmerge and not forcemerge:
538 if branchmerge and not forcemerge:
542 if wc.files():
539 if wc.files():
543 raise util.Abort(_("outstanding uncommitted changes"))
540 raise util.Abort(_("outstanding uncommitted changes"))
544
541
545 ### calculate phase
542 ### calculate phase
546 action = []
543 action = []
547 if not force:
544 if not force:
548 checkunknown(wc, p2)
545 checkunknown(wc, p2)
549 if not util.checkfolding(repo.path):
546 if not util.checkfolding(repo.path):
550 checkcollision(p2)
547 checkcollision(p2)
551 if not branchmerge:
548 if not branchmerge:
552 action += forgetremoved(wc, p2)
549 action += forgetremoved(wc, p2)
553 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
550 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
554
551
555 ### apply phase
552 ### apply phase
556 if not branchmerge: # just jump to the new rev
553 if not branchmerge: # just jump to the new rev
557 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
554 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
558 if not partial:
555 if not partial:
559 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
556 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
560
557
561 stats = applyupdates(repo, action, wc, p2)
558 stats = applyupdates(repo, action, wc, p2)
562
559
563 if not partial:
560 if not partial:
564 recordupdates(repo, action, branchmerge)
561 recordupdates(repo, action, branchmerge)
565 repo.dirstate.setparents(fp1, fp2)
562 repo.dirstate.setparents(fp1, fp2)
566 if not branchmerge and not fastforward:
563 if not branchmerge and not fastforward:
567 repo.dirstate.setbranch(p2.branch())
564 repo.dirstate.setbranch(p2.branch())
568 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
565 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
569
566
570 return stats
567 return stats
571 finally:
568 finally:
572 del wlock
569 del wlock
@@ -1,1319 +1,1319
1 # patch.py - patch file parsing routines
1 # patch.py - patch file parsing routines
2 #
2 #
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from i18n import _
9 from i18n import _
10 from node import *
10 from node import *
11 import base85, cmdutil, mdiff, util, context, revlog, diffhelpers
11 import base85, cmdutil, mdiff, util, context, revlog, diffhelpers
12 import cStringIO, email.Parser, os, popen2, re, sha
12 import cStringIO, email.Parser, os, popen2, re, sha
13 import sys, tempfile, zlib
13 import sys, tempfile, zlib
14
14
15 class PatchError(Exception):
15 class PatchError(Exception):
16 pass
16 pass
17
17
18 class NoHunks(PatchError):
18 class NoHunks(PatchError):
19 pass
19 pass
20
20
21 # helper functions
21 # helper functions
22
22
23 def copyfile(src, dst, basedir=None):
23 def copyfile(src, dst, basedir=None):
24 if not basedir:
24 if not basedir:
25 basedir = os.getcwd()
25 basedir = os.getcwd()
26
26
27 abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
27 abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
28 if os.path.exists(absdst):
28 if os.path.exists(absdst):
29 raise util.Abort(_("cannot create %s: destination already exists") %
29 raise util.Abort(_("cannot create %s: destination already exists") %
30 dst)
30 dst)
31
31
32 targetdir = os.path.dirname(absdst)
32 targetdir = os.path.dirname(absdst)
33 if not os.path.isdir(targetdir):
33 if not os.path.isdir(targetdir):
34 os.makedirs(targetdir)
34 os.makedirs(targetdir)
35
35
36 util.copyfile(abssrc, absdst)
36 util.copyfile(abssrc, absdst)
37
37
38 # public functions
38 # public functions
39
39
40 def extract(ui, fileobj):
40 def extract(ui, fileobj):
41 '''extract patch from data read from fileobj.
41 '''extract patch from data read from fileobj.
42
42
43 patch can be a normal patch or contained in an email message.
43 patch can be a normal patch or contained in an email message.
44
44
45 return tuple (filename, message, user, date, node, p1, p2).
45 return tuple (filename, message, user, date, node, p1, p2).
46 Any item in the returned tuple can be None. If filename is None,
46 Any item in the returned tuple can be None. If filename is None,
47 fileobj did not contain a patch. Caller must unlink filename when done.'''
47 fileobj did not contain a patch. Caller must unlink filename when done.'''
48
48
49 # attempt to detect the start of a patch
49 # attempt to detect the start of a patch
50 # (this heuristic is borrowed from quilt)
50 # (this heuristic is borrowed from quilt)
51 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
51 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
52 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
52 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
53 '(---|\*\*\*)[ \t])', re.MULTILINE)
53 '(---|\*\*\*)[ \t])', re.MULTILINE)
54
54
55 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
55 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
56 tmpfp = os.fdopen(fd, 'w')
56 tmpfp = os.fdopen(fd, 'w')
57 try:
57 try:
58 msg = email.Parser.Parser().parse(fileobj)
58 msg = email.Parser.Parser().parse(fileobj)
59
59
60 subject = msg['Subject']
60 subject = msg['Subject']
61 user = msg['From']
61 user = msg['From']
62 # should try to parse msg['Date']
62 # should try to parse msg['Date']
63 date = None
63 date = None
64 nodeid = None
64 nodeid = None
65 branch = None
65 branch = None
66 parents = []
66 parents = []
67
67
68 if subject:
68 if subject:
69 if subject.startswith('[PATCH'):
69 if subject.startswith('[PATCH'):
70 pend = subject.find(']')
70 pend = subject.find(']')
71 if pend >= 0:
71 if pend >= 0:
72 subject = subject[pend+1:].lstrip()
72 subject = subject[pend+1:].lstrip()
73 subject = subject.replace('\n\t', ' ')
73 subject = subject.replace('\n\t', ' ')
74 ui.debug('Subject: %s\n' % subject)
74 ui.debug('Subject: %s\n' % subject)
75 if user:
75 if user:
76 ui.debug('From: %s\n' % user)
76 ui.debug('From: %s\n' % user)
77 diffs_seen = 0
77 diffs_seen = 0
78 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
78 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
79 message = ''
79 message = ''
80 for part in msg.walk():
80 for part in msg.walk():
81 content_type = part.get_content_type()
81 content_type = part.get_content_type()
82 ui.debug('Content-Type: %s\n' % content_type)
82 ui.debug('Content-Type: %s\n' % content_type)
83 if content_type not in ok_types:
83 if content_type not in ok_types:
84 continue
84 continue
85 payload = part.get_payload(decode=True)
85 payload = part.get_payload(decode=True)
86 m = diffre.search(payload)
86 m = diffre.search(payload)
87 if m:
87 if m:
88 hgpatch = False
88 hgpatch = False
89 ignoretext = False
89 ignoretext = False
90
90
91 ui.debug(_('found patch at byte %d\n') % m.start(0))
91 ui.debug(_('found patch at byte %d\n') % m.start(0))
92 diffs_seen += 1
92 diffs_seen += 1
93 cfp = cStringIO.StringIO()
93 cfp = cStringIO.StringIO()
94 for line in payload[:m.start(0)].splitlines():
94 for line in payload[:m.start(0)].splitlines():
95 if line.startswith('# HG changeset patch'):
95 if line.startswith('# HG changeset patch'):
96 ui.debug(_('patch generated by hg export\n'))
96 ui.debug(_('patch generated by hg export\n'))
97 hgpatch = True
97 hgpatch = True
98 # drop earlier commit message content
98 # drop earlier commit message content
99 cfp.seek(0)
99 cfp.seek(0)
100 cfp.truncate()
100 cfp.truncate()
101 subject = None
101 subject = None
102 elif hgpatch:
102 elif hgpatch:
103 if line.startswith('# User '):
103 if line.startswith('# User '):
104 user = line[7:]
104 user = line[7:]
105 ui.debug('From: %s\n' % user)
105 ui.debug('From: %s\n' % user)
106 elif line.startswith("# Date "):
106 elif line.startswith("# Date "):
107 date = line[7:]
107 date = line[7:]
108 elif line.startswith("# Branch "):
108 elif line.startswith("# Branch "):
109 branch = line[9:]
109 branch = line[9:]
110 elif line.startswith("# Node ID "):
110 elif line.startswith("# Node ID "):
111 nodeid = line[10:]
111 nodeid = line[10:]
112 elif line.startswith("# Parent "):
112 elif line.startswith("# Parent "):
113 parents.append(line[10:])
113 parents.append(line[10:])
114 elif line == '---' and 'git-send-email' in msg['X-Mailer']:
114 elif line == '---' and 'git-send-email' in msg['X-Mailer']:
115 ignoretext = True
115 ignoretext = True
116 if not line.startswith('# ') and not ignoretext:
116 if not line.startswith('# ') and not ignoretext:
117 cfp.write(line)
117 cfp.write(line)
118 cfp.write('\n')
118 cfp.write('\n')
119 message = cfp.getvalue()
119 message = cfp.getvalue()
120 if tmpfp:
120 if tmpfp:
121 tmpfp.write(payload)
121 tmpfp.write(payload)
122 if not payload.endswith('\n'):
122 if not payload.endswith('\n'):
123 tmpfp.write('\n')
123 tmpfp.write('\n')
124 elif not diffs_seen and message and content_type == 'text/plain':
124 elif not diffs_seen and message and content_type == 'text/plain':
125 message += '\n' + payload
125 message += '\n' + payload
126 except:
126 except:
127 tmpfp.close()
127 tmpfp.close()
128 os.unlink(tmpname)
128 os.unlink(tmpname)
129 raise
129 raise
130
130
131 if subject and not message.startswith(subject):
131 if subject and not message.startswith(subject):
132 message = '%s\n%s' % (subject, message)
132 message = '%s\n%s' % (subject, message)
133 tmpfp.close()
133 tmpfp.close()
134 if not diffs_seen:
134 if not diffs_seen:
135 os.unlink(tmpname)
135 os.unlink(tmpname)
136 return None, message, user, date, branch, None, None, None
136 return None, message, user, date, branch, None, None, None
137 p1 = parents and parents.pop(0) or None
137 p1 = parents and parents.pop(0) or None
138 p2 = parents and parents.pop(0) or None
138 p2 = parents and parents.pop(0) or None
139 return tmpname, message, user, date, branch, nodeid, p1, p2
139 return tmpname, message, user, date, branch, nodeid, p1, p2
140
140
141 GP_PATCH = 1 << 0 # we have to run patch
141 GP_PATCH = 1 << 0 # we have to run patch
142 GP_FILTER = 1 << 1 # there's some copy/rename operation
142 GP_FILTER = 1 << 1 # there's some copy/rename operation
143 GP_BINARY = 1 << 2 # there's a binary patch
143 GP_BINARY = 1 << 2 # there's a binary patch
144
144
145 def readgitpatch(fp, firstline):
145 def readgitpatch(fp, firstline):
146 """extract git-style metadata about patches from <patchname>"""
146 """extract git-style metadata about patches from <patchname>"""
147 class gitpatch:
147 class gitpatch:
148 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
148 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
149 def __init__(self, path):
149 def __init__(self, path):
150 self.path = path
150 self.path = path
151 self.oldpath = None
151 self.oldpath = None
152 self.mode = None
152 self.mode = None
153 self.op = 'MODIFY'
153 self.op = 'MODIFY'
154 self.copymod = False
154 self.copymod = False
155 self.lineno = 0
155 self.lineno = 0
156 self.binary = False
156 self.binary = False
157
157
158 def reader(fp, firstline):
158 def reader(fp, firstline):
159 yield firstline
159 yield firstline
160 for line in fp:
160 for line in fp:
161 yield line
161 yield line
162
162
163 # Filter patch for git information
163 # Filter patch for git information
164 gitre = re.compile('diff --git a/(.*) b/(.*)')
164 gitre = re.compile('diff --git a/(.*) b/(.*)')
165 gp = None
165 gp = None
166 gitpatches = []
166 gitpatches = []
167 # Can have a git patch with only metadata, causing patch to complain
167 # Can have a git patch with only metadata, causing patch to complain
168 dopatch = 0
168 dopatch = 0
169
169
170 lineno = 0
170 lineno = 0
171 for line in reader(fp, firstline):
171 for line in reader(fp, firstline):
172 lineno += 1
172 lineno += 1
173 if line.startswith('diff --git'):
173 if line.startswith('diff --git'):
174 m = gitre.match(line)
174 m = gitre.match(line)
175 if m:
175 if m:
176 if gp:
176 if gp:
177 gitpatches.append(gp)
177 gitpatches.append(gp)
178 src, dst = m.group(1, 2)
178 src, dst = m.group(1, 2)
179 gp = gitpatch(dst)
179 gp = gitpatch(dst)
180 gp.lineno = lineno
180 gp.lineno = lineno
181 elif gp:
181 elif gp:
182 if line.startswith('--- '):
182 if line.startswith('--- '):
183 if gp.op in ('COPY', 'RENAME'):
183 if gp.op in ('COPY', 'RENAME'):
184 gp.copymod = True
184 gp.copymod = True
185 dopatch |= GP_FILTER
185 dopatch |= GP_FILTER
186 gitpatches.append(gp)
186 gitpatches.append(gp)
187 gp = None
187 gp = None
188 dopatch |= GP_PATCH
188 dopatch |= GP_PATCH
189 continue
189 continue
190 if line.startswith('rename from '):
190 if line.startswith('rename from '):
191 gp.op = 'RENAME'
191 gp.op = 'RENAME'
192 gp.oldpath = line[12:].rstrip()
192 gp.oldpath = line[12:].rstrip()
193 elif line.startswith('rename to '):
193 elif line.startswith('rename to '):
194 gp.path = line[10:].rstrip()
194 gp.path = line[10:].rstrip()
195 elif line.startswith('copy from '):
195 elif line.startswith('copy from '):
196 gp.op = 'COPY'
196 gp.op = 'COPY'
197 gp.oldpath = line[10:].rstrip()
197 gp.oldpath = line[10:].rstrip()
198 elif line.startswith('copy to '):
198 elif line.startswith('copy to '):
199 gp.path = line[8:].rstrip()
199 gp.path = line[8:].rstrip()
200 elif line.startswith('deleted file'):
200 elif line.startswith('deleted file'):
201 gp.op = 'DELETE'
201 gp.op = 'DELETE'
202 elif line.startswith('new file mode '):
202 elif line.startswith('new file mode '):
203 gp.op = 'ADD'
203 gp.op = 'ADD'
204 gp.mode = int(line.rstrip()[-3:], 8)
204 gp.mode = int(line.rstrip()[-3:], 8)
205 elif line.startswith('new mode '):
205 elif line.startswith('new mode '):
206 gp.mode = int(line.rstrip()[-3:], 8)
206 gp.mode = int(line.rstrip()[-3:], 8)
207 elif line.startswith('GIT binary patch'):
207 elif line.startswith('GIT binary patch'):
208 dopatch |= GP_BINARY
208 dopatch |= GP_BINARY
209 gp.binary = True
209 gp.binary = True
210 if gp:
210 if gp:
211 gitpatches.append(gp)
211 gitpatches.append(gp)
212
212
213 if not gitpatches:
213 if not gitpatches:
214 dopatch = GP_PATCH
214 dopatch = GP_PATCH
215
215
216 return (dopatch, gitpatches)
216 return (dopatch, gitpatches)
217
217
218 def patch(patchname, ui, strip=1, cwd=None, files={}):
218 def patch(patchname, ui, strip=1, cwd=None, files={}):
219 """apply <patchname> to the working directory.
219 """apply <patchname> to the working directory.
220 returns whether patch was applied with fuzz factor."""
220 returns whether patch was applied with fuzz factor."""
221 patcher = ui.config('ui', 'patch')
221 patcher = ui.config('ui', 'patch')
222 args = []
222 args = []
223 try:
223 try:
224 if patcher:
224 if patcher:
225 return externalpatch(patcher, args, patchname, ui, strip, cwd,
225 return externalpatch(patcher, args, patchname, ui, strip, cwd,
226 files)
226 files)
227 else:
227 else:
228 try:
228 try:
229 return internalpatch(patchname, ui, strip, cwd, files)
229 return internalpatch(patchname, ui, strip, cwd, files)
230 except NoHunks:
230 except NoHunks:
231 patcher = util.find_exe('gpatch') or util.find_exe('patch')
231 patcher = util.find_exe('gpatch') or util.find_exe('patch')
232 ui.debug('no valid hunks found; trying with %r instead\n' %
232 ui.debug('no valid hunks found; trying with %r instead\n' %
233 patcher)
233 patcher)
234 if util.needbinarypatch():
234 if util.needbinarypatch():
235 args.append('--binary')
235 args.append('--binary')
236 return externalpatch(patcher, args, patchname, ui, strip, cwd,
236 return externalpatch(patcher, args, patchname, ui, strip, cwd,
237 files)
237 files)
238 except PatchError, err:
238 except PatchError, err:
239 s = str(err)
239 s = str(err)
240 if s:
240 if s:
241 raise util.Abort(s)
241 raise util.Abort(s)
242 else:
242 else:
243 raise util.Abort(_('patch failed to apply'))
243 raise util.Abort(_('patch failed to apply'))
244
244
245 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
245 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
246 """use <patcher> to apply <patchname> to the working directory.
246 """use <patcher> to apply <patchname> to the working directory.
247 returns whether patch was applied with fuzz factor."""
247 returns whether patch was applied with fuzz factor."""
248
248
249 fuzz = False
249 fuzz = False
250 if cwd:
250 if cwd:
251 args.append('-d %s' % util.shellquote(cwd))
251 args.append('-d %s' % util.shellquote(cwd))
252 fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
252 fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
253 util.shellquote(patchname)))
253 util.shellquote(patchname)))
254
254
255 for line in fp:
255 for line in fp:
256 line = line.rstrip()
256 line = line.rstrip()
257 ui.note(line + '\n')
257 ui.note(line + '\n')
258 if line.startswith('patching file '):
258 if line.startswith('patching file '):
259 pf = util.parse_patch_output(line)
259 pf = util.parse_patch_output(line)
260 printed_file = False
260 printed_file = False
261 files.setdefault(pf, (None, None))
261 files.setdefault(pf, (None, None))
262 elif line.find('with fuzz') >= 0:
262 elif line.find('with fuzz') >= 0:
263 fuzz = True
263 fuzz = True
264 if not printed_file:
264 if not printed_file:
265 ui.warn(pf + '\n')
265 ui.warn(pf + '\n')
266 printed_file = True
266 printed_file = True
267 ui.warn(line + '\n')
267 ui.warn(line + '\n')
268 elif line.find('saving rejects to file') >= 0:
268 elif line.find('saving rejects to file') >= 0:
269 ui.warn(line + '\n')
269 ui.warn(line + '\n')
270 elif line.find('FAILED') >= 0:
270 elif line.find('FAILED') >= 0:
271 if not printed_file:
271 if not printed_file:
272 ui.warn(pf + '\n')
272 ui.warn(pf + '\n')
273 printed_file = True
273 printed_file = True
274 ui.warn(line + '\n')
274 ui.warn(line + '\n')
275 code = fp.close()
275 code = fp.close()
276 if code:
276 if code:
277 raise PatchError(_("patch command failed: %s") %
277 raise PatchError(_("patch command failed: %s") %
278 util.explain_exit(code)[0])
278 util.explain_exit(code)[0])
279 return fuzz
279 return fuzz
280
280
281 def internalpatch(patchname, ui, strip, cwd, files):
281 def internalpatch(patchname, ui, strip, cwd, files):
282 """use builtin patch to apply <patchname> to the working directory.
282 """use builtin patch to apply <patchname> to the working directory.
283 returns whether patch was applied with fuzz factor."""
283 returns whether patch was applied with fuzz factor."""
284 fp = file(patchname)
284 fp = file(patchname)
285 if cwd:
285 if cwd:
286 curdir = os.getcwd()
286 curdir = os.getcwd()
287 os.chdir(cwd)
287 os.chdir(cwd)
288 try:
288 try:
289 ret = applydiff(ui, fp, files, strip=strip)
289 ret = applydiff(ui, fp, files, strip=strip)
290 finally:
290 finally:
291 if cwd:
291 if cwd:
292 os.chdir(curdir)
292 os.chdir(curdir)
293 if ret < 0:
293 if ret < 0:
294 raise PatchError
294 raise PatchError
295 return ret > 0
295 return ret > 0
296
296
297 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
297 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
298 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
298 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
299 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
299 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
300
300
301 class patchfile:
301 class patchfile:
302 def __init__(self, ui, fname):
302 def __init__(self, ui, fname):
303 self.fname = fname
303 self.fname = fname
304 self.ui = ui
304 self.ui = ui
305 try:
305 try:
306 fp = file(fname, 'r')
306 fp = file(fname, 'r')
307 self.lines = fp.readlines()
307 self.lines = fp.readlines()
308 self.exists = True
308 self.exists = True
309 except IOError:
309 except IOError:
310 dirname = os.path.dirname(fname)
310 dirname = os.path.dirname(fname)
311 if dirname and not os.path.isdir(dirname):
311 if dirname and not os.path.isdir(dirname):
312 dirs = dirname.split(os.path.sep)
312 dirs = dirname.split(os.path.sep)
313 d = ""
313 d = ""
314 for x in dirs:
314 for x in dirs:
315 d = os.path.join(d, x)
315 d = os.path.join(d, x)
316 if not os.path.isdir(d):
316 if not os.path.isdir(d):
317 os.mkdir(d)
317 os.mkdir(d)
318 self.lines = []
318 self.lines = []
319 self.exists = False
319 self.exists = False
320
320
321 self.hash = {}
321 self.hash = {}
322 self.dirty = 0
322 self.dirty = 0
323 self.offset = 0
323 self.offset = 0
324 self.rej = []
324 self.rej = []
325 self.fileprinted = False
325 self.fileprinted = False
326 self.printfile(False)
326 self.printfile(False)
327 self.hunks = 0
327 self.hunks = 0
328
328
329 def printfile(self, warn):
329 def printfile(self, warn):
330 if self.fileprinted:
330 if self.fileprinted:
331 return
331 return
332 if warn or self.ui.verbose:
332 if warn or self.ui.verbose:
333 self.fileprinted = True
333 self.fileprinted = True
334 s = _("patching file %s\n") % self.fname
334 s = _("patching file %s\n") % self.fname
335 if warn:
335 if warn:
336 self.ui.warn(s)
336 self.ui.warn(s)
337 else:
337 else:
338 self.ui.note(s)
338 self.ui.note(s)
339
339
340
340
341 def findlines(self, l, linenum):
341 def findlines(self, l, linenum):
342 # looks through the hash and finds candidate lines. The
342 # looks through the hash and finds candidate lines. The
343 # result is a list of line numbers sorted based on distance
343 # result is a list of line numbers sorted based on distance
344 # from linenum
344 # from linenum
345 def sorter(a, b):
345 def sorter(a, b):
346 vala = abs(a - linenum)
346 vala = abs(a - linenum)
347 valb = abs(b - linenum)
347 valb = abs(b - linenum)
348 return cmp(vala, valb)
348 return cmp(vala, valb)
349
349
350 try:
350 try:
351 cand = self.hash[l]
351 cand = self.hash[l]
352 except:
352 except:
353 return []
353 return []
354
354
355 if len(cand) > 1:
355 if len(cand) > 1:
356 # resort our list of potentials forward then back.
356 # resort our list of potentials forward then back.
357 cand.sort(cmp=sorter)
357 cand.sort(cmp=sorter)
358 return cand
358 return cand
359
359
360 def hashlines(self):
360 def hashlines(self):
361 self.hash = {}
361 self.hash = {}
362 for x in xrange(len(self.lines)):
362 for x in xrange(len(self.lines)):
363 s = self.lines[x]
363 s = self.lines[x]
364 self.hash.setdefault(s, []).append(x)
364 self.hash.setdefault(s, []).append(x)
365
365
366 def write_rej(self):
366 def write_rej(self):
367 # our rejects are a little different from patch(1). This always
367 # our rejects are a little different from patch(1). This always
368 # creates rejects in the same form as the original patch. A file
368 # creates rejects in the same form as the original patch. A file
369 # header is inserted so that you can run the reject through patch again
369 # header is inserted so that you can run the reject through patch again
370 # without having to type the filename.
370 # without having to type the filename.
371
371
372 if not self.rej:
372 if not self.rej:
373 return
373 return
374 if self.hunks != 1:
374 if self.hunks != 1:
375 hunkstr = "s"
375 hunkstr = "s"
376 else:
376 else:
377 hunkstr = ""
377 hunkstr = ""
378
378
379 fname = self.fname + ".rej"
379 fname = self.fname + ".rej"
380 self.ui.warn(
380 self.ui.warn(
381 _("%d out of %d hunk%s FAILED -- saving rejects to file %s\n") %
381 _("%d out of %d hunk%s FAILED -- saving rejects to file %s\n") %
382 (len(self.rej), self.hunks, hunkstr, fname))
382 (len(self.rej), self.hunks, hunkstr, fname))
383 try: os.unlink(fname)
383 try: os.unlink(fname)
384 except:
384 except:
385 pass
385 pass
386 fp = file(fname, 'w')
386 fp = file(fname, 'w')
387 base = os.path.basename(self.fname)
387 base = os.path.basename(self.fname)
388 fp.write("--- %s\n+++ %s\n" % (base, base))
388 fp.write("--- %s\n+++ %s\n" % (base, base))
389 for x in self.rej:
389 for x in self.rej:
390 for l in x.hunk:
390 for l in x.hunk:
391 fp.write(l)
391 fp.write(l)
392 if l[-1] != '\n':
392 if l[-1] != '\n':
393 fp.write("\n\ No newline at end of file\n")
393 fp.write("\n\ No newline at end of file\n")
394
394
395 def write(self, dest=None):
395 def write(self, dest=None):
396 if self.dirty:
396 if self.dirty:
397 if not dest:
397 if not dest:
398 dest = self.fname
398 dest = self.fname
399 st = None
399 st = None
400 try:
400 try:
401 st = os.lstat(dest)
401 st = os.lstat(dest)
402 if st.st_nlink > 1:
402 if st.st_nlink > 1:
403 os.unlink(dest)
403 os.unlink(dest)
404 except: pass
404 except: pass
405 fp = file(dest, 'w')
405 fp = file(dest, 'w')
406 if st:
406 if st:
407 os.chmod(dest, st.st_mode)
407 os.chmod(dest, st.st_mode)
408 fp.writelines(self.lines)
408 fp.writelines(self.lines)
409 fp.close()
409 fp.close()
410
410
411 def close(self):
411 def close(self):
412 self.write()
412 self.write()
413 self.write_rej()
413 self.write_rej()
414
414
415 def apply(self, h, reverse):
415 def apply(self, h, reverse):
416 if not h.complete():
416 if not h.complete():
417 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
417 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
418 (h.number, h.desc, len(h.a), h.lena, len(h.b),
418 (h.number, h.desc, len(h.a), h.lena, len(h.b),
419 h.lenb))
419 h.lenb))
420
420
421 self.hunks += 1
421 self.hunks += 1
422 if reverse:
422 if reverse:
423 h.reverse()
423 h.reverse()
424
424
425 if self.exists and h.createfile():
425 if self.exists and h.createfile():
426 self.ui.warn(_("file %s already exists\n") % self.fname)
426 self.ui.warn(_("file %s already exists\n") % self.fname)
427 self.rej.append(h)
427 self.rej.append(h)
428 return -1
428 return -1
429
429
430 if isinstance(h, binhunk):
430 if isinstance(h, binhunk):
431 if h.rmfile():
431 if h.rmfile():
432 os.unlink(self.fname)
432 os.unlink(self.fname)
433 else:
433 else:
434 self.lines[:] = h.new()
434 self.lines[:] = h.new()
435 self.offset += len(h.new())
435 self.offset += len(h.new())
436 self.dirty = 1
436 self.dirty = 1
437 return 0
437 return 0
438
438
439 # fast case first, no offsets, no fuzz
439 # fast case first, no offsets, no fuzz
440 old = h.old()
440 old = h.old()
441 # patch starts counting at 1 unless we are adding the file
441 # patch starts counting at 1 unless we are adding the file
442 if h.starta == 0:
442 if h.starta == 0:
443 start = 0
443 start = 0
444 else:
444 else:
445 start = h.starta + self.offset - 1
445 start = h.starta + self.offset - 1
446 orig_start = start
446 orig_start = start
447 if diffhelpers.testhunk(old, self.lines, start) == 0:
447 if diffhelpers.testhunk(old, self.lines, start) == 0:
448 if h.rmfile():
448 if h.rmfile():
449 os.unlink(self.fname)
449 os.unlink(self.fname)
450 else:
450 else:
451 self.lines[start : start + h.lena] = h.new()
451 self.lines[start : start + h.lena] = h.new()
452 self.offset += h.lenb - h.lena
452 self.offset += h.lenb - h.lena
453 self.dirty = 1
453 self.dirty = 1
454 return 0
454 return 0
455
455
456 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
456 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
457 self.hashlines()
457 self.hashlines()
458 if h.hunk[-1][0] != ' ':
458 if h.hunk[-1][0] != ' ':
459 # if the hunk tried to put something at the bottom of the file
459 # if the hunk tried to put something at the bottom of the file
460 # override the start line and use eof here
460 # override the start line and use eof here
461 search_start = len(self.lines)
461 search_start = len(self.lines)
462 else:
462 else:
463 search_start = orig_start
463 search_start = orig_start
464
464
465 for fuzzlen in xrange(3):
465 for fuzzlen in xrange(3):
466 for toponly in [ True, False ]:
466 for toponly in [ True, False ]:
467 old = h.old(fuzzlen, toponly)
467 old = h.old(fuzzlen, toponly)
468
468
469 cand = self.findlines(old[0][1:], search_start)
469 cand = self.findlines(old[0][1:], search_start)
470 for l in cand:
470 for l in cand:
471 if diffhelpers.testhunk(old, self.lines, l) == 0:
471 if diffhelpers.testhunk(old, self.lines, l) == 0:
472 newlines = h.new(fuzzlen, toponly)
472 newlines = h.new(fuzzlen, toponly)
473 self.lines[l : l + len(old)] = newlines
473 self.lines[l : l + len(old)] = newlines
474 self.offset += len(newlines) - len(old)
474 self.offset += len(newlines) - len(old)
475 self.dirty = 1
475 self.dirty = 1
476 if fuzzlen:
476 if fuzzlen:
477 fuzzstr = "with fuzz %d " % fuzzlen
477 fuzzstr = "with fuzz %d " % fuzzlen
478 f = self.ui.warn
478 f = self.ui.warn
479 self.printfile(True)
479 self.printfile(True)
480 else:
480 else:
481 fuzzstr = ""
481 fuzzstr = ""
482 f = self.ui.note
482 f = self.ui.note
483 offset = l - orig_start - fuzzlen
483 offset = l - orig_start - fuzzlen
484 if offset == 1:
484 if offset == 1:
485 linestr = "line"
485 linestr = "line"
486 else:
486 else:
487 linestr = "lines"
487 linestr = "lines"
488 f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
488 f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
489 (h.number, l+1, fuzzstr, offset, linestr))
489 (h.number, l+1, fuzzstr, offset, linestr))
490 return fuzzlen
490 return fuzzlen
491 self.printfile(True)
491 self.printfile(True)
492 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
492 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
493 self.rej.append(h)
493 self.rej.append(h)
494 return -1
494 return -1
495
495
496 class hunk:
496 class hunk:
497 def __init__(self, desc, num, lr, context):
497 def __init__(self, desc, num, lr, context):
498 self.number = num
498 self.number = num
499 self.desc = desc
499 self.desc = desc
500 self.hunk = [ desc ]
500 self.hunk = [ desc ]
501 self.a = []
501 self.a = []
502 self.b = []
502 self.b = []
503 if context:
503 if context:
504 self.read_context_hunk(lr)
504 self.read_context_hunk(lr)
505 else:
505 else:
506 self.read_unified_hunk(lr)
506 self.read_unified_hunk(lr)
507
507
508 def read_unified_hunk(self, lr):
508 def read_unified_hunk(self, lr):
509 m = unidesc.match(self.desc)
509 m = unidesc.match(self.desc)
510 if not m:
510 if not m:
511 raise PatchError(_("bad hunk #%d") % self.number)
511 raise PatchError(_("bad hunk #%d") % self.number)
512 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
512 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
513 if self.lena == None:
513 if self.lena == None:
514 self.lena = 1
514 self.lena = 1
515 else:
515 else:
516 self.lena = int(self.lena)
516 self.lena = int(self.lena)
517 if self.lenb == None:
517 if self.lenb == None:
518 self.lenb = 1
518 self.lenb = 1
519 else:
519 else:
520 self.lenb = int(self.lenb)
520 self.lenb = int(self.lenb)
521 self.starta = int(self.starta)
521 self.starta = int(self.starta)
522 self.startb = int(self.startb)
522 self.startb = int(self.startb)
523 diffhelpers.addlines(lr.fp, self.hunk, self.lena, self.lenb, self.a, self.b)
523 diffhelpers.addlines(lr.fp, self.hunk, self.lena, self.lenb, self.a, self.b)
524 # if we hit eof before finishing out the hunk, the last line will
524 # if we hit eof before finishing out the hunk, the last line will
525 # be zero length. Lets try to fix it up.
525 # be zero length. Lets try to fix it up.
526 while len(self.hunk[-1]) == 0:
526 while len(self.hunk[-1]) == 0:
527 del self.hunk[-1]
527 del self.hunk[-1]
528 del self.a[-1]
528 del self.a[-1]
529 del self.b[-1]
529 del self.b[-1]
530 self.lena -= 1
530 self.lena -= 1
531 self.lenb -= 1
531 self.lenb -= 1
532
532
533 def read_context_hunk(self, lr):
533 def read_context_hunk(self, lr):
534 self.desc = lr.readline()
534 self.desc = lr.readline()
535 m = contextdesc.match(self.desc)
535 m = contextdesc.match(self.desc)
536 if not m:
536 if not m:
537 raise PatchError(_("bad hunk #%d") % self.number)
537 raise PatchError(_("bad hunk #%d") % self.number)
538 foo, self.starta, foo2, aend, foo3 = m.groups()
538 foo, self.starta, foo2, aend, foo3 = m.groups()
539 self.starta = int(self.starta)
539 self.starta = int(self.starta)
540 if aend == None:
540 if aend == None:
541 aend = self.starta
541 aend = self.starta
542 self.lena = int(aend) - self.starta
542 self.lena = int(aend) - self.starta
543 if self.starta:
543 if self.starta:
544 self.lena += 1
544 self.lena += 1
545 for x in xrange(self.lena):
545 for x in xrange(self.lena):
546 l = lr.readline()
546 l = lr.readline()
547 if l.startswith('---'):
547 if l.startswith('---'):
548 lr.push(l)
548 lr.push(l)
549 break
549 break
550 s = l[2:]
550 s = l[2:]
551 if l.startswith('- ') or l.startswith('! '):
551 if l.startswith('- ') or l.startswith('! '):
552 u = '-' + s
552 u = '-' + s
553 elif l.startswith(' '):
553 elif l.startswith(' '):
554 u = ' ' + s
554 u = ' ' + s
555 else:
555 else:
556 raise PatchError(_("bad hunk #%d old text line %d") %
556 raise PatchError(_("bad hunk #%d old text line %d") %
557 (self.number, x))
557 (self.number, x))
558 self.a.append(u)
558 self.a.append(u)
559 self.hunk.append(u)
559 self.hunk.append(u)
560
560
561 l = lr.readline()
561 l = lr.readline()
562 if l.startswith('\ '):
562 if l.startswith('\ '):
563 s = self.a[-1][:-1]
563 s = self.a[-1][:-1]
564 self.a[-1] = s
564 self.a[-1] = s
565 self.hunk[-1] = s
565 self.hunk[-1] = s
566 l = lr.readline()
566 l = lr.readline()
567 m = contextdesc.match(l)
567 m = contextdesc.match(l)
568 if not m:
568 if not m:
569 raise PatchError(_("bad hunk #%d") % self.number)
569 raise PatchError(_("bad hunk #%d") % self.number)
570 foo, self.startb, foo2, bend, foo3 = m.groups()
570 foo, self.startb, foo2, bend, foo3 = m.groups()
571 self.startb = int(self.startb)
571 self.startb = int(self.startb)
572 if bend == None:
572 if bend == None:
573 bend = self.startb
573 bend = self.startb
574 self.lenb = int(bend) - self.startb
574 self.lenb = int(bend) - self.startb
575 if self.startb:
575 if self.startb:
576 self.lenb += 1
576 self.lenb += 1
577 hunki = 1
577 hunki = 1
578 for x in xrange(self.lenb):
578 for x in xrange(self.lenb):
579 l = lr.readline()
579 l = lr.readline()
580 if l.startswith('\ '):
580 if l.startswith('\ '):
581 s = self.b[-1][:-1]
581 s = self.b[-1][:-1]
582 self.b[-1] = s
582 self.b[-1] = s
583 self.hunk[hunki-1] = s
583 self.hunk[hunki-1] = s
584 continue
584 continue
585 if not l:
585 if not l:
586 lr.push(l)
586 lr.push(l)
587 break
587 break
588 s = l[2:]
588 s = l[2:]
589 if l.startswith('+ ') or l.startswith('! '):
589 if l.startswith('+ ') or l.startswith('! '):
590 u = '+' + s
590 u = '+' + s
591 elif l.startswith(' '):
591 elif l.startswith(' '):
592 u = ' ' + s
592 u = ' ' + s
593 elif len(self.b) == 0:
593 elif len(self.b) == 0:
594 # this can happen when the hunk does not add any lines
594 # this can happen when the hunk does not add any lines
595 lr.push(l)
595 lr.push(l)
596 break
596 break
597 else:
597 else:
598 raise PatchError(_("bad hunk #%d old text line %d") %
598 raise PatchError(_("bad hunk #%d old text line %d") %
599 (self.number, x))
599 (self.number, x))
600 self.b.append(s)
600 self.b.append(s)
601 while True:
601 while True:
602 if hunki >= len(self.hunk):
602 if hunki >= len(self.hunk):
603 h = ""
603 h = ""
604 else:
604 else:
605 h = self.hunk[hunki]
605 h = self.hunk[hunki]
606 hunki += 1
606 hunki += 1
607 if h == u:
607 if h == u:
608 break
608 break
609 elif h.startswith('-'):
609 elif h.startswith('-'):
610 continue
610 continue
611 else:
611 else:
612 self.hunk.insert(hunki-1, u)
612 self.hunk.insert(hunki-1, u)
613 break
613 break
614
614
615 if not self.a:
615 if not self.a:
616 # this happens when lines were only added to the hunk
616 # this happens when lines were only added to the hunk
617 for x in self.hunk:
617 for x in self.hunk:
618 if x.startswith('-') or x.startswith(' '):
618 if x.startswith('-') or x.startswith(' '):
619 self.a.append(x)
619 self.a.append(x)
620 if not self.b:
620 if not self.b:
621 # this happens when lines were only deleted from the hunk
621 # this happens when lines were only deleted from the hunk
622 for x in self.hunk:
622 for x in self.hunk:
623 if x.startswith('+') or x.startswith(' '):
623 if x.startswith('+') or x.startswith(' '):
624 self.b.append(x[1:])
624 self.b.append(x[1:])
625 # @@ -start,len +start,len @@
625 # @@ -start,len +start,len @@
626 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
626 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
627 self.startb, self.lenb)
627 self.startb, self.lenb)
628 self.hunk[0] = self.desc
628 self.hunk[0] = self.desc
629
629
630 def reverse(self):
630 def reverse(self):
631 origlena = self.lena
631 origlena = self.lena
632 origstarta = self.starta
632 origstarta = self.starta
633 self.lena = self.lenb
633 self.lena = self.lenb
634 self.starta = self.startb
634 self.starta = self.startb
635 self.lenb = origlena
635 self.lenb = origlena
636 self.startb = origstarta
636 self.startb = origstarta
637 self.a = []
637 self.a = []
638 self.b = []
638 self.b = []
639 # self.hunk[0] is the @@ description
639 # self.hunk[0] is the @@ description
640 for x in xrange(1, len(self.hunk)):
640 for x in xrange(1, len(self.hunk)):
641 o = self.hunk[x]
641 o = self.hunk[x]
642 if o.startswith('-'):
642 if o.startswith('-'):
643 n = '+' + o[1:]
643 n = '+' + o[1:]
644 self.b.append(o[1:])
644 self.b.append(o[1:])
645 elif o.startswith('+'):
645 elif o.startswith('+'):
646 n = '-' + o[1:]
646 n = '-' + o[1:]
647 self.a.append(n)
647 self.a.append(n)
648 else:
648 else:
649 n = o
649 n = o
650 self.b.append(o[1:])
650 self.b.append(o[1:])
651 self.a.append(o)
651 self.a.append(o)
652 self.hunk[x] = o
652 self.hunk[x] = o
653
653
654 def fix_newline(self):
654 def fix_newline(self):
655 diffhelpers.fix_newline(self.hunk, self.a, self.b)
655 diffhelpers.fix_newline(self.hunk, self.a, self.b)
656
656
657 def complete(self):
657 def complete(self):
658 return len(self.a) == self.lena and len(self.b) == self.lenb
658 return len(self.a) == self.lena and len(self.b) == self.lenb
659
659
660 def createfile(self):
660 def createfile(self):
661 return self.starta == 0 and self.lena == 0
661 return self.starta == 0 and self.lena == 0
662
662
663 def rmfile(self):
663 def rmfile(self):
664 return self.startb == 0 and self.lenb == 0
664 return self.startb == 0 and self.lenb == 0
665
665
666 def fuzzit(self, l, fuzz, toponly):
666 def fuzzit(self, l, fuzz, toponly):
667 # this removes context lines from the top and bottom of list 'l'. It
667 # this removes context lines from the top and bottom of list 'l'. It
668 # checks the hunk to make sure only context lines are removed, and then
668 # checks the hunk to make sure only context lines are removed, and then
669 # returns a new shortened list of lines.
669 # returns a new shortened list of lines.
670 fuzz = min(fuzz, len(l)-1)
670 fuzz = min(fuzz, len(l)-1)
671 if fuzz:
671 if fuzz:
672 top = 0
672 top = 0
673 bot = 0
673 bot = 0
674 hlen = len(self.hunk)
674 hlen = len(self.hunk)
675 for x in xrange(hlen-1):
675 for x in xrange(hlen-1):
676 # the hunk starts with the @@ line, so use x+1
676 # the hunk starts with the @@ line, so use x+1
677 if self.hunk[x+1][0] == ' ':
677 if self.hunk[x+1][0] == ' ':
678 top += 1
678 top += 1
679 else:
679 else:
680 break
680 break
681 if not toponly:
681 if not toponly:
682 for x in xrange(hlen-1):
682 for x in xrange(hlen-1):
683 if self.hunk[hlen-bot-1][0] == ' ':
683 if self.hunk[hlen-bot-1][0] == ' ':
684 bot += 1
684 bot += 1
685 else:
685 else:
686 break
686 break
687
687
688 # top and bot now count context in the hunk
688 # top and bot now count context in the hunk
689 # adjust them if either one is short
689 # adjust them if either one is short
690 context = max(top, bot, 3)
690 context = max(top, bot, 3)
691 if bot < context:
691 if bot < context:
692 bot = max(0, fuzz - (context - bot))
692 bot = max(0, fuzz - (context - bot))
693 else:
693 else:
694 bot = min(fuzz, bot)
694 bot = min(fuzz, bot)
695 if top < context:
695 if top < context:
696 top = max(0, fuzz - (context - top))
696 top = max(0, fuzz - (context - top))
697 else:
697 else:
698 top = min(fuzz, top)
698 top = min(fuzz, top)
699
699
700 return l[top:len(l)-bot]
700 return l[top:len(l)-bot]
701 return l
701 return l
702
702
703 def old(self, fuzz=0, toponly=False):
703 def old(self, fuzz=0, toponly=False):
704 return self.fuzzit(self.a, fuzz, toponly)
704 return self.fuzzit(self.a, fuzz, toponly)
705
705
706 def newctrl(self):
706 def newctrl(self):
707 res = []
707 res = []
708 for x in self.hunk:
708 for x in self.hunk:
709 c = x[0]
709 c = x[0]
710 if c == ' ' or c == '+':
710 if c == ' ' or c == '+':
711 res.append(x)
711 res.append(x)
712 return res
712 return res
713
713
714 def new(self, fuzz=0, toponly=False):
714 def new(self, fuzz=0, toponly=False):
715 return self.fuzzit(self.b, fuzz, toponly)
715 return self.fuzzit(self.b, fuzz, toponly)
716
716
717 class binhunk:
717 class binhunk:
718 'A binary patch file. Only understands literals so far.'
718 'A binary patch file. Only understands literals so far.'
719 def __init__(self, gitpatch):
719 def __init__(self, gitpatch):
720 self.gitpatch = gitpatch
720 self.gitpatch = gitpatch
721 self.text = None
721 self.text = None
722 self.hunk = ['GIT binary patch\n']
722 self.hunk = ['GIT binary patch\n']
723
723
724 def createfile(self):
724 def createfile(self):
725 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
725 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
726
726
727 def rmfile(self):
727 def rmfile(self):
728 return self.gitpatch.op == 'DELETE'
728 return self.gitpatch.op == 'DELETE'
729
729
730 def complete(self):
730 def complete(self):
731 return self.text is not None
731 return self.text is not None
732
732
733 def new(self):
733 def new(self):
734 return [self.text]
734 return [self.text]
735
735
736 def extract(self, fp):
736 def extract(self, fp):
737 line = fp.readline()
737 line = fp.readline()
738 self.hunk.append(line)
738 self.hunk.append(line)
739 while line and not line.startswith('literal '):
739 while line and not line.startswith('literal '):
740 line = fp.readline()
740 line = fp.readline()
741 self.hunk.append(line)
741 self.hunk.append(line)
742 if not line:
742 if not line:
743 raise PatchError(_('could not extract binary patch'))
743 raise PatchError(_('could not extract binary patch'))
744 size = int(line[8:].rstrip())
744 size = int(line[8:].rstrip())
745 dec = []
745 dec = []
746 line = fp.readline()
746 line = fp.readline()
747 self.hunk.append(line)
747 self.hunk.append(line)
748 while len(line) > 1:
748 while len(line) > 1:
749 l = line[0]
749 l = line[0]
750 if l <= 'Z' and l >= 'A':
750 if l <= 'Z' and l >= 'A':
751 l = ord(l) - ord('A') + 1
751 l = ord(l) - ord('A') + 1
752 else:
752 else:
753 l = ord(l) - ord('a') + 27
753 l = ord(l) - ord('a') + 27
754 dec.append(base85.b85decode(line[1:-1])[:l])
754 dec.append(base85.b85decode(line[1:-1])[:l])
755 line = fp.readline()
755 line = fp.readline()
756 self.hunk.append(line)
756 self.hunk.append(line)
757 text = zlib.decompress(''.join(dec))
757 text = zlib.decompress(''.join(dec))
758 if len(text) != size:
758 if len(text) != size:
759 raise PatchError(_('binary patch is %d bytes, not %d') %
759 raise PatchError(_('binary patch is %d bytes, not %d') %
760 len(text), size)
760 len(text), size)
761 self.text = text
761 self.text = text
762
762
763 def parsefilename(str):
763 def parsefilename(str):
764 # --- filename \t|space stuff
764 # --- filename \t|space stuff
765 s = str[4:]
765 s = str[4:]
766 i = s.find('\t')
766 i = s.find('\t')
767 if i < 0:
767 if i < 0:
768 i = s.find(' ')
768 i = s.find(' ')
769 if i < 0:
769 if i < 0:
770 return s
770 return s
771 return s[:i]
771 return s[:i]
772
772
773 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
773 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
774 def pathstrip(path, count=1):
774 def pathstrip(path, count=1):
775 pathlen = len(path)
775 pathlen = len(path)
776 i = 0
776 i = 0
777 if count == 0:
777 if count == 0:
778 return path.rstrip()
778 return path.rstrip()
779 while count > 0:
779 while count > 0:
780 i = path.find(os.sep, i)
780 i = path.find(os.sep, i)
781 if i == -1:
781 if i == -1:
782 raise PatchError(_("unable to strip away %d dirs from %s") %
782 raise PatchError(_("unable to strip away %d dirs from %s") %
783 (count, path))
783 (count, path))
784 i += 1
784 i += 1
785 # consume '//' in the path
785 # consume '//' in the path
786 while i < pathlen - 1 and path[i] == os.sep:
786 while i < pathlen - 1 and path[i] == os.sep:
787 i += 1
787 i += 1
788 count -= 1
788 count -= 1
789 return path[i:].rstrip()
789 return path[i:].rstrip()
790
790
791 nulla = afile_orig == "/dev/null"
791 nulla = afile_orig == "/dev/null"
792 nullb = bfile_orig == "/dev/null"
792 nullb = bfile_orig == "/dev/null"
793 afile = pathstrip(afile_orig, strip)
793 afile = pathstrip(afile_orig, strip)
794 gooda = os.path.exists(afile) and not nulla
794 gooda = os.path.exists(afile) and not nulla
795 bfile = pathstrip(bfile_orig, strip)
795 bfile = pathstrip(bfile_orig, strip)
796 if afile == bfile:
796 if afile == bfile:
797 goodb = gooda
797 goodb = gooda
798 else:
798 else:
799 goodb = os.path.exists(bfile) and not nullb
799 goodb = os.path.exists(bfile) and not nullb
800 createfunc = hunk.createfile
800 createfunc = hunk.createfile
801 if reverse:
801 if reverse:
802 createfunc = hunk.rmfile
802 createfunc = hunk.rmfile
803 if not goodb and not gooda and not createfunc():
803 if not goodb and not gooda and not createfunc():
804 raise PatchError(_("unable to find %s or %s for patching") %
804 raise PatchError(_("unable to find %s or %s for patching") %
805 (afile, bfile))
805 (afile, bfile))
806 if gooda and goodb:
806 if gooda and goodb:
807 fname = bfile
807 fname = bfile
808 if afile in bfile:
808 if afile in bfile:
809 fname = afile
809 fname = afile
810 elif gooda:
810 elif gooda:
811 fname = afile
811 fname = afile
812 elif not nullb:
812 elif not nullb:
813 fname = bfile
813 fname = bfile
814 if afile in bfile:
814 if afile in bfile:
815 fname = afile
815 fname = afile
816 elif not nulla:
816 elif not nulla:
817 fname = afile
817 fname = afile
818 return fname
818 return fname
819
819
820 class linereader:
820 class linereader:
821 # simple class to allow pushing lines back into the input stream
821 # simple class to allow pushing lines back into the input stream
822 def __init__(self, fp):
822 def __init__(self, fp):
823 self.fp = fp
823 self.fp = fp
824 self.buf = []
824 self.buf = []
825
825
826 def push(self, line):
826 def push(self, line):
827 self.buf.append(line)
827 self.buf.append(line)
828
828
829 def readline(self):
829 def readline(self):
830 if self.buf:
830 if self.buf:
831 l = self.buf[0]
831 l = self.buf[0]
832 del self.buf[0]
832 del self.buf[0]
833 return l
833 return l
834 return self.fp.readline()
834 return self.fp.readline()
835
835
836 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
836 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
837 rejmerge=None, updatedir=None):
837 rejmerge=None, updatedir=None):
838 """reads a patch from fp and tries to apply it. The dict 'changed' is
838 """reads a patch from fp and tries to apply it. The dict 'changed' is
839 filled in with all of the filenames changed by the patch. Returns 0
839 filled in with all of the filenames changed by the patch. Returns 0
840 for a clean patch, -1 if any rejects were found and 1 if there was
840 for a clean patch, -1 if any rejects were found and 1 if there was
841 any fuzz."""
841 any fuzz."""
842
842
843 def scangitpatch(fp, firstline, cwd=None):
843 def scangitpatch(fp, firstline, cwd=None):
844 '''git patches can modify a file, then copy that file to
844 '''git patches can modify a file, then copy that file to
845 a new file, but expect the source to be the unmodified form.
845 a new file, but expect the source to be the unmodified form.
846 So we scan the patch looking for that case so we can do
846 So we scan the patch looking for that case so we can do
847 the copies ahead of time.'''
847 the copies ahead of time.'''
848
848
849 pos = 0
849 pos = 0
850 try:
850 try:
851 pos = fp.tell()
851 pos = fp.tell()
852 except IOError:
852 except IOError:
853 fp = cStringIO.StringIO(fp.read())
853 fp = cStringIO.StringIO(fp.read())
854
854
855 (dopatch, gitpatches) = readgitpatch(fp, firstline)
855 (dopatch, gitpatches) = readgitpatch(fp, firstline)
856 for gp in gitpatches:
856 for gp in gitpatches:
857 if gp.copymod:
857 if gp.copymod:
858 copyfile(gp.oldpath, gp.path, basedir=cwd)
858 copyfile(gp.oldpath, gp.path, basedir=cwd)
859
859
860 fp.seek(pos)
860 fp.seek(pos)
861
861
862 return fp, dopatch, gitpatches
862 return fp, dopatch, gitpatches
863
863
864 current_hunk = None
864 current_hunk = None
865 current_file = None
865 current_file = None
866 afile = ""
866 afile = ""
867 bfile = ""
867 bfile = ""
868 state = None
868 state = None
869 hunknum = 0
869 hunknum = 0
870 rejects = 0
870 rejects = 0
871
871
872 git = False
872 git = False
873 gitre = re.compile('diff --git (a/.*) (b/.*)')
873 gitre = re.compile('diff --git (a/.*) (b/.*)')
874
874
875 # our states
875 # our states
876 BFILE = 1
876 BFILE = 1
877 err = 0
877 err = 0
878 context = None
878 context = None
879 lr = linereader(fp)
879 lr = linereader(fp)
880 dopatch = True
880 dopatch = True
881 gitworkdone = False
881 gitworkdone = False
882
882
883 while True:
883 while True:
884 newfile = False
884 newfile = False
885 x = lr.readline()
885 x = lr.readline()
886 if not x:
886 if not x:
887 break
887 break
888 if current_hunk:
888 if current_hunk:
889 if x.startswith('\ '):
889 if x.startswith('\ '):
890 current_hunk.fix_newline()
890 current_hunk.fix_newline()
891 ret = current_file.apply(current_hunk, reverse)
891 ret = current_file.apply(current_hunk, reverse)
892 if ret >= 0:
892 if ret >= 0:
893 changed.setdefault(current_file.fname, (None, None))
893 changed.setdefault(current_file.fname, (None, None))
894 if ret > 0:
894 if ret > 0:
895 err = 1
895 err = 1
896 current_hunk = None
896 current_hunk = None
897 gitworkdone = False
897 gitworkdone = False
898 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
898 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
899 ((context or context == None) and x.startswith('***************')))):
899 ((context or context == None) and x.startswith('***************')))):
900 try:
900 try:
901 if context == None and x.startswith('***************'):
901 if context == None and x.startswith('***************'):
902 context = True
902 context = True
903 current_hunk = hunk(x, hunknum + 1, lr, context)
903 current_hunk = hunk(x, hunknum + 1, lr, context)
904 except PatchError, err:
904 except PatchError, err:
905 ui.debug(err)
905 ui.debug(err)
906 current_hunk = None
906 current_hunk = None
907 continue
907 continue
908 hunknum += 1
908 hunknum += 1
909 if not current_file:
909 if not current_file:
910 if sourcefile:
910 if sourcefile:
911 current_file = patchfile(ui, sourcefile)
911 current_file = patchfile(ui, sourcefile)
912 else:
912 else:
913 current_file = selectfile(afile, bfile, current_hunk,
913 current_file = selectfile(afile, bfile, current_hunk,
914 strip, reverse)
914 strip, reverse)
915 current_file = patchfile(ui, current_file)
915 current_file = patchfile(ui, current_file)
916 elif state == BFILE and x.startswith('GIT binary patch'):
916 elif state == BFILE and x.startswith('GIT binary patch'):
917 current_hunk = binhunk(changed[bfile[2:]][1])
917 current_hunk = binhunk(changed[bfile[2:]][1])
918 if not current_file:
918 if not current_file:
919 if sourcefile:
919 if sourcefile:
920 current_file = patchfile(ui, sourcefile)
920 current_file = patchfile(ui, sourcefile)
921 else:
921 else:
922 current_file = selectfile(afile, bfile, current_hunk,
922 current_file = selectfile(afile, bfile, current_hunk,
923 strip, reverse)
923 strip, reverse)
924 current_file = patchfile(ui, current_file)
924 current_file = patchfile(ui, current_file)
925 hunknum += 1
925 hunknum += 1
926 current_hunk.extract(fp)
926 current_hunk.extract(fp)
927 elif x.startswith('diff --git'):
927 elif x.startswith('diff --git'):
928 # check for git diff, scanning the whole patch file if needed
928 # check for git diff, scanning the whole patch file if needed
929 m = gitre.match(x)
929 m = gitre.match(x)
930 if m:
930 if m:
931 afile, bfile = m.group(1, 2)
931 afile, bfile = m.group(1, 2)
932 if not git:
932 if not git:
933 git = True
933 git = True
934 fp, dopatch, gitpatches = scangitpatch(fp, x)
934 fp, dopatch, gitpatches = scangitpatch(fp, x)
935 for gp in gitpatches:
935 for gp in gitpatches:
936 changed[gp.path] = (gp.op, gp)
936 changed[gp.path] = (gp.op, gp)
937 # else error?
937 # else error?
938 # copy/rename + modify should modify target, not source
938 # copy/rename + modify should modify target, not source
939 if changed.get(bfile[2:], (None, None))[0] in ('COPY',
939 if changed.get(bfile[2:], (None, None))[0] in ('COPY',
940 'RENAME'):
940 'RENAME'):
941 afile = bfile
941 afile = bfile
942 gitworkdone = True
942 gitworkdone = True
943 newfile = True
943 newfile = True
944 elif x.startswith('---'):
944 elif x.startswith('---'):
945 # check for a unified diff
945 # check for a unified diff
946 l2 = lr.readline()
946 l2 = lr.readline()
947 if not l2.startswith('+++'):
947 if not l2.startswith('+++'):
948 lr.push(l2)
948 lr.push(l2)
949 continue
949 continue
950 newfile = True
950 newfile = True
951 context = False
951 context = False
952 afile = parsefilename(x)
952 afile = parsefilename(x)
953 bfile = parsefilename(l2)
953 bfile = parsefilename(l2)
954 elif x.startswith('***'):
954 elif x.startswith('***'):
955 # check for a context diff
955 # check for a context diff
956 l2 = lr.readline()
956 l2 = lr.readline()
957 if not l2.startswith('---'):
957 if not l2.startswith('---'):
958 lr.push(l2)
958 lr.push(l2)
959 continue
959 continue
960 l3 = lr.readline()
960 l3 = lr.readline()
961 lr.push(l3)
961 lr.push(l3)
962 if not l3.startswith("***************"):
962 if not l3.startswith("***************"):
963 lr.push(l2)
963 lr.push(l2)
964 continue
964 continue
965 newfile = True
965 newfile = True
966 context = True
966 context = True
967 afile = parsefilename(x)
967 afile = parsefilename(x)
968 bfile = parsefilename(l2)
968 bfile = parsefilename(l2)
969
969
970 if newfile:
970 if newfile:
971 if current_file:
971 if current_file:
972 current_file.close()
972 current_file.close()
973 if rejmerge:
973 if rejmerge:
974 rejmerge(current_file)
974 rejmerge(current_file)
975 rejects += len(current_file.rej)
975 rejects += len(current_file.rej)
976 state = BFILE
976 state = BFILE
977 current_file = None
977 current_file = None
978 hunknum = 0
978 hunknum = 0
979 if current_hunk:
979 if current_hunk:
980 if current_hunk.complete():
980 if current_hunk.complete():
981 ret = current_file.apply(current_hunk, reverse)
981 ret = current_file.apply(current_hunk, reverse)
982 if ret >= 0:
982 if ret >= 0:
983 changed.setdefault(current_file.fname, (None, None))
983 changed.setdefault(current_file.fname, (None, None))
984 if ret > 0:
984 if ret > 0:
985 err = 1
985 err = 1
986 else:
986 else:
987 fname = current_file and current_file.fname or None
987 fname = current_file and current_file.fname or None
988 raise PatchError(_("malformed patch %s %s") % (fname,
988 raise PatchError(_("malformed patch %s %s") % (fname,
989 current_hunk.desc))
989 current_hunk.desc))
990 if current_file:
990 if current_file:
991 current_file.close()
991 current_file.close()
992 if rejmerge:
992 if rejmerge:
993 rejmerge(current_file)
993 rejmerge(current_file)
994 rejects += len(current_file.rej)
994 rejects += len(current_file.rej)
995 if updatedir and git:
995 if updatedir and git:
996 updatedir(gitpatches)
996 updatedir(gitpatches)
997 if rejects:
997 if rejects:
998 return -1
998 return -1
999 if hunknum == 0 and dopatch and not gitworkdone:
999 if hunknum == 0 and dopatch and not gitworkdone:
1000 raise NoHunks
1000 raise NoHunks
1001 return err
1001 return err
1002
1002
1003 def diffopts(ui, opts={}, untrusted=False):
1003 def diffopts(ui, opts={}, untrusted=False):
1004 def get(key, name=None):
1004 def get(key, name=None):
1005 return (opts.get(key) or
1005 return (opts.get(key) or
1006 ui.configbool('diff', name or key, None, untrusted=untrusted))
1006 ui.configbool('diff', name or key, None, untrusted=untrusted))
1007 return mdiff.diffopts(
1007 return mdiff.diffopts(
1008 text=opts.get('text'),
1008 text=opts.get('text'),
1009 git=get('git'),
1009 git=get('git'),
1010 nodates=get('nodates'),
1010 nodates=get('nodates'),
1011 showfunc=get('show_function', 'showfunc'),
1011 showfunc=get('show_function', 'showfunc'),
1012 ignorews=get('ignore_all_space', 'ignorews'),
1012 ignorews=get('ignore_all_space', 'ignorews'),
1013 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1013 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1014 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'))
1014 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'))
1015
1015
1016 def updatedir(ui, repo, patches, wlock=None):
1016 def updatedir(ui, repo, patches):
1017 '''Update dirstate after patch application according to metadata'''
1017 '''Update dirstate after patch application according to metadata'''
1018 if not patches:
1018 if not patches:
1019 return
1019 return
1020 copies = []
1020 copies = []
1021 removes = {}
1021 removes = {}
1022 cfiles = patches.keys()
1022 cfiles = patches.keys()
1023 cwd = repo.getcwd()
1023 cwd = repo.getcwd()
1024 if cwd:
1024 if cwd:
1025 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1025 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1026 for f in patches:
1026 for f in patches:
1027 ctype, gp = patches[f]
1027 ctype, gp = patches[f]
1028 if ctype == 'RENAME':
1028 if ctype == 'RENAME':
1029 copies.append((gp.oldpath, gp.path, gp.copymod))
1029 copies.append((gp.oldpath, gp.path, gp.copymod))
1030 removes[gp.oldpath] = 1
1030 removes[gp.oldpath] = 1
1031 elif ctype == 'COPY':
1031 elif ctype == 'COPY':
1032 copies.append((gp.oldpath, gp.path, gp.copymod))
1032 copies.append((gp.oldpath, gp.path, gp.copymod))
1033 elif ctype == 'DELETE':
1033 elif ctype == 'DELETE':
1034 removes[gp.path] = 1
1034 removes[gp.path] = 1
1035 for src, dst, after in copies:
1035 for src, dst, after in copies:
1036 if not after:
1036 if not after:
1037 copyfile(src, dst, repo.root)
1037 copyfile(src, dst, repo.root)
1038 repo.copy(src, dst, wlock=wlock)
1038 repo.copy(src, dst)
1039 removes = removes.keys()
1039 removes = removes.keys()
1040 if removes:
1040 if removes:
1041 removes.sort()
1041 removes.sort()
1042 repo.remove(removes, True, wlock=wlock)
1042 repo.remove(removes, True)
1043 for f in patches:
1043 for f in patches:
1044 ctype, gp = patches[f]
1044 ctype, gp = patches[f]
1045 if gp and gp.mode:
1045 if gp and gp.mode:
1046 x = gp.mode & 0100 != 0
1046 x = gp.mode & 0100 != 0
1047 dst = os.path.join(repo.root, gp.path)
1047 dst = os.path.join(repo.root, gp.path)
1048 # patch won't create empty files
1048 # patch won't create empty files
1049 if ctype == 'ADD' and not os.path.exists(dst):
1049 if ctype == 'ADD' and not os.path.exists(dst):
1050 repo.wwrite(gp.path, '', x and 'x' or '')
1050 repo.wwrite(gp.path, '', x and 'x' or '')
1051 else:
1051 else:
1052 util.set_exec(dst, x)
1052 util.set_exec(dst, x)
1053 cmdutil.addremove(repo, cfiles, wlock=wlock)
1053 cmdutil.addremove(repo, cfiles)
1054 files = patches.keys()
1054 files = patches.keys()
1055 files.extend([r for r in removes if r not in files])
1055 files.extend([r for r in removes if r not in files])
1056 files.sort()
1056 files.sort()
1057
1057
1058 return files
1058 return files
1059
1059
1060 def b85diff(fp, to, tn):
1060 def b85diff(fp, to, tn):
1061 '''print base85-encoded binary diff'''
1061 '''print base85-encoded binary diff'''
1062 def gitindex(text):
1062 def gitindex(text):
1063 if not text:
1063 if not text:
1064 return '0' * 40
1064 return '0' * 40
1065 l = len(text)
1065 l = len(text)
1066 s = sha.new('blob %d\0' % l)
1066 s = sha.new('blob %d\0' % l)
1067 s.update(text)
1067 s.update(text)
1068 return s.hexdigest()
1068 return s.hexdigest()
1069
1069
1070 def fmtline(line):
1070 def fmtline(line):
1071 l = len(line)
1071 l = len(line)
1072 if l <= 26:
1072 if l <= 26:
1073 l = chr(ord('A') + l - 1)
1073 l = chr(ord('A') + l - 1)
1074 else:
1074 else:
1075 l = chr(l - 26 + ord('a') - 1)
1075 l = chr(l - 26 + ord('a') - 1)
1076 return '%c%s\n' % (l, base85.b85encode(line, True))
1076 return '%c%s\n' % (l, base85.b85encode(line, True))
1077
1077
1078 def chunk(text, csize=52):
1078 def chunk(text, csize=52):
1079 l = len(text)
1079 l = len(text)
1080 i = 0
1080 i = 0
1081 while i < l:
1081 while i < l:
1082 yield text[i:i+csize]
1082 yield text[i:i+csize]
1083 i += csize
1083 i += csize
1084
1084
1085 tohash = gitindex(to)
1085 tohash = gitindex(to)
1086 tnhash = gitindex(tn)
1086 tnhash = gitindex(tn)
1087 if tohash == tnhash:
1087 if tohash == tnhash:
1088 return ""
1088 return ""
1089
1089
1090 # TODO: deltas
1090 # TODO: deltas
1091 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1091 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1092 (tohash, tnhash, len(tn))]
1092 (tohash, tnhash, len(tn))]
1093 for l in chunk(zlib.compress(tn)):
1093 for l in chunk(zlib.compress(tn)):
1094 ret.append(fmtline(l))
1094 ret.append(fmtline(l))
1095 ret.append('\n')
1095 ret.append('\n')
1096 return ''.join(ret)
1096 return ''.join(ret)
1097
1097
1098 def diff(repo, node1=None, node2=None, files=None, match=util.always,
1098 def diff(repo, node1=None, node2=None, files=None, match=util.always,
1099 fp=None, changes=None, opts=None):
1099 fp=None, changes=None, opts=None):
1100 '''print diff of changes to files between two nodes, or node and
1100 '''print diff of changes to files between two nodes, or node and
1101 working directory.
1101 working directory.
1102
1102
1103 if node1 is None, use first dirstate parent instead.
1103 if node1 is None, use first dirstate parent instead.
1104 if node2 is None, compare node1 with working directory.'''
1104 if node2 is None, compare node1 with working directory.'''
1105
1105
1106 if opts is None:
1106 if opts is None:
1107 opts = mdiff.defaultopts
1107 opts = mdiff.defaultopts
1108 if fp is None:
1108 if fp is None:
1109 fp = repo.ui
1109 fp = repo.ui
1110
1110
1111 if not node1:
1111 if not node1:
1112 node1 = repo.dirstate.parents()[0]
1112 node1 = repo.dirstate.parents()[0]
1113
1113
1114 ccache = {}
1114 ccache = {}
1115 def getctx(r):
1115 def getctx(r):
1116 if r not in ccache:
1116 if r not in ccache:
1117 ccache[r] = context.changectx(repo, r)
1117 ccache[r] = context.changectx(repo, r)
1118 return ccache[r]
1118 return ccache[r]
1119
1119
1120 flcache = {}
1120 flcache = {}
1121 def getfilectx(f, ctx):
1121 def getfilectx(f, ctx):
1122 flctx = ctx.filectx(f, filelog=flcache.get(f))
1122 flctx = ctx.filectx(f, filelog=flcache.get(f))
1123 if f not in flcache:
1123 if f not in flcache:
1124 flcache[f] = flctx._filelog
1124 flcache[f] = flctx._filelog
1125 return flctx
1125 return flctx
1126
1126
1127 # reading the data for node1 early allows it to play nicely
1127 # reading the data for node1 early allows it to play nicely
1128 # with repo.status and the revlog cache.
1128 # with repo.status and the revlog cache.
1129 ctx1 = context.changectx(repo, node1)
1129 ctx1 = context.changectx(repo, node1)
1130 # force manifest reading
1130 # force manifest reading
1131 man1 = ctx1.manifest()
1131 man1 = ctx1.manifest()
1132 date1 = util.datestr(ctx1.date())
1132 date1 = util.datestr(ctx1.date())
1133
1133
1134 if not changes:
1134 if not changes:
1135 changes = repo.status(node1, node2, files, match=match)[:5]
1135 changes = repo.status(node1, node2, files, match=match)[:5]
1136 modified, added, removed, deleted, unknown = changes
1136 modified, added, removed, deleted, unknown = changes
1137
1137
1138 if not modified and not added and not removed:
1138 if not modified and not added and not removed:
1139 return
1139 return
1140
1140
1141 if node2:
1141 if node2:
1142 ctx2 = context.changectx(repo, node2)
1142 ctx2 = context.changectx(repo, node2)
1143 execf2 = ctx2.manifest().execf
1143 execf2 = ctx2.manifest().execf
1144 else:
1144 else:
1145 ctx2 = context.workingctx(repo)
1145 ctx2 = context.workingctx(repo)
1146 execf2 = util.execfunc(repo.root, None)
1146 execf2 = util.execfunc(repo.root, None)
1147 if execf2 is None:
1147 if execf2 is None:
1148 execf2 = ctx2.parents()[0].manifest().copy().execf
1148 execf2 = ctx2.parents()[0].manifest().copy().execf
1149
1149
1150 # returns False if there was no rename between ctx1 and ctx2
1150 # returns False if there was no rename between ctx1 and ctx2
1151 # returns None if the file was created between ctx1 and ctx2
1151 # returns None if the file was created between ctx1 and ctx2
1152 # returns the (file, node) present in ctx1 that was renamed to f in ctx2
1152 # returns the (file, node) present in ctx1 that was renamed to f in ctx2
1153 def renamed(f):
1153 def renamed(f):
1154 startrev = ctx1.rev()
1154 startrev = ctx1.rev()
1155 c = ctx2
1155 c = ctx2
1156 crev = c.rev()
1156 crev = c.rev()
1157 if crev is None:
1157 if crev is None:
1158 crev = repo.changelog.count()
1158 crev = repo.changelog.count()
1159 orig = f
1159 orig = f
1160 while crev > startrev:
1160 while crev > startrev:
1161 if f in c.files():
1161 if f in c.files():
1162 try:
1162 try:
1163 src = getfilectx(f, c).renamed()
1163 src = getfilectx(f, c).renamed()
1164 except revlog.LookupError:
1164 except revlog.LookupError:
1165 return None
1165 return None
1166 if src:
1166 if src:
1167 f = src[0]
1167 f = src[0]
1168 crev = c.parents()[0].rev()
1168 crev = c.parents()[0].rev()
1169 # try to reuse
1169 # try to reuse
1170 c = getctx(crev)
1170 c = getctx(crev)
1171 if f not in man1:
1171 if f not in man1:
1172 return None
1172 return None
1173 if f == orig:
1173 if f == orig:
1174 return False
1174 return False
1175 return f
1175 return f
1176
1176
1177 if repo.ui.quiet:
1177 if repo.ui.quiet:
1178 r = None
1178 r = None
1179 else:
1179 else:
1180 hexfunc = repo.ui.debugflag and hex or short
1180 hexfunc = repo.ui.debugflag and hex or short
1181 r = [hexfunc(node) for node in [node1, node2] if node]
1181 r = [hexfunc(node) for node in [node1, node2] if node]
1182
1182
1183 if opts.git:
1183 if opts.git:
1184 copied = {}
1184 copied = {}
1185 for f in added:
1185 for f in added:
1186 src = renamed(f)
1186 src = renamed(f)
1187 if src:
1187 if src:
1188 copied[f] = src
1188 copied[f] = src
1189 srcs = [x[1] for x in copied.items()]
1189 srcs = [x[1] for x in copied.items()]
1190
1190
1191 all = modified + added + removed
1191 all = modified + added + removed
1192 all.sort()
1192 all.sort()
1193 gone = {}
1193 gone = {}
1194
1194
1195 for f in all:
1195 for f in all:
1196 to = None
1196 to = None
1197 tn = None
1197 tn = None
1198 dodiff = True
1198 dodiff = True
1199 header = []
1199 header = []
1200 if f in man1:
1200 if f in man1:
1201 to = getfilectx(f, ctx1).data()
1201 to = getfilectx(f, ctx1).data()
1202 if f not in removed:
1202 if f not in removed:
1203 tn = getfilectx(f, ctx2).data()
1203 tn = getfilectx(f, ctx2).data()
1204 if opts.git:
1204 if opts.git:
1205 def gitmode(x):
1205 def gitmode(x):
1206 return x and '100755' or '100644'
1206 return x and '100755' or '100644'
1207 def addmodehdr(header, omode, nmode):
1207 def addmodehdr(header, omode, nmode):
1208 if omode != nmode:
1208 if omode != nmode:
1209 header.append('old mode %s\n' % omode)
1209 header.append('old mode %s\n' % omode)
1210 header.append('new mode %s\n' % nmode)
1210 header.append('new mode %s\n' % nmode)
1211
1211
1212 a, b = f, f
1212 a, b = f, f
1213 if f in added:
1213 if f in added:
1214 mode = gitmode(execf2(f))
1214 mode = gitmode(execf2(f))
1215 if f in copied:
1215 if f in copied:
1216 a = copied[f]
1216 a = copied[f]
1217 omode = gitmode(man1.execf(a))
1217 omode = gitmode(man1.execf(a))
1218 addmodehdr(header, omode, mode)
1218 addmodehdr(header, omode, mode)
1219 if a in removed and a not in gone:
1219 if a in removed and a not in gone:
1220 op = 'rename'
1220 op = 'rename'
1221 gone[a] = 1
1221 gone[a] = 1
1222 else:
1222 else:
1223 op = 'copy'
1223 op = 'copy'
1224 header.append('%s from %s\n' % (op, a))
1224 header.append('%s from %s\n' % (op, a))
1225 header.append('%s to %s\n' % (op, f))
1225 header.append('%s to %s\n' % (op, f))
1226 to = getfilectx(a, ctx1).data()
1226 to = getfilectx(a, ctx1).data()
1227 else:
1227 else:
1228 header.append('new file mode %s\n' % mode)
1228 header.append('new file mode %s\n' % mode)
1229 if util.binary(tn):
1229 if util.binary(tn):
1230 dodiff = 'binary'
1230 dodiff = 'binary'
1231 elif f in removed:
1231 elif f in removed:
1232 if f in srcs:
1232 if f in srcs:
1233 dodiff = False
1233 dodiff = False
1234 else:
1234 else:
1235 mode = gitmode(man1.execf(f))
1235 mode = gitmode(man1.execf(f))
1236 header.append('deleted file mode %s\n' % mode)
1236 header.append('deleted file mode %s\n' % mode)
1237 else:
1237 else:
1238 omode = gitmode(man1.execf(f))
1238 omode = gitmode(man1.execf(f))
1239 nmode = gitmode(execf2(f))
1239 nmode = gitmode(execf2(f))
1240 addmodehdr(header, omode, nmode)
1240 addmodehdr(header, omode, nmode)
1241 if util.binary(to) or util.binary(tn):
1241 if util.binary(to) or util.binary(tn):
1242 dodiff = 'binary'
1242 dodiff = 'binary'
1243 r = None
1243 r = None
1244 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
1244 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
1245 if dodiff:
1245 if dodiff:
1246 if dodiff == 'binary':
1246 if dodiff == 'binary':
1247 text = b85diff(fp, to, tn)
1247 text = b85diff(fp, to, tn)
1248 else:
1248 else:
1249 text = mdiff.unidiff(to, date1,
1249 text = mdiff.unidiff(to, date1,
1250 # ctx2 date may be dynamic
1250 # ctx2 date may be dynamic
1251 tn, util.datestr(ctx2.date()),
1251 tn, util.datestr(ctx2.date()),
1252 f, r, opts=opts)
1252 f, r, opts=opts)
1253 if text or len(header) > 1:
1253 if text or len(header) > 1:
1254 fp.write(''.join(header))
1254 fp.write(''.join(header))
1255 fp.write(text)
1255 fp.write(text)
1256
1256
1257 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1257 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1258 opts=None):
1258 opts=None):
1259 '''export changesets as hg patches.'''
1259 '''export changesets as hg patches.'''
1260
1260
1261 total = len(revs)
1261 total = len(revs)
1262 revwidth = max([len(str(rev)) for rev in revs])
1262 revwidth = max([len(str(rev)) for rev in revs])
1263
1263
1264 def single(rev, seqno, fp):
1264 def single(rev, seqno, fp):
1265 ctx = repo.changectx(rev)
1265 ctx = repo.changectx(rev)
1266 node = ctx.node()
1266 node = ctx.node()
1267 parents = [p.node() for p in ctx.parents() if p]
1267 parents = [p.node() for p in ctx.parents() if p]
1268 branch = ctx.branch()
1268 branch = ctx.branch()
1269 if switch_parent:
1269 if switch_parent:
1270 parents.reverse()
1270 parents.reverse()
1271 prev = (parents and parents[0]) or nullid
1271 prev = (parents and parents[0]) or nullid
1272
1272
1273 if not fp:
1273 if not fp:
1274 fp = cmdutil.make_file(repo, template, node, total=total,
1274 fp = cmdutil.make_file(repo, template, node, total=total,
1275 seqno=seqno, revwidth=revwidth)
1275 seqno=seqno, revwidth=revwidth)
1276 if fp != sys.stdout and hasattr(fp, 'name'):
1276 if fp != sys.stdout and hasattr(fp, 'name'):
1277 repo.ui.note("%s\n" % fp.name)
1277 repo.ui.note("%s\n" % fp.name)
1278
1278
1279 fp.write("# HG changeset patch\n")
1279 fp.write("# HG changeset patch\n")
1280 fp.write("# User %s\n" % ctx.user())
1280 fp.write("# User %s\n" % ctx.user())
1281 fp.write("# Date %d %d\n" % ctx.date())
1281 fp.write("# Date %d %d\n" % ctx.date())
1282 if branch and (branch != 'default'):
1282 if branch and (branch != 'default'):
1283 fp.write("# Branch %s\n" % branch)
1283 fp.write("# Branch %s\n" % branch)
1284 fp.write("# Node ID %s\n" % hex(node))
1284 fp.write("# Node ID %s\n" % hex(node))
1285 fp.write("# Parent %s\n" % hex(prev))
1285 fp.write("# Parent %s\n" % hex(prev))
1286 if len(parents) > 1:
1286 if len(parents) > 1:
1287 fp.write("# Parent %s\n" % hex(parents[1]))
1287 fp.write("# Parent %s\n" % hex(parents[1]))
1288 fp.write(ctx.description().rstrip())
1288 fp.write(ctx.description().rstrip())
1289 fp.write("\n\n")
1289 fp.write("\n\n")
1290
1290
1291 diff(repo, prev, node, fp=fp, opts=opts)
1291 diff(repo, prev, node, fp=fp, opts=opts)
1292 if fp not in (sys.stdout, repo.ui):
1292 if fp not in (sys.stdout, repo.ui):
1293 fp.close()
1293 fp.close()
1294
1294
1295 for seqno, rev in enumerate(revs):
1295 for seqno, rev in enumerate(revs):
1296 single(rev, seqno+1, fp)
1296 single(rev, seqno+1, fp)
1297
1297
1298 def diffstat(patchlines):
1298 def diffstat(patchlines):
1299 if not util.find_exe('diffstat'):
1299 if not util.find_exe('diffstat'):
1300 return
1300 return
1301 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
1301 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
1302 try:
1302 try:
1303 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
1303 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
1304 try:
1304 try:
1305 for line in patchlines: print >> p.tochild, line
1305 for line in patchlines: print >> p.tochild, line
1306 p.tochild.close()
1306 p.tochild.close()
1307 if p.wait(): return
1307 if p.wait(): return
1308 fp = os.fdopen(fd, 'r')
1308 fp = os.fdopen(fd, 'r')
1309 stat = []
1309 stat = []
1310 for line in fp: stat.append(line.lstrip())
1310 for line in fp: stat.append(line.lstrip())
1311 last = stat.pop()
1311 last = stat.pop()
1312 stat.insert(0, last)
1312 stat.insert(0, last)
1313 stat = ''.join(stat)
1313 stat = ''.join(stat)
1314 if stat.startswith('0 files'): raise ValueError
1314 if stat.startswith('0 files'): raise ValueError
1315 return stat
1315 return stat
1316 except: raise
1316 except: raise
1317 finally:
1317 finally:
1318 try: os.unlink(name)
1318 try: os.unlink(name)
1319 except: pass
1319 except: pass
General Comments 0
You need to be logged in to leave comments. Login now