##// END OF EJS Templates
Use try/finally pattern to cleanup locks and transactions
Matt Mackall -
r4915:97b734fb default
parent child Browse files
Show More
@@ -1,98 +1,98 b''
1 # fetch.py - pull and merge remote changes
1 # fetch.py - pull and merge remote changes
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from mercurial.i18n import _
8 from mercurial.i18n import _
9 from mercurial.node import *
9 from mercurial.node import *
10 from mercurial import commands, cmdutil, hg, node, util
10 from mercurial import commands, cmdutil, hg, node, util
11
11
12 def fetch(ui, repo, source='default', **opts):
12 def fetch(ui, repo, source='default', **opts):
13 '''Pull changes from a remote repository, merge new changes if needed.
13 '''Pull changes from a remote repository, merge new changes if needed.
14
14
15 This finds all changes from the repository at the specified path
15 This finds all changes from the repository at the specified path
16 or URL and adds them to the local repository.
16 or URL and adds them to the local repository.
17
17
18 If the pulled changes add a new head, the head is automatically
18 If the pulled changes add a new head, the head is automatically
19 merged, and the result of the merge is committed. Otherwise, the
19 merged, and the result of the merge is committed. Otherwise, the
20 working directory is updated.'''
20 working directory is updated.'''
21
21
22 def postincoming(other, modheads):
22 def postincoming(other, modheads, lock, wlock):
23 if modheads == 0:
23 if modheads == 0:
24 return 0
24 return 0
25 if modheads == 1:
25 if modheads == 1:
26 return hg.clean(repo, repo.changelog.tip(), wlock=wlock)
26 return hg.clean(repo, repo.changelog.tip(), wlock=wlock)
27 newheads = repo.heads(parent)
27 newheads = repo.heads(parent)
28 newchildren = [n for n in repo.heads(parent) if n != parent]
28 newchildren = [n for n in repo.heads(parent) if n != parent]
29 newparent = parent
29 newparent = parent
30 if newchildren:
30 if newchildren:
31 newparent = newchildren[0]
31 newparent = newchildren[0]
32 hg.clean(repo, newparent, wlock=wlock)
32 hg.clean(repo, newparent, wlock=wlock)
33 newheads = [n for n in repo.heads() if n != newparent]
33 newheads = [n for n in repo.heads() if n != newparent]
34 err = False
34 err = False
35 if newheads:
35 if newheads:
36 ui.status(_('merging with new head %d:%s\n') %
36 ui.status(_('merging with new head %d:%s\n') %
37 (repo.changelog.rev(newheads[0]), short(newheads[0])))
37 (repo.changelog.rev(newheads[0]), short(newheads[0])))
38 err = hg.merge(repo, newheads[0], remind=False, wlock=wlock)
38 err = hg.merge(repo, newheads[0], remind=False, wlock=wlock)
39 if not err and len(newheads) > 1:
39 if not err and len(newheads) > 1:
40 ui.status(_('not merging with %d other new heads '
40 ui.status(_('not merging with %d other new heads '
41 '(use "hg heads" and "hg merge" to merge them)') %
41 '(use "hg heads" and "hg merge" to merge them)') %
42 (len(newheads) - 1))
42 (len(newheads) - 1))
43 if not err:
43 if not err:
44 mod, add, rem = repo.status(wlock=wlock)[:3]
44 mod, add, rem = repo.status(wlock=wlock)[:3]
45 message = (cmdutil.logmessage(opts) or
45 message = (cmdutil.logmessage(opts) or
46 (_('Automated merge with %s') % other.url()))
46 (_('Automated merge with %s') % other.url()))
47 n = repo.commit(mod + add + rem, message,
47 n = repo.commit(mod + add + rem, message,
48 opts['user'], opts['date'], lock=lock, wlock=wlock,
48 opts['user'], opts['date'], lock=lock, wlock=wlock,
49 force_editor=opts.get('force_editor'))
49 force_editor=opts.get('force_editor'))
50 ui.status(_('new changeset %d:%s merges remote changes '
50 ui.status(_('new changeset %d:%s merges remote changes '
51 'with local\n') % (repo.changelog.rev(n),
51 'with local\n') % (repo.changelog.rev(n),
52 short(n)))
52 short(n)))
53 def pull():
53 def pull(lock, wlock):
54 cmdutil.setremoteconfig(ui, opts)
54 cmdutil.setremoteconfig(ui, opts)
55
55
56 other = hg.repository(ui, ui.expandpath(source))
56 other = hg.repository(ui, ui.expandpath(source))
57 ui.status(_('pulling from %s\n') % ui.expandpath(source))
57 ui.status(_('pulling from %s\n') % ui.expandpath(source))
58 revs = None
58 revs = None
59 if opts['rev'] and not other.local():
59 if opts['rev'] and not other.local():
60 raise util.Abort(_("fetch -r doesn't work for remote repositories yet"))
60 raise util.Abort(_("fetch -r doesn't work for remote repositories yet"))
61 elif opts['rev']:
61 elif opts['rev']:
62 revs = [other.lookup(rev) for rev in opts['rev']]
62 revs = [other.lookup(rev) for rev in opts['rev']]
63 modheads = repo.pull(other, heads=revs, lock=lock)
63 modheads = repo.pull(other, heads=revs, lock=lock)
64 return postincoming(other, modheads)
64 return postincoming(other, modheads, lock, wlock)
65
65
66 parent, p2 = repo.dirstate.parents()
66 parent, p2 = repo.dirstate.parents()
67 if parent != repo.changelog.tip():
67 if parent != repo.changelog.tip():
68 raise util.Abort(_('working dir not at tip '
68 raise util.Abort(_('working dir not at tip '
69 '(use "hg update" to check out tip)'))
69 '(use "hg update" to check out tip)'))
70 if p2 != nullid:
70 if p2 != nullid:
71 raise util.Abort(_('outstanding uncommitted merge'))
71 raise util.Abort(_('outstanding uncommitted merge'))
72 wlock = repo.wlock()
72 wlock = lock = None
73 lock = repo.lock()
74 try:
73 try:
74 wlock = repo.wlock()
75 lock = repo.lock()
75 mod, add, rem = repo.status(wlock=wlock)[:3]
76 mod, add, rem = repo.status(wlock=wlock)[:3]
76 if mod or add or rem:
77 if mod or add or rem:
77 raise util.Abort(_('outstanding uncommitted changes'))
78 raise util.Abort(_('outstanding uncommitted changes'))
78 if len(repo.heads()) > 1:
79 if len(repo.heads()) > 1:
79 raise util.Abort(_('multiple heads in this repository '
80 raise util.Abort(_('multiple heads in this repository '
80 '(use "hg heads" and "hg merge" to merge)'))
81 '(use "hg heads" and "hg merge" to merge)'))
81 return pull()
82 return pull(lock, wlock)
82 finally:
83 finally:
83 lock.release()
84 del lock, wlock
84 wlock.release()
85
85
86 cmdtable = {
86 cmdtable = {
87 'fetch':
87 'fetch':
88 (fetch,
88 (fetch,
89 [('e', 'ssh', '', _('specify ssh command to use')),
89 [('e', 'ssh', '', _('specify ssh command to use')),
90 ('m', 'message', '', _('use <text> as commit message')),
90 ('m', 'message', '', _('use <text> as commit message')),
91 ('l', 'logfile', '', _('read the commit message from <file>')),
91 ('l', 'logfile', '', _('read the commit message from <file>')),
92 ('d', 'date', '', _('record datecode as commit date')),
92 ('d', 'date', '', _('record datecode as commit date')),
93 ('u', 'user', '', _('record user as commiter')),
93 ('u', 'user', '', _('record user as commiter')),
94 ('r', 'rev', [], _('a specific revision you would like to pull')),
94 ('r', 'rev', [], _('a specific revision you would like to pull')),
95 ('f', 'force-editor', None, _('edit commit message')),
95 ('f', 'force-editor', None, _('edit commit message')),
96 ('', 'remotecmd', '', _('hg command to run on the remote side'))],
96 ('', 'remotecmd', '', _('hg command to run on the remote side'))],
97 _('hg fetch [SOURCE]')),
97 _('hg fetch [SOURCE]')),
98 }
98 }
This diff has been collapsed as it changes many lines, (721 lines changed) Show them Hide them
@@ -1,2235 +1,2262 b''
1 # queue.py - patch queues for mercurial
1 # queue.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 '''patch management and development
8 '''patch management and development
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use "hg help command" for more details):
17 Common tasks (use "hg help command" for more details):
18
18
19 prepare repository to work with patches qinit
19 prepare repository to work with patches qinit
20 create new patch qnew
20 create new patch qnew
21 import existing patch qimport
21 import existing patch qimport
22
22
23 print patch series qseries
23 print patch series qseries
24 print applied patches qapplied
24 print applied patches qapplied
25 print name of top applied patch qtop
25 print name of top applied patch qtop
26
26
27 add known patch to applied stack qpush
27 add known patch to applied stack qpush
28 remove patch from applied stack qpop
28 remove patch from applied stack qpop
29 refresh contents of top applied patch qrefresh
29 refresh contents of top applied patch qrefresh
30 '''
30 '''
31
31
32 from mercurial.i18n import _
32 from mercurial.i18n import _
33 from mercurial import commands, cmdutil, hg, patch, revlog, util
33 from mercurial import commands, cmdutil, hg, patch, revlog, util
34 from mercurial import repair
34 from mercurial import repair
35 import os, sys, re, errno
35 import os, sys, re, errno
36
36
37 commands.norepo += " qclone qversion"
37 commands.norepo += " qclone qversion"
38
38
39 # Patch names looks like unix-file names.
39 # Patch names looks like unix-file names.
40 # They must be joinable with queue directory and result in the patch path.
40 # They must be joinable with queue directory and result in the patch path.
41 normname = util.normpath
41 normname = util.normpath
42
42
43 class statusentry:
43 class statusentry:
44 def __init__(self, rev, name=None):
44 def __init__(self, rev, name=None):
45 if not name:
45 if not name:
46 fields = rev.split(':', 1)
46 fields = rev.split(':', 1)
47 if len(fields) == 2:
47 if len(fields) == 2:
48 self.rev, self.name = fields
48 self.rev, self.name = fields
49 else:
49 else:
50 self.rev, self.name = None, None
50 self.rev, self.name = None, None
51 else:
51 else:
52 self.rev, self.name = rev, name
52 self.rev, self.name = rev, name
53
53
54 def __str__(self):
54 def __str__(self):
55 return self.rev + ':' + self.name
55 return self.rev + ':' + self.name
56
56
57 class queue:
57 class queue:
58 def __init__(self, ui, path, patchdir=None):
58 def __init__(self, ui, path, patchdir=None):
59 self.basepath = path
59 self.basepath = path
60 self.path = patchdir or os.path.join(path, "patches")
60 self.path = patchdir or os.path.join(path, "patches")
61 self.opener = util.opener(self.path)
61 self.opener = util.opener(self.path)
62 self.ui = ui
62 self.ui = ui
63 self.applied = []
63 self.applied = []
64 self.full_series = []
64 self.full_series = []
65 self.applied_dirty = 0
65 self.applied_dirty = 0
66 self.series_dirty = 0
66 self.series_dirty = 0
67 self.series_path = "series"
67 self.series_path = "series"
68 self.status_path = "status"
68 self.status_path = "status"
69 self.guards_path = "guards"
69 self.guards_path = "guards"
70 self.active_guards = None
70 self.active_guards = None
71 self.guards_dirty = False
71 self.guards_dirty = False
72 self._diffopts = None
72 self._diffopts = None
73
73
74 if os.path.exists(self.join(self.series_path)):
74 if os.path.exists(self.join(self.series_path)):
75 self.full_series = self.opener(self.series_path).read().splitlines()
75 self.full_series = self.opener(self.series_path).read().splitlines()
76 self.parse_series()
76 self.parse_series()
77
77
78 if os.path.exists(self.join(self.status_path)):
78 if os.path.exists(self.join(self.status_path)):
79 lines = self.opener(self.status_path).read().splitlines()
79 lines = self.opener(self.status_path).read().splitlines()
80 self.applied = [statusentry(l) for l in lines]
80 self.applied = [statusentry(l) for l in lines]
81
81
82 def diffopts(self):
82 def diffopts(self):
83 if self._diffopts is None:
83 if self._diffopts is None:
84 self._diffopts = patch.diffopts(self.ui)
84 self._diffopts = patch.diffopts(self.ui)
85 return self._diffopts
85 return self._diffopts
86
86
87 def join(self, *p):
87 def join(self, *p):
88 return os.path.join(self.path, *p)
88 return os.path.join(self.path, *p)
89
89
90 def find_series(self, patch):
90 def find_series(self, patch):
91 pre = re.compile("(\s*)([^#]+)")
91 pre = re.compile("(\s*)([^#]+)")
92 index = 0
92 index = 0
93 for l in self.full_series:
93 for l in self.full_series:
94 m = pre.match(l)
94 m = pre.match(l)
95 if m:
95 if m:
96 s = m.group(2)
96 s = m.group(2)
97 s = s.rstrip()
97 s = s.rstrip()
98 if s == patch:
98 if s == patch:
99 return index
99 return index
100 index += 1
100 index += 1
101 return None
101 return None
102
102
103 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
103 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
104
104
105 def parse_series(self):
105 def parse_series(self):
106 self.series = []
106 self.series = []
107 self.series_guards = []
107 self.series_guards = []
108 for l in self.full_series:
108 for l in self.full_series:
109 h = l.find('#')
109 h = l.find('#')
110 if h == -1:
110 if h == -1:
111 patch = l
111 patch = l
112 comment = ''
112 comment = ''
113 elif h == 0:
113 elif h == 0:
114 continue
114 continue
115 else:
115 else:
116 patch = l[:h]
116 patch = l[:h]
117 comment = l[h:]
117 comment = l[h:]
118 patch = patch.strip()
118 patch = patch.strip()
119 if patch:
119 if patch:
120 if patch in self.series:
120 if patch in self.series:
121 raise util.Abort(_('%s appears more than once in %s') %
121 raise util.Abort(_('%s appears more than once in %s') %
122 (patch, self.join(self.series_path)))
122 (patch, self.join(self.series_path)))
123 self.series.append(patch)
123 self.series.append(patch)
124 self.series_guards.append(self.guard_re.findall(comment))
124 self.series_guards.append(self.guard_re.findall(comment))
125
125
126 def check_guard(self, guard):
126 def check_guard(self, guard):
127 bad_chars = '# \t\r\n\f'
127 bad_chars = '# \t\r\n\f'
128 first = guard[0]
128 first = guard[0]
129 for c in '-+':
129 for c in '-+':
130 if first == c:
130 if first == c:
131 return (_('guard %r starts with invalid character: %r') %
131 return (_('guard %r starts with invalid character: %r') %
132 (guard, c))
132 (guard, c))
133 for c in bad_chars:
133 for c in bad_chars:
134 if c in guard:
134 if c in guard:
135 return _('invalid character in guard %r: %r') % (guard, c)
135 return _('invalid character in guard %r: %r') % (guard, c)
136
136
137 def set_active(self, guards):
137 def set_active(self, guards):
138 for guard in guards:
138 for guard in guards:
139 bad = self.check_guard(guard)
139 bad = self.check_guard(guard)
140 if bad:
140 if bad:
141 raise util.Abort(bad)
141 raise util.Abort(bad)
142 guards = dict.fromkeys(guards).keys()
142 guards = dict.fromkeys(guards).keys()
143 guards.sort()
143 guards.sort()
144 self.ui.debug('active guards: %s\n' % ' '.join(guards))
144 self.ui.debug('active guards: %s\n' % ' '.join(guards))
145 self.active_guards = guards
145 self.active_guards = guards
146 self.guards_dirty = True
146 self.guards_dirty = True
147
147
148 def active(self):
148 def active(self):
149 if self.active_guards is None:
149 if self.active_guards is None:
150 self.active_guards = []
150 self.active_guards = []
151 try:
151 try:
152 guards = self.opener(self.guards_path).read().split()
152 guards = self.opener(self.guards_path).read().split()
153 except IOError, err:
153 except IOError, err:
154 if err.errno != errno.ENOENT: raise
154 if err.errno != errno.ENOENT: raise
155 guards = []
155 guards = []
156 for i, guard in enumerate(guards):
156 for i, guard in enumerate(guards):
157 bad = self.check_guard(guard)
157 bad = self.check_guard(guard)
158 if bad:
158 if bad:
159 self.ui.warn('%s:%d: %s\n' %
159 self.ui.warn('%s:%d: %s\n' %
160 (self.join(self.guards_path), i + 1, bad))
160 (self.join(self.guards_path), i + 1, bad))
161 else:
161 else:
162 self.active_guards.append(guard)
162 self.active_guards.append(guard)
163 return self.active_guards
163 return self.active_guards
164
164
165 def set_guards(self, idx, guards):
165 def set_guards(self, idx, guards):
166 for g in guards:
166 for g in guards:
167 if len(g) < 2:
167 if len(g) < 2:
168 raise util.Abort(_('guard %r too short') % g)
168 raise util.Abort(_('guard %r too short') % g)
169 if g[0] not in '-+':
169 if g[0] not in '-+':
170 raise util.Abort(_('guard %r starts with invalid char') % g)
170 raise util.Abort(_('guard %r starts with invalid char') % g)
171 bad = self.check_guard(g[1:])
171 bad = self.check_guard(g[1:])
172 if bad:
172 if bad:
173 raise util.Abort(bad)
173 raise util.Abort(bad)
174 drop = self.guard_re.sub('', self.full_series[idx])
174 drop = self.guard_re.sub('', self.full_series[idx])
175 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
175 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
176 self.parse_series()
176 self.parse_series()
177 self.series_dirty = True
177 self.series_dirty = True
178
178
179 def pushable(self, idx):
179 def pushable(self, idx):
180 if isinstance(idx, str):
180 if isinstance(idx, str):
181 idx = self.series.index(idx)
181 idx = self.series.index(idx)
182 patchguards = self.series_guards[idx]
182 patchguards = self.series_guards[idx]
183 if not patchguards:
183 if not patchguards:
184 return True, None
184 return True, None
185 default = False
185 default = False
186 guards = self.active()
186 guards = self.active()
187 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
187 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
188 if exactneg:
188 if exactneg:
189 return False, exactneg[0]
189 return False, exactneg[0]
190 pos = [g for g in patchguards if g[0] == '+']
190 pos = [g for g in patchguards if g[0] == '+']
191 exactpos = [g for g in pos if g[1:] in guards]
191 exactpos = [g for g in pos if g[1:] in guards]
192 if pos:
192 if pos:
193 if exactpos:
193 if exactpos:
194 return True, exactpos[0]
194 return True, exactpos[0]
195 return False, pos
195 return False, pos
196 return True, ''
196 return True, ''
197
197
198 def explain_pushable(self, idx, all_patches=False):
198 def explain_pushable(self, idx, all_patches=False):
199 write = all_patches and self.ui.write or self.ui.warn
199 write = all_patches and self.ui.write or self.ui.warn
200 if all_patches or self.ui.verbose:
200 if all_patches or self.ui.verbose:
201 if isinstance(idx, str):
201 if isinstance(idx, str):
202 idx = self.series.index(idx)
202 idx = self.series.index(idx)
203 pushable, why = self.pushable(idx)
203 pushable, why = self.pushable(idx)
204 if all_patches and pushable:
204 if all_patches and pushable:
205 if why is None:
205 if why is None:
206 write(_('allowing %s - no guards in effect\n') %
206 write(_('allowing %s - no guards in effect\n') %
207 self.series[idx])
207 self.series[idx])
208 else:
208 else:
209 if not why:
209 if not why:
210 write(_('allowing %s - no matching negative guards\n') %
210 write(_('allowing %s - no matching negative guards\n') %
211 self.series[idx])
211 self.series[idx])
212 else:
212 else:
213 write(_('allowing %s - guarded by %r\n') %
213 write(_('allowing %s - guarded by %r\n') %
214 (self.series[idx], why))
214 (self.series[idx], why))
215 if not pushable:
215 if not pushable:
216 if why:
216 if why:
217 write(_('skipping %s - guarded by %r\n') %
217 write(_('skipping %s - guarded by %r\n') %
218 (self.series[idx], why))
218 (self.series[idx], why))
219 else:
219 else:
220 write(_('skipping %s - no matching guards\n') %
220 write(_('skipping %s - no matching guards\n') %
221 self.series[idx])
221 self.series[idx])
222
222
223 def save_dirty(self):
223 def save_dirty(self):
224 def write_list(items, path):
224 def write_list(items, path):
225 fp = self.opener(path, 'w')
225 fp = self.opener(path, 'w')
226 for i in items:
226 for i in items:
227 print >> fp, i
227 print >> fp, i
228 fp.close()
228 fp.close()
229 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
229 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
230 if self.series_dirty: write_list(self.full_series, self.series_path)
230 if self.series_dirty: write_list(self.full_series, self.series_path)
231 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
231 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
232
232
233 def readheaders(self, patch):
233 def readheaders(self, patch):
234 def eatdiff(lines):
234 def eatdiff(lines):
235 while lines:
235 while lines:
236 l = lines[-1]
236 l = lines[-1]
237 if (l.startswith("diff -") or
237 if (l.startswith("diff -") or
238 l.startswith("Index:") or
238 l.startswith("Index:") or
239 l.startswith("===========")):
239 l.startswith("===========")):
240 del lines[-1]
240 del lines[-1]
241 else:
241 else:
242 break
242 break
243 def eatempty(lines):
243 def eatempty(lines):
244 while lines:
244 while lines:
245 l = lines[-1]
245 l = lines[-1]
246 if re.match('\s*$', l):
246 if re.match('\s*$', l):
247 del lines[-1]
247 del lines[-1]
248 else:
248 else:
249 break
249 break
250
250
251 pf = self.join(patch)
251 pf = self.join(patch)
252 message = []
252 message = []
253 comments = []
253 comments = []
254 user = None
254 user = None
255 date = None
255 date = None
256 format = None
256 format = None
257 subject = None
257 subject = None
258 diffstart = 0
258 diffstart = 0
259
259
260 for line in file(pf):
260 for line in file(pf):
261 line = line.rstrip()
261 line = line.rstrip()
262 if line.startswith('diff --git'):
262 if line.startswith('diff --git'):
263 diffstart = 2
263 diffstart = 2
264 break
264 break
265 if diffstart:
265 if diffstart:
266 if line.startswith('+++ '):
266 if line.startswith('+++ '):
267 diffstart = 2
267 diffstart = 2
268 break
268 break
269 if line.startswith("--- "):
269 if line.startswith("--- "):
270 diffstart = 1
270 diffstart = 1
271 continue
271 continue
272 elif format == "hgpatch":
272 elif format == "hgpatch":
273 # parse values when importing the result of an hg export
273 # parse values when importing the result of an hg export
274 if line.startswith("# User "):
274 if line.startswith("# User "):
275 user = line[7:]
275 user = line[7:]
276 elif line.startswith("# Date "):
276 elif line.startswith("# Date "):
277 date = line[7:]
277 date = line[7:]
278 elif not line.startswith("# ") and line:
278 elif not line.startswith("# ") and line:
279 message.append(line)
279 message.append(line)
280 format = None
280 format = None
281 elif line == '# HG changeset patch':
281 elif line == '# HG changeset patch':
282 format = "hgpatch"
282 format = "hgpatch"
283 elif (format != "tagdone" and (line.startswith("Subject: ") or
283 elif (format != "tagdone" and (line.startswith("Subject: ") or
284 line.startswith("subject: "))):
284 line.startswith("subject: "))):
285 subject = line[9:]
285 subject = line[9:]
286 format = "tag"
286 format = "tag"
287 elif (format != "tagdone" and (line.startswith("From: ") or
287 elif (format != "tagdone" and (line.startswith("From: ") or
288 line.startswith("from: "))):
288 line.startswith("from: "))):
289 user = line[6:]
289 user = line[6:]
290 format = "tag"
290 format = "tag"
291 elif format == "tag" and line == "":
291 elif format == "tag" and line == "":
292 # when looking for tags (subject: from: etc) they
292 # when looking for tags (subject: from: etc) they
293 # end once you find a blank line in the source
293 # end once you find a blank line in the source
294 format = "tagdone"
294 format = "tagdone"
295 elif message or line:
295 elif message or line:
296 message.append(line)
296 message.append(line)
297 comments.append(line)
297 comments.append(line)
298
298
299 eatdiff(message)
299 eatdiff(message)
300 eatdiff(comments)
300 eatdiff(comments)
301 eatempty(message)
301 eatempty(message)
302 eatempty(comments)
302 eatempty(comments)
303
303
304 # make sure message isn't empty
304 # make sure message isn't empty
305 if format and format.startswith("tag") and subject:
305 if format and format.startswith("tag") and subject:
306 message.insert(0, "")
306 message.insert(0, "")
307 message.insert(0, subject)
307 message.insert(0, subject)
308 return (message, comments, user, date, diffstart > 1)
308 return (message, comments, user, date, diffstart > 1)
309
309
310 def removeundo(self, repo):
310 def removeundo(self, repo):
311 undo = repo.sjoin('undo')
311 undo = repo.sjoin('undo')
312 if not os.path.exists(undo):
312 if not os.path.exists(undo):
313 return
313 return
314 try:
314 try:
315 os.unlink(undo)
315 os.unlink(undo)
316 except OSError, inst:
316 except OSError, inst:
317 self.ui.warn('error removing undo: %s\n' % str(inst))
317 self.ui.warn('error removing undo: %s\n' % str(inst))
318
318
319 def printdiff(self, repo, node1, node2=None, files=None,
319 def printdiff(self, repo, node1, node2=None, files=None,
320 fp=None, changes=None, opts={}):
320 fp=None, changes=None, opts={}):
321 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
321 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
322
322
323 patch.diff(repo, node1, node2, fns, match=matchfn,
323 patch.diff(repo, node1, node2, fns, match=matchfn,
324 fp=fp, changes=changes, opts=self.diffopts())
324 fp=fp, changes=changes, opts=self.diffopts())
325
325
326 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
326 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
327 # first try just applying the patch
327 # first try just applying the patch
328 (err, n) = self.apply(repo, [ patch ], update_status=False,
328 (err, n) = self.apply(repo, [ patch ], update_status=False,
329 strict=True, merge=rev, wlock=wlock)
329 strict=True, merge=rev, wlock=wlock)
330
330
331 if err == 0:
331 if err == 0:
332 return (err, n)
332 return (err, n)
333
333
334 if n is None:
334 if n is None:
335 raise util.Abort(_("apply failed for patch %s") % patch)
335 raise util.Abort(_("apply failed for patch %s") % patch)
336
336
337 self.ui.warn("patch didn't work out, merging %s\n" % patch)
337 self.ui.warn("patch didn't work out, merging %s\n" % patch)
338
338
339 # apply failed, strip away that rev and merge.
339 # apply failed, strip away that rev and merge.
340 hg.clean(repo, head, wlock=wlock)
340 hg.clean(repo, head, wlock=wlock)
341 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
341 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
342
342
343 ctx = repo.changectx(rev)
343 ctx = repo.changectx(rev)
344 ret = hg.merge(repo, rev, wlock=wlock)
344 ret = hg.merge(repo, rev, wlock=wlock)
345 if ret:
345 if ret:
346 raise util.Abort(_("update returned %d") % ret)
346 raise util.Abort(_("update returned %d") % ret)
347 n = repo.commit(None, ctx.description(), ctx.user(),
347 n = repo.commit(None, ctx.description(), ctx.user(),
348 force=1, wlock=wlock)
348 force=1, wlock=wlock)
349 if n == None:
349 if n == None:
350 raise util.Abort(_("repo commit failed"))
350 raise util.Abort(_("repo commit failed"))
351 try:
351 try:
352 message, comments, user, date, patchfound = mergeq.readheaders(patch)
352 message, comments, user, date, patchfound = mergeq.readheaders(patch)
353 except:
353 except:
354 raise util.Abort(_("unable to read %s") % patch)
354 raise util.Abort(_("unable to read %s") % patch)
355
355
356 patchf = self.opener(patch, "w")
356 patchf = self.opener(patch, "w")
357 if comments:
357 if comments:
358 comments = "\n".join(comments) + '\n\n'
358 comments = "\n".join(comments) + '\n\n'
359 patchf.write(comments)
359 patchf.write(comments)
360 self.printdiff(repo, head, n, fp=patchf)
360 self.printdiff(repo, head, n, fp=patchf)
361 patchf.close()
361 patchf.close()
362 self.removeundo(repo)
362 self.removeundo(repo)
363 return (0, n)
363 return (0, n)
364
364
365 def qparents(self, repo, rev=None):
365 def qparents(self, repo, rev=None):
366 if rev is None:
366 if rev is None:
367 (p1, p2) = repo.dirstate.parents()
367 (p1, p2) = repo.dirstate.parents()
368 if p2 == revlog.nullid:
368 if p2 == revlog.nullid:
369 return p1
369 return p1
370 if len(self.applied) == 0:
370 if len(self.applied) == 0:
371 return None
371 return None
372 return revlog.bin(self.applied[-1].rev)
372 return revlog.bin(self.applied[-1].rev)
373 pp = repo.changelog.parents(rev)
373 pp = repo.changelog.parents(rev)
374 if pp[1] != revlog.nullid:
374 if pp[1] != revlog.nullid:
375 arevs = [ x.rev for x in self.applied ]
375 arevs = [ x.rev for x in self.applied ]
376 p0 = revlog.hex(pp[0])
376 p0 = revlog.hex(pp[0])
377 p1 = revlog.hex(pp[1])
377 p1 = revlog.hex(pp[1])
378 if p0 in arevs:
378 if p0 in arevs:
379 return pp[0]
379 return pp[0]
380 if p1 in arevs:
380 if p1 in arevs:
381 return pp[1]
381 return pp[1]
382 return pp[0]
382 return pp[0]
383
383
384 def mergepatch(self, repo, mergeq, series, wlock):
384 def mergepatch(self, repo, mergeq, series, wlock):
385 if len(self.applied) == 0:
385 if len(self.applied) == 0:
386 # each of the patches merged in will have two parents. This
386 # each of the patches merged in will have two parents. This
387 # can confuse the qrefresh, qdiff, and strip code because it
387 # can confuse the qrefresh, qdiff, and strip code because it
388 # needs to know which parent is actually in the patch queue.
388 # needs to know which parent is actually in the patch queue.
389 # so, we insert a merge marker with only one parent. This way
389 # so, we insert a merge marker with only one parent. This way
390 # the first patch in the queue is never a merge patch
390 # the first patch in the queue is never a merge patch
391 #
391 #
392 pname = ".hg.patches.merge.marker"
392 pname = ".hg.patches.merge.marker"
393 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
393 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
394 wlock=wlock)
394 wlock=wlock)
395 self.removeundo(repo)
395 self.removeundo(repo)
396 self.applied.append(statusentry(revlog.hex(n), pname))
396 self.applied.append(statusentry(revlog.hex(n), pname))
397 self.applied_dirty = 1
397 self.applied_dirty = 1
398
398
399 head = self.qparents(repo)
399 head = self.qparents(repo)
400
400
401 for patch in series:
401 for patch in series:
402 patch = mergeq.lookup(patch, strict=True)
402 patch = mergeq.lookup(patch, strict=True)
403 if not patch:
403 if not patch:
404 self.ui.warn("patch %s does not exist\n" % patch)
404 self.ui.warn("patch %s does not exist\n" % patch)
405 return (1, None)
405 return (1, None)
406 pushable, reason = self.pushable(patch)
406 pushable, reason = self.pushable(patch)
407 if not pushable:
407 if not pushable:
408 self.explain_pushable(patch, all_patches=True)
408 self.explain_pushable(patch, all_patches=True)
409 continue
409 continue
410 info = mergeq.isapplied(patch)
410 info = mergeq.isapplied(patch)
411 if not info:
411 if not info:
412 self.ui.warn("patch %s is not applied\n" % patch)
412 self.ui.warn("patch %s is not applied\n" % patch)
413 return (1, None)
413 return (1, None)
414 rev = revlog.bin(info[1])
414 rev = revlog.bin(info[1])
415 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
415 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
416 if head:
416 if head:
417 self.applied.append(statusentry(revlog.hex(head), patch))
417 self.applied.append(statusentry(revlog.hex(head), patch))
418 self.applied_dirty = 1
418 self.applied_dirty = 1
419 if err:
419 if err:
420 return (err, head)
420 return (err, head)
421 self.save_dirty()
421 self.save_dirty()
422 return (0, head)
422 return (0, head)
423
423
424 def patch(self, repo, patchfile):
424 def patch(self, repo, patchfile):
425 '''Apply patchfile to the working directory.
425 '''Apply patchfile to the working directory.
426 patchfile: file name of patch'''
426 patchfile: file name of patch'''
427 files = {}
427 files = {}
428 try:
428 try:
429 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
429 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
430 files=files)
430 files=files)
431 except Exception, inst:
431 except Exception, inst:
432 self.ui.note(str(inst) + '\n')
432 self.ui.note(str(inst) + '\n')
433 if not self.ui.verbose:
433 if not self.ui.verbose:
434 self.ui.warn("patch failed, unable to continue (try -v)\n")
434 self.ui.warn("patch failed, unable to continue (try -v)\n")
435 return (False, files, False)
435 return (False, files, False)
436
436
437 return (True, files, fuzz)
437 return (True, files, fuzz)
438
438
439 def apply(self, repo, series, list=False, update_status=True,
439 def apply(self, repo, series, list=False, update_status=True,
440 strict=False, patchdir=None, merge=None, wlock=None,
440 strict=False, patchdir=None, merge=None, wlock=None,
441 all_files={}):
441 all_files={}):
442 if not wlock:
442 lock = tr = None
443 wlock = repo.wlock()
444 lock = repo.lock()
445 tr = repo.transaction()
446 try:
443 try:
447 ret = self._apply(tr, repo, series, list, update_status,
444 if not wlock:
448 strict, patchdir, merge, wlock,
445 wlock = repo.wlock()
449 lock=lock, all_files=all_files)
446 lock = repo.lock()
450 tr.close()
447 tr = repo.transaction()
451 self.save_dirty()
452 return ret
453 except:
454 try:
448 try:
455 tr.abort()
449 ret = self._apply(tr, repo, series, list, update_status,
456 finally:
450 strict, patchdir, merge, wlock,
457 repo.invalidate()
451 lock=lock, all_files=all_files)
458 repo.dirstate.invalidate()
452 tr.close()
459 raise
453 self.save_dirty()
454 return ret
455 except:
456 try:
457 tr.abort()
458 finally:
459 repo.invalidate()
460 repo.dirstate.invalidate()
461 raise
462 finally:
463 del lock, wlock, tr
460
464
461 def _apply(self, tr, repo, series, list=False, update_status=True,
465 def _apply(self, tr, repo, series, list=False, update_status=True,
462 strict=False, patchdir=None, merge=None, wlock=None,
466 strict=False, patchdir=None, merge=None, wlock=None,
463 lock=None, all_files={}):
467 lock=None, all_files={}):
464 # TODO unify with commands.py
468 # TODO unify with commands.py
465 if not patchdir:
469 if not patchdir:
466 patchdir = self.path
470 patchdir = self.path
467 err = 0
471 err = 0
468 n = None
472 n = None
469 for patchname in series:
473 for patchname in series:
470 pushable, reason = self.pushable(patchname)
474 pushable, reason = self.pushable(patchname)
471 if not pushable:
475 if not pushable:
472 self.explain_pushable(patchname, all_patches=True)
476 self.explain_pushable(patchname, all_patches=True)
473 continue
477 continue
474 self.ui.warn("applying %s\n" % patchname)
478 self.ui.warn("applying %s\n" % patchname)
475 pf = os.path.join(patchdir, patchname)
479 pf = os.path.join(patchdir, patchname)
476
480
477 try:
481 try:
478 message, comments, user, date, patchfound = self.readheaders(patchname)
482 message, comments, user, date, patchfound = self.readheaders(patchname)
479 except:
483 except:
480 self.ui.warn("Unable to read %s\n" % patchname)
484 self.ui.warn("Unable to read %s\n" % patchname)
481 err = 1
485 err = 1
482 break
486 break
483
487
484 if not message:
488 if not message:
485 message = "imported patch %s\n" % patchname
489 message = "imported patch %s\n" % patchname
486 else:
490 else:
487 if list:
491 if list:
488 message.append("\nimported patch %s" % patchname)
492 message.append("\nimported patch %s" % patchname)
489 message = '\n'.join(message)
493 message = '\n'.join(message)
490
494
491 (patcherr, files, fuzz) = self.patch(repo, pf)
495 (patcherr, files, fuzz) = self.patch(repo, pf)
492 all_files.update(files)
496 all_files.update(files)
493 patcherr = not patcherr
497 patcherr = not patcherr
494
498
495 if merge and files:
499 if merge and files:
496 # Mark as removed/merged and update dirstate parent info
500 # Mark as removed/merged and update dirstate parent info
497 removed = []
501 removed = []
498 merged = []
502 merged = []
499 for f in files:
503 for f in files:
500 if os.path.exists(repo.wjoin(f)):
504 if os.path.exists(repo.wjoin(f)):
501 merged.append(f)
505 merged.append(f)
502 else:
506 else:
503 removed.append(f)
507 removed.append(f)
504 for f in removed:
508 for f in removed:
505 repo.dirstate.remove(f)
509 repo.dirstate.remove(f)
506 for f in merged:
510 for f in merged:
507 repo.dirstate.merge(f)
511 repo.dirstate.merge(f)
508 p1, p2 = repo.dirstate.parents()
512 p1, p2 = repo.dirstate.parents()
509 repo.dirstate.setparents(p1, merge)
513 repo.dirstate.setparents(p1, merge)
510 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
514 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
511 n = repo.commit(files, message, user, date, force=1, lock=lock,
515 n = repo.commit(files, message, user, date, force=1, lock=lock,
512 wlock=wlock)
516 wlock=wlock)
513
517
514 if n == None:
518 if n == None:
515 raise util.Abort(_("repo commit failed"))
519 raise util.Abort(_("repo commit failed"))
516
520
517 if update_status:
521 if update_status:
518 self.applied.append(statusentry(revlog.hex(n), patchname))
522 self.applied.append(statusentry(revlog.hex(n), patchname))
519
523
520 if patcherr:
524 if patcherr:
521 if not patchfound:
525 if not patchfound:
522 self.ui.warn("patch %s is empty\n" % patchname)
526 self.ui.warn("patch %s is empty\n" % patchname)
523 err = 0
527 err = 0
524 else:
528 else:
525 self.ui.warn("patch failed, rejects left in working dir\n")
529 self.ui.warn("patch failed, rejects left in working dir\n")
526 err = 1
530 err = 1
527 break
531 break
528
532
529 if fuzz and strict:
533 if fuzz and strict:
530 self.ui.warn("fuzz found when applying patch, stopping\n")
534 self.ui.warn("fuzz found when applying patch, stopping\n")
531 err = 1
535 err = 1
532 break
536 break
533 self.removeundo(repo)
537 self.removeundo(repo)
534 return (err, n)
538 return (err, n)
535
539
536 def delete(self, repo, patches, opts):
540 def delete(self, repo, patches, opts):
537 if not patches and not opts.get('rev'):
541 if not patches and not opts.get('rev'):
538 raise util.Abort(_('qdelete requires at least one revision or '
542 raise util.Abort(_('qdelete requires at least one revision or '
539 'patch name'))
543 'patch name'))
540
544
541 realpatches = []
545 realpatches = []
542 for patch in patches:
546 for patch in patches:
543 patch = self.lookup(patch, strict=True)
547 patch = self.lookup(patch, strict=True)
544 info = self.isapplied(patch)
548 info = self.isapplied(patch)
545 if info:
549 if info:
546 raise util.Abort(_("cannot delete applied patch %s") % patch)
550 raise util.Abort(_("cannot delete applied patch %s") % patch)
547 if patch not in self.series:
551 if patch not in self.series:
548 raise util.Abort(_("patch %s not in series file") % patch)
552 raise util.Abort(_("patch %s not in series file") % patch)
549 realpatches.append(patch)
553 realpatches.append(patch)
550
554
551 appliedbase = 0
555 appliedbase = 0
552 if opts.get('rev'):
556 if opts.get('rev'):
553 if not self.applied:
557 if not self.applied:
554 raise util.Abort(_('no patches applied'))
558 raise util.Abort(_('no patches applied'))
555 revs = cmdutil.revrange(repo, opts['rev'])
559 revs = cmdutil.revrange(repo, opts['rev'])
556 if len(revs) > 1 and revs[0] > revs[1]:
560 if len(revs) > 1 and revs[0] > revs[1]:
557 revs.reverse()
561 revs.reverse()
558 for rev in revs:
562 for rev in revs:
559 if appliedbase >= len(self.applied):
563 if appliedbase >= len(self.applied):
560 raise util.Abort(_("revision %d is not managed") % rev)
564 raise util.Abort(_("revision %d is not managed") % rev)
561
565
562 base = revlog.bin(self.applied[appliedbase].rev)
566 base = revlog.bin(self.applied[appliedbase].rev)
563 node = repo.changelog.node(rev)
567 node = repo.changelog.node(rev)
564 if node != base:
568 if node != base:
565 raise util.Abort(_("cannot delete revision %d above "
569 raise util.Abort(_("cannot delete revision %d above "
566 "applied patches") % rev)
570 "applied patches") % rev)
567 realpatches.append(self.applied[appliedbase].name)
571 realpatches.append(self.applied[appliedbase].name)
568 appliedbase += 1
572 appliedbase += 1
569
573
570 if not opts.get('keep'):
574 if not opts.get('keep'):
571 r = self.qrepo()
575 r = self.qrepo()
572 if r:
576 if r:
573 r.remove(realpatches, True)
577 r.remove(realpatches, True)
574 else:
578 else:
575 for p in realpatches:
579 for p in realpatches:
576 os.unlink(self.join(p))
580 os.unlink(self.join(p))
577
581
578 if appliedbase:
582 if appliedbase:
579 del self.applied[:appliedbase]
583 del self.applied[:appliedbase]
580 self.applied_dirty = 1
584 self.applied_dirty = 1
581 indices = [self.find_series(p) for p in realpatches]
585 indices = [self.find_series(p) for p in realpatches]
582 indices.sort()
586 indices.sort()
583 for i in indices[-1::-1]:
587 for i in indices[-1::-1]:
584 del self.full_series[i]
588 del self.full_series[i]
585 self.parse_series()
589 self.parse_series()
586 self.series_dirty = 1
590 self.series_dirty = 1
587
591
588 def check_toppatch(self, repo):
592 def check_toppatch(self, repo):
589 if len(self.applied) > 0:
593 if len(self.applied) > 0:
590 top = revlog.bin(self.applied[-1].rev)
594 top = revlog.bin(self.applied[-1].rev)
591 pp = repo.dirstate.parents()
595 pp = repo.dirstate.parents()
592 if top not in pp:
596 if top not in pp:
593 raise util.Abort(_("queue top not at same revision as working directory"))
597 raise util.Abort(_("queue top not at same revision as working directory"))
594 return top
598 return top
595 return None
599 return None
596 def check_localchanges(self, repo, force=False, refresh=True):
600 def check_localchanges(self, repo, force=False, refresh=True):
597 m, a, r, d = repo.status()[:4]
601 m, a, r, d = repo.status()[:4]
598 if m or a or r or d:
602 if m or a or r or d:
599 if not force:
603 if not force:
600 if refresh:
604 if refresh:
601 raise util.Abort(_("local changes found, refresh first"))
605 raise util.Abort(_("local changes found, refresh first"))
602 else:
606 else:
603 raise util.Abort(_("local changes found"))
607 raise util.Abort(_("local changes found"))
604 return m, a, r, d
608 return m, a, r, d
605
609
606 def new(self, repo, patch, *pats, **opts):
610 def new(self, repo, patch, *pats, **opts):
607 msg = opts.get('msg')
611 msg = opts.get('msg')
608 force = opts.get('force')
612 force = opts.get('force')
609 if os.path.exists(self.join(patch)):
613 if os.path.exists(self.join(patch)):
610 raise util.Abort(_('patch "%s" already exists') % patch)
614 raise util.Abort(_('patch "%s" already exists') % patch)
611 if opts.get('include') or opts.get('exclude') or pats:
615 if opts.get('include') or opts.get('exclude') or pats:
612 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
616 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
613 m, a, r, d = repo.status(files=fns, match=match)[:4]
617 m, a, r, d = repo.status(files=fns, match=match)[:4]
614 else:
618 else:
615 m, a, r, d = self.check_localchanges(repo, force)
619 m, a, r, d = self.check_localchanges(repo, force)
616 commitfiles = m + a + r
620 commitfiles = m + a + r
617 self.check_toppatch(repo)
621 self.check_toppatch(repo)
618 wlock = repo.wlock()
622 wlock = repo.wlock()
619 insert = self.full_series_end()
623 try:
620 if msg:
624 insert = self.full_series_end()
621 n = repo.commit(commitfiles, msg, force=True, wlock=wlock)
625 if msg:
622 else:
626 n = repo.commit(commitfiles, msg, force=True, wlock=wlock)
623 n = repo.commit(commitfiles,
627 else:
624 "[mq]: %s" % patch, force=True, wlock=wlock)
628 n = repo.commit(commitfiles,
625 if n == None:
629 "[mq]: %s" % patch, force=True, wlock=wlock)
626 raise util.Abort(_("repo commit failed"))
630 if n == None:
627 self.full_series[insert:insert] = [patch]
631 raise util.Abort(_("repo commit failed"))
628 self.applied.append(statusentry(revlog.hex(n), patch))
632 self.full_series[insert:insert] = [patch]
629 self.parse_series()
633 self.applied.append(statusentry(revlog.hex(n), patch))
630 self.series_dirty = 1
634 self.parse_series()
631 self.applied_dirty = 1
635 self.series_dirty = 1
632 p = self.opener(patch, "w")
636 self.applied_dirty = 1
633 if msg:
637 p = self.opener(patch, "w")
634 msg = msg + "\n"
638 if msg:
635 p.write(msg)
639 msg = msg + "\n"
636 p.close()
640 p.write(msg)
637 wlock = None
641 p.close()
638 r = self.qrepo()
642 wlock = None
639 if r: r.add([patch])
643 r = self.qrepo()
640 if commitfiles:
644 if r: r.add([patch])
641 self.refresh(repo, short=True)
645 if commitfiles:
642 self.removeundo(repo)
646 self.refresh(repo, short=True)
647 self.removeundo(repo)
648 finally:
649 del wlock
643
650
644 def strip(self, repo, rev, update=True, backup="all", wlock=None):
651 def strip(self, repo, rev, update=True, backup="all", wlock=None):
645 if not wlock:
652 lock = None
646 wlock = repo.wlock()
653 try:
647 lock = repo.lock()
654 if not wlock:
655 wlock = repo.wlock()
656 lock = repo.lock()
648
657
649 if update:
658 if update:
650 self.check_localchanges(repo, refresh=False)
659 self.check_localchanges(repo, refresh=False)
651 urev = self.qparents(repo, rev)
660 urev = self.qparents(repo, rev)
652 hg.clean(repo, urev, wlock=wlock)
661 hg.clean(repo, urev, wlock=wlock)
653 repo.dirstate.write()
662 repo.dirstate.write()
654
663
655 self.removeundo(repo)
664 self.removeundo(repo)
656 repair.strip(self.ui, repo, rev, backup)
665 repair.strip(self.ui, repo, rev, backup)
666 finally:
667 del lock, wlock
657
668
658 def isapplied(self, patch):
669 def isapplied(self, patch):
659 """returns (index, rev, patch)"""
670 """returns (index, rev, patch)"""
660 for i in xrange(len(self.applied)):
671 for i in xrange(len(self.applied)):
661 a = self.applied[i]
672 a = self.applied[i]
662 if a.name == patch:
673 if a.name == patch:
663 return (i, a.rev, a.name)
674 return (i, a.rev, a.name)
664 return None
675 return None
665
676
666 # if the exact patch name does not exist, we try a few
677 # if the exact patch name does not exist, we try a few
667 # variations. If strict is passed, we try only #1
678 # variations. If strict is passed, we try only #1
668 #
679 #
669 # 1) a number to indicate an offset in the series file
680 # 1) a number to indicate an offset in the series file
670 # 2) a unique substring of the patch name was given
681 # 2) a unique substring of the patch name was given
671 # 3) patchname[-+]num to indicate an offset in the series file
682 # 3) patchname[-+]num to indicate an offset in the series file
672 def lookup(self, patch, strict=False):
683 def lookup(self, patch, strict=False):
673 patch = patch and str(patch)
684 patch = patch and str(patch)
674
685
675 def partial_name(s):
686 def partial_name(s):
676 if s in self.series:
687 if s in self.series:
677 return s
688 return s
678 matches = [x for x in self.series if s in x]
689 matches = [x for x in self.series if s in x]
679 if len(matches) > 1:
690 if len(matches) > 1:
680 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
691 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
681 for m in matches:
692 for m in matches:
682 self.ui.warn(' %s\n' % m)
693 self.ui.warn(' %s\n' % m)
683 return None
694 return None
684 if matches:
695 if matches:
685 return matches[0]
696 return matches[0]
686 if len(self.series) > 0 and len(self.applied) > 0:
697 if len(self.series) > 0 and len(self.applied) > 0:
687 if s == 'qtip':
698 if s == 'qtip':
688 return self.series[self.series_end(True)-1]
699 return self.series[self.series_end(True)-1]
689 if s == 'qbase':
700 if s == 'qbase':
690 return self.series[0]
701 return self.series[0]
691 return None
702 return None
692 if patch == None:
703 if patch == None:
693 return None
704 return None
694
705
695 # we don't want to return a partial match until we make
706 # we don't want to return a partial match until we make
696 # sure the file name passed in does not exist (checked below)
707 # sure the file name passed in does not exist (checked below)
697 res = partial_name(patch)
708 res = partial_name(patch)
698 if res and res == patch:
709 if res and res == patch:
699 return res
710 return res
700
711
701 if not os.path.isfile(self.join(patch)):
712 if not os.path.isfile(self.join(patch)):
702 try:
713 try:
703 sno = int(patch)
714 sno = int(patch)
704 except(ValueError, OverflowError):
715 except(ValueError, OverflowError):
705 pass
716 pass
706 else:
717 else:
707 if sno < len(self.series):
718 if sno < len(self.series):
708 return self.series[sno]
719 return self.series[sno]
709 if not strict:
720 if not strict:
710 # return any partial match made above
721 # return any partial match made above
711 if res:
722 if res:
712 return res
723 return res
713 minus = patch.rfind('-')
724 minus = patch.rfind('-')
714 if minus >= 0:
725 if minus >= 0:
715 res = partial_name(patch[:minus])
726 res = partial_name(patch[:minus])
716 if res:
727 if res:
717 i = self.series.index(res)
728 i = self.series.index(res)
718 try:
729 try:
719 off = int(patch[minus+1:] or 1)
730 off = int(patch[minus+1:] or 1)
720 except(ValueError, OverflowError):
731 except(ValueError, OverflowError):
721 pass
732 pass
722 else:
733 else:
723 if i - off >= 0:
734 if i - off >= 0:
724 return self.series[i - off]
735 return self.series[i - off]
725 plus = patch.rfind('+')
736 plus = patch.rfind('+')
726 if plus >= 0:
737 if plus >= 0:
727 res = partial_name(patch[:plus])
738 res = partial_name(patch[:plus])
728 if res:
739 if res:
729 i = self.series.index(res)
740 i = self.series.index(res)
730 try:
741 try:
731 off = int(patch[plus+1:] or 1)
742 off = int(patch[plus+1:] or 1)
732 except(ValueError, OverflowError):
743 except(ValueError, OverflowError):
733 pass
744 pass
734 else:
745 else:
735 if i + off < len(self.series):
746 if i + off < len(self.series):
736 return self.series[i + off]
747 return self.series[i + off]
737 raise util.Abort(_("patch %s not in series") % patch)
748 raise util.Abort(_("patch %s not in series") % patch)
738
749
739 def push(self, repo, patch=None, force=False, list=False,
750 def push(self, repo, patch=None, force=False, list=False,
740 mergeq=None, wlock=None):
751 mergeq=None, wlock=None):
741 if not wlock:
752 if not wlock:
742 wlock = repo.wlock()
753 wlock = repo.wlock()
743 patch = self.lookup(patch)
754 try:
744 # Suppose our series file is: A B C and the current 'top' patch is B.
755 patch = self.lookup(patch)
745 # qpush C should be performed (moving forward)
756 # Suppose our series file is: A B C and the current 'top'
746 # qpush B is a NOP (no change)
757 # patch is B. qpush C should be performed (moving forward)
747 # qpush A is an error (can't go backwards with qpush)
758 # qpush B is a NOP (no change) qpush A is an error (can't
748 if patch:
759 # go backwards with qpush)
749 info = self.isapplied(patch)
760 if patch:
750 if info:
761 info = self.isapplied(patch)
751 if info[0] < len(self.applied) - 1:
762 if info:
752 raise util.Abort(_("cannot push to a previous patch: %s") %
763 if info[0] < len(self.applied) - 1:
753 patch)
764 raise util.Abort(
754 if info[0] < len(self.series) - 1:
765 _("cannot push to a previous patch: %s") % patch)
755 self.ui.warn(_('qpush: %s is already at the top\n') % patch)
766 if info[0] < len(self.series) - 1:
756 else:
767 self.ui.warn(
757 self.ui.warn(_('all patches are currently applied\n'))
768 _('qpush: %s is already at the top\n') % patch)
758 return
769 else:
770 self.ui.warn(_('all patches are currently applied\n'))
771 return
759
772
760 # Following the above example, starting at 'top' of B:
773 # Following the above example, starting at 'top' of B:
761 # qpush should be performed (pushes C), but a subsequent qpush without
774 # qpush should be performed (pushes C), but a subsequent
762 # an argument is an error (nothing to apply). This allows a loop
775 # qpush without an argument is an error (nothing to
763 # of "...while hg qpush..." to work as it detects an error when done
776 # apply). This allows a loop of "...while hg qpush..." to
764 if self.series_end() == len(self.series):
777 # work as it detects an error when done
765 self.ui.warn(_('patch series already fully applied\n'))
778 if self.series_end() == len(self.series):
766 return 1
779 self.ui.warn(_('patch series already fully applied\n'))
767 if not force:
780 return 1
768 self.check_localchanges(repo)
781 if not force:
782 self.check_localchanges(repo)
769
783
770 self.applied_dirty = 1;
784 self.applied_dirty = 1;
771 start = self.series_end()
785 start = self.series_end()
772 if start > 0:
786 if start > 0:
773 self.check_toppatch(repo)
787 self.check_toppatch(repo)
774 if not patch:
788 if not patch:
775 patch = self.series[start]
789 patch = self.series[start]
776 end = start + 1
790 end = start + 1
777 else:
778 end = self.series.index(patch, start) + 1
779 s = self.series[start:end]
780 all_files = {}
781 try:
782 if mergeq:
783 ret = self.mergepatch(repo, mergeq, s, wlock)
784 else:
791 else:
785 ret = self.apply(repo, s, list, wlock=wlock,
792 end = self.series.index(patch, start) + 1
786 all_files=all_files)
793 s = self.series[start:end]
787 except:
794 all_files = {}
788 self.ui.warn(_('cleaning up working directory...'))
795 try:
789 node = repo.dirstate.parents()[0]
796 if mergeq:
790 hg.revert(repo, node, None, wlock)
797 ret = self.mergepatch(repo, mergeq, s, wlock)
791 unknown = repo.status(wlock=wlock)[4]
798 else:
792 # only remove unknown files that we know we touched or
799 ret = self.apply(repo, s, list, wlock=wlock,
793 # created while patching
800 all_files=all_files)
794 for f in unknown:
801 except:
795 if f in all_files:
802 self.ui.warn(_('cleaning up working directory...'))
796 util.unlink(repo.wjoin(f))
803 node = repo.dirstate.parents()[0]
797 self.ui.warn(_('done\n'))
804 hg.revert(repo, node, None, wlock)
798 raise
805 unknown = repo.status(wlock=wlock)[4]
799 top = self.applied[-1].name
806 # only remove unknown files that we know we touched or
800 if ret[0]:
807 # created while patching
801 self.ui.write("Errors during apply, please fix and refresh %s\n" %
808 for f in unknown:
802 top)
809 if f in all_files:
803 else:
810 util.unlink(repo.wjoin(f))
804 self.ui.write("Now at: %s\n" % top)
811 self.ui.warn(_('done\n'))
805 return ret[0]
812 raise
813 top = self.applied[-1].name
814 if ret[0]:
815 self.ui.write(
816 "Errors during apply, please fix and refresh %s\n" % top)
817 else:
818 self.ui.write("Now at: %s\n" % top)
819 return ret[0]
820 finally:
821 del wlock
806
822
807 def pop(self, repo, patch=None, force=False, update=True, all=False,
823 def pop(self, repo, patch=None, force=False, update=True, all=False,
808 wlock=None):
824 wlock=None):
809 def getfile(f, rev):
825 def getfile(f, rev):
810 t = repo.file(f).read(rev)
826 t = repo.file(f).read(rev)
811 repo.wfile(f, "w").write(t)
827 repo.wfile(f, "w").write(t)
812
828
813 if not wlock:
829 if not wlock:
814 wlock = repo.wlock()
830 wlock = repo.wlock()
815 if patch:
831 try:
816 # index, rev, patch
832 if patch:
817 info = self.isapplied(patch)
833 # index, rev, patch
818 if not info:
834 info = self.isapplied(patch)
819 patch = self.lookup(patch)
835 if not info:
820 info = self.isapplied(patch)
836 patch = self.lookup(patch)
821 if not info:
837 info = self.isapplied(patch)
822 raise util.Abort(_("patch %s is not applied") % patch)
838 if not info:
839 raise util.Abort(_("patch %s is not applied") % patch)
823
840
824 if len(self.applied) == 0:
841 if len(self.applied) == 0:
825 # Allow qpop -a to work repeatedly,
842 # Allow qpop -a to work repeatedly,
826 # but not qpop without an argument
843 # but not qpop without an argument
827 self.ui.warn(_("no patches applied\n"))
844 self.ui.warn(_("no patches applied\n"))
828 return not all
845 return not all
829
846
830 if not update:
847 if not update:
831 parents = repo.dirstate.parents()
848 parents = repo.dirstate.parents()
832 rr = [ revlog.bin(x.rev) for x in self.applied ]
849 rr = [ revlog.bin(x.rev) for x in self.applied ]
833 for p in parents:
850 for p in parents:
834 if p in rr:
851 if p in rr:
835 self.ui.warn("qpop: forcing dirstate update\n")
852 self.ui.warn("qpop: forcing dirstate update\n")
836 update = True
853 update = True
837
854
838 if not force and update:
855 if not force and update:
839 self.check_localchanges(repo)
856 self.check_localchanges(repo)
840
857
841 self.applied_dirty = 1;
858 self.applied_dirty = 1;
842 end = len(self.applied)
859 end = len(self.applied)
843 if not patch:
860 if not patch:
844 if all:
861 if all:
845 popi = 0
862 popi = 0
863 else:
864 popi = len(self.applied) - 1
846 else:
865 else:
847 popi = len(self.applied) - 1
866 popi = info[0] + 1
848 else:
867 if popi >= end:
849 popi = info[0] + 1
868 self.ui.warn("qpop: %s is already at the top\n" % patch)
850 if popi >= end:
869 return
851 self.ui.warn("qpop: %s is already at the top\n" % patch)
870 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
852 return
853 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
854
871
855 start = info[0]
872 start = info[0]
856 rev = revlog.bin(info[1])
873 rev = revlog.bin(info[1])
857
874
858 # we know there are no local changes, so we can make a simplified
875 # we know there are no local changes, so we can make a simplified
859 # form of hg.update.
876 # form of hg.update.
860 if update:
877 if update:
861 top = self.check_toppatch(repo)
878 top = self.check_toppatch(repo)
862 qp = self.qparents(repo, rev)
879 qp = self.qparents(repo, rev)
863 changes = repo.changelog.read(qp)
880 changes = repo.changelog.read(qp)
864 mmap = repo.manifest.read(changes[0])
881 mmap = repo.manifest.read(changes[0])
865 m, a, r, d, u = repo.status(qp, top)[:5]
882 m, a, r, d, u = repo.status(qp, top)[:5]
866 if d:
883 if d:
867 raise util.Abort("deletions found between repo revs")
884 raise util.Abort("deletions found between repo revs")
868 for f in m:
885 for f in m:
869 getfile(f, mmap[f])
886 getfile(f, mmap[f])
870 for f in r:
887 for f in r:
871 getfile(f, mmap[f])
888 getfile(f, mmap[f])
872 util.set_exec(repo.wjoin(f), mmap.execf(f))
889 util.set_exec(repo.wjoin(f), mmap.execf(f))
873 for f in m + r:
890 for f in m + r:
874 repo.dirstate.normal(f)
891 repo.dirstate.normal(f)
875 for f in a:
892 for f in a:
876 try:
893 try:
877 os.unlink(repo.wjoin(f))
894 os.unlink(repo.wjoin(f))
878 except OSError, e:
895 except OSError, e:
879 if e.errno != errno.ENOENT:
896 if e.errno != errno.ENOENT:
880 raise
897 raise
881 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
898 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
882 except: pass
899 except: pass
883 repo.dirstate.forget(f)
900 repo.dirstate.forget(f)
884 repo.dirstate.setparents(qp, revlog.nullid)
901 repo.dirstate.setparents(qp, revlog.nullid)
885 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
902 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
886 del self.applied[start:end]
903 del self.applied[start:end]
887 if len(self.applied):
904 if len(self.applied):
888 self.ui.write("Now at: %s\n" % self.applied[-1].name)
905 self.ui.write("Now at: %s\n" % self.applied[-1].name)
889 else:
906 else:
890 self.ui.write("Patch queue now empty\n")
907 self.ui.write("Patch queue now empty\n")
908 finally:
909 del wlock
891
910
892 def diff(self, repo, pats, opts):
911 def diff(self, repo, pats, opts):
893 top = self.check_toppatch(repo)
912 top = self.check_toppatch(repo)
894 if not top:
913 if not top:
895 self.ui.write("No patches applied\n")
914 self.ui.write("No patches applied\n")
896 return
915 return
897 qp = self.qparents(repo, top)
916 qp = self.qparents(repo, top)
898 if opts.get('git'):
917 if opts.get('git'):
899 self.diffopts().git = True
918 self.diffopts().git = True
900 self.printdiff(repo, qp, files=pats, opts=opts)
919 self.printdiff(repo, qp, files=pats, opts=opts)
901
920
902 def refresh(self, repo, pats=None, **opts):
921 def refresh(self, repo, pats=None, **opts):
903 if len(self.applied) == 0:
922 if len(self.applied) == 0:
904 self.ui.write("No patches applied\n")
923 self.ui.write("No patches applied\n")
905 return 1
924 return 1
906 wlock = repo.wlock()
925 wlock = repo.wlock()
907 self.check_toppatch(repo)
926 try:
908 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
927 self.check_toppatch(repo)
909 top = revlog.bin(top)
928 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
910 cparents = repo.changelog.parents(top)
929 top = revlog.bin(top)
911 patchparent = self.qparents(repo, top)
930 cparents = repo.changelog.parents(top)
912 message, comments, user, date, patchfound = self.readheaders(patchfn)
931 patchparent = self.qparents(repo, top)
913
932 message, comments, user, date, patchfound = self.readheaders(patchfn)
914 patchf = self.opener(patchfn, 'r+')
915
916 # if the patch was a git patch, refresh it as a git patch
917 for line in patchf:
918 if line.startswith('diff --git'):
919 self.diffopts().git = True
920 break
921 patchf.seek(0)
922 patchf.truncate()
923
933
924 msg = opts.get('msg', '').rstrip()
934 patchf = self.opener(patchfn, 'r+')
925 if msg:
935
926 if comments:
936 # if the patch was a git patch, refresh it as a git patch
927 # Remove existing message.
937 for line in patchf:
928 ci = 0
938 if line.startswith('diff --git'):
929 subj = None
939 self.diffopts().git = True
930 for mi in xrange(len(message)):
940 break
931 if comments[ci].lower().startswith('subject: '):
941 patchf.seek(0)
932 subj = comments[ci][9:]
942 patchf.truncate()
933 while message[mi] != comments[ci] and message[mi] != subj:
934 ci += 1
935 del comments[ci]
936 comments.append(msg)
937 if comments:
938 comments = "\n".join(comments) + '\n\n'
939 patchf.write(comments)
940
943
941 if opts.get('git'):
944 msg = opts.get('msg', '').rstrip()
942 self.diffopts().git = True
945 if msg:
943 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
946 if comments:
944 tip = repo.changelog.tip()
947 # Remove existing message.
945 if top == tip:
948 ci = 0
946 # if the top of our patch queue is also the tip, there is an
949 subj = None
947 # optimization here. We update the dirstate in place and strip
950 for mi in xrange(len(message)):
948 # off the tip commit. Then just commit the current directory
951 if comments[ci].lower().startswith('subject: '):
949 # tree. We can also send repo.commit the list of files
952 subj = comments[ci][9:]
950 # changed to speed up the diff
953 while message[mi] != comments[ci] and message[mi] != subj:
951 #
954 ci += 1
952 # in short mode, we only diff the files included in the
955 del comments[ci]
953 # patch already
956 comments.append(msg)
954 #
957 if comments:
955 # this should really read:
958 comments = "\n".join(comments) + '\n\n'
956 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
959 patchf.write(comments)
957 # but we do it backwards to take advantage of manifest/chlog
958 # caching against the next repo.status call
959 #
960 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
961 changes = repo.changelog.read(tip)
962 man = repo.manifest.read(changes[0])
963 aaa = aa[:]
964 if opts.get('short'):
965 filelist = mm + aa + dd
966 match = dict.fromkeys(filelist).__contains__
967 else:
968 filelist = None
969 match = util.always
970 m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
971
960
972 # we might end up with files that were added between tip and
961 if opts.get('git'):
973 # the dirstate parent, but then changed in the local dirstate.
962 self.diffopts().git = True
974 # in this case, we want them to only show up in the added section
963 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
975 for x in m:
964 tip = repo.changelog.tip()
976 if x not in aa:
965 if top == tip:
977 mm.append(x)
966 # if the top of our patch queue is also the tip, there is an
978 # we might end up with files added by the local dirstate that
967 # optimization here. We update the dirstate in place and strip
979 # were deleted by the patch. In this case, they should only
968 # off the tip commit. Then just commit the current directory
980 # show up in the changed section.
969 # tree. We can also send repo.commit the list of files
981 for x in a:
970 # changed to speed up the diff
982 if x in dd:
971 #
983 del dd[dd.index(x)]
972 # in short mode, we only diff the files included in the
984 mm.append(x)
973 # patch already
974 #
975 # this should really read:
976 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
977 # but we do it backwards to take advantage of manifest/chlog
978 # caching against the next repo.status call
979 #
980 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
981 changes = repo.changelog.read(tip)
982 man = repo.manifest.read(changes[0])
983 aaa = aa[:]
984 if opts.get('short'):
985 filelist = mm + aa + dd
986 match = dict.fromkeys(filelist).__contains__
985 else:
987 else:
986 aa.append(x)
988 filelist = None
987 # make sure any files deleted in the local dirstate
989 match = util.always
988 # are not in the add or change column of the patch
990 m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
989 forget = []
990 for x in d + r:
991 if x in aa:
992 del aa[aa.index(x)]
993 forget.append(x)
994 continue
995 elif x in mm:
996 del mm[mm.index(x)]
997 dd.append(x)
998
991
999 m = util.unique(mm)
992 # we might end up with files that were added between
1000 r = util.unique(dd)
993 # tip and the dirstate parent, but then changed in the
1001 a = util.unique(aa)
994 # local dirstate. in this case, we want them to only
1002 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
995 # show up in the added section
1003 filelist = util.unique(c[0] + c[1] + c[2])
996 for x in m:
1004 patch.diff(repo, patchparent, files=filelist, match=matchfn,
997 if x not in aa:
1005 fp=patchf, changes=c, opts=self.diffopts())
998 mm.append(x)
1006 patchf.close()
999 # we might end up with files added by the local dirstate that
1000 # were deleted by the patch. In this case, they should only
1001 # show up in the changed section.
1002 for x in a:
1003 if x in dd:
1004 del dd[dd.index(x)]
1005 mm.append(x)
1006 else:
1007 aa.append(x)
1008 # make sure any files deleted in the local dirstate
1009 # are not in the add or change column of the patch
1010 forget = []
1011 for x in d + r:
1012 if x in aa:
1013 del aa[aa.index(x)]
1014 forget.append(x)
1015 continue
1016 elif x in mm:
1017 del mm[mm.index(x)]
1018 dd.append(x)
1019
1020 m = util.unique(mm)
1021 r = util.unique(dd)
1022 a = util.unique(aa)
1023 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1024 filelist = util.unique(c[0] + c[1] + c[2])
1025 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1026 fp=patchf, changes=c, opts=self.diffopts())
1027 patchf.close()
1007
1028
1008 repo.dirstate.setparents(*cparents)
1029 repo.dirstate.setparents(*cparents)
1009 copies = {}
1030 copies = {}
1010 for dst in a:
1031 for dst in a:
1011 src = repo.dirstate.copied(dst)
1032 src = repo.dirstate.copied(dst)
1012 if src is None:
1033 if src is None:
1013 continue
1034 continue
1014 copies.setdefault(src, []).append(dst)
1035 copies.setdefault(src, []).append(dst)
1015 repo.dirstate.add(dst)
1036 repo.dirstate.add(dst)
1016 # remember the copies between patchparent and tip
1037 # remember the copies between patchparent and tip
1017 # this may be slow, so don't do it if we're not tracking copies
1038 # this may be slow, so don't do it if we're not tracking copies
1018 if self.diffopts().git:
1039 if self.diffopts().git:
1019 for dst in aaa:
1040 for dst in aaa:
1020 f = repo.file(dst)
1041 f = repo.file(dst)
1021 src = f.renamed(man[dst])
1042 src = f.renamed(man[dst])
1022 if src:
1043 if src:
1023 copies[src[0]] = copies.get(dst, [])
1044 copies[src[0]] = copies.get(dst, [])
1024 if dst in a:
1045 if dst in a:
1025 copies[src[0]].append(dst)
1046 copies[src[0]].append(dst)
1026 # we can't copy a file created by the patch itself
1047 # we can't copy a file created by the patch itself
1027 if dst in copies:
1048 if dst in copies:
1028 del copies[dst]
1049 del copies[dst]
1029 for src, dsts in copies.iteritems():
1050 for src, dsts in copies.iteritems():
1030 for dst in dsts:
1051 for dst in dsts:
1031 repo.dirstate.copy(src, dst)
1052 repo.dirstate.copy(src, dst)
1032 for f in r:
1053 for f in r:
1033 repo.dirstate.remove(f)
1054 repo.dirstate.remove(f)
1034 # if the patch excludes a modified file, mark that file with mtime=0
1055 # if the patch excludes a modified file, mark that
1035 # so status can see it.
1056 # file with mtime=0 so status can see it.
1036 mm = []
1057 mm = []
1037 for i in xrange(len(m)-1, -1, -1):
1058 for i in xrange(len(m)-1, -1, -1):
1038 if not matchfn(m[i]):
1059 if not matchfn(m[i]):
1039 mm.append(m[i])
1060 mm.append(m[i])
1040 del m[i]
1061 del m[i]
1041 for f in m:
1062 for f in m:
1042 repo.dirstate.normal(f)
1063 repo.dirstate.normal(f)
1043 for f in mm:
1064 for f in mm:
1044 repo.dirstate.normaldirty(f)
1065 repo.dirstate.normaldirty(f)
1045 for f in forget:
1066 for f in forget:
1046 repo.dirstate.forget(f)
1067 repo.dirstate.forget(f)
1047
1068
1048 if not msg:
1069 if not msg:
1049 if not message:
1070 if not message:
1050 message = "[mq]: %s\n" % patchfn
1071 message = "[mq]: %s\n" % patchfn
1072 else:
1073 message = "\n".join(message)
1051 else:
1074 else:
1052 message = "\n".join(message)
1075 message = msg
1053 else:
1054 message = msg
1055
1076
1056 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1077 self.strip(repo, top, update=False,
1057 n = repo.commit(filelist, message, changes[1], match=matchfn,
1078 backup='strip', wlock=wlock)
1058 force=1, wlock=wlock)
1079 n = repo.commit(filelist, message, changes[1], match=matchfn,
1059 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1080 force=1, wlock=wlock)
1060 self.applied_dirty = 1
1081 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1061 self.removeundo(repo)
1082 self.applied_dirty = 1
1062 else:
1083 self.removeundo(repo)
1063 self.printdiff(repo, patchparent, fp=patchf)
1084 else:
1064 patchf.close()
1085 self.printdiff(repo, patchparent, fp=patchf)
1065 added = repo.status()[1]
1086 patchf.close()
1066 for a in added:
1087 added = repo.status()[1]
1067 f = repo.wjoin(a)
1088 for a in added:
1068 try:
1089 f = repo.wjoin(a)
1069 os.unlink(f)
1090 try:
1070 except OSError, e:
1091 os.unlink(f)
1071 if e.errno != errno.ENOENT:
1092 except OSError, e:
1072 raise
1093 if e.errno != errno.ENOENT:
1073 try: os.removedirs(os.path.dirname(f))
1094 raise
1074 except: pass
1095 try: os.removedirs(os.path.dirname(f))
1075 # forget the file copies in the dirstate
1096 except: pass
1076 # push should readd the files later on
1097 # forget the file copies in the dirstate
1077 repo.dirstate.forget(a)
1098 # push should readd the files later on
1078 self.pop(repo, force=True, wlock=wlock)
1099 repo.dirstate.forget(a)
1079 self.push(repo, force=True, wlock=wlock)
1100 self.pop(repo, force=True, wlock=wlock)
1101 self.push(repo, force=True, wlock=wlock)
1102 finally:
1103 del wlock
1080
1104
1081 def init(self, repo, create=False):
1105 def init(self, repo, create=False):
1082 if not create and os.path.isdir(self.path):
1106 if not create and os.path.isdir(self.path):
1083 raise util.Abort(_("patch queue directory already exists"))
1107 raise util.Abort(_("patch queue directory already exists"))
1084 try:
1108 try:
1085 os.mkdir(self.path)
1109 os.mkdir(self.path)
1086 except OSError, inst:
1110 except OSError, inst:
1087 if inst.errno != errno.EEXIST or not create:
1111 if inst.errno != errno.EEXIST or not create:
1088 raise
1112 raise
1089 if create:
1113 if create:
1090 return self.qrepo(create=True)
1114 return self.qrepo(create=True)
1091
1115
1092 def unapplied(self, repo, patch=None):
1116 def unapplied(self, repo, patch=None):
1093 if patch and patch not in self.series:
1117 if patch and patch not in self.series:
1094 raise util.Abort(_("patch %s is not in series file") % patch)
1118 raise util.Abort(_("patch %s is not in series file") % patch)
1095 if not patch:
1119 if not patch:
1096 start = self.series_end()
1120 start = self.series_end()
1097 else:
1121 else:
1098 start = self.series.index(patch) + 1
1122 start = self.series.index(patch) + 1
1099 unapplied = []
1123 unapplied = []
1100 for i in xrange(start, len(self.series)):
1124 for i in xrange(start, len(self.series)):
1101 pushable, reason = self.pushable(i)
1125 pushable, reason = self.pushable(i)
1102 if pushable:
1126 if pushable:
1103 unapplied.append((i, self.series[i]))
1127 unapplied.append((i, self.series[i]))
1104 self.explain_pushable(i)
1128 self.explain_pushable(i)
1105 return unapplied
1129 return unapplied
1106
1130
1107 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1131 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1108 summary=False):
1132 summary=False):
1109 def displayname(patchname):
1133 def displayname(patchname):
1110 if summary:
1134 if summary:
1111 msg = self.readheaders(patchname)[0]
1135 msg = self.readheaders(patchname)[0]
1112 msg = msg and ': ' + msg[0] or ': '
1136 msg = msg and ': ' + msg[0] or ': '
1113 else:
1137 else:
1114 msg = ''
1138 msg = ''
1115 return '%s%s' % (patchname, msg)
1139 return '%s%s' % (patchname, msg)
1116
1140
1117 applied = dict.fromkeys([p.name for p in self.applied])
1141 applied = dict.fromkeys([p.name for p in self.applied])
1118 if length is None:
1142 if length is None:
1119 length = len(self.series) - start
1143 length = len(self.series) - start
1120 if not missing:
1144 if not missing:
1121 for i in xrange(start, start+length):
1145 for i in xrange(start, start+length):
1122 patch = self.series[i]
1146 patch = self.series[i]
1123 if patch in applied:
1147 if patch in applied:
1124 stat = 'A'
1148 stat = 'A'
1125 elif self.pushable(i)[0]:
1149 elif self.pushable(i)[0]:
1126 stat = 'U'
1150 stat = 'U'
1127 else:
1151 else:
1128 stat = 'G'
1152 stat = 'G'
1129 pfx = ''
1153 pfx = ''
1130 if self.ui.verbose:
1154 if self.ui.verbose:
1131 pfx = '%d %s ' % (i, stat)
1155 pfx = '%d %s ' % (i, stat)
1132 elif status and status != stat:
1156 elif status and status != stat:
1133 continue
1157 continue
1134 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1158 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1135 else:
1159 else:
1136 msng_list = []
1160 msng_list = []
1137 for root, dirs, files in os.walk(self.path):
1161 for root, dirs, files in os.walk(self.path):
1138 d = root[len(self.path) + 1:]
1162 d = root[len(self.path) + 1:]
1139 for f in files:
1163 for f in files:
1140 fl = os.path.join(d, f)
1164 fl = os.path.join(d, f)
1141 if (fl not in self.series and
1165 if (fl not in self.series and
1142 fl not in (self.status_path, self.series_path,
1166 fl not in (self.status_path, self.series_path,
1143 self.guards_path)
1167 self.guards_path)
1144 and not fl.startswith('.')):
1168 and not fl.startswith('.')):
1145 msng_list.append(fl)
1169 msng_list.append(fl)
1146 msng_list.sort()
1170 msng_list.sort()
1147 for x in msng_list:
1171 for x in msng_list:
1148 pfx = self.ui.verbose and ('D ') or ''
1172 pfx = self.ui.verbose and ('D ') or ''
1149 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1173 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1150
1174
1151 def issaveline(self, l):
1175 def issaveline(self, l):
1152 if l.name == '.hg.patches.save.line':
1176 if l.name == '.hg.patches.save.line':
1153 return True
1177 return True
1154
1178
1155 def qrepo(self, create=False):
1179 def qrepo(self, create=False):
1156 if create or os.path.isdir(self.join(".hg")):
1180 if create or os.path.isdir(self.join(".hg")):
1157 return hg.repository(self.ui, path=self.path, create=create)
1181 return hg.repository(self.ui, path=self.path, create=create)
1158
1182
1159 def restore(self, repo, rev, delete=None, qupdate=None):
1183 def restore(self, repo, rev, delete=None, qupdate=None):
1160 c = repo.changelog.read(rev)
1184 c = repo.changelog.read(rev)
1161 desc = c[4].strip()
1185 desc = c[4].strip()
1162 lines = desc.splitlines()
1186 lines = desc.splitlines()
1163 i = 0
1187 i = 0
1164 datastart = None
1188 datastart = None
1165 series = []
1189 series = []
1166 applied = []
1190 applied = []
1167 qpp = None
1191 qpp = None
1168 for i in xrange(0, len(lines)):
1192 for i in xrange(0, len(lines)):
1169 if lines[i] == 'Patch Data:':
1193 if lines[i] == 'Patch Data:':
1170 datastart = i + 1
1194 datastart = i + 1
1171 elif lines[i].startswith('Dirstate:'):
1195 elif lines[i].startswith('Dirstate:'):
1172 l = lines[i].rstrip()
1196 l = lines[i].rstrip()
1173 l = l[10:].split(' ')
1197 l = l[10:].split(' ')
1174 qpp = [ hg.bin(x) for x in l ]
1198 qpp = [ hg.bin(x) for x in l ]
1175 elif datastart != None:
1199 elif datastart != None:
1176 l = lines[i].rstrip()
1200 l = lines[i].rstrip()
1177 se = statusentry(l)
1201 se = statusentry(l)
1178 file_ = se.name
1202 file_ = se.name
1179 if se.rev:
1203 if se.rev:
1180 applied.append(se)
1204 applied.append(se)
1181 else:
1205 else:
1182 series.append(file_)
1206 series.append(file_)
1183 if datastart == None:
1207 if datastart == None:
1184 self.ui.warn("No saved patch data found\n")
1208 self.ui.warn("No saved patch data found\n")
1185 return 1
1209 return 1
1186 self.ui.warn("restoring status: %s\n" % lines[0])
1210 self.ui.warn("restoring status: %s\n" % lines[0])
1187 self.full_series = series
1211 self.full_series = series
1188 self.applied = applied
1212 self.applied = applied
1189 self.parse_series()
1213 self.parse_series()
1190 self.series_dirty = 1
1214 self.series_dirty = 1
1191 self.applied_dirty = 1
1215 self.applied_dirty = 1
1192 heads = repo.changelog.heads()
1216 heads = repo.changelog.heads()
1193 if delete:
1217 if delete:
1194 if rev not in heads:
1218 if rev not in heads:
1195 self.ui.warn("save entry has children, leaving it alone\n")
1219 self.ui.warn("save entry has children, leaving it alone\n")
1196 else:
1220 else:
1197 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1221 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1198 pp = repo.dirstate.parents()
1222 pp = repo.dirstate.parents()
1199 if rev in pp:
1223 if rev in pp:
1200 update = True
1224 update = True
1201 else:
1225 else:
1202 update = False
1226 update = False
1203 self.strip(repo, rev, update=update, backup='strip')
1227 self.strip(repo, rev, update=update, backup='strip')
1204 if qpp:
1228 if qpp:
1205 self.ui.warn("saved queue repository parents: %s %s\n" %
1229 self.ui.warn("saved queue repository parents: %s %s\n" %
1206 (hg.short(qpp[0]), hg.short(qpp[1])))
1230 (hg.short(qpp[0]), hg.short(qpp[1])))
1207 if qupdate:
1231 if qupdate:
1208 print "queue directory updating"
1232 print "queue directory updating"
1209 r = self.qrepo()
1233 r = self.qrepo()
1210 if not r:
1234 if not r:
1211 self.ui.warn("Unable to load queue repository\n")
1235 self.ui.warn("Unable to load queue repository\n")
1212 return 1
1236 return 1
1213 hg.clean(r, qpp[0])
1237 hg.clean(r, qpp[0])
1214
1238
1215 def save(self, repo, msg=None):
1239 def save(self, repo, msg=None):
1216 if len(self.applied) == 0:
1240 if len(self.applied) == 0:
1217 self.ui.warn("save: no patches applied, exiting\n")
1241 self.ui.warn("save: no patches applied, exiting\n")
1218 return 1
1242 return 1
1219 if self.issaveline(self.applied[-1]):
1243 if self.issaveline(self.applied[-1]):
1220 self.ui.warn("status is already saved\n")
1244 self.ui.warn("status is already saved\n")
1221 return 1
1245 return 1
1222
1246
1223 ar = [ ':' + x for x in self.full_series ]
1247 ar = [ ':' + x for x in self.full_series ]
1224 if not msg:
1248 if not msg:
1225 msg = "hg patches saved state"
1249 msg = "hg patches saved state"
1226 else:
1250 else:
1227 msg = "hg patches: " + msg.rstrip('\r\n')
1251 msg = "hg patches: " + msg.rstrip('\r\n')
1228 r = self.qrepo()
1252 r = self.qrepo()
1229 if r:
1253 if r:
1230 pp = r.dirstate.parents()
1254 pp = r.dirstate.parents()
1231 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1255 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1232 msg += "\n\nPatch Data:\n"
1256 msg += "\n\nPatch Data:\n"
1233 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1257 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1234 "\n".join(ar) + '\n' or "")
1258 "\n".join(ar) + '\n' or "")
1235 n = repo.commit(None, text, user=None, force=1)
1259 n = repo.commit(None, text, user=None, force=1)
1236 if not n:
1260 if not n:
1237 self.ui.warn("repo commit failed\n")
1261 self.ui.warn("repo commit failed\n")
1238 return 1
1262 return 1
1239 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1263 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1240 self.applied_dirty = 1
1264 self.applied_dirty = 1
1241 self.removeundo(repo)
1265 self.removeundo(repo)
1242
1266
1243 def full_series_end(self):
1267 def full_series_end(self):
1244 if len(self.applied) > 0:
1268 if len(self.applied) > 0:
1245 p = self.applied[-1].name
1269 p = self.applied[-1].name
1246 end = self.find_series(p)
1270 end = self.find_series(p)
1247 if end == None:
1271 if end == None:
1248 return len(self.full_series)
1272 return len(self.full_series)
1249 return end + 1
1273 return end + 1
1250 return 0
1274 return 0
1251
1275
1252 def series_end(self, all_patches=False):
1276 def series_end(self, all_patches=False):
1253 """If all_patches is False, return the index of the next pushable patch
1277 """If all_patches is False, return the index of the next pushable patch
1254 in the series, or the series length. If all_patches is True, return the
1278 in the series, or the series length. If all_patches is True, return the
1255 index of the first patch past the last applied one.
1279 index of the first patch past the last applied one.
1256 """
1280 """
1257 end = 0
1281 end = 0
1258 def next(start):
1282 def next(start):
1259 if all_patches:
1283 if all_patches:
1260 return start
1284 return start
1261 i = start
1285 i = start
1262 while i < len(self.series):
1286 while i < len(self.series):
1263 p, reason = self.pushable(i)
1287 p, reason = self.pushable(i)
1264 if p:
1288 if p:
1265 break
1289 break
1266 self.explain_pushable(i)
1290 self.explain_pushable(i)
1267 i += 1
1291 i += 1
1268 return i
1292 return i
1269 if len(self.applied) > 0:
1293 if len(self.applied) > 0:
1270 p = self.applied[-1].name
1294 p = self.applied[-1].name
1271 try:
1295 try:
1272 end = self.series.index(p)
1296 end = self.series.index(p)
1273 except ValueError:
1297 except ValueError:
1274 return 0
1298 return 0
1275 return next(end + 1)
1299 return next(end + 1)
1276 return next(end)
1300 return next(end)
1277
1301
1278 def appliedname(self, index):
1302 def appliedname(self, index):
1279 pname = self.applied[index].name
1303 pname = self.applied[index].name
1280 if not self.ui.verbose:
1304 if not self.ui.verbose:
1281 p = pname
1305 p = pname
1282 else:
1306 else:
1283 p = str(self.series.index(pname)) + " " + pname
1307 p = str(self.series.index(pname)) + " " + pname
1284 return p
1308 return p
1285
1309
1286 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1310 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1287 force=None, git=False):
1311 force=None, git=False):
1288 def checkseries(patchname):
1312 def checkseries(patchname):
1289 if patchname in self.series:
1313 if patchname in self.series:
1290 raise util.Abort(_('patch %s is already in the series file')
1314 raise util.Abort(_('patch %s is already in the series file')
1291 % patchname)
1315 % patchname)
1292 def checkfile(patchname):
1316 def checkfile(patchname):
1293 if not force and os.path.exists(self.join(patchname)):
1317 if not force and os.path.exists(self.join(patchname)):
1294 raise util.Abort(_('patch "%s" already exists')
1318 raise util.Abort(_('patch "%s" already exists')
1295 % patchname)
1319 % patchname)
1296
1320
1297 if rev:
1321 if rev:
1298 if files:
1322 if files:
1299 raise util.Abort(_('option "-r" not valid when importing '
1323 raise util.Abort(_('option "-r" not valid when importing '
1300 'files'))
1324 'files'))
1301 rev = cmdutil.revrange(repo, rev)
1325 rev = cmdutil.revrange(repo, rev)
1302 rev.sort(lambda x, y: cmp(y, x))
1326 rev.sort(lambda x, y: cmp(y, x))
1303 if (len(files) > 1 or len(rev) > 1) and patchname:
1327 if (len(files) > 1 or len(rev) > 1) and patchname:
1304 raise util.Abort(_('option "-n" not valid when importing multiple '
1328 raise util.Abort(_('option "-n" not valid when importing multiple '
1305 'patches'))
1329 'patches'))
1306 i = 0
1330 i = 0
1307 added = []
1331 added = []
1308 if rev:
1332 if rev:
1309 # If mq patches are applied, we can only import revisions
1333 # If mq patches are applied, we can only import revisions
1310 # that form a linear path to qbase.
1334 # that form a linear path to qbase.
1311 # Otherwise, they should form a linear path to a head.
1335 # Otherwise, they should form a linear path to a head.
1312 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1336 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1313 if len(heads) > 1:
1337 if len(heads) > 1:
1314 raise util.Abort(_('revision %d is the root of more than one '
1338 raise util.Abort(_('revision %d is the root of more than one '
1315 'branch') % rev[-1])
1339 'branch') % rev[-1])
1316 if self.applied:
1340 if self.applied:
1317 base = revlog.hex(repo.changelog.node(rev[0]))
1341 base = revlog.hex(repo.changelog.node(rev[0]))
1318 if base in [n.rev for n in self.applied]:
1342 if base in [n.rev for n in self.applied]:
1319 raise util.Abort(_('revision %d is already managed')
1343 raise util.Abort(_('revision %d is already managed')
1320 % rev[0])
1344 % rev[0])
1321 if heads != [revlog.bin(self.applied[-1].rev)]:
1345 if heads != [revlog.bin(self.applied[-1].rev)]:
1322 raise util.Abort(_('revision %d is not the parent of '
1346 raise util.Abort(_('revision %d is not the parent of '
1323 'the queue') % rev[0])
1347 'the queue') % rev[0])
1324 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1348 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1325 lastparent = repo.changelog.parentrevs(base)[0]
1349 lastparent = repo.changelog.parentrevs(base)[0]
1326 else:
1350 else:
1327 if heads != [repo.changelog.node(rev[0])]:
1351 if heads != [repo.changelog.node(rev[0])]:
1328 raise util.Abort(_('revision %d has unmanaged children')
1352 raise util.Abort(_('revision %d has unmanaged children')
1329 % rev[0])
1353 % rev[0])
1330 lastparent = None
1354 lastparent = None
1331
1355
1332 if git:
1356 if git:
1333 self.diffopts().git = True
1357 self.diffopts().git = True
1334
1358
1335 for r in rev:
1359 for r in rev:
1336 p1, p2 = repo.changelog.parentrevs(r)
1360 p1, p2 = repo.changelog.parentrevs(r)
1337 n = repo.changelog.node(r)
1361 n = repo.changelog.node(r)
1338 if p2 != revlog.nullrev:
1362 if p2 != revlog.nullrev:
1339 raise util.Abort(_('cannot import merge revision %d') % r)
1363 raise util.Abort(_('cannot import merge revision %d') % r)
1340 if lastparent and lastparent != r:
1364 if lastparent and lastparent != r:
1341 raise util.Abort(_('revision %d is not the parent of %d')
1365 raise util.Abort(_('revision %d is not the parent of %d')
1342 % (r, lastparent))
1366 % (r, lastparent))
1343 lastparent = p1
1367 lastparent = p1
1344
1368
1345 if not patchname:
1369 if not patchname:
1346 patchname = normname('%d.diff' % r)
1370 patchname = normname('%d.diff' % r)
1347 checkseries(patchname)
1371 checkseries(patchname)
1348 checkfile(patchname)
1372 checkfile(patchname)
1349 self.full_series.insert(0, patchname)
1373 self.full_series.insert(0, patchname)
1350
1374
1351 patchf = self.opener(patchname, "w")
1375 patchf = self.opener(patchname, "w")
1352 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1376 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1353 patchf.close()
1377 patchf.close()
1354
1378
1355 se = statusentry(revlog.hex(n), patchname)
1379 se = statusentry(revlog.hex(n), patchname)
1356 self.applied.insert(0, se)
1380 self.applied.insert(0, se)
1357
1381
1358 added.append(patchname)
1382 added.append(patchname)
1359 patchname = None
1383 patchname = None
1360 self.parse_series()
1384 self.parse_series()
1361 self.applied_dirty = 1
1385 self.applied_dirty = 1
1362
1386
1363 for filename in files:
1387 for filename in files:
1364 if existing:
1388 if existing:
1365 if filename == '-':
1389 if filename == '-':
1366 raise util.Abort(_('-e is incompatible with import from -'))
1390 raise util.Abort(_('-e is incompatible with import from -'))
1367 if not patchname:
1391 if not patchname:
1368 patchname = normname(filename)
1392 patchname = normname(filename)
1369 if not os.path.isfile(self.join(patchname)):
1393 if not os.path.isfile(self.join(patchname)):
1370 raise util.Abort(_("patch %s does not exist") % patchname)
1394 raise util.Abort(_("patch %s does not exist") % patchname)
1371 else:
1395 else:
1372 try:
1396 try:
1373 if filename == '-':
1397 if filename == '-':
1374 if not patchname:
1398 if not patchname:
1375 raise util.Abort(_('need --name to import a patch from -'))
1399 raise util.Abort(_('need --name to import a patch from -'))
1376 text = sys.stdin.read()
1400 text = sys.stdin.read()
1377 else:
1401 else:
1378 text = file(filename).read()
1402 text = file(filename).read()
1379 except IOError:
1403 except IOError:
1380 raise util.Abort(_("unable to read %s") % patchname)
1404 raise util.Abort(_("unable to read %s") % patchname)
1381 if not patchname:
1405 if not patchname:
1382 patchname = normname(os.path.basename(filename))
1406 patchname = normname(os.path.basename(filename))
1383 checkfile(patchname)
1407 checkfile(patchname)
1384 patchf = self.opener(patchname, "w")
1408 patchf = self.opener(patchname, "w")
1385 patchf.write(text)
1409 patchf.write(text)
1386 checkseries(patchname)
1410 checkseries(patchname)
1387 index = self.full_series_end() + i
1411 index = self.full_series_end() + i
1388 self.full_series[index:index] = [patchname]
1412 self.full_series[index:index] = [patchname]
1389 self.parse_series()
1413 self.parse_series()
1390 self.ui.warn("adding %s to series file\n" % patchname)
1414 self.ui.warn("adding %s to series file\n" % patchname)
1391 i += 1
1415 i += 1
1392 added.append(patchname)
1416 added.append(patchname)
1393 patchname = None
1417 patchname = None
1394 self.series_dirty = 1
1418 self.series_dirty = 1
1395 qrepo = self.qrepo()
1419 qrepo = self.qrepo()
1396 if qrepo:
1420 if qrepo:
1397 qrepo.add(added)
1421 qrepo.add(added)
1398
1422
1399 def delete(ui, repo, *patches, **opts):
1423 def delete(ui, repo, *patches, **opts):
1400 """remove patches from queue
1424 """remove patches from queue
1401
1425
1402 The patches must not be applied, unless they are arguments to
1426 The patches must not be applied, unless they are arguments to
1403 the --rev parameter. At least one patch or revision is required.
1427 the --rev parameter. At least one patch or revision is required.
1404
1428
1405 With --rev, mq will stop managing the named revisions (converting
1429 With --rev, mq will stop managing the named revisions (converting
1406 them to regular mercurial changesets). The patches must be applied
1430 them to regular mercurial changesets). The patches must be applied
1407 and at the base of the stack. This option is useful when the patches
1431 and at the base of the stack. This option is useful when the patches
1408 have been applied upstream.
1432 have been applied upstream.
1409
1433
1410 With --keep, the patch files are preserved in the patch directory."""
1434 With --keep, the patch files are preserved in the patch directory."""
1411 q = repo.mq
1435 q = repo.mq
1412 q.delete(repo, patches, opts)
1436 q.delete(repo, patches, opts)
1413 q.save_dirty()
1437 q.save_dirty()
1414 return 0
1438 return 0
1415
1439
1416 def applied(ui, repo, patch=None, **opts):
1440 def applied(ui, repo, patch=None, **opts):
1417 """print the patches already applied"""
1441 """print the patches already applied"""
1418 q = repo.mq
1442 q = repo.mq
1419 if patch:
1443 if patch:
1420 if patch not in q.series:
1444 if patch not in q.series:
1421 raise util.Abort(_("patch %s is not in series file") % patch)
1445 raise util.Abort(_("patch %s is not in series file") % patch)
1422 end = q.series.index(patch) + 1
1446 end = q.series.index(patch) + 1
1423 else:
1447 else:
1424 end = q.series_end(True)
1448 end = q.series_end(True)
1425 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1449 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1426
1450
1427 def unapplied(ui, repo, patch=None, **opts):
1451 def unapplied(ui, repo, patch=None, **opts):
1428 """print the patches not yet applied"""
1452 """print the patches not yet applied"""
1429 q = repo.mq
1453 q = repo.mq
1430 if patch:
1454 if patch:
1431 if patch not in q.series:
1455 if patch not in q.series:
1432 raise util.Abort(_("patch %s is not in series file") % patch)
1456 raise util.Abort(_("patch %s is not in series file") % patch)
1433 start = q.series.index(patch) + 1
1457 start = q.series.index(patch) + 1
1434 else:
1458 else:
1435 start = q.series_end(True)
1459 start = q.series_end(True)
1436 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1460 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1437
1461
1438 def qimport(ui, repo, *filename, **opts):
1462 def qimport(ui, repo, *filename, **opts):
1439 """import a patch
1463 """import a patch
1440
1464
1441 The patch will have the same name as its source file unless you
1465 The patch will have the same name as its source file unless you
1442 give it a new one with --name.
1466 give it a new one with --name.
1443
1467
1444 You can register an existing patch inside the patch directory
1468 You can register an existing patch inside the patch directory
1445 with the --existing flag.
1469 with the --existing flag.
1446
1470
1447 With --force, an existing patch of the same name will be overwritten.
1471 With --force, an existing patch of the same name will be overwritten.
1448
1472
1449 An existing changeset may be placed under mq control with --rev
1473 An existing changeset may be placed under mq control with --rev
1450 (e.g. qimport --rev tip -n patch will place tip under mq control).
1474 (e.g. qimport --rev tip -n patch will place tip under mq control).
1451 With --git, patches imported with --rev will use the git diff
1475 With --git, patches imported with --rev will use the git diff
1452 format.
1476 format.
1453 """
1477 """
1454 q = repo.mq
1478 q = repo.mq
1455 q.qimport(repo, filename, patchname=opts['name'],
1479 q.qimport(repo, filename, patchname=opts['name'],
1456 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1480 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1457 git=opts['git'])
1481 git=opts['git'])
1458 q.save_dirty()
1482 q.save_dirty()
1459 return 0
1483 return 0
1460
1484
1461 def init(ui, repo, **opts):
1485 def init(ui, repo, **opts):
1462 """init a new queue repository
1486 """init a new queue repository
1463
1487
1464 The queue repository is unversioned by default. If -c is
1488 The queue repository is unversioned by default. If -c is
1465 specified, qinit will create a separate nested repository
1489 specified, qinit will create a separate nested repository
1466 for patches (qinit -c may also be run later to convert
1490 for patches (qinit -c may also be run later to convert
1467 an unversioned patch repository into a versioned one).
1491 an unversioned patch repository into a versioned one).
1468 You can use qcommit to commit changes to this queue repository."""
1492 You can use qcommit to commit changes to this queue repository."""
1469 q = repo.mq
1493 q = repo.mq
1470 r = q.init(repo, create=opts['create_repo'])
1494 r = q.init(repo, create=opts['create_repo'])
1471 q.save_dirty()
1495 q.save_dirty()
1472 if r:
1496 if r:
1473 if not os.path.exists(r.wjoin('.hgignore')):
1497 if not os.path.exists(r.wjoin('.hgignore')):
1474 fp = r.wopener('.hgignore', 'w')
1498 fp = r.wopener('.hgignore', 'w')
1475 fp.write('syntax: glob\n')
1499 fp.write('syntax: glob\n')
1476 fp.write('status\n')
1500 fp.write('status\n')
1477 fp.write('guards\n')
1501 fp.write('guards\n')
1478 fp.close()
1502 fp.close()
1479 if not os.path.exists(r.wjoin('series')):
1503 if not os.path.exists(r.wjoin('series')):
1480 r.wopener('series', 'w').close()
1504 r.wopener('series', 'w').close()
1481 r.add(['.hgignore', 'series'])
1505 r.add(['.hgignore', 'series'])
1482 commands.add(ui, r)
1506 commands.add(ui, r)
1483 return 0
1507 return 0
1484
1508
1485 def clone(ui, source, dest=None, **opts):
1509 def clone(ui, source, dest=None, **opts):
1486 '''clone main and patch repository at same time
1510 '''clone main and patch repository at same time
1487
1511
1488 If source is local, destination will have no patches applied. If
1512 If source is local, destination will have no patches applied. If
1489 source is remote, this command can not check if patches are
1513 source is remote, this command can not check if patches are
1490 applied in source, so cannot guarantee that patches are not
1514 applied in source, so cannot guarantee that patches are not
1491 applied in destination. If you clone remote repository, be sure
1515 applied in destination. If you clone remote repository, be sure
1492 before that it has no patches applied.
1516 before that it has no patches applied.
1493
1517
1494 Source patch repository is looked for in <src>/.hg/patches by
1518 Source patch repository is looked for in <src>/.hg/patches by
1495 default. Use -p <url> to change.
1519 default. Use -p <url> to change.
1496
1520
1497 The patch directory must be a nested mercurial repository, as
1521 The patch directory must be a nested mercurial repository, as
1498 would be created by qinit -c.
1522 would be created by qinit -c.
1499 '''
1523 '''
1500 cmdutil.setremoteconfig(ui, opts)
1524 cmdutil.setremoteconfig(ui, opts)
1501 if dest is None:
1525 if dest is None:
1502 dest = hg.defaultdest(source)
1526 dest = hg.defaultdest(source)
1503 sr = hg.repository(ui, ui.expandpath(source))
1527 sr = hg.repository(ui, ui.expandpath(source))
1504 patchdir = opts['patches'] or (sr.url() + '/.hg/patches')
1528 patchdir = opts['patches'] or (sr.url() + '/.hg/patches')
1505 try:
1529 try:
1506 pr = hg.repository(ui, patchdir)
1530 pr = hg.repository(ui, patchdir)
1507 except hg.RepoError:
1531 except hg.RepoError:
1508 raise util.Abort(_('versioned patch repository not found'
1532 raise util.Abort(_('versioned patch repository not found'
1509 ' (see qinit -c)'))
1533 ' (see qinit -c)'))
1510 qbase, destrev = None, None
1534 qbase, destrev = None, None
1511 if sr.local():
1535 if sr.local():
1512 if sr.mq.applied:
1536 if sr.mq.applied:
1513 qbase = revlog.bin(sr.mq.applied[0].rev)
1537 qbase = revlog.bin(sr.mq.applied[0].rev)
1514 if not hg.islocal(dest):
1538 if not hg.islocal(dest):
1515 heads = dict.fromkeys(sr.heads())
1539 heads = dict.fromkeys(sr.heads())
1516 for h in sr.heads(qbase):
1540 for h in sr.heads(qbase):
1517 del heads[h]
1541 del heads[h]
1518 destrev = heads.keys()
1542 destrev = heads.keys()
1519 destrev.append(sr.changelog.parents(qbase)[0])
1543 destrev.append(sr.changelog.parents(qbase)[0])
1520 ui.note(_('cloning main repo\n'))
1544 ui.note(_('cloning main repo\n'))
1521 sr, dr = hg.clone(ui, sr.url(), dest,
1545 sr, dr = hg.clone(ui, sr.url(), dest,
1522 pull=opts['pull'],
1546 pull=opts['pull'],
1523 rev=destrev,
1547 rev=destrev,
1524 update=False,
1548 update=False,
1525 stream=opts['uncompressed'])
1549 stream=opts['uncompressed'])
1526 ui.note(_('cloning patch repo\n'))
1550 ui.note(_('cloning patch repo\n'))
1527 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1551 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1528 dr.url() + '/.hg/patches',
1552 dr.url() + '/.hg/patches',
1529 pull=opts['pull'],
1553 pull=opts['pull'],
1530 update=not opts['noupdate'],
1554 update=not opts['noupdate'],
1531 stream=opts['uncompressed'])
1555 stream=opts['uncompressed'])
1532 if dr.local():
1556 if dr.local():
1533 if qbase:
1557 if qbase:
1534 ui.note(_('stripping applied patches from destination repo\n'))
1558 ui.note(_('stripping applied patches from destination repo\n'))
1535 dr.mq.strip(dr, qbase, update=False, backup=None)
1559 dr.mq.strip(dr, qbase, update=False, backup=None)
1536 if not opts['noupdate']:
1560 if not opts['noupdate']:
1537 ui.note(_('updating destination repo\n'))
1561 ui.note(_('updating destination repo\n'))
1538 hg.update(dr, dr.changelog.tip())
1562 hg.update(dr, dr.changelog.tip())
1539
1563
1540 def commit(ui, repo, *pats, **opts):
1564 def commit(ui, repo, *pats, **opts):
1541 """commit changes in the queue repository"""
1565 """commit changes in the queue repository"""
1542 q = repo.mq
1566 q = repo.mq
1543 r = q.qrepo()
1567 r = q.qrepo()
1544 if not r: raise util.Abort('no queue repository')
1568 if not r: raise util.Abort('no queue repository')
1545 commands.commit(r.ui, r, *pats, **opts)
1569 commands.commit(r.ui, r, *pats, **opts)
1546
1570
1547 def series(ui, repo, **opts):
1571 def series(ui, repo, **opts):
1548 """print the entire series file"""
1572 """print the entire series file"""
1549 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1573 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1550 return 0
1574 return 0
1551
1575
1552 def top(ui, repo, **opts):
1576 def top(ui, repo, **opts):
1553 """print the name of the current patch"""
1577 """print the name of the current patch"""
1554 q = repo.mq
1578 q = repo.mq
1555 t = q.applied and q.series_end(True) or 0
1579 t = q.applied and q.series_end(True) or 0
1556 if t:
1580 if t:
1557 return q.qseries(repo, start=t-1, length=1, status='A',
1581 return q.qseries(repo, start=t-1, length=1, status='A',
1558 summary=opts.get('summary'))
1582 summary=opts.get('summary'))
1559 else:
1583 else:
1560 ui.write("No patches applied\n")
1584 ui.write("No patches applied\n")
1561 return 1
1585 return 1
1562
1586
1563 def next(ui, repo, **opts):
1587 def next(ui, repo, **opts):
1564 """print the name of the next patch"""
1588 """print the name of the next patch"""
1565 q = repo.mq
1589 q = repo.mq
1566 end = q.series_end()
1590 end = q.series_end()
1567 if end == len(q.series):
1591 if end == len(q.series):
1568 ui.write("All patches applied\n")
1592 ui.write("All patches applied\n")
1569 return 1
1593 return 1
1570 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1594 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1571
1595
1572 def prev(ui, repo, **opts):
1596 def prev(ui, repo, **opts):
1573 """print the name of the previous patch"""
1597 """print the name of the previous patch"""
1574 q = repo.mq
1598 q = repo.mq
1575 l = len(q.applied)
1599 l = len(q.applied)
1576 if l == 1:
1600 if l == 1:
1577 ui.write("Only one patch applied\n")
1601 ui.write("Only one patch applied\n")
1578 return 1
1602 return 1
1579 if not l:
1603 if not l:
1580 ui.write("No patches applied\n")
1604 ui.write("No patches applied\n")
1581 return 1
1605 return 1
1582 return q.qseries(repo, start=l-2, length=1, status='A',
1606 return q.qseries(repo, start=l-2, length=1, status='A',
1583 summary=opts.get('summary'))
1607 summary=opts.get('summary'))
1584
1608
1585 def new(ui, repo, patch, *args, **opts):
1609 def new(ui, repo, patch, *args, **opts):
1586 """create a new patch
1610 """create a new patch
1587
1611
1588 qnew creates a new patch on top of the currently-applied patch
1612 qnew creates a new patch on top of the currently-applied patch
1589 (if any). It will refuse to run if there are any outstanding
1613 (if any). It will refuse to run if there are any outstanding
1590 changes unless -f is specified, in which case the patch will
1614 changes unless -f is specified, in which case the patch will
1591 be initialised with them. You may also use -I, -X, and/or a list of
1615 be initialised with them. You may also use -I, -X, and/or a list of
1592 files after the patch name to add only changes to matching files
1616 files after the patch name to add only changes to matching files
1593 to the new patch, leaving the rest as uncommitted modifications.
1617 to the new patch, leaving the rest as uncommitted modifications.
1594
1618
1595 -e, -m or -l set the patch header as well as the commit message.
1619 -e, -m or -l set the patch header as well as the commit message.
1596 If none is specified, the patch header is empty and the
1620 If none is specified, the patch header is empty and the
1597 commit message is '[mq]: PATCH'"""
1621 commit message is '[mq]: PATCH'"""
1598 q = repo.mq
1622 q = repo.mq
1599 message = cmdutil.logmessage(opts)
1623 message = cmdutil.logmessage(opts)
1600 if opts['edit']:
1624 if opts['edit']:
1601 message = ui.edit(message, ui.username())
1625 message = ui.edit(message, ui.username())
1602 opts['msg'] = message
1626 opts['msg'] = message
1603 q.new(repo, patch, *args, **opts)
1627 q.new(repo, patch, *args, **opts)
1604 q.save_dirty()
1628 q.save_dirty()
1605 return 0
1629 return 0
1606
1630
1607 def refresh(ui, repo, *pats, **opts):
1631 def refresh(ui, repo, *pats, **opts):
1608 """update the current patch
1632 """update the current patch
1609
1633
1610 If any file patterns are provided, the refreshed patch will contain only
1634 If any file patterns are provided, the refreshed patch will contain only
1611 the modifications that match those patterns; the remaining modifications
1635 the modifications that match those patterns; the remaining modifications
1612 will remain in the working directory.
1636 will remain in the working directory.
1613
1637
1614 hg add/remove/copy/rename work as usual, though you might want to use
1638 hg add/remove/copy/rename work as usual, though you might want to use
1615 git-style patches (--git or [diff] git=1) to track copies and renames.
1639 git-style patches (--git or [diff] git=1) to track copies and renames.
1616 """
1640 """
1617 q = repo.mq
1641 q = repo.mq
1618 message = cmdutil.logmessage(opts)
1642 message = cmdutil.logmessage(opts)
1619 if opts['edit']:
1643 if opts['edit']:
1620 if message:
1644 if message:
1621 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1645 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1622 patch = q.applied[-1].name
1646 patch = q.applied[-1].name
1623 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1647 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1624 message = ui.edit('\n'.join(message), user or ui.username())
1648 message = ui.edit('\n'.join(message), user or ui.username())
1625 ret = q.refresh(repo, pats, msg=message, **opts)
1649 ret = q.refresh(repo, pats, msg=message, **opts)
1626 q.save_dirty()
1650 q.save_dirty()
1627 return ret
1651 return ret
1628
1652
1629 def diff(ui, repo, *pats, **opts):
1653 def diff(ui, repo, *pats, **opts):
1630 """diff of the current patch"""
1654 """diff of the current patch"""
1631 repo.mq.diff(repo, pats, opts)
1655 repo.mq.diff(repo, pats, opts)
1632 return 0
1656 return 0
1633
1657
1634 def fold(ui, repo, *files, **opts):
1658 def fold(ui, repo, *files, **opts):
1635 """fold the named patches into the current patch
1659 """fold the named patches into the current patch
1636
1660
1637 Patches must not yet be applied. Each patch will be successively
1661 Patches must not yet be applied. Each patch will be successively
1638 applied to the current patch in the order given. If all the
1662 applied to the current patch in the order given. If all the
1639 patches apply successfully, the current patch will be refreshed
1663 patches apply successfully, the current patch will be refreshed
1640 with the new cumulative patch, and the folded patches will
1664 with the new cumulative patch, and the folded patches will
1641 be deleted. With -k/--keep, the folded patch files will not
1665 be deleted. With -k/--keep, the folded patch files will not
1642 be removed afterwards.
1666 be removed afterwards.
1643
1667
1644 The header for each folded patch will be concatenated with
1668 The header for each folded patch will be concatenated with
1645 the current patch header, separated by a line of '* * *'."""
1669 the current patch header, separated by a line of '* * *'."""
1646
1670
1647 q = repo.mq
1671 q = repo.mq
1648
1672
1649 if not files:
1673 if not files:
1650 raise util.Abort(_('qfold requires at least one patch name'))
1674 raise util.Abort(_('qfold requires at least one patch name'))
1651 if not q.check_toppatch(repo):
1675 if not q.check_toppatch(repo):
1652 raise util.Abort(_('No patches applied'))
1676 raise util.Abort(_('No patches applied'))
1653
1677
1654 message = cmdutil.logmessage(opts)
1678 message = cmdutil.logmessage(opts)
1655 if opts['edit']:
1679 if opts['edit']:
1656 if message:
1680 if message:
1657 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1681 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1658
1682
1659 parent = q.lookup('qtip')
1683 parent = q.lookup('qtip')
1660 patches = []
1684 patches = []
1661 messages = []
1685 messages = []
1662 for f in files:
1686 for f in files:
1663 p = q.lookup(f)
1687 p = q.lookup(f)
1664 if p in patches or p == parent:
1688 if p in patches or p == parent:
1665 ui.warn(_('Skipping already folded patch %s') % p)
1689 ui.warn(_('Skipping already folded patch %s') % p)
1666 if q.isapplied(p):
1690 if q.isapplied(p):
1667 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1691 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1668 patches.append(p)
1692 patches.append(p)
1669
1693
1670 for p in patches:
1694 for p in patches:
1671 if not message:
1695 if not message:
1672 messages.append(q.readheaders(p)[0])
1696 messages.append(q.readheaders(p)[0])
1673 pf = q.join(p)
1697 pf = q.join(p)
1674 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1698 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1675 if not patchsuccess:
1699 if not patchsuccess:
1676 raise util.Abort(_('Error folding patch %s') % p)
1700 raise util.Abort(_('Error folding patch %s') % p)
1677 patch.updatedir(ui, repo, files)
1701 patch.updatedir(ui, repo, files)
1678
1702
1679 if not message:
1703 if not message:
1680 message, comments, user = q.readheaders(parent)[0:3]
1704 message, comments, user = q.readheaders(parent)[0:3]
1681 for msg in messages:
1705 for msg in messages:
1682 message.append('* * *')
1706 message.append('* * *')
1683 message.extend(msg)
1707 message.extend(msg)
1684 message = '\n'.join(message)
1708 message = '\n'.join(message)
1685
1709
1686 if opts['edit']:
1710 if opts['edit']:
1687 message = ui.edit(message, user or ui.username())
1711 message = ui.edit(message, user or ui.username())
1688
1712
1689 q.refresh(repo, msg=message)
1713 q.refresh(repo, msg=message)
1690 q.delete(repo, patches, opts)
1714 q.delete(repo, patches, opts)
1691 q.save_dirty()
1715 q.save_dirty()
1692
1716
1693 def goto(ui, repo, patch, **opts):
1717 def goto(ui, repo, patch, **opts):
1694 '''push or pop patches until named patch is at top of stack'''
1718 '''push or pop patches until named patch is at top of stack'''
1695 q = repo.mq
1719 q = repo.mq
1696 patch = q.lookup(patch)
1720 patch = q.lookup(patch)
1697 if q.isapplied(patch):
1721 if q.isapplied(patch):
1698 ret = q.pop(repo, patch, force=opts['force'])
1722 ret = q.pop(repo, patch, force=opts['force'])
1699 else:
1723 else:
1700 ret = q.push(repo, patch, force=opts['force'])
1724 ret = q.push(repo, patch, force=opts['force'])
1701 q.save_dirty()
1725 q.save_dirty()
1702 return ret
1726 return ret
1703
1727
1704 def guard(ui, repo, *args, **opts):
1728 def guard(ui, repo, *args, **opts):
1705 '''set or print guards for a patch
1729 '''set or print guards for a patch
1706
1730
1707 Guards control whether a patch can be pushed. A patch with no
1731 Guards control whether a patch can be pushed. A patch with no
1708 guards is always pushed. A patch with a positive guard ("+foo") is
1732 guards is always pushed. A patch with a positive guard ("+foo") is
1709 pushed only if the qselect command has activated it. A patch with
1733 pushed only if the qselect command has activated it. A patch with
1710 a negative guard ("-foo") is never pushed if the qselect command
1734 a negative guard ("-foo") is never pushed if the qselect command
1711 has activated it.
1735 has activated it.
1712
1736
1713 With no arguments, print the currently active guards.
1737 With no arguments, print the currently active guards.
1714 With arguments, set guards for the named patch.
1738 With arguments, set guards for the named patch.
1715
1739
1716 To set a negative guard "-foo" on topmost patch ("--" is needed so
1740 To set a negative guard "-foo" on topmost patch ("--" is needed so
1717 hg will not interpret "-foo" as an option):
1741 hg will not interpret "-foo" as an option):
1718 hg qguard -- -foo
1742 hg qguard -- -foo
1719
1743
1720 To set guards on another patch:
1744 To set guards on another patch:
1721 hg qguard other.patch +2.6.17 -stable
1745 hg qguard other.patch +2.6.17 -stable
1722 '''
1746 '''
1723 def status(idx):
1747 def status(idx):
1724 guards = q.series_guards[idx] or ['unguarded']
1748 guards = q.series_guards[idx] or ['unguarded']
1725 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1749 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1726 q = repo.mq
1750 q = repo.mq
1727 patch = None
1751 patch = None
1728 args = list(args)
1752 args = list(args)
1729 if opts['list']:
1753 if opts['list']:
1730 if args or opts['none']:
1754 if args or opts['none']:
1731 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1755 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1732 for i in xrange(len(q.series)):
1756 for i in xrange(len(q.series)):
1733 status(i)
1757 status(i)
1734 return
1758 return
1735 if not args or args[0][0:1] in '-+':
1759 if not args or args[0][0:1] in '-+':
1736 if not q.applied:
1760 if not q.applied:
1737 raise util.Abort(_('no patches applied'))
1761 raise util.Abort(_('no patches applied'))
1738 patch = q.applied[-1].name
1762 patch = q.applied[-1].name
1739 if patch is None and args[0][0:1] not in '-+':
1763 if patch is None and args[0][0:1] not in '-+':
1740 patch = args.pop(0)
1764 patch = args.pop(0)
1741 if patch is None:
1765 if patch is None:
1742 raise util.Abort(_('no patch to work with'))
1766 raise util.Abort(_('no patch to work with'))
1743 if args or opts['none']:
1767 if args or opts['none']:
1744 idx = q.find_series(patch)
1768 idx = q.find_series(patch)
1745 if idx is None:
1769 if idx is None:
1746 raise util.Abort(_('no patch named %s') % patch)
1770 raise util.Abort(_('no patch named %s') % patch)
1747 q.set_guards(idx, args)
1771 q.set_guards(idx, args)
1748 q.save_dirty()
1772 q.save_dirty()
1749 else:
1773 else:
1750 status(q.series.index(q.lookup(patch)))
1774 status(q.series.index(q.lookup(patch)))
1751
1775
1752 def header(ui, repo, patch=None):
1776 def header(ui, repo, patch=None):
1753 """Print the header of the topmost or specified patch"""
1777 """Print the header of the topmost or specified patch"""
1754 q = repo.mq
1778 q = repo.mq
1755
1779
1756 if patch:
1780 if patch:
1757 patch = q.lookup(patch)
1781 patch = q.lookup(patch)
1758 else:
1782 else:
1759 if not q.applied:
1783 if not q.applied:
1760 ui.write('No patches applied\n')
1784 ui.write('No patches applied\n')
1761 return 1
1785 return 1
1762 patch = q.lookup('qtip')
1786 patch = q.lookup('qtip')
1763 message = repo.mq.readheaders(patch)[0]
1787 message = repo.mq.readheaders(patch)[0]
1764
1788
1765 ui.write('\n'.join(message) + '\n')
1789 ui.write('\n'.join(message) + '\n')
1766
1790
1767 def lastsavename(path):
1791 def lastsavename(path):
1768 (directory, base) = os.path.split(path)
1792 (directory, base) = os.path.split(path)
1769 names = os.listdir(directory)
1793 names = os.listdir(directory)
1770 namere = re.compile("%s.([0-9]+)" % base)
1794 namere = re.compile("%s.([0-9]+)" % base)
1771 maxindex = None
1795 maxindex = None
1772 maxname = None
1796 maxname = None
1773 for f in names:
1797 for f in names:
1774 m = namere.match(f)
1798 m = namere.match(f)
1775 if m:
1799 if m:
1776 index = int(m.group(1))
1800 index = int(m.group(1))
1777 if maxindex == None or index > maxindex:
1801 if maxindex == None or index > maxindex:
1778 maxindex = index
1802 maxindex = index
1779 maxname = f
1803 maxname = f
1780 if maxname:
1804 if maxname:
1781 return (os.path.join(directory, maxname), maxindex)
1805 return (os.path.join(directory, maxname), maxindex)
1782 return (None, None)
1806 return (None, None)
1783
1807
1784 def savename(path):
1808 def savename(path):
1785 (last, index) = lastsavename(path)
1809 (last, index) = lastsavename(path)
1786 if last is None:
1810 if last is None:
1787 index = 0
1811 index = 0
1788 newpath = path + ".%d" % (index + 1)
1812 newpath = path + ".%d" % (index + 1)
1789 return newpath
1813 return newpath
1790
1814
1791 def push(ui, repo, patch=None, **opts):
1815 def push(ui, repo, patch=None, **opts):
1792 """push the next patch onto the stack"""
1816 """push the next patch onto the stack"""
1793 q = repo.mq
1817 q = repo.mq
1794 mergeq = None
1818 mergeq = None
1795
1819
1796 if opts['all']:
1820 if opts['all']:
1797 if not q.series:
1821 if not q.series:
1798 ui.warn(_('no patches in series\n'))
1822 ui.warn(_('no patches in series\n'))
1799 return 0
1823 return 0
1800 patch = q.series[-1]
1824 patch = q.series[-1]
1801 if opts['merge']:
1825 if opts['merge']:
1802 if opts['name']:
1826 if opts['name']:
1803 newpath = opts['name']
1827 newpath = opts['name']
1804 else:
1828 else:
1805 newpath, i = lastsavename(q.path)
1829 newpath, i = lastsavename(q.path)
1806 if not newpath:
1830 if not newpath:
1807 ui.warn("no saved queues found, please use -n\n")
1831 ui.warn("no saved queues found, please use -n\n")
1808 return 1
1832 return 1
1809 mergeq = queue(ui, repo.join(""), newpath)
1833 mergeq = queue(ui, repo.join(""), newpath)
1810 ui.warn("merging with queue at: %s\n" % mergeq.path)
1834 ui.warn("merging with queue at: %s\n" % mergeq.path)
1811 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1835 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1812 mergeq=mergeq)
1836 mergeq=mergeq)
1813 return ret
1837 return ret
1814
1838
1815 def pop(ui, repo, patch=None, **opts):
1839 def pop(ui, repo, patch=None, **opts):
1816 """pop the current patch off the stack"""
1840 """pop the current patch off the stack"""
1817 localupdate = True
1841 localupdate = True
1818 if opts['name']:
1842 if opts['name']:
1819 q = queue(ui, repo.join(""), repo.join(opts['name']))
1843 q = queue(ui, repo.join(""), repo.join(opts['name']))
1820 ui.warn('using patch queue: %s\n' % q.path)
1844 ui.warn('using patch queue: %s\n' % q.path)
1821 localupdate = False
1845 localupdate = False
1822 else:
1846 else:
1823 q = repo.mq
1847 q = repo.mq
1824 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1848 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1825 all=opts['all'])
1849 all=opts['all'])
1826 q.save_dirty()
1850 q.save_dirty()
1827 return ret
1851 return ret
1828
1852
1829 def rename(ui, repo, patch, name=None, **opts):
1853 def rename(ui, repo, patch, name=None, **opts):
1830 """rename a patch
1854 """rename a patch
1831
1855
1832 With one argument, renames the current patch to PATCH1.
1856 With one argument, renames the current patch to PATCH1.
1833 With two arguments, renames PATCH1 to PATCH2."""
1857 With two arguments, renames PATCH1 to PATCH2."""
1834
1858
1835 q = repo.mq
1859 q = repo.mq
1836
1860
1837 if not name:
1861 if not name:
1838 name = patch
1862 name = patch
1839 patch = None
1863 patch = None
1840
1864
1841 if patch:
1865 if patch:
1842 patch = q.lookup(patch)
1866 patch = q.lookup(patch)
1843 else:
1867 else:
1844 if not q.applied:
1868 if not q.applied:
1845 ui.write(_('No patches applied\n'))
1869 ui.write(_('No patches applied\n'))
1846 return
1870 return
1847 patch = q.lookup('qtip')
1871 patch = q.lookup('qtip')
1848 absdest = q.join(name)
1872 absdest = q.join(name)
1849 if os.path.isdir(absdest):
1873 if os.path.isdir(absdest):
1850 name = normname(os.path.join(name, os.path.basename(patch)))
1874 name = normname(os.path.join(name, os.path.basename(patch)))
1851 absdest = q.join(name)
1875 absdest = q.join(name)
1852 if os.path.exists(absdest):
1876 if os.path.exists(absdest):
1853 raise util.Abort(_('%s already exists') % absdest)
1877 raise util.Abort(_('%s already exists') % absdest)
1854
1878
1855 if name in q.series:
1879 if name in q.series:
1856 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1880 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1857
1881
1858 if ui.verbose:
1882 if ui.verbose:
1859 ui.write('Renaming %s to %s\n' % (patch, name))
1883 ui.write('Renaming %s to %s\n' % (patch, name))
1860 i = q.find_series(patch)
1884 i = q.find_series(patch)
1861 guards = q.guard_re.findall(q.full_series[i])
1885 guards = q.guard_re.findall(q.full_series[i])
1862 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1886 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1863 q.parse_series()
1887 q.parse_series()
1864 q.series_dirty = 1
1888 q.series_dirty = 1
1865
1889
1866 info = q.isapplied(patch)
1890 info = q.isapplied(patch)
1867 if info:
1891 if info:
1868 q.applied[info[0]] = statusentry(info[1], name)
1892 q.applied[info[0]] = statusentry(info[1], name)
1869 q.applied_dirty = 1
1893 q.applied_dirty = 1
1870
1894
1871 util.rename(q.join(patch), absdest)
1895 util.rename(q.join(patch), absdest)
1872 r = q.qrepo()
1896 r = q.qrepo()
1873 if r:
1897 if r:
1874 wlock = r.wlock()
1898 wlock = r.wlock()
1875 if r.dirstate[name] == 'r':
1899 try:
1876 r.undelete([name], wlock)
1900 if r.dirstate[name] == 'r':
1877 r.copy(patch, name, wlock)
1901 r.undelete([name], wlock)
1878 r.remove([patch], False, wlock)
1902 r.copy(patch, name, wlock)
1903 r.remove([patch], False, wlock)
1904 finally:
1905 del wlock
1879
1906
1880 q.save_dirty()
1907 q.save_dirty()
1881
1908
1882 def restore(ui, repo, rev, **opts):
1909 def restore(ui, repo, rev, **opts):
1883 """restore the queue state saved by a rev"""
1910 """restore the queue state saved by a rev"""
1884 rev = repo.lookup(rev)
1911 rev = repo.lookup(rev)
1885 q = repo.mq
1912 q = repo.mq
1886 q.restore(repo, rev, delete=opts['delete'],
1913 q.restore(repo, rev, delete=opts['delete'],
1887 qupdate=opts['update'])
1914 qupdate=opts['update'])
1888 q.save_dirty()
1915 q.save_dirty()
1889 return 0
1916 return 0
1890
1917
1891 def save(ui, repo, **opts):
1918 def save(ui, repo, **opts):
1892 """save current queue state"""
1919 """save current queue state"""
1893 q = repo.mq
1920 q = repo.mq
1894 message = cmdutil.logmessage(opts)
1921 message = cmdutil.logmessage(opts)
1895 ret = q.save(repo, msg=message)
1922 ret = q.save(repo, msg=message)
1896 if ret:
1923 if ret:
1897 return ret
1924 return ret
1898 q.save_dirty()
1925 q.save_dirty()
1899 if opts['copy']:
1926 if opts['copy']:
1900 path = q.path
1927 path = q.path
1901 if opts['name']:
1928 if opts['name']:
1902 newpath = os.path.join(q.basepath, opts['name'])
1929 newpath = os.path.join(q.basepath, opts['name'])
1903 if os.path.exists(newpath):
1930 if os.path.exists(newpath):
1904 if not os.path.isdir(newpath):
1931 if not os.path.isdir(newpath):
1905 raise util.Abort(_('destination %s exists and is not '
1932 raise util.Abort(_('destination %s exists and is not '
1906 'a directory') % newpath)
1933 'a directory') % newpath)
1907 if not opts['force']:
1934 if not opts['force']:
1908 raise util.Abort(_('destination %s exists, '
1935 raise util.Abort(_('destination %s exists, '
1909 'use -f to force') % newpath)
1936 'use -f to force') % newpath)
1910 else:
1937 else:
1911 newpath = savename(path)
1938 newpath = savename(path)
1912 ui.warn("copy %s to %s\n" % (path, newpath))
1939 ui.warn("copy %s to %s\n" % (path, newpath))
1913 util.copyfiles(path, newpath)
1940 util.copyfiles(path, newpath)
1914 if opts['empty']:
1941 if opts['empty']:
1915 try:
1942 try:
1916 os.unlink(q.join(q.status_path))
1943 os.unlink(q.join(q.status_path))
1917 except:
1944 except:
1918 pass
1945 pass
1919 return 0
1946 return 0
1920
1947
1921 def strip(ui, repo, rev, **opts):
1948 def strip(ui, repo, rev, **opts):
1922 """strip a revision and all later revs on the same branch"""
1949 """strip a revision and all later revs on the same branch"""
1923 rev = repo.lookup(rev)
1950 rev = repo.lookup(rev)
1924 backup = 'all'
1951 backup = 'all'
1925 if opts['backup']:
1952 if opts['backup']:
1926 backup = 'strip'
1953 backup = 'strip'
1927 elif opts['nobackup']:
1954 elif opts['nobackup']:
1928 backup = 'none'
1955 backup = 'none'
1929 update = repo.dirstate.parents()[0] != revlog.nullid
1956 update = repo.dirstate.parents()[0] != revlog.nullid
1930 repo.mq.strip(repo, rev, backup=backup, update=update)
1957 repo.mq.strip(repo, rev, backup=backup, update=update)
1931 return 0
1958 return 0
1932
1959
1933 def select(ui, repo, *args, **opts):
1960 def select(ui, repo, *args, **opts):
1934 '''set or print guarded patches to push
1961 '''set or print guarded patches to push
1935
1962
1936 Use the qguard command to set or print guards on patch, then use
1963 Use the qguard command to set or print guards on patch, then use
1937 qselect to tell mq which guards to use. A patch will be pushed if it
1964 qselect to tell mq which guards to use. A patch will be pushed if it
1938 has no guards or any positive guards match the currently selected guard,
1965 has no guards or any positive guards match the currently selected guard,
1939 but will not be pushed if any negative guards match the current guard.
1966 but will not be pushed if any negative guards match the current guard.
1940 For example:
1967 For example:
1941
1968
1942 qguard foo.patch -stable (negative guard)
1969 qguard foo.patch -stable (negative guard)
1943 qguard bar.patch +stable (positive guard)
1970 qguard bar.patch +stable (positive guard)
1944 qselect stable
1971 qselect stable
1945
1972
1946 This activates the "stable" guard. mq will skip foo.patch (because
1973 This activates the "stable" guard. mq will skip foo.patch (because
1947 it has a negative match) but push bar.patch (because it
1974 it has a negative match) but push bar.patch (because it
1948 has a positive match).
1975 has a positive match).
1949
1976
1950 With no arguments, prints the currently active guards.
1977 With no arguments, prints the currently active guards.
1951 With one argument, sets the active guard.
1978 With one argument, sets the active guard.
1952
1979
1953 Use -n/--none to deactivate guards (no other arguments needed).
1980 Use -n/--none to deactivate guards (no other arguments needed).
1954 When no guards are active, patches with positive guards are skipped
1981 When no guards are active, patches with positive guards are skipped
1955 and patches with negative guards are pushed.
1982 and patches with negative guards are pushed.
1956
1983
1957 qselect can change the guards on applied patches. It does not pop
1984 qselect can change the guards on applied patches. It does not pop
1958 guarded patches by default. Use --pop to pop back to the last applied
1985 guarded patches by default. Use --pop to pop back to the last applied
1959 patch that is not guarded. Use --reapply (which implies --pop) to push
1986 patch that is not guarded. Use --reapply (which implies --pop) to push
1960 back to the current patch afterwards, but skip guarded patches.
1987 back to the current patch afterwards, but skip guarded patches.
1961
1988
1962 Use -s/--series to print a list of all guards in the series file (no
1989 Use -s/--series to print a list of all guards in the series file (no
1963 other arguments needed). Use -v for more information.'''
1990 other arguments needed). Use -v for more information.'''
1964
1991
1965 q = repo.mq
1992 q = repo.mq
1966 guards = q.active()
1993 guards = q.active()
1967 if args or opts['none']:
1994 if args or opts['none']:
1968 old_unapplied = q.unapplied(repo)
1995 old_unapplied = q.unapplied(repo)
1969 old_guarded = [i for i in xrange(len(q.applied)) if
1996 old_guarded = [i for i in xrange(len(q.applied)) if
1970 not q.pushable(i)[0]]
1997 not q.pushable(i)[0]]
1971 q.set_active(args)
1998 q.set_active(args)
1972 q.save_dirty()
1999 q.save_dirty()
1973 if not args:
2000 if not args:
1974 ui.status(_('guards deactivated\n'))
2001 ui.status(_('guards deactivated\n'))
1975 if not opts['pop'] and not opts['reapply']:
2002 if not opts['pop'] and not opts['reapply']:
1976 unapplied = q.unapplied(repo)
2003 unapplied = q.unapplied(repo)
1977 guarded = [i for i in xrange(len(q.applied))
2004 guarded = [i for i in xrange(len(q.applied))
1978 if not q.pushable(i)[0]]
2005 if not q.pushable(i)[0]]
1979 if len(unapplied) != len(old_unapplied):
2006 if len(unapplied) != len(old_unapplied):
1980 ui.status(_('number of unguarded, unapplied patches has '
2007 ui.status(_('number of unguarded, unapplied patches has '
1981 'changed from %d to %d\n') %
2008 'changed from %d to %d\n') %
1982 (len(old_unapplied), len(unapplied)))
2009 (len(old_unapplied), len(unapplied)))
1983 if len(guarded) != len(old_guarded):
2010 if len(guarded) != len(old_guarded):
1984 ui.status(_('number of guarded, applied patches has changed '
2011 ui.status(_('number of guarded, applied patches has changed '
1985 'from %d to %d\n') %
2012 'from %d to %d\n') %
1986 (len(old_guarded), len(guarded)))
2013 (len(old_guarded), len(guarded)))
1987 elif opts['series']:
2014 elif opts['series']:
1988 guards = {}
2015 guards = {}
1989 noguards = 0
2016 noguards = 0
1990 for gs in q.series_guards:
2017 for gs in q.series_guards:
1991 if not gs:
2018 if not gs:
1992 noguards += 1
2019 noguards += 1
1993 for g in gs:
2020 for g in gs:
1994 guards.setdefault(g, 0)
2021 guards.setdefault(g, 0)
1995 guards[g] += 1
2022 guards[g] += 1
1996 if ui.verbose:
2023 if ui.verbose:
1997 guards['NONE'] = noguards
2024 guards['NONE'] = noguards
1998 guards = guards.items()
2025 guards = guards.items()
1999 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2026 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2000 if guards:
2027 if guards:
2001 ui.note(_('guards in series file:\n'))
2028 ui.note(_('guards in series file:\n'))
2002 for guard, count in guards:
2029 for guard, count in guards:
2003 ui.note('%2d ' % count)
2030 ui.note('%2d ' % count)
2004 ui.write(guard, '\n')
2031 ui.write(guard, '\n')
2005 else:
2032 else:
2006 ui.note(_('no guards in series file\n'))
2033 ui.note(_('no guards in series file\n'))
2007 else:
2034 else:
2008 if guards:
2035 if guards:
2009 ui.note(_('active guards:\n'))
2036 ui.note(_('active guards:\n'))
2010 for g in guards:
2037 for g in guards:
2011 ui.write(g, '\n')
2038 ui.write(g, '\n')
2012 else:
2039 else:
2013 ui.write(_('no active guards\n'))
2040 ui.write(_('no active guards\n'))
2014 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2041 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2015 popped = False
2042 popped = False
2016 if opts['pop'] or opts['reapply']:
2043 if opts['pop'] or opts['reapply']:
2017 for i in xrange(len(q.applied)):
2044 for i in xrange(len(q.applied)):
2018 pushable, reason = q.pushable(i)
2045 pushable, reason = q.pushable(i)
2019 if not pushable:
2046 if not pushable:
2020 ui.status(_('popping guarded patches\n'))
2047 ui.status(_('popping guarded patches\n'))
2021 popped = True
2048 popped = True
2022 if i == 0:
2049 if i == 0:
2023 q.pop(repo, all=True)
2050 q.pop(repo, all=True)
2024 else:
2051 else:
2025 q.pop(repo, i-1)
2052 q.pop(repo, i-1)
2026 break
2053 break
2027 if popped:
2054 if popped:
2028 try:
2055 try:
2029 if reapply:
2056 if reapply:
2030 ui.status(_('reapplying unguarded patches\n'))
2057 ui.status(_('reapplying unguarded patches\n'))
2031 q.push(repo, reapply)
2058 q.push(repo, reapply)
2032 finally:
2059 finally:
2033 q.save_dirty()
2060 q.save_dirty()
2034
2061
2035 def reposetup(ui, repo):
2062 def reposetup(ui, repo):
2036 class mqrepo(repo.__class__):
2063 class mqrepo(repo.__class__):
2037 def abort_if_wdir_patched(self, errmsg, force=False):
2064 def abort_if_wdir_patched(self, errmsg, force=False):
2038 if self.mq.applied and not force:
2065 if self.mq.applied and not force:
2039 parent = revlog.hex(self.dirstate.parents()[0])
2066 parent = revlog.hex(self.dirstate.parents()[0])
2040 if parent in [s.rev for s in self.mq.applied]:
2067 if parent in [s.rev for s in self.mq.applied]:
2041 raise util.Abort(errmsg)
2068 raise util.Abort(errmsg)
2042
2069
2043 def commit(self, *args, **opts):
2070 def commit(self, *args, **opts):
2044 if len(args) >= 6:
2071 if len(args) >= 6:
2045 force = args[5]
2072 force = args[5]
2046 else:
2073 else:
2047 force = opts.get('force')
2074 force = opts.get('force')
2048 self.abort_if_wdir_patched(
2075 self.abort_if_wdir_patched(
2049 _('cannot commit over an applied mq patch'),
2076 _('cannot commit over an applied mq patch'),
2050 force)
2077 force)
2051
2078
2052 return super(mqrepo, self).commit(*args, **opts)
2079 return super(mqrepo, self).commit(*args, **opts)
2053
2080
2054 def push(self, remote, force=False, revs=None):
2081 def push(self, remote, force=False, revs=None):
2055 if self.mq.applied and not force and not revs:
2082 if self.mq.applied and not force and not revs:
2056 raise util.Abort(_('source has mq patches applied'))
2083 raise util.Abort(_('source has mq patches applied'))
2057 return super(mqrepo, self).push(remote, force, revs)
2084 return super(mqrepo, self).push(remote, force, revs)
2058
2085
2059 def tags(self):
2086 def tags(self):
2060 if self.tagscache:
2087 if self.tagscache:
2061 return self.tagscache
2088 return self.tagscache
2062
2089
2063 tagscache = super(mqrepo, self).tags()
2090 tagscache = super(mqrepo, self).tags()
2064
2091
2065 q = self.mq
2092 q = self.mq
2066 if not q.applied:
2093 if not q.applied:
2067 return tagscache
2094 return tagscache
2068
2095
2069 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2096 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2070 mqtags.append((mqtags[-1][0], 'qtip'))
2097 mqtags.append((mqtags[-1][0], 'qtip'))
2071 mqtags.append((mqtags[0][0], 'qbase'))
2098 mqtags.append((mqtags[0][0], 'qbase'))
2072 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2099 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2073 for patch in mqtags:
2100 for patch in mqtags:
2074 if patch[1] in tagscache:
2101 if patch[1] in tagscache:
2075 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2102 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2076 else:
2103 else:
2077 tagscache[patch[1]] = patch[0]
2104 tagscache[patch[1]] = patch[0]
2078
2105
2079 return tagscache
2106 return tagscache
2080
2107
2081 def _branchtags(self):
2108 def _branchtags(self):
2082 q = self.mq
2109 q = self.mq
2083 if not q.applied:
2110 if not q.applied:
2084 return super(mqrepo, self)._branchtags()
2111 return super(mqrepo, self)._branchtags()
2085
2112
2086 self.branchcache = {} # avoid recursion in changectx
2113 self.branchcache = {} # avoid recursion in changectx
2087 cl = self.changelog
2114 cl = self.changelog
2088 partial, last, lrev = self._readbranchcache()
2115 partial, last, lrev = self._readbranchcache()
2089
2116
2090 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2117 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2091 start = lrev + 1
2118 start = lrev + 1
2092 if start < qbase:
2119 if start < qbase:
2093 # update the cache (excluding the patches) and save it
2120 # update the cache (excluding the patches) and save it
2094 self._updatebranchcache(partial, lrev+1, qbase)
2121 self._updatebranchcache(partial, lrev+1, qbase)
2095 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2122 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2096 start = qbase
2123 start = qbase
2097 # if start = qbase, the cache is as updated as it should be.
2124 # if start = qbase, the cache is as updated as it should be.
2098 # if start > qbase, the cache includes (part of) the patches.
2125 # if start > qbase, the cache includes (part of) the patches.
2099 # we might as well use it, but we won't save it.
2126 # we might as well use it, but we won't save it.
2100
2127
2101 # update the cache up to the tip
2128 # update the cache up to the tip
2102 self._updatebranchcache(partial, start, cl.count())
2129 self._updatebranchcache(partial, start, cl.count())
2103
2130
2104 return partial
2131 return partial
2105
2132
2106 if repo.local():
2133 if repo.local():
2107 repo.__class__ = mqrepo
2134 repo.__class__ = mqrepo
2108 repo.mq = queue(ui, repo.join(""))
2135 repo.mq = queue(ui, repo.join(""))
2109
2136
2110 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2137 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2111
2138
2112 cmdtable = {
2139 cmdtable = {
2113 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2140 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2114 "qclone":
2141 "qclone":
2115 (clone,
2142 (clone,
2116 [('', 'pull', None, _('use pull protocol to copy metadata')),
2143 [('', 'pull', None, _('use pull protocol to copy metadata')),
2117 ('U', 'noupdate', None, _('do not update the new working directories')),
2144 ('U', 'noupdate', None, _('do not update the new working directories')),
2118 ('', 'uncompressed', None,
2145 ('', 'uncompressed', None,
2119 _('use uncompressed transfer (fast over LAN)')),
2146 _('use uncompressed transfer (fast over LAN)')),
2120 ('e', 'ssh', '', _('specify ssh command to use')),
2147 ('e', 'ssh', '', _('specify ssh command to use')),
2121 ('p', 'patches', '', _('location of source patch repo')),
2148 ('p', 'patches', '', _('location of source patch repo')),
2122 ('', 'remotecmd', '',
2149 ('', 'remotecmd', '',
2123 _('specify hg command to run on the remote side'))],
2150 _('specify hg command to run on the remote side'))],
2124 _('hg qclone [OPTION]... SOURCE [DEST]')),
2151 _('hg qclone [OPTION]... SOURCE [DEST]')),
2125 "qcommit|qci":
2152 "qcommit|qci":
2126 (commit,
2153 (commit,
2127 commands.table["^commit|ci"][1],
2154 commands.table["^commit|ci"][1],
2128 _('hg qcommit [OPTION]... [FILE]...')),
2155 _('hg qcommit [OPTION]... [FILE]...')),
2129 "^qdiff":
2156 "^qdiff":
2130 (diff,
2157 (diff,
2131 [('g', 'git', None, _('use git extended diff format')),
2158 [('g', 'git', None, _('use git extended diff format')),
2132 ('I', 'include', [], _('include names matching the given patterns')),
2159 ('I', 'include', [], _('include names matching the given patterns')),
2133 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2160 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2134 _('hg qdiff [-I] [-X] [-g] [FILE]...')),
2161 _('hg qdiff [-I] [-X] [-g] [FILE]...')),
2135 "qdelete|qremove|qrm":
2162 "qdelete|qremove|qrm":
2136 (delete,
2163 (delete,
2137 [('k', 'keep', None, _('keep patch file')),
2164 [('k', 'keep', None, _('keep patch file')),
2138 ('r', 'rev', [], _('stop managing a revision'))],
2165 ('r', 'rev', [], _('stop managing a revision'))],
2139 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2166 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2140 'qfold':
2167 'qfold':
2141 (fold,
2168 (fold,
2142 [('e', 'edit', None, _('edit patch header')),
2169 [('e', 'edit', None, _('edit patch header')),
2143 ('k', 'keep', None, _('keep folded patch files')),
2170 ('k', 'keep', None, _('keep folded patch files')),
2144 ] + commands.commitopts,
2171 ] + commands.commitopts,
2145 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2172 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2146 'qgoto':
2173 'qgoto':
2147 (goto,
2174 (goto,
2148 [('f', 'force', None, _('overwrite any local changes'))],
2175 [('f', 'force', None, _('overwrite any local changes'))],
2149 _('hg qgoto [OPTION]... PATCH')),
2176 _('hg qgoto [OPTION]... PATCH')),
2150 'qguard':
2177 'qguard':
2151 (guard,
2178 (guard,
2152 [('l', 'list', None, _('list all patches and guards')),
2179 [('l', 'list', None, _('list all patches and guards')),
2153 ('n', 'none', None, _('drop all guards'))],
2180 ('n', 'none', None, _('drop all guards'))],
2154 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2181 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2155 'qheader': (header, [], _('hg qheader [PATCH]')),
2182 'qheader': (header, [], _('hg qheader [PATCH]')),
2156 "^qimport":
2183 "^qimport":
2157 (qimport,
2184 (qimport,
2158 [('e', 'existing', None, 'import file in patch dir'),
2185 [('e', 'existing', None, 'import file in patch dir'),
2159 ('n', 'name', '', 'patch file name'),
2186 ('n', 'name', '', 'patch file name'),
2160 ('f', 'force', None, 'overwrite existing files'),
2187 ('f', 'force', None, 'overwrite existing files'),
2161 ('r', 'rev', [], 'place existing revisions under mq control'),
2188 ('r', 'rev', [], 'place existing revisions under mq control'),
2162 ('g', 'git', None, _('use git extended diff format'))],
2189 ('g', 'git', None, _('use git extended diff format'))],
2163 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2190 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2164 "^qinit":
2191 "^qinit":
2165 (init,
2192 (init,
2166 [('c', 'create-repo', None, 'create queue repository')],
2193 [('c', 'create-repo', None, 'create queue repository')],
2167 _('hg qinit [-c]')),
2194 _('hg qinit [-c]')),
2168 "qnew":
2195 "qnew":
2169 (new,
2196 (new,
2170 [('e', 'edit', None, _('edit commit message')),
2197 [('e', 'edit', None, _('edit commit message')),
2171 ('f', 'force', None, _('import uncommitted changes into patch')),
2198 ('f', 'force', None, _('import uncommitted changes into patch')),
2172 ('I', 'include', [], _('include names matching the given patterns')),
2199 ('I', 'include', [], _('include names matching the given patterns')),
2173 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2200 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2174 ] + commands.commitopts,
2201 ] + commands.commitopts,
2175 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2202 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2176 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2203 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2177 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2204 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2178 "^qpop":
2205 "^qpop":
2179 (pop,
2206 (pop,
2180 [('a', 'all', None, _('pop all patches')),
2207 [('a', 'all', None, _('pop all patches')),
2181 ('n', 'name', '', _('queue name to pop')),
2208 ('n', 'name', '', _('queue name to pop')),
2182 ('f', 'force', None, _('forget any local changes'))],
2209 ('f', 'force', None, _('forget any local changes'))],
2183 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2210 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2184 "^qpush":
2211 "^qpush":
2185 (push,
2212 (push,
2186 [('f', 'force', None, _('apply if the patch has rejects')),
2213 [('f', 'force', None, _('apply if the patch has rejects')),
2187 ('l', 'list', None, _('list patch name in commit text')),
2214 ('l', 'list', None, _('list patch name in commit text')),
2188 ('a', 'all', None, _('apply all patches')),
2215 ('a', 'all', None, _('apply all patches')),
2189 ('m', 'merge', None, _('merge from another queue')),
2216 ('m', 'merge', None, _('merge from another queue')),
2190 ('n', 'name', '', _('merge queue name'))],
2217 ('n', 'name', '', _('merge queue name'))],
2191 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2218 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2192 "^qrefresh":
2219 "^qrefresh":
2193 (refresh,
2220 (refresh,
2194 [('e', 'edit', None, _('edit commit message')),
2221 [('e', 'edit', None, _('edit commit message')),
2195 ('g', 'git', None, _('use git extended diff format')),
2222 ('g', 'git', None, _('use git extended diff format')),
2196 ('s', 'short', None, _('refresh only files already in the patch')),
2223 ('s', 'short', None, _('refresh only files already in the patch')),
2197 ('I', 'include', [], _('include names matching the given patterns')),
2224 ('I', 'include', [], _('include names matching the given patterns')),
2198 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2225 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2199 ] + commands.commitopts,
2226 ] + commands.commitopts,
2200 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2227 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2201 'qrename|qmv':
2228 'qrename|qmv':
2202 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2229 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2203 "qrestore":
2230 "qrestore":
2204 (restore,
2231 (restore,
2205 [('d', 'delete', None, _('delete save entry')),
2232 [('d', 'delete', None, _('delete save entry')),
2206 ('u', 'update', None, _('update queue working dir'))],
2233 ('u', 'update', None, _('update queue working dir'))],
2207 _('hg qrestore [-d] [-u] REV')),
2234 _('hg qrestore [-d] [-u] REV')),
2208 "qsave":
2235 "qsave":
2209 (save,
2236 (save,
2210 [('c', 'copy', None, _('copy patch directory')),
2237 [('c', 'copy', None, _('copy patch directory')),
2211 ('n', 'name', '', _('copy directory name')),
2238 ('n', 'name', '', _('copy directory name')),
2212 ('e', 'empty', None, _('clear queue status file')),
2239 ('e', 'empty', None, _('clear queue status file')),
2213 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2240 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2214 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2241 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2215 "qselect":
2242 "qselect":
2216 (select,
2243 (select,
2217 [('n', 'none', None, _('disable all guards')),
2244 [('n', 'none', None, _('disable all guards')),
2218 ('s', 'series', None, _('list all guards in series file')),
2245 ('s', 'series', None, _('list all guards in series file')),
2219 ('', 'pop', None, _('pop to before first guarded applied patch')),
2246 ('', 'pop', None, _('pop to before first guarded applied patch')),
2220 ('', 'reapply', None, _('pop, then reapply patches'))],
2247 ('', 'reapply', None, _('pop, then reapply patches'))],
2221 _('hg qselect [OPTION]... [GUARD]...')),
2248 _('hg qselect [OPTION]... [GUARD]...')),
2222 "qseries":
2249 "qseries":
2223 (series,
2250 (series,
2224 [('m', 'missing', None, _('print patches not in series')),
2251 [('m', 'missing', None, _('print patches not in series')),
2225 ] + seriesopts,
2252 ] + seriesopts,
2226 _('hg qseries [-ms]')),
2253 _('hg qseries [-ms]')),
2227 "^strip":
2254 "^strip":
2228 (strip,
2255 (strip,
2229 [('f', 'force', None, _('force multi-head removal')),
2256 [('f', 'force', None, _('force multi-head removal')),
2230 ('b', 'backup', None, _('bundle unrelated changesets')),
2257 ('b', 'backup', None, _('bundle unrelated changesets')),
2231 ('n', 'nobackup', None, _('no backups'))],
2258 ('n', 'nobackup', None, _('no backups'))],
2232 _('hg strip [-f] [-b] [-n] REV')),
2259 _('hg strip [-f] [-b] [-n] REV')),
2233 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2260 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2234 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2261 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2235 }
2262 }
@@ -1,593 +1,600 b''
1 # Patch transplanting extension for Mercurial
1 # Patch transplanting extension for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from mercurial.i18n import _
8 from mercurial.i18n import _
9 import os, tempfile
9 import os, tempfile
10 from mercurial import bundlerepo, changegroup, cmdutil, commands, hg, merge
10 from mercurial import bundlerepo, changegroup, cmdutil, commands, hg, merge
11 from mercurial import patch, revlog, util
11 from mercurial import patch, revlog, util
12
12
13 '''patch transplanting tool
13 '''patch transplanting tool
14
14
15 This extension allows you to transplant patches from another branch.
15 This extension allows you to transplant patches from another branch.
16
16
17 Transplanted patches are recorded in .hg/transplant/transplants, as a map
17 Transplanted patches are recorded in .hg/transplant/transplants, as a map
18 from a changeset hash to its hash in the source repository.
18 from a changeset hash to its hash in the source repository.
19 '''
19 '''
20
20
21 class transplantentry:
21 class transplantentry:
22 def __init__(self, lnode, rnode):
22 def __init__(self, lnode, rnode):
23 self.lnode = lnode
23 self.lnode = lnode
24 self.rnode = rnode
24 self.rnode = rnode
25
25
26 class transplants:
26 class transplants:
27 def __init__(self, path=None, transplantfile=None, opener=None):
27 def __init__(self, path=None, transplantfile=None, opener=None):
28 self.path = path
28 self.path = path
29 self.transplantfile = transplantfile
29 self.transplantfile = transplantfile
30 self.opener = opener
30 self.opener = opener
31
31
32 if not opener:
32 if not opener:
33 self.opener = util.opener(self.path)
33 self.opener = util.opener(self.path)
34 self.transplants = []
34 self.transplants = []
35 self.dirty = False
35 self.dirty = False
36 self.read()
36 self.read()
37
37
38 def read(self):
38 def read(self):
39 abspath = os.path.join(self.path, self.transplantfile)
39 abspath = os.path.join(self.path, self.transplantfile)
40 if self.transplantfile and os.path.exists(abspath):
40 if self.transplantfile and os.path.exists(abspath):
41 for line in self.opener(self.transplantfile).read().splitlines():
41 for line in self.opener(self.transplantfile).read().splitlines():
42 lnode, rnode = map(revlog.bin, line.split(':'))
42 lnode, rnode = map(revlog.bin, line.split(':'))
43 self.transplants.append(transplantentry(lnode, rnode))
43 self.transplants.append(transplantentry(lnode, rnode))
44
44
45 def write(self):
45 def write(self):
46 if self.dirty and self.transplantfile:
46 if self.dirty and self.transplantfile:
47 if not os.path.isdir(self.path):
47 if not os.path.isdir(self.path):
48 os.mkdir(self.path)
48 os.mkdir(self.path)
49 fp = self.opener(self.transplantfile, 'w')
49 fp = self.opener(self.transplantfile, 'w')
50 for c in self.transplants:
50 for c in self.transplants:
51 l, r = map(revlog.hex, (c.lnode, c.rnode))
51 l, r = map(revlog.hex, (c.lnode, c.rnode))
52 fp.write(l + ':' + r + '\n')
52 fp.write(l + ':' + r + '\n')
53 fp.close()
53 fp.close()
54 self.dirty = False
54 self.dirty = False
55
55
56 def get(self, rnode):
56 def get(self, rnode):
57 return [t for t in self.transplants if t.rnode == rnode]
57 return [t for t in self.transplants if t.rnode == rnode]
58
58
59 def set(self, lnode, rnode):
59 def set(self, lnode, rnode):
60 self.transplants.append(transplantentry(lnode, rnode))
60 self.transplants.append(transplantentry(lnode, rnode))
61 self.dirty = True
61 self.dirty = True
62
62
63 def remove(self, transplant):
63 def remove(self, transplant):
64 del self.transplants[self.transplants.index(transplant)]
64 del self.transplants[self.transplants.index(transplant)]
65 self.dirty = True
65 self.dirty = True
66
66
67 class transplanter:
67 class transplanter:
68 def __init__(self, ui, repo):
68 def __init__(self, ui, repo):
69 self.ui = ui
69 self.ui = ui
70 self.path = repo.join('transplant')
70 self.path = repo.join('transplant')
71 self.opener = util.opener(self.path)
71 self.opener = util.opener(self.path)
72 self.transplants = transplants(self.path, 'transplants', opener=self.opener)
72 self.transplants = transplants(self.path, 'transplants', opener=self.opener)
73
73
74 def applied(self, repo, node, parent):
74 def applied(self, repo, node, parent):
75 '''returns True if a node is already an ancestor of parent
75 '''returns True if a node is already an ancestor of parent
76 or has already been transplanted'''
76 or has already been transplanted'''
77 if hasnode(repo, node):
77 if hasnode(repo, node):
78 if node in repo.changelog.reachable(parent, stop=node):
78 if node in repo.changelog.reachable(parent, stop=node):
79 return True
79 return True
80 for t in self.transplants.get(node):
80 for t in self.transplants.get(node):
81 # it might have been stripped
81 # it might have been stripped
82 if not hasnode(repo, t.lnode):
82 if not hasnode(repo, t.lnode):
83 self.transplants.remove(t)
83 self.transplants.remove(t)
84 return False
84 return False
85 if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
85 if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
86 return True
86 return True
87 return False
87 return False
88
88
89 def apply(self, repo, source, revmap, merges, opts={}):
89 def apply(self, repo, source, revmap, merges, opts={}):
90 '''apply the revisions in revmap one by one in revision order'''
90 '''apply the revisions in revmap one by one in revision order'''
91 revs = revmap.keys()
91 revs = revmap.keys()
92 revs.sort()
92 revs.sort()
93
93
94 p1, p2 = repo.dirstate.parents()
94 p1, p2 = repo.dirstate.parents()
95 pulls = []
95 pulls = []
96 diffopts = patch.diffopts(self.ui, opts)
96 diffopts = patch.diffopts(self.ui, opts)
97 diffopts.git = True
97 diffopts.git = True
98
98
99 wlock = repo.wlock()
99 lock = wlock = None
100 lock = repo.lock()
101 try:
100 try:
101 wlock = repo.wlock()
102 lock = repo.lock()
102 for rev in revs:
103 for rev in revs:
103 node = revmap[rev]
104 node = revmap[rev]
104 revstr = '%s:%s' % (rev, revlog.short(node))
105 revstr = '%s:%s' % (rev, revlog.short(node))
105
106
106 if self.applied(repo, node, p1):
107 if self.applied(repo, node, p1):
107 self.ui.warn(_('skipping already applied revision %s\n') %
108 self.ui.warn(_('skipping already applied revision %s\n') %
108 revstr)
109 revstr)
109 continue
110 continue
110
111
111 parents = source.changelog.parents(node)
112 parents = source.changelog.parents(node)
112 if not opts.get('filter'):
113 if not opts.get('filter'):
113 # If the changeset parent is the same as the wdir's parent,
114 # If the changeset parent is the same as the wdir's parent,
114 # just pull it.
115 # just pull it.
115 if parents[0] == p1:
116 if parents[0] == p1:
116 pulls.append(node)
117 pulls.append(node)
117 p1 = node
118 p1 = node
118 continue
119 continue
119 if pulls:
120 if pulls:
120 if source != repo:
121 if source != repo:
121 repo.pull(source, heads=pulls, lock=lock)
122 repo.pull(source, heads=pulls, lock=lock)
122 merge.update(repo, pulls[-1], False, False, None,
123 merge.update(repo, pulls[-1], False, False, None,
123 wlock=wlock)
124 wlock=wlock)
124 p1, p2 = repo.dirstate.parents()
125 p1, p2 = repo.dirstate.parents()
125 pulls = []
126 pulls = []
126
127
127 domerge = False
128 domerge = False
128 if node in merges:
129 if node in merges:
129 # pulling all the merge revs at once would mean we couldn't
130 # pulling all the merge revs at once would mean we couldn't
130 # transplant after the latest even if transplants before them
131 # transplant after the latest even if transplants before them
131 # fail.
132 # fail.
132 domerge = True
133 domerge = True
133 if not hasnode(repo, node):
134 if not hasnode(repo, node):
134 repo.pull(source, heads=[node], lock=lock)
135 repo.pull(source, heads=[node], lock=lock)
135
136
136 if parents[1] != revlog.nullid:
137 if parents[1] != revlog.nullid:
137 self.ui.note(_('skipping merge changeset %s:%s\n')
138 self.ui.note(_('skipping merge changeset %s:%s\n')
138 % (rev, revlog.short(node)))
139 % (rev, revlog.short(node)))
139 patchfile = None
140 patchfile = None
140 else:
141 else:
141 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
142 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
142 fp = os.fdopen(fd, 'w')
143 fp = os.fdopen(fd, 'w')
143 patch.diff(source, parents[0], node, fp=fp, opts=diffopts)
144 patch.diff(source, parents[0], node, fp=fp, opts=diffopts)
144 fp.close()
145 fp.close()
145
146
146 del revmap[rev]
147 del revmap[rev]
147 if patchfile or domerge:
148 if patchfile or domerge:
148 try:
149 try:
149 n = self.applyone(repo, node, source.changelog.read(node),
150 n = self.applyone(repo, node, source.changelog.read(node),
150 patchfile, merge=domerge,
151 patchfile, merge=domerge,
151 log=opts.get('log'),
152 log=opts.get('log'),
152 filter=opts.get('filter'),
153 filter=opts.get('filter'),
153 lock=lock, wlock=wlock)
154 lock=lock, wlock=wlock)
154 if n and domerge:
155 if n and domerge:
155 self.ui.status(_('%s merged at %s\n') % (revstr,
156 self.ui.status(_('%s merged at %s\n') % (revstr,
156 revlog.short(n)))
157 revlog.short(n)))
157 elif n:
158 elif n:
158 self.ui.status(_('%s transplanted to %s\n') % (revlog.short(node),
159 self.ui.status(_('%s transplanted to %s\n') % (revlog.short(node),
159 revlog.short(n)))
160 revlog.short(n)))
160 finally:
161 finally:
161 if patchfile:
162 if patchfile:
162 os.unlink(patchfile)
163 os.unlink(patchfile)
163 if pulls:
164 if pulls:
164 repo.pull(source, heads=pulls, lock=lock)
165 repo.pull(source, heads=pulls, lock=lock)
165 merge.update(repo, pulls[-1], False, False, None, wlock=wlock)
166 merge.update(repo, pulls[-1], False, False, None, wlock=wlock)
166 finally:
167 finally:
167 self.saveseries(revmap, merges)
168 self.saveseries(revmap, merges)
168 self.transplants.write()
169 self.transplants.write()
170 del lock, wlock
169
171
170 def filter(self, filter, changelog, patchfile):
172 def filter(self, filter, changelog, patchfile):
171 '''arbitrarily rewrite changeset before applying it'''
173 '''arbitrarily rewrite changeset before applying it'''
172
174
173 self.ui.status('filtering %s\n' % patchfile)
175 self.ui.status('filtering %s\n' % patchfile)
174 user, date, msg = (changelog[1], changelog[2], changelog[4])
176 user, date, msg = (changelog[1], changelog[2], changelog[4])
175
177
176 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
178 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
177 fp = os.fdopen(fd, 'w')
179 fp = os.fdopen(fd, 'w')
178 fp.write("# HG changeset patch\n")
180 fp.write("# HG changeset patch\n")
179 fp.write("# User %s\n" % user)
181 fp.write("# User %s\n" % user)
180 fp.write("# Date %d %d\n" % date)
182 fp.write("# Date %d %d\n" % date)
181 fp.write(changelog[4])
183 fp.write(changelog[4])
182 fp.close()
184 fp.close()
183
185
184 try:
186 try:
185 util.system('%s %s %s' % (filter, util.shellquote(headerfile),
187 util.system('%s %s %s' % (filter, util.shellquote(headerfile),
186 util.shellquote(patchfile)),
188 util.shellquote(patchfile)),
187 environ={'HGUSER': changelog[1]},
189 environ={'HGUSER': changelog[1]},
188 onerr=util.Abort, errprefix=_('filter failed'))
190 onerr=util.Abort, errprefix=_('filter failed'))
189 user, date, msg = self.parselog(file(headerfile))[1:4]
191 user, date, msg = self.parselog(file(headerfile))[1:4]
190 finally:
192 finally:
191 os.unlink(headerfile)
193 os.unlink(headerfile)
192
194
193 return (user, date, msg)
195 return (user, date, msg)
194
196
195 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
197 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
196 filter=None, lock=None, wlock=None):
198 filter=None, lock=None, wlock=None):
197 '''apply the patch in patchfile to the repository as a transplant'''
199 '''apply the patch in patchfile to the repository as a transplant'''
198 (manifest, user, (time, timezone), files, message) = cl[:5]
200 (manifest, user, (time, timezone), files, message) = cl[:5]
199 date = "%d %d" % (time, timezone)
201 date = "%d %d" % (time, timezone)
200 extra = {'transplant_source': node}
202 extra = {'transplant_source': node}
201 if filter:
203 if filter:
202 (user, date, message) = self.filter(filter, cl, patchfile)
204 (user, date, message) = self.filter(filter, cl, patchfile)
203
205
204 if log:
206 if log:
205 message += '\n(transplanted from %s)' % revlog.hex(node)
207 message += '\n(transplanted from %s)' % revlog.hex(node)
206
208
207 self.ui.status(_('applying %s\n') % revlog.short(node))
209 self.ui.status(_('applying %s\n') % revlog.short(node))
208 self.ui.note('%s %s\n%s\n' % (user, date, message))
210 self.ui.note('%s %s\n%s\n' % (user, date, message))
209
211
210 if not patchfile and not merge:
212 if not patchfile and not merge:
211 raise util.Abort(_('can only omit patchfile if merging'))
213 raise util.Abort(_('can only omit patchfile if merging'))
212 if patchfile:
214 if patchfile:
213 try:
215 try:
214 files = {}
216 files = {}
215 try:
217 try:
216 fuzz = patch.patch(patchfile, self.ui, cwd=repo.root,
218 fuzz = patch.patch(patchfile, self.ui, cwd=repo.root,
217 files=files)
219 files=files)
218 if not files:
220 if not files:
219 self.ui.warn(_('%s: empty changeset') % revlog.hex(node))
221 self.ui.warn(_('%s: empty changeset') % revlog.hex(node))
220 return None
222 return None
221 finally:
223 finally:
222 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
224 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
223 except Exception, inst:
225 except Exception, inst:
224 if filter:
226 if filter:
225 os.unlink(patchfile)
227 os.unlink(patchfile)
226 seriespath = os.path.join(self.path, 'series')
228 seriespath = os.path.join(self.path, 'series')
227 if os.path.exists(seriespath):
229 if os.path.exists(seriespath):
228 os.unlink(seriespath)
230 os.unlink(seriespath)
229 p1 = repo.dirstate.parents()[0]
231 p1 = repo.dirstate.parents()[0]
230 p2 = node
232 p2 = node
231 self.log(user, date, message, p1, p2, merge=merge)
233 self.log(user, date, message, p1, p2, merge=merge)
232 self.ui.write(str(inst) + '\n')
234 self.ui.write(str(inst) + '\n')
233 raise util.Abort(_('Fix up the merge and run hg transplant --continue'))
235 raise util.Abort(_('Fix up the merge and run hg transplant --continue'))
234 else:
236 else:
235 files = None
237 files = None
236 if merge:
238 if merge:
237 p1, p2 = repo.dirstate.parents()
239 p1, p2 = repo.dirstate.parents()
238 repo.dirstate.setparents(p1, node)
240 repo.dirstate.setparents(p1, node)
239
241
240 n = repo.commit(files, message, user, date, lock=lock, wlock=wlock,
242 n = repo.commit(files, message, user, date, lock=lock, wlock=wlock,
241 extra=extra)
243 extra=extra)
242 if not merge:
244 if not merge:
243 self.transplants.set(n, node)
245 self.transplants.set(n, node)
244
246
245 return n
247 return n
246
248
247 def resume(self, repo, source, opts=None):
249 def resume(self, repo, source, opts=None):
248 '''recover last transaction and apply remaining changesets'''
250 '''recover last transaction and apply remaining changesets'''
249 if os.path.exists(os.path.join(self.path, 'journal')):
251 if os.path.exists(os.path.join(self.path, 'journal')):
250 n, node = self.recover(repo)
252 n, node = self.recover(repo)
251 self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
253 self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
252 revlog.short(n)))
254 revlog.short(n)))
253 seriespath = os.path.join(self.path, 'series')
255 seriespath = os.path.join(self.path, 'series')
254 if not os.path.exists(seriespath):
256 if not os.path.exists(seriespath):
255 self.transplants.write()
257 self.transplants.write()
256 return
258 return
257 nodes, merges = self.readseries()
259 nodes, merges = self.readseries()
258 revmap = {}
260 revmap = {}
259 for n in nodes:
261 for n in nodes:
260 revmap[source.changelog.rev(n)] = n
262 revmap[source.changelog.rev(n)] = n
261 os.unlink(seriespath)
263 os.unlink(seriespath)
262
264
263 self.apply(repo, source, revmap, merges, opts)
265 self.apply(repo, source, revmap, merges, opts)
264
266
265 def recover(self, repo):
267 def recover(self, repo):
266 '''commit working directory using journal metadata'''
268 '''commit working directory using journal metadata'''
267 node, user, date, message, parents = self.readlog()
269 node, user, date, message, parents = self.readlog()
268 merge = len(parents) == 2
270 merge = len(parents) == 2
269
271
270 if not user or not date or not message or not parents[0]:
272 if not user or not date or not message or not parents[0]:
271 raise util.Abort(_('transplant log file is corrupt'))
273 raise util.Abort(_('transplant log file is corrupt'))
272
274
273 extra = {'transplant_source': node}
275 extra = {'transplant_source': node}
274 wlock = repo.wlock()
276 wlock = repo.wlock()
275 p1, p2 = repo.dirstate.parents()
277 try:
276 if p1 != parents[0]:
278 p1, p2 = repo.dirstate.parents()
277 raise util.Abort(_('working dir not at transplant parent %s') %
279 if p1 != parents[0]:
278 revlog.hex(parents[0]))
280 raise util.Abort(
279 if merge:
281 _('working dir not at transplant parent %s') %
280 repo.dirstate.setparents(p1, parents[1])
282 revlog.hex(parents[0]))
281 n = repo.commit(None, message, user, date, wlock=wlock, extra=extra)
283 if merge:
282 if not n:
284 repo.dirstate.setparents(p1, parents[1])
283 raise util.Abort(_('commit failed'))
285 n = repo.commit(None, message, user, date, wlock=wlock,
284 if not merge:
286 extra=extra)
285 self.transplants.set(n, node)
287 if not n:
286 self.unlog()
288 raise util.Abort(_('commit failed'))
289 if not merge:
290 self.transplants.set(n, node)
291 self.unlog()
287
292
288 return n, node
293 return n, node
294 finally:
295 del wlock
289
296
290 def readseries(self):
297 def readseries(self):
291 nodes = []
298 nodes = []
292 merges = []
299 merges = []
293 cur = nodes
300 cur = nodes
294 for line in self.opener('series').read().splitlines():
301 for line in self.opener('series').read().splitlines():
295 if line.startswith('# Merges'):
302 if line.startswith('# Merges'):
296 cur = merges
303 cur = merges
297 continue
304 continue
298 cur.append(revlog.bin(line))
305 cur.append(revlog.bin(line))
299
306
300 return (nodes, merges)
307 return (nodes, merges)
301
308
302 def saveseries(self, revmap, merges):
309 def saveseries(self, revmap, merges):
303 if not revmap:
310 if not revmap:
304 return
311 return
305
312
306 if not os.path.isdir(self.path):
313 if not os.path.isdir(self.path):
307 os.mkdir(self.path)
314 os.mkdir(self.path)
308 series = self.opener('series', 'w')
315 series = self.opener('series', 'w')
309 revs = revmap.keys()
316 revs = revmap.keys()
310 revs.sort()
317 revs.sort()
311 for rev in revs:
318 for rev in revs:
312 series.write(revlog.hex(revmap[rev]) + '\n')
319 series.write(revlog.hex(revmap[rev]) + '\n')
313 if merges:
320 if merges:
314 series.write('# Merges\n')
321 series.write('# Merges\n')
315 for m in merges:
322 for m in merges:
316 series.write(revlog.hex(m) + '\n')
323 series.write(revlog.hex(m) + '\n')
317 series.close()
324 series.close()
318
325
319 def parselog(self, fp):
326 def parselog(self, fp):
320 parents = []
327 parents = []
321 message = []
328 message = []
322 node = revlog.nullid
329 node = revlog.nullid
323 inmsg = False
330 inmsg = False
324 for line in fp.read().splitlines():
331 for line in fp.read().splitlines():
325 if inmsg:
332 if inmsg:
326 message.append(line)
333 message.append(line)
327 elif line.startswith('# User '):
334 elif line.startswith('# User '):
328 user = line[7:]
335 user = line[7:]
329 elif line.startswith('# Date '):
336 elif line.startswith('# Date '):
330 date = line[7:]
337 date = line[7:]
331 elif line.startswith('# Node ID '):
338 elif line.startswith('# Node ID '):
332 node = revlog.bin(line[10:])
339 node = revlog.bin(line[10:])
333 elif line.startswith('# Parent '):
340 elif line.startswith('# Parent '):
334 parents.append(revlog.bin(line[9:]))
341 parents.append(revlog.bin(line[9:]))
335 elif not line.startswith('#'):
342 elif not line.startswith('#'):
336 inmsg = True
343 inmsg = True
337 message.append(line)
344 message.append(line)
338 return (node, user, date, '\n'.join(message), parents)
345 return (node, user, date, '\n'.join(message), parents)
339
346
340 def log(self, user, date, message, p1, p2, merge=False):
347 def log(self, user, date, message, p1, p2, merge=False):
341 '''journal changelog metadata for later recover'''
348 '''journal changelog metadata for later recover'''
342
349
343 if not os.path.isdir(self.path):
350 if not os.path.isdir(self.path):
344 os.mkdir(self.path)
351 os.mkdir(self.path)
345 fp = self.opener('journal', 'w')
352 fp = self.opener('journal', 'w')
346 fp.write('# User %s\n' % user)
353 fp.write('# User %s\n' % user)
347 fp.write('# Date %s\n' % date)
354 fp.write('# Date %s\n' % date)
348 fp.write('# Node ID %s\n' % revlog.hex(p2))
355 fp.write('# Node ID %s\n' % revlog.hex(p2))
349 fp.write('# Parent ' + revlog.hex(p1) + '\n')
356 fp.write('# Parent ' + revlog.hex(p1) + '\n')
350 if merge:
357 if merge:
351 fp.write('# Parent ' + revlog.hex(p2) + '\n')
358 fp.write('# Parent ' + revlog.hex(p2) + '\n')
352 fp.write(message.rstrip() + '\n')
359 fp.write(message.rstrip() + '\n')
353 fp.close()
360 fp.close()
354
361
355 def readlog(self):
362 def readlog(self):
356 return self.parselog(self.opener('journal'))
363 return self.parselog(self.opener('journal'))
357
364
358 def unlog(self):
365 def unlog(self):
359 '''remove changelog journal'''
366 '''remove changelog journal'''
360 absdst = os.path.join(self.path, 'journal')
367 absdst = os.path.join(self.path, 'journal')
361 if os.path.exists(absdst):
368 if os.path.exists(absdst):
362 os.unlink(absdst)
369 os.unlink(absdst)
363
370
364 def transplantfilter(self, repo, source, root):
371 def transplantfilter(self, repo, source, root):
365 def matchfn(node):
372 def matchfn(node):
366 if self.applied(repo, node, root):
373 if self.applied(repo, node, root):
367 return False
374 return False
368 if source.changelog.parents(node)[1] != revlog.nullid:
375 if source.changelog.parents(node)[1] != revlog.nullid:
369 return False
376 return False
370 extra = source.changelog.read(node)[5]
377 extra = source.changelog.read(node)[5]
371 cnode = extra.get('transplant_source')
378 cnode = extra.get('transplant_source')
372 if cnode and self.applied(repo, cnode, root):
379 if cnode and self.applied(repo, cnode, root):
373 return False
380 return False
374 return True
381 return True
375
382
376 return matchfn
383 return matchfn
377
384
378 def hasnode(repo, node):
385 def hasnode(repo, node):
379 try:
386 try:
380 return repo.changelog.rev(node) != None
387 return repo.changelog.rev(node) != None
381 except revlog.RevlogError:
388 except revlog.RevlogError:
382 return False
389 return False
383
390
384 def browserevs(ui, repo, nodes, opts):
391 def browserevs(ui, repo, nodes, opts):
385 '''interactively transplant changesets'''
392 '''interactively transplant changesets'''
386 def browsehelp(ui):
393 def browsehelp(ui):
387 ui.write('y: transplant this changeset\n'
394 ui.write('y: transplant this changeset\n'
388 'n: skip this changeset\n'
395 'n: skip this changeset\n'
389 'm: merge at this changeset\n'
396 'm: merge at this changeset\n'
390 'p: show patch\n'
397 'p: show patch\n'
391 'c: commit selected changesets\n'
398 'c: commit selected changesets\n'
392 'q: cancel transplant\n'
399 'q: cancel transplant\n'
393 '?: show this help\n')
400 '?: show this help\n')
394
401
395 displayer = cmdutil.show_changeset(ui, repo, opts)
402 displayer = cmdutil.show_changeset(ui, repo, opts)
396 transplants = []
403 transplants = []
397 merges = []
404 merges = []
398 for node in nodes:
405 for node in nodes:
399 displayer.show(changenode=node)
406 displayer.show(changenode=node)
400 action = None
407 action = None
401 while not action:
408 while not action:
402 action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
409 action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
403 if action == '?':
410 if action == '?':
404 browsehelp(ui)
411 browsehelp(ui)
405 action = None
412 action = None
406 elif action == 'p':
413 elif action == 'p':
407 parent = repo.changelog.parents(node)[0]
414 parent = repo.changelog.parents(node)[0]
408 patch.diff(repo, parent, node)
415 patch.diff(repo, parent, node)
409 action = None
416 action = None
410 elif action not in ('y', 'n', 'm', 'c', 'q'):
417 elif action not in ('y', 'n', 'm', 'c', 'q'):
411 ui.write('no such option\n')
418 ui.write('no such option\n')
412 action = None
419 action = None
413 if action == 'y':
420 if action == 'y':
414 transplants.append(node)
421 transplants.append(node)
415 elif action == 'm':
422 elif action == 'm':
416 merges.append(node)
423 merges.append(node)
417 elif action == 'c':
424 elif action == 'c':
418 break
425 break
419 elif action == 'q':
426 elif action == 'q':
420 transplants = ()
427 transplants = ()
421 merges = ()
428 merges = ()
422 break
429 break
423 return (transplants, merges)
430 return (transplants, merges)
424
431
425 def transplant(ui, repo, *revs, **opts):
432 def transplant(ui, repo, *revs, **opts):
426 '''transplant changesets from another branch
433 '''transplant changesets from another branch
427
434
428 Selected changesets will be applied on top of the current working
435 Selected changesets will be applied on top of the current working
429 directory with the log of the original changeset. If --log is
436 directory with the log of the original changeset. If --log is
430 specified, log messages will have a comment appended of the form:
437 specified, log messages will have a comment appended of the form:
431
438
432 (transplanted from CHANGESETHASH)
439 (transplanted from CHANGESETHASH)
433
440
434 You can rewrite the changelog message with the --filter option.
441 You can rewrite the changelog message with the --filter option.
435 Its argument will be invoked with the current changelog message
442 Its argument will be invoked with the current changelog message
436 as $1 and the patch as $2.
443 as $1 and the patch as $2.
437
444
438 If --source is specified, selects changesets from the named
445 If --source is specified, selects changesets from the named
439 repository. If --branch is specified, selects changesets from the
446 repository. If --branch is specified, selects changesets from the
440 branch holding the named revision, up to that revision. If --all
447 branch holding the named revision, up to that revision. If --all
441 is specified, all changesets on the branch will be transplanted,
448 is specified, all changesets on the branch will be transplanted,
442 otherwise you will be prompted to select the changesets you want.
449 otherwise you will be prompted to select the changesets you want.
443
450
444 hg transplant --branch REVISION --all will rebase the selected branch
451 hg transplant --branch REVISION --all will rebase the selected branch
445 (up to the named revision) onto your current working directory.
452 (up to the named revision) onto your current working directory.
446
453
447 You can optionally mark selected transplanted changesets as
454 You can optionally mark selected transplanted changesets as
448 merge changesets. You will not be prompted to transplant any
455 merge changesets. You will not be prompted to transplant any
449 ancestors of a merged transplant, and you can merge descendants
456 ancestors of a merged transplant, and you can merge descendants
450 of them normally instead of transplanting them.
457 of them normally instead of transplanting them.
451
458
452 If no merges or revisions are provided, hg transplant will start
459 If no merges or revisions are provided, hg transplant will start
453 an interactive changeset browser.
460 an interactive changeset browser.
454
461
455 If a changeset application fails, you can fix the merge by hand and
462 If a changeset application fails, you can fix the merge by hand and
456 then resume where you left off by calling hg transplant --continue.
463 then resume where you left off by calling hg transplant --continue.
457 '''
464 '''
458 def getoneitem(opts, item, errmsg):
465 def getoneitem(opts, item, errmsg):
459 val = opts.get(item)
466 val = opts.get(item)
460 if val:
467 if val:
461 if len(val) > 1:
468 if len(val) > 1:
462 raise util.Abort(errmsg)
469 raise util.Abort(errmsg)
463 else:
470 else:
464 return val[0]
471 return val[0]
465
472
466 def getremotechanges(repo, url):
473 def getremotechanges(repo, url):
467 sourcerepo = ui.expandpath(url)
474 sourcerepo = ui.expandpath(url)
468 source = hg.repository(ui, sourcerepo)
475 source = hg.repository(ui, sourcerepo)
469 incoming = repo.findincoming(source, force=True)
476 incoming = repo.findincoming(source, force=True)
470 if not incoming:
477 if not incoming:
471 return (source, None, None)
478 return (source, None, None)
472
479
473 bundle = None
480 bundle = None
474 if not source.local():
481 if not source.local():
475 cg = source.changegroup(incoming, 'incoming')
482 cg = source.changegroup(incoming, 'incoming')
476 bundle = changegroup.writebundle(cg, None, 'HG10UN')
483 bundle = changegroup.writebundle(cg, None, 'HG10UN')
477 source = bundlerepo.bundlerepository(ui, repo.root, bundle)
484 source = bundlerepo.bundlerepository(ui, repo.root, bundle)
478
485
479 return (source, incoming, bundle)
486 return (source, incoming, bundle)
480
487
481 def incwalk(repo, incoming, branches, match=util.always):
488 def incwalk(repo, incoming, branches, match=util.always):
482 if not branches:
489 if not branches:
483 branches=None
490 branches=None
484 for node in repo.changelog.nodesbetween(incoming, branches)[0]:
491 for node in repo.changelog.nodesbetween(incoming, branches)[0]:
485 if match(node):
492 if match(node):
486 yield node
493 yield node
487
494
488 def transplantwalk(repo, root, branches, match=util.always):
495 def transplantwalk(repo, root, branches, match=util.always):
489 if not branches:
496 if not branches:
490 branches = repo.heads()
497 branches = repo.heads()
491 ancestors = []
498 ancestors = []
492 for branch in branches:
499 for branch in branches:
493 ancestors.append(repo.changelog.ancestor(root, branch))
500 ancestors.append(repo.changelog.ancestor(root, branch))
494 for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
501 for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
495 if match(node):
502 if match(node):
496 yield node
503 yield node
497
504
498 def checkopts(opts, revs):
505 def checkopts(opts, revs):
499 if opts.get('continue'):
506 if opts.get('continue'):
500 if filter(lambda opt: opts.get(opt), ('branch', 'all', 'merge')):
507 if filter(lambda opt: opts.get(opt), ('branch', 'all', 'merge')):
501 raise util.Abort(_('--continue is incompatible with branch, all or merge'))
508 raise util.Abort(_('--continue is incompatible with branch, all or merge'))
502 return
509 return
503 if not (opts.get('source') or revs or
510 if not (opts.get('source') or revs or
504 opts.get('merge') or opts.get('branch')):
511 opts.get('merge') or opts.get('branch')):
505 raise util.Abort(_('no source URL, branch tag or revision list provided'))
512 raise util.Abort(_('no source URL, branch tag or revision list provided'))
506 if opts.get('all'):
513 if opts.get('all'):
507 if not opts.get('branch'):
514 if not opts.get('branch'):
508 raise util.Abort(_('--all requires a branch revision'))
515 raise util.Abort(_('--all requires a branch revision'))
509 if revs:
516 if revs:
510 raise util.Abort(_('--all is incompatible with a revision list'))
517 raise util.Abort(_('--all is incompatible with a revision list'))
511
518
512 checkopts(opts, revs)
519 checkopts(opts, revs)
513
520
514 if not opts.get('log'):
521 if not opts.get('log'):
515 opts['log'] = ui.config('transplant', 'log')
522 opts['log'] = ui.config('transplant', 'log')
516 if not opts.get('filter'):
523 if not opts.get('filter'):
517 opts['filter'] = ui.config('transplant', 'filter')
524 opts['filter'] = ui.config('transplant', 'filter')
518
525
519 tp = transplanter(ui, repo)
526 tp = transplanter(ui, repo)
520
527
521 p1, p2 = repo.dirstate.parents()
528 p1, p2 = repo.dirstate.parents()
522 if p1 == revlog.nullid:
529 if p1 == revlog.nullid:
523 raise util.Abort(_('no revision checked out'))
530 raise util.Abort(_('no revision checked out'))
524 if not opts.get('continue'):
531 if not opts.get('continue'):
525 if p2 != revlog.nullid:
532 if p2 != revlog.nullid:
526 raise util.Abort(_('outstanding uncommitted merges'))
533 raise util.Abort(_('outstanding uncommitted merges'))
527 m, a, r, d = repo.status()[:4]
534 m, a, r, d = repo.status()[:4]
528 if m or a or r or d:
535 if m or a or r or d:
529 raise util.Abort(_('outstanding local changes'))
536 raise util.Abort(_('outstanding local changes'))
530
537
531 bundle = None
538 bundle = None
532 source = opts.get('source')
539 source = opts.get('source')
533 if source:
540 if source:
534 (source, incoming, bundle) = getremotechanges(repo, source)
541 (source, incoming, bundle) = getremotechanges(repo, source)
535 else:
542 else:
536 source = repo
543 source = repo
537
544
538 try:
545 try:
539 if opts.get('continue'):
546 if opts.get('continue'):
540 tp.resume(repo, source, opts)
547 tp.resume(repo, source, opts)
541 return
548 return
542
549
543 tf=tp.transplantfilter(repo, source, p1)
550 tf=tp.transplantfilter(repo, source, p1)
544 if opts.get('prune'):
551 if opts.get('prune'):
545 prune = [source.lookup(r)
552 prune = [source.lookup(r)
546 for r in cmdutil.revrange(source, opts.get('prune'))]
553 for r in cmdutil.revrange(source, opts.get('prune'))]
547 matchfn = lambda x: tf(x) and x not in prune
554 matchfn = lambda x: tf(x) and x not in prune
548 else:
555 else:
549 matchfn = tf
556 matchfn = tf
550 branches = map(source.lookup, opts.get('branch', ()))
557 branches = map(source.lookup, opts.get('branch', ()))
551 merges = map(source.lookup, opts.get('merge', ()))
558 merges = map(source.lookup, opts.get('merge', ()))
552 revmap = {}
559 revmap = {}
553 if revs:
560 if revs:
554 for r in cmdutil.revrange(source, revs):
561 for r in cmdutil.revrange(source, revs):
555 revmap[int(r)] = source.lookup(r)
562 revmap[int(r)] = source.lookup(r)
556 elif opts.get('all') or not merges:
563 elif opts.get('all') or not merges:
557 if source != repo:
564 if source != repo:
558 alltransplants = incwalk(source, incoming, branches, match=matchfn)
565 alltransplants = incwalk(source, incoming, branches, match=matchfn)
559 else:
566 else:
560 alltransplants = transplantwalk(source, p1, branches, match=matchfn)
567 alltransplants = transplantwalk(source, p1, branches, match=matchfn)
561 if opts.get('all'):
568 if opts.get('all'):
562 revs = alltransplants
569 revs = alltransplants
563 else:
570 else:
564 revs, newmerges = browserevs(ui, source, alltransplants, opts)
571 revs, newmerges = browserevs(ui, source, alltransplants, opts)
565 merges.extend(newmerges)
572 merges.extend(newmerges)
566 for r in revs:
573 for r in revs:
567 revmap[source.changelog.rev(r)] = r
574 revmap[source.changelog.rev(r)] = r
568 for r in merges:
575 for r in merges:
569 revmap[source.changelog.rev(r)] = r
576 revmap[source.changelog.rev(r)] = r
570
577
571 revs = revmap.keys()
578 revs = revmap.keys()
572 revs.sort()
579 revs.sort()
573 pulls = []
580 pulls = []
574
581
575 tp.apply(repo, source, revmap, merges, opts)
582 tp.apply(repo, source, revmap, merges, opts)
576 finally:
583 finally:
577 if bundle:
584 if bundle:
578 source.close()
585 source.close()
579 os.unlink(bundle)
586 os.unlink(bundle)
580
587
581 cmdtable = {
588 cmdtable = {
582 "transplant":
589 "transplant":
583 (transplant,
590 (transplant,
584 [('s', 'source', '', _('pull patches from REPOSITORY')),
591 [('s', 'source', '', _('pull patches from REPOSITORY')),
585 ('b', 'branch', [], _('pull patches from branch BRANCH')),
592 ('b', 'branch', [], _('pull patches from branch BRANCH')),
586 ('a', 'all', None, _('pull all changesets up to BRANCH')),
593 ('a', 'all', None, _('pull all changesets up to BRANCH')),
587 ('p', 'prune', [], _('skip over REV')),
594 ('p', 'prune', [], _('skip over REV')),
588 ('m', 'merge', [], _('merge at REV')),
595 ('m', 'merge', [], _('merge at REV')),
589 ('', 'log', None, _('append transplant info to log message')),
596 ('', 'log', None, _('append transplant info to log message')),
590 ('c', 'continue', None, _('continue last transplant session after repair')),
597 ('c', 'continue', None, _('continue last transplant session after repair')),
591 ('', 'filter', '', _('filter changesets through FILTER'))],
598 ('', 'filter', '', _('filter changesets through FILTER'))],
592 _('hg transplant [-s REPOSITORY] [-b BRANCH [-a]] [-p REV] [-m REV] [REV]...'))
599 _('hg transplant [-s REPOSITORY] [-b BRANCH [-a]] [-p REV] [-m REV] [REV]...'))
593 }
600 }
@@ -1,3164 +1,3180 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import demandimport; demandimport.enable()
8 import demandimport; demandimport.enable()
9 from node import *
9 from node import *
10 from i18n import _
10 from i18n import _
11 import bisect, os, re, sys, urllib, shlex, stat
11 import bisect, os, re, sys, urllib, shlex, stat
12 import ui, hg, util, revlog, bundlerepo, extensions
12 import ui, hg, util, revlog, bundlerepo, extensions
13 import difflib, patch, time, help, mdiff, tempfile
13 import difflib, patch, time, help, mdiff, tempfile
14 import errno, version, socket
14 import errno, version, socket
15 import archival, changegroup, cmdutil, hgweb.server, sshserver
15 import archival, changegroup, cmdutil, hgweb.server, sshserver
16
16
17 # Commands start here, listed alphabetically
17 # Commands start here, listed alphabetically
18
18
19 def add(ui, repo, *pats, **opts):
19 def add(ui, repo, *pats, **opts):
20 """add the specified files on the next commit
20 """add the specified files on the next commit
21
21
22 Schedule files to be version controlled and added to the repository.
22 Schedule files to be version controlled and added to the repository.
23
23
24 The files will be added to the repository at the next commit. To
24 The files will be added to the repository at the next commit. To
25 undo an add before that, see hg revert.
25 undo an add before that, see hg revert.
26
26
27 If no names are given, add all files in the repository.
27 If no names are given, add all files in the repository.
28 """
28 """
29
29
30 names = []
30 names = []
31 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
31 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
32 if exact:
32 if exact:
33 if ui.verbose:
33 if ui.verbose:
34 ui.status(_('adding %s\n') % rel)
34 ui.status(_('adding %s\n') % rel)
35 names.append(abs)
35 names.append(abs)
36 elif abs not in repo.dirstate:
36 elif abs not in repo.dirstate:
37 ui.status(_('adding %s\n') % rel)
37 ui.status(_('adding %s\n') % rel)
38 names.append(abs)
38 names.append(abs)
39 if not opts.get('dry_run'):
39 if not opts.get('dry_run'):
40 repo.add(names)
40 repo.add(names)
41
41
42 def addremove(ui, repo, *pats, **opts):
42 def addremove(ui, repo, *pats, **opts):
43 """add all new files, delete all missing files
43 """add all new files, delete all missing files
44
44
45 Add all new files and remove all missing files from the repository.
45 Add all new files and remove all missing files from the repository.
46
46
47 New files are ignored if they match any of the patterns in .hgignore. As
47 New files are ignored if they match any of the patterns in .hgignore. As
48 with add, these changes take effect at the next commit.
48 with add, these changes take effect at the next commit.
49
49
50 Use the -s option to detect renamed files. With a parameter > 0,
50 Use the -s option to detect renamed files. With a parameter > 0,
51 this compares every removed file with every added file and records
51 this compares every removed file with every added file and records
52 those similar enough as renames. This option takes a percentage
52 those similar enough as renames. This option takes a percentage
53 between 0 (disabled) and 100 (files must be identical) as its
53 between 0 (disabled) and 100 (files must be identical) as its
54 parameter. Detecting renamed files this way can be expensive.
54 parameter. Detecting renamed files this way can be expensive.
55 """
55 """
56 sim = float(opts.get('similarity') or 0)
56 sim = float(opts.get('similarity') or 0)
57 if sim < 0 or sim > 100:
57 if sim < 0 or sim > 100:
58 raise util.Abort(_('similarity must be between 0 and 100'))
58 raise util.Abort(_('similarity must be between 0 and 100'))
59 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
59 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
60
60
61 def annotate(ui, repo, *pats, **opts):
61 def annotate(ui, repo, *pats, **opts):
62 """show changeset information per file line
62 """show changeset information per file line
63
63
64 List changes in files, showing the revision id responsible for each line
64 List changes in files, showing the revision id responsible for each line
65
65
66 This command is useful to discover who did a change or when a change took
66 This command is useful to discover who did a change or when a change took
67 place.
67 place.
68
68
69 Without the -a option, annotate will avoid processing files it
69 Without the -a option, annotate will avoid processing files it
70 detects as binary. With -a, annotate will generate an annotation
70 detects as binary. With -a, annotate will generate an annotation
71 anyway, probably with undesirable results.
71 anyway, probably with undesirable results.
72 """
72 """
73 getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
73 getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
74
74
75 if not pats:
75 if not pats:
76 raise util.Abort(_('at least one file name or pattern required'))
76 raise util.Abort(_('at least one file name or pattern required'))
77
77
78 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
78 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
79 ('number', lambda x: str(x[0].rev())),
79 ('number', lambda x: str(x[0].rev())),
80 ('changeset', lambda x: short(x[0].node())),
80 ('changeset', lambda x: short(x[0].node())),
81 ('date', getdate),
81 ('date', getdate),
82 ('follow', lambda x: x[0].path()),
82 ('follow', lambda x: x[0].path()),
83 ]
83 ]
84
84
85 if (not opts['user'] and not opts['changeset'] and not opts['date']
85 if (not opts['user'] and not opts['changeset'] and not opts['date']
86 and not opts['follow']):
86 and not opts['follow']):
87 opts['number'] = 1
87 opts['number'] = 1
88
88
89 linenumber = opts.get('line_number') is not None
89 linenumber = opts.get('line_number') is not None
90 if (linenumber and (not opts['changeset']) and (not opts['number'])):
90 if (linenumber and (not opts['changeset']) and (not opts['number'])):
91 raise util.Abort(_('at least one of -n/-c is required for -l'))
91 raise util.Abort(_('at least one of -n/-c is required for -l'))
92
92
93 funcmap = [func for op, func in opmap if opts.get(op)]
93 funcmap = [func for op, func in opmap if opts.get(op)]
94 if linenumber:
94 if linenumber:
95 lastfunc = funcmap[-1]
95 lastfunc = funcmap[-1]
96 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
96 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
97
97
98 ctx = repo.changectx(opts['rev'])
98 ctx = repo.changectx(opts['rev'])
99
99
100 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
100 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
101 node=ctx.node()):
101 node=ctx.node()):
102 fctx = ctx.filectx(abs)
102 fctx = ctx.filectx(abs)
103 if not opts['text'] and util.binary(fctx.data()):
103 if not opts['text'] and util.binary(fctx.data()):
104 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
104 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
105 continue
105 continue
106
106
107 lines = fctx.annotate(follow=opts.get('follow'),
107 lines = fctx.annotate(follow=opts.get('follow'),
108 linenumber=linenumber)
108 linenumber=linenumber)
109 pieces = []
109 pieces = []
110
110
111 for f in funcmap:
111 for f in funcmap:
112 l = [f(n) for n, dummy in lines]
112 l = [f(n) for n, dummy in lines]
113 if l:
113 if l:
114 m = max(map(len, l))
114 m = max(map(len, l))
115 pieces.append(["%*s" % (m, x) for x in l])
115 pieces.append(["%*s" % (m, x) for x in l])
116
116
117 if pieces:
117 if pieces:
118 for p, l in zip(zip(*pieces), lines):
118 for p, l in zip(zip(*pieces), lines):
119 ui.write("%s: %s" % (" ".join(p), l[1]))
119 ui.write("%s: %s" % (" ".join(p), l[1]))
120
120
121 def archive(ui, repo, dest, **opts):
121 def archive(ui, repo, dest, **opts):
122 '''create unversioned archive of a repository revision
122 '''create unversioned archive of a repository revision
123
123
124 By default, the revision used is the parent of the working
124 By default, the revision used is the parent of the working
125 directory; use "-r" to specify a different revision.
125 directory; use "-r" to specify a different revision.
126
126
127 To specify the type of archive to create, use "-t". Valid
127 To specify the type of archive to create, use "-t". Valid
128 types are:
128 types are:
129
129
130 "files" (default): a directory full of files
130 "files" (default): a directory full of files
131 "tar": tar archive, uncompressed
131 "tar": tar archive, uncompressed
132 "tbz2": tar archive, compressed using bzip2
132 "tbz2": tar archive, compressed using bzip2
133 "tgz": tar archive, compressed using gzip
133 "tgz": tar archive, compressed using gzip
134 "uzip": zip archive, uncompressed
134 "uzip": zip archive, uncompressed
135 "zip": zip archive, compressed using deflate
135 "zip": zip archive, compressed using deflate
136
136
137 The exact name of the destination archive or directory is given
137 The exact name of the destination archive or directory is given
138 using a format string; see "hg help export" for details.
138 using a format string; see "hg help export" for details.
139
139
140 Each member added to an archive file has a directory prefix
140 Each member added to an archive file has a directory prefix
141 prepended. Use "-p" to specify a format string for the prefix.
141 prepended. Use "-p" to specify a format string for the prefix.
142 The default is the basename of the archive, with suffixes removed.
142 The default is the basename of the archive, with suffixes removed.
143 '''
143 '''
144
144
145 ctx = repo.changectx(opts['rev'])
145 ctx = repo.changectx(opts['rev'])
146 if not ctx:
146 if not ctx:
147 raise util.Abort(_('repository has no revisions'))
147 raise util.Abort(_('repository has no revisions'))
148 node = ctx.node()
148 node = ctx.node()
149 dest = cmdutil.make_filename(repo, dest, node)
149 dest = cmdutil.make_filename(repo, dest, node)
150 if os.path.realpath(dest) == repo.root:
150 if os.path.realpath(dest) == repo.root:
151 raise util.Abort(_('repository root cannot be destination'))
151 raise util.Abort(_('repository root cannot be destination'))
152 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
152 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
153 kind = opts.get('type') or 'files'
153 kind = opts.get('type') or 'files'
154 prefix = opts['prefix']
154 prefix = opts['prefix']
155 if dest == '-':
155 if dest == '-':
156 if kind == 'files':
156 if kind == 'files':
157 raise util.Abort(_('cannot archive plain files to stdout'))
157 raise util.Abort(_('cannot archive plain files to stdout'))
158 dest = sys.stdout
158 dest = sys.stdout
159 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
159 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
160 prefix = cmdutil.make_filename(repo, prefix, node)
160 prefix = cmdutil.make_filename(repo, prefix, node)
161 archival.archive(repo, dest, node, kind, not opts['no_decode'],
161 archival.archive(repo, dest, node, kind, not opts['no_decode'],
162 matchfn, prefix)
162 matchfn, prefix)
163
163
164 def backout(ui, repo, node=None, rev=None, **opts):
164 def backout(ui, repo, node=None, rev=None, **opts):
165 '''reverse effect of earlier changeset
165 '''reverse effect of earlier changeset
166
166
167 Commit the backed out changes as a new changeset. The new
167 Commit the backed out changes as a new changeset. The new
168 changeset is a child of the backed out changeset.
168 changeset is a child of the backed out changeset.
169
169
170 If you back out a changeset other than the tip, a new head is
170 If you back out a changeset other than the tip, a new head is
171 created. This head is the parent of the working directory. If
171 created. This head is the parent of the working directory. If
172 you back out an old changeset, your working directory will appear
172 you back out an old changeset, your working directory will appear
173 old after the backout. You should merge the backout changeset
173 old after the backout. You should merge the backout changeset
174 with another head.
174 with another head.
175
175
176 The --merge option remembers the parent of the working directory
176 The --merge option remembers the parent of the working directory
177 before starting the backout, then merges the new head with that
177 before starting the backout, then merges the new head with that
178 changeset afterwards. This saves you from doing the merge by
178 changeset afterwards. This saves you from doing the merge by
179 hand. The result of this merge is not committed, as for a normal
179 hand. The result of this merge is not committed, as for a normal
180 merge.'''
180 merge.'''
181 if rev and node:
181 if rev and node:
182 raise util.Abort(_("please specify just one revision"))
182 raise util.Abort(_("please specify just one revision"))
183
183
184 if not rev:
184 if not rev:
185 rev = node
185 rev = node
186
186
187 if not rev:
187 if not rev:
188 raise util.Abort(_("please specify a revision to backout"))
188 raise util.Abort(_("please specify a revision to backout"))
189
189
190 cmdutil.bail_if_changed(repo)
190 cmdutil.bail_if_changed(repo)
191 op1, op2 = repo.dirstate.parents()
191 op1, op2 = repo.dirstate.parents()
192 if op2 != nullid:
192 if op2 != nullid:
193 raise util.Abort(_('outstanding uncommitted merge'))
193 raise util.Abort(_('outstanding uncommitted merge'))
194 node = repo.lookup(rev)
194 node = repo.lookup(rev)
195 p1, p2 = repo.changelog.parents(node)
195 p1, p2 = repo.changelog.parents(node)
196 if p1 == nullid:
196 if p1 == nullid:
197 raise util.Abort(_('cannot back out a change with no parents'))
197 raise util.Abort(_('cannot back out a change with no parents'))
198 if p2 != nullid:
198 if p2 != nullid:
199 if not opts['parent']:
199 if not opts['parent']:
200 raise util.Abort(_('cannot back out a merge changeset without '
200 raise util.Abort(_('cannot back out a merge changeset without '
201 '--parent'))
201 '--parent'))
202 p = repo.lookup(opts['parent'])
202 p = repo.lookup(opts['parent'])
203 if p not in (p1, p2):
203 if p not in (p1, p2):
204 raise util.Abort(_('%s is not a parent of %s') %
204 raise util.Abort(_('%s is not a parent of %s') %
205 (short(p), short(node)))
205 (short(p), short(node)))
206 parent = p
206 parent = p
207 else:
207 else:
208 if opts['parent']:
208 if opts['parent']:
209 raise util.Abort(_('cannot use --parent on non-merge changeset'))
209 raise util.Abort(_('cannot use --parent on non-merge changeset'))
210 parent = p1
210 parent = p1
211 hg.clean(repo, node, show_stats=False)
211 hg.clean(repo, node, show_stats=False)
212 revert_opts = opts.copy()
212 revert_opts = opts.copy()
213 revert_opts['date'] = None
213 revert_opts['date'] = None
214 revert_opts['all'] = True
214 revert_opts['all'] = True
215 revert_opts['rev'] = hex(parent)
215 revert_opts['rev'] = hex(parent)
216 revert(ui, repo, **revert_opts)
216 revert(ui, repo, **revert_opts)
217 commit_opts = opts.copy()
217 commit_opts = opts.copy()
218 commit_opts['addremove'] = False
218 commit_opts['addremove'] = False
219 if not commit_opts['message'] and not commit_opts['logfile']:
219 if not commit_opts['message'] and not commit_opts['logfile']:
220 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
220 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
221 commit_opts['force_editor'] = True
221 commit_opts['force_editor'] = True
222 commit(ui, repo, **commit_opts)
222 commit(ui, repo, **commit_opts)
223 def nice(node):
223 def nice(node):
224 return '%d:%s' % (repo.changelog.rev(node), short(node))
224 return '%d:%s' % (repo.changelog.rev(node), short(node))
225 ui.status(_('changeset %s backs out changeset %s\n') %
225 ui.status(_('changeset %s backs out changeset %s\n') %
226 (nice(repo.changelog.tip()), nice(node)))
226 (nice(repo.changelog.tip()), nice(node)))
227 if op1 != node:
227 if op1 != node:
228 if opts['merge']:
228 if opts['merge']:
229 ui.status(_('merging with changeset %s\n') % nice(op1))
229 ui.status(_('merging with changeset %s\n') % nice(op1))
230 hg.merge(repo, hex(op1))
230 hg.merge(repo, hex(op1))
231 else:
231 else:
232 ui.status(_('the backout changeset is a new head - '
232 ui.status(_('the backout changeset is a new head - '
233 'do not forget to merge\n'))
233 'do not forget to merge\n'))
234 ui.status(_('(use "backout --merge" '
234 ui.status(_('(use "backout --merge" '
235 'if you want to auto-merge)\n'))
235 'if you want to auto-merge)\n'))
236
236
237 def branch(ui, repo, label=None, **opts):
237 def branch(ui, repo, label=None, **opts):
238 """set or show the current branch name
238 """set or show the current branch name
239
239
240 With no argument, show the current branch name. With one argument,
240 With no argument, show the current branch name. With one argument,
241 set the working directory branch name (the branch does not exist in
241 set the working directory branch name (the branch does not exist in
242 the repository until the next commit).
242 the repository until the next commit).
243
243
244 Unless --force is specified, branch will not let you set a
244 Unless --force is specified, branch will not let you set a
245 branch name that shadows an existing branch.
245 branch name that shadows an existing branch.
246 """
246 """
247
247
248 if label:
248 if label:
249 if not opts.get('force') and label in repo.branchtags():
249 if not opts.get('force') and label in repo.branchtags():
250 if label not in [p.branch() for p in repo.workingctx().parents()]:
250 if label not in [p.branch() for p in repo.workingctx().parents()]:
251 raise util.Abort(_('a branch of the same name already exists'
251 raise util.Abort(_('a branch of the same name already exists'
252 ' (use --force to override)'))
252 ' (use --force to override)'))
253 repo.dirstate.setbranch(util.fromlocal(label))
253 repo.dirstate.setbranch(util.fromlocal(label))
254 ui.status(_('marked working directory as branch %s\n') % label)
254 ui.status(_('marked working directory as branch %s\n') % label)
255 else:
255 else:
256 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
256 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
257
257
258 def branches(ui, repo, active=False):
258 def branches(ui, repo, active=False):
259 """list repository named branches
259 """list repository named branches
260
260
261 List the repository's named branches, indicating which ones are
261 List the repository's named branches, indicating which ones are
262 inactive. If active is specified, only show active branches.
262 inactive. If active is specified, only show active branches.
263
263
264 A branch is considered active if it contains unmerged heads.
264 A branch is considered active if it contains unmerged heads.
265 """
265 """
266 b = repo.branchtags()
266 b = repo.branchtags()
267 heads = dict.fromkeys(repo.heads(), 1)
267 heads = dict.fromkeys(repo.heads(), 1)
268 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
268 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
269 l.sort()
269 l.sort()
270 l.reverse()
270 l.reverse()
271 for ishead, r, n, t in l:
271 for ishead, r, n, t in l:
272 if active and not ishead:
272 if active and not ishead:
273 # If we're only displaying active branches, abort the loop on
273 # If we're only displaying active branches, abort the loop on
274 # encountering the first inactive head
274 # encountering the first inactive head
275 break
275 break
276 else:
276 else:
277 hexfunc = ui.debugflag and hex or short
277 hexfunc = ui.debugflag and hex or short
278 if ui.quiet:
278 if ui.quiet:
279 ui.write("%s\n" % t)
279 ui.write("%s\n" % t)
280 else:
280 else:
281 spaces = " " * (30 - util.locallen(t))
281 spaces = " " * (30 - util.locallen(t))
282 # The code only gets here if inactive branches are being
282 # The code only gets here if inactive branches are being
283 # displayed or the branch is active.
283 # displayed or the branch is active.
284 isinactive = ((not ishead) and " (inactive)") or ''
284 isinactive = ((not ishead) and " (inactive)") or ''
285 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
285 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
286
286
287 def bundle(ui, repo, fname, dest=None, **opts):
287 def bundle(ui, repo, fname, dest=None, **opts):
288 """create a changegroup file
288 """create a changegroup file
289
289
290 Generate a compressed changegroup file collecting changesets not
290 Generate a compressed changegroup file collecting changesets not
291 found in the other repository.
291 found in the other repository.
292
292
293 If no destination repository is specified the destination is assumed
293 If no destination repository is specified the destination is assumed
294 to have all the nodes specified by one or more --base parameters.
294 to have all the nodes specified by one or more --base parameters.
295
295
296 The bundle file can then be transferred using conventional means and
296 The bundle file can then be transferred using conventional means and
297 applied to another repository with the unbundle or pull command.
297 applied to another repository with the unbundle or pull command.
298 This is useful when direct push and pull are not available or when
298 This is useful when direct push and pull are not available or when
299 exporting an entire repository is undesirable.
299 exporting an entire repository is undesirable.
300
300
301 Applying bundles preserves all changeset contents including
301 Applying bundles preserves all changeset contents including
302 permissions, copy/rename information, and revision history.
302 permissions, copy/rename information, and revision history.
303 """
303 """
304 revs = opts.get('rev') or None
304 revs = opts.get('rev') or None
305 if revs:
305 if revs:
306 revs = [repo.lookup(rev) for rev in revs]
306 revs = [repo.lookup(rev) for rev in revs]
307 base = opts.get('base')
307 base = opts.get('base')
308 if base:
308 if base:
309 if dest:
309 if dest:
310 raise util.Abort(_("--base is incompatible with specifiying "
310 raise util.Abort(_("--base is incompatible with specifiying "
311 "a destination"))
311 "a destination"))
312 base = [repo.lookup(rev) for rev in base]
312 base = [repo.lookup(rev) for rev in base]
313 # create the right base
313 # create the right base
314 # XXX: nodesbetween / changegroup* should be "fixed" instead
314 # XXX: nodesbetween / changegroup* should be "fixed" instead
315 o = []
315 o = []
316 has = {nullid: None}
316 has = {nullid: None}
317 for n in base:
317 for n in base:
318 has.update(repo.changelog.reachable(n))
318 has.update(repo.changelog.reachable(n))
319 if revs:
319 if revs:
320 visit = list(revs)
320 visit = list(revs)
321 else:
321 else:
322 visit = repo.changelog.heads()
322 visit = repo.changelog.heads()
323 seen = {}
323 seen = {}
324 while visit:
324 while visit:
325 n = visit.pop(0)
325 n = visit.pop(0)
326 parents = [p for p in repo.changelog.parents(n) if p not in has]
326 parents = [p for p in repo.changelog.parents(n) if p not in has]
327 if len(parents) == 0:
327 if len(parents) == 0:
328 o.insert(0, n)
328 o.insert(0, n)
329 else:
329 else:
330 for p in parents:
330 for p in parents:
331 if p not in seen:
331 if p not in seen:
332 seen[p] = 1
332 seen[p] = 1
333 visit.append(p)
333 visit.append(p)
334 else:
334 else:
335 cmdutil.setremoteconfig(ui, opts)
335 cmdutil.setremoteconfig(ui, opts)
336 dest, revs = cmdutil.parseurl(
336 dest, revs = cmdutil.parseurl(
337 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
337 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
338 other = hg.repository(ui, dest)
338 other = hg.repository(ui, dest)
339 o = repo.findoutgoing(other, force=opts['force'])
339 o = repo.findoutgoing(other, force=opts['force'])
340
340
341 if revs:
341 if revs:
342 cg = repo.changegroupsubset(o, revs, 'bundle')
342 cg = repo.changegroupsubset(o, revs, 'bundle')
343 else:
343 else:
344 cg = repo.changegroup(o, 'bundle')
344 cg = repo.changegroup(o, 'bundle')
345 changegroup.writebundle(cg, fname, "HG10BZ")
345 changegroup.writebundle(cg, fname, "HG10BZ")
346
346
347 def cat(ui, repo, file1, *pats, **opts):
347 def cat(ui, repo, file1, *pats, **opts):
348 """output the current or given revision of files
348 """output the current or given revision of files
349
349
350 Print the specified files as they were at the given revision.
350 Print the specified files as they were at the given revision.
351 If no revision is given, the parent of the working directory is used,
351 If no revision is given, the parent of the working directory is used,
352 or tip if no revision is checked out.
352 or tip if no revision is checked out.
353
353
354 Output may be to a file, in which case the name of the file is
354 Output may be to a file, in which case the name of the file is
355 given using a format string. The formatting rules are the same as
355 given using a format string. The formatting rules are the same as
356 for the export command, with the following additions:
356 for the export command, with the following additions:
357
357
358 %s basename of file being printed
358 %s basename of file being printed
359 %d dirname of file being printed, or '.' if in repo root
359 %d dirname of file being printed, or '.' if in repo root
360 %p root-relative path name of file being printed
360 %p root-relative path name of file being printed
361 """
361 """
362 ctx = repo.changectx(opts['rev'])
362 ctx = repo.changectx(opts['rev'])
363 err = 1
363 err = 1
364 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
364 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
365 ctx.node()):
365 ctx.node()):
366 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
366 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
367 fp.write(ctx.filectx(abs).data())
367 fp.write(ctx.filectx(abs).data())
368 err = 0
368 err = 0
369 return err
369 return err
370
370
371 def clone(ui, source, dest=None, **opts):
371 def clone(ui, source, dest=None, **opts):
372 """make a copy of an existing repository
372 """make a copy of an existing repository
373
373
374 Create a copy of an existing repository in a new directory.
374 Create a copy of an existing repository in a new directory.
375
375
376 If no destination directory name is specified, it defaults to the
376 If no destination directory name is specified, it defaults to the
377 basename of the source.
377 basename of the source.
378
378
379 The location of the source is added to the new repository's
379 The location of the source is added to the new repository's
380 .hg/hgrc file, as the default to be used for future pulls.
380 .hg/hgrc file, as the default to be used for future pulls.
381
381
382 For efficiency, hardlinks are used for cloning whenever the source
382 For efficiency, hardlinks are used for cloning whenever the source
383 and destination are on the same filesystem (note this applies only
383 and destination are on the same filesystem (note this applies only
384 to the repository data, not to the checked out files). Some
384 to the repository data, not to the checked out files). Some
385 filesystems, such as AFS, implement hardlinking incorrectly, but
385 filesystems, such as AFS, implement hardlinking incorrectly, but
386 do not report errors. In these cases, use the --pull option to
386 do not report errors. In these cases, use the --pull option to
387 avoid hardlinking.
387 avoid hardlinking.
388
388
389 You can safely clone repositories and checked out files using full
389 You can safely clone repositories and checked out files using full
390 hardlinks with
390 hardlinks with
391
391
392 $ cp -al REPO REPOCLONE
392 $ cp -al REPO REPOCLONE
393
393
394 which is the fastest way to clone. However, the operation is not
394 which is the fastest way to clone. However, the operation is not
395 atomic (making sure REPO is not modified during the operation is
395 atomic (making sure REPO is not modified during the operation is
396 up to you) and you have to make sure your editor breaks hardlinks
396 up to you) and you have to make sure your editor breaks hardlinks
397 (Emacs and most Linux Kernel tools do so).
397 (Emacs and most Linux Kernel tools do so).
398
398
399 If you use the -r option to clone up to a specific revision, no
399 If you use the -r option to clone up to a specific revision, no
400 subsequent revisions will be present in the cloned repository.
400 subsequent revisions will be present in the cloned repository.
401 This option implies --pull, even on local repositories.
401 This option implies --pull, even on local repositories.
402
402
403 See pull for valid source format details.
403 See pull for valid source format details.
404
404
405 It is possible to specify an ssh:// URL as the destination, but no
405 It is possible to specify an ssh:// URL as the destination, but no
406 .hg/hgrc and working directory will be created on the remote side.
406 .hg/hgrc and working directory will be created on the remote side.
407 Look at the help text for the pull command for important details
407 Look at the help text for the pull command for important details
408 about ssh:// URLs.
408 about ssh:// URLs.
409 """
409 """
410 cmdutil.setremoteconfig(ui, opts)
410 cmdutil.setremoteconfig(ui, opts)
411 hg.clone(ui, source, dest,
411 hg.clone(ui, source, dest,
412 pull=opts['pull'],
412 pull=opts['pull'],
413 stream=opts['uncompressed'],
413 stream=opts['uncompressed'],
414 rev=opts['rev'],
414 rev=opts['rev'],
415 update=not opts['noupdate'])
415 update=not opts['noupdate'])
416
416
417 def commit(ui, repo, *pats, **opts):
417 def commit(ui, repo, *pats, **opts):
418 """commit the specified files or all outstanding changes
418 """commit the specified files or all outstanding changes
419
419
420 Commit changes to the given files into the repository.
420 Commit changes to the given files into the repository.
421
421
422 If a list of files is omitted, all changes reported by "hg status"
422 If a list of files is omitted, all changes reported by "hg status"
423 will be committed.
423 will be committed.
424
424
425 If no commit message is specified, the editor configured in your hgrc
425 If no commit message is specified, the editor configured in your hgrc
426 or in the EDITOR environment variable is started to enter a message.
426 or in the EDITOR environment variable is started to enter a message.
427 """
427 """
428 message = cmdutil.logmessage(opts)
428 message = cmdutil.logmessage(opts)
429
429
430 if opts['addremove']:
430 if opts['addremove']:
431 cmdutil.addremove(repo, pats, opts)
431 cmdutil.addremove(repo, pats, opts)
432 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
432 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
433 if pats:
433 if pats:
434 status = repo.status(files=fns, match=match)
434 status = repo.status(files=fns, match=match)
435 modified, added, removed, deleted, unknown = status[:5]
435 modified, added, removed, deleted, unknown = status[:5]
436 files = modified + added + removed
436 files = modified + added + removed
437 slist = None
437 slist = None
438 for f in fns:
438 for f in fns:
439 if f == '.':
439 if f == '.':
440 continue
440 continue
441 if f not in files:
441 if f not in files:
442 rf = repo.wjoin(f)
442 rf = repo.wjoin(f)
443 try:
443 try:
444 mode = os.lstat(rf)[stat.ST_MODE]
444 mode = os.lstat(rf)[stat.ST_MODE]
445 except OSError:
445 except OSError:
446 raise util.Abort(_("file %s not found!") % rf)
446 raise util.Abort(_("file %s not found!") % rf)
447 if stat.S_ISDIR(mode):
447 if stat.S_ISDIR(mode):
448 name = f + '/'
448 name = f + '/'
449 if slist is None:
449 if slist is None:
450 slist = list(files)
450 slist = list(files)
451 slist.sort()
451 slist.sort()
452 i = bisect.bisect(slist, name)
452 i = bisect.bisect(slist, name)
453 if i >= len(slist) or not slist[i].startswith(name):
453 if i >= len(slist) or not slist[i].startswith(name):
454 raise util.Abort(_("no match under directory %s!")
454 raise util.Abort(_("no match under directory %s!")
455 % rf)
455 % rf)
456 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
456 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
457 raise util.Abort(_("can't commit %s: "
457 raise util.Abort(_("can't commit %s: "
458 "unsupported file type!") % rf)
458 "unsupported file type!") % rf)
459 elif f not in repo.dirstate:
459 elif f not in repo.dirstate:
460 raise util.Abort(_("file %s not tracked!") % rf)
460 raise util.Abort(_("file %s not tracked!") % rf)
461 else:
461 else:
462 files = []
462 files = []
463 try:
463 try:
464 repo.commit(files, message, opts['user'], opts['date'], match,
464 repo.commit(files, message, opts['user'], opts['date'], match,
465 force_editor=opts.get('force_editor'))
465 force_editor=opts.get('force_editor'))
466 except ValueError, inst:
466 except ValueError, inst:
467 raise util.Abort(str(inst))
467 raise util.Abort(str(inst))
468
468
469 def docopy(ui, repo, pats, opts, wlock):
469 def docopy(ui, repo, pats, opts, wlock):
470 # called with the repo lock held
470 # called with the repo lock held
471 #
471 #
472 # hgsep => pathname that uses "/" to separate directories
472 # hgsep => pathname that uses "/" to separate directories
473 # ossep => pathname that uses os.sep to separate directories
473 # ossep => pathname that uses os.sep to separate directories
474 cwd = repo.getcwd()
474 cwd = repo.getcwd()
475 errors = 0
475 errors = 0
476 copied = []
476 copied = []
477 targets = {}
477 targets = {}
478
478
479 # abs: hgsep
479 # abs: hgsep
480 # rel: ossep
480 # rel: ossep
481 # return: hgsep
481 # return: hgsep
482 def okaytocopy(abs, rel, exact):
482 def okaytocopy(abs, rel, exact):
483 reasons = {'?': _('is not managed'),
483 reasons = {'?': _('is not managed'),
484 'r': _('has been marked for remove')}
484 'r': _('has been marked for remove')}
485 state = repo.dirstate[abs]
485 state = repo.dirstate[abs]
486 reason = reasons.get(state)
486 reason = reasons.get(state)
487 if reason:
487 if reason:
488 if exact:
488 if exact:
489 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
489 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
490 else:
490 else:
491 if state == 'a':
491 if state == 'a':
492 origsrc = repo.dirstate.copied(abs)
492 origsrc = repo.dirstate.copied(abs)
493 if origsrc is not None:
493 if origsrc is not None:
494 return origsrc
494 return origsrc
495 return abs
495 return abs
496
496
497 # origsrc: hgsep
497 # origsrc: hgsep
498 # abssrc: hgsep
498 # abssrc: hgsep
499 # relsrc: ossep
499 # relsrc: ossep
500 # otarget: ossep
500 # otarget: ossep
501 def copy(origsrc, abssrc, relsrc, otarget, exact):
501 def copy(origsrc, abssrc, relsrc, otarget, exact):
502 abstarget = util.canonpath(repo.root, cwd, otarget)
502 abstarget = util.canonpath(repo.root, cwd, otarget)
503 reltarget = repo.pathto(abstarget, cwd)
503 reltarget = repo.pathto(abstarget, cwd)
504 prevsrc = targets.get(abstarget)
504 prevsrc = targets.get(abstarget)
505 src = repo.wjoin(abssrc)
505 src = repo.wjoin(abssrc)
506 target = repo.wjoin(abstarget)
506 target = repo.wjoin(abstarget)
507 if prevsrc is not None:
507 if prevsrc is not None:
508 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
508 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
509 (reltarget, repo.pathto(abssrc, cwd),
509 (reltarget, repo.pathto(abssrc, cwd),
510 repo.pathto(prevsrc, cwd)))
510 repo.pathto(prevsrc, cwd)))
511 return
511 return
512 if (not opts['after'] and os.path.exists(target) or
512 if (not opts['after'] and os.path.exists(target) or
513 opts['after'] and repo.dirstate[abstarget] in 'mn'):
513 opts['after'] and repo.dirstate[abstarget] in 'mn'):
514 if not opts['force']:
514 if not opts['force']:
515 ui.warn(_('%s: not overwriting - file exists\n') %
515 ui.warn(_('%s: not overwriting - file exists\n') %
516 reltarget)
516 reltarget)
517 return
517 return
518 if not opts['after'] and not opts.get('dry_run'):
518 if not opts['after'] and not opts.get('dry_run'):
519 os.unlink(target)
519 os.unlink(target)
520 if opts['after']:
520 if opts['after']:
521 if not os.path.exists(target):
521 if not os.path.exists(target):
522 return
522 return
523 else:
523 else:
524 targetdir = os.path.dirname(target) or '.'
524 targetdir = os.path.dirname(target) or '.'
525 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
525 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
526 os.makedirs(targetdir)
526 os.makedirs(targetdir)
527 try:
527 try:
528 restore = repo.dirstate[abstarget] == 'r'
528 restore = repo.dirstate[abstarget] == 'r'
529 if restore and not opts.get('dry_run'):
529 if restore and not opts.get('dry_run'):
530 repo.undelete([abstarget], wlock)
530 repo.undelete([abstarget], wlock)
531 try:
531 try:
532 if not opts.get('dry_run'):
532 if not opts.get('dry_run'):
533 util.copyfile(src, target)
533 util.copyfile(src, target)
534 restore = False
534 restore = False
535 finally:
535 finally:
536 if restore:
536 if restore:
537 repo.remove([abstarget], wlock=wlock)
537 repo.remove([abstarget], wlock=wlock)
538 except IOError, inst:
538 except IOError, inst:
539 if inst.errno == errno.ENOENT:
539 if inst.errno == errno.ENOENT:
540 ui.warn(_('%s: deleted in working copy\n') % relsrc)
540 ui.warn(_('%s: deleted in working copy\n') % relsrc)
541 else:
541 else:
542 ui.warn(_('%s: cannot copy - %s\n') %
542 ui.warn(_('%s: cannot copy - %s\n') %
543 (relsrc, inst.strerror))
543 (relsrc, inst.strerror))
544 errors += 1
544 errors += 1
545 return
545 return
546 if ui.verbose or not exact:
546 if ui.verbose or not exact:
547 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
547 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
548 targets[abstarget] = abssrc
548 targets[abstarget] = abssrc
549 if abstarget != origsrc:
549 if abstarget != origsrc:
550 if repo.dirstate[origsrc] == 'a':
550 if repo.dirstate[origsrc] == 'a':
551 if not ui.quiet:
551 if not ui.quiet:
552 ui.warn(_("%s has not been committed yet, so no copy "
552 ui.warn(_("%s has not been committed yet, so no copy "
553 "data will be stored for %s.\n")
553 "data will be stored for %s.\n")
554 % (repo.pathto(origsrc, cwd), reltarget))
554 % (repo.pathto(origsrc, cwd), reltarget))
555 if abstarget not in repo.dirstate and not opts.get('dry_run'):
555 if abstarget not in repo.dirstate and not opts.get('dry_run'):
556 repo.add([abstarget], wlock)
556 repo.add([abstarget], wlock)
557 elif not opts.get('dry_run'):
557 elif not opts.get('dry_run'):
558 repo.copy(origsrc, abstarget, wlock)
558 repo.copy(origsrc, abstarget, wlock)
559 copied.append((abssrc, relsrc, exact))
559 copied.append((abssrc, relsrc, exact))
560
560
561 # pat: ossep
561 # pat: ossep
562 # dest ossep
562 # dest ossep
563 # srcs: list of (hgsep, hgsep, ossep, bool)
563 # srcs: list of (hgsep, hgsep, ossep, bool)
564 # return: function that takes hgsep and returns ossep
564 # return: function that takes hgsep and returns ossep
565 def targetpathfn(pat, dest, srcs):
565 def targetpathfn(pat, dest, srcs):
566 if os.path.isdir(pat):
566 if os.path.isdir(pat):
567 abspfx = util.canonpath(repo.root, cwd, pat)
567 abspfx = util.canonpath(repo.root, cwd, pat)
568 abspfx = util.localpath(abspfx)
568 abspfx = util.localpath(abspfx)
569 if destdirexists:
569 if destdirexists:
570 striplen = len(os.path.split(abspfx)[0])
570 striplen = len(os.path.split(abspfx)[0])
571 else:
571 else:
572 striplen = len(abspfx)
572 striplen = len(abspfx)
573 if striplen:
573 if striplen:
574 striplen += len(os.sep)
574 striplen += len(os.sep)
575 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
575 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
576 elif destdirexists:
576 elif destdirexists:
577 res = lambda p: os.path.join(dest,
577 res = lambda p: os.path.join(dest,
578 os.path.basename(util.localpath(p)))
578 os.path.basename(util.localpath(p)))
579 else:
579 else:
580 res = lambda p: dest
580 res = lambda p: dest
581 return res
581 return res
582
582
583 # pat: ossep
583 # pat: ossep
584 # dest ossep
584 # dest ossep
585 # srcs: list of (hgsep, hgsep, ossep, bool)
585 # srcs: list of (hgsep, hgsep, ossep, bool)
586 # return: function that takes hgsep and returns ossep
586 # return: function that takes hgsep and returns ossep
587 def targetpathafterfn(pat, dest, srcs):
587 def targetpathafterfn(pat, dest, srcs):
588 if util.patkind(pat, None)[0]:
588 if util.patkind(pat, None)[0]:
589 # a mercurial pattern
589 # a mercurial pattern
590 res = lambda p: os.path.join(dest,
590 res = lambda p: os.path.join(dest,
591 os.path.basename(util.localpath(p)))
591 os.path.basename(util.localpath(p)))
592 else:
592 else:
593 abspfx = util.canonpath(repo.root, cwd, pat)
593 abspfx = util.canonpath(repo.root, cwd, pat)
594 if len(abspfx) < len(srcs[0][0]):
594 if len(abspfx) < len(srcs[0][0]):
595 # A directory. Either the target path contains the last
595 # A directory. Either the target path contains the last
596 # component of the source path or it does not.
596 # component of the source path or it does not.
597 def evalpath(striplen):
597 def evalpath(striplen):
598 score = 0
598 score = 0
599 for s in srcs:
599 for s in srcs:
600 t = os.path.join(dest, util.localpath(s[0])[striplen:])
600 t = os.path.join(dest, util.localpath(s[0])[striplen:])
601 if os.path.exists(t):
601 if os.path.exists(t):
602 score += 1
602 score += 1
603 return score
603 return score
604
604
605 abspfx = util.localpath(abspfx)
605 abspfx = util.localpath(abspfx)
606 striplen = len(abspfx)
606 striplen = len(abspfx)
607 if striplen:
607 if striplen:
608 striplen += len(os.sep)
608 striplen += len(os.sep)
609 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
609 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
610 score = evalpath(striplen)
610 score = evalpath(striplen)
611 striplen1 = len(os.path.split(abspfx)[0])
611 striplen1 = len(os.path.split(abspfx)[0])
612 if striplen1:
612 if striplen1:
613 striplen1 += len(os.sep)
613 striplen1 += len(os.sep)
614 if evalpath(striplen1) > score:
614 if evalpath(striplen1) > score:
615 striplen = striplen1
615 striplen = striplen1
616 res = lambda p: os.path.join(dest,
616 res = lambda p: os.path.join(dest,
617 util.localpath(p)[striplen:])
617 util.localpath(p)[striplen:])
618 else:
618 else:
619 # a file
619 # a file
620 if destdirexists:
620 if destdirexists:
621 res = lambda p: os.path.join(dest,
621 res = lambda p: os.path.join(dest,
622 os.path.basename(util.localpath(p)))
622 os.path.basename(util.localpath(p)))
623 else:
623 else:
624 res = lambda p: dest
624 res = lambda p: dest
625 return res
625 return res
626
626
627
627
628 pats = util.expand_glob(pats)
628 pats = util.expand_glob(pats)
629 if not pats:
629 if not pats:
630 raise util.Abort(_('no source or destination specified'))
630 raise util.Abort(_('no source or destination specified'))
631 if len(pats) == 1:
631 if len(pats) == 1:
632 raise util.Abort(_('no destination specified'))
632 raise util.Abort(_('no destination specified'))
633 dest = pats.pop()
633 dest = pats.pop()
634 destdirexists = os.path.isdir(dest)
634 destdirexists = os.path.isdir(dest)
635 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
635 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
636 raise util.Abort(_('with multiple sources, destination must be an '
636 raise util.Abort(_('with multiple sources, destination must be an '
637 'existing directory'))
637 'existing directory'))
638 if opts['after']:
638 if opts['after']:
639 tfn = targetpathafterfn
639 tfn = targetpathafterfn
640 else:
640 else:
641 tfn = targetpathfn
641 tfn = targetpathfn
642 copylist = []
642 copylist = []
643 for pat in pats:
643 for pat in pats:
644 srcs = []
644 srcs = []
645 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
645 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
646 globbed=True):
646 globbed=True):
647 origsrc = okaytocopy(abssrc, relsrc, exact)
647 origsrc = okaytocopy(abssrc, relsrc, exact)
648 if origsrc:
648 if origsrc:
649 srcs.append((origsrc, abssrc, relsrc, exact))
649 srcs.append((origsrc, abssrc, relsrc, exact))
650 if not srcs:
650 if not srcs:
651 continue
651 continue
652 copylist.append((tfn(pat, dest, srcs), srcs))
652 copylist.append((tfn(pat, dest, srcs), srcs))
653 if not copylist:
653 if not copylist:
654 raise util.Abort(_('no files to copy'))
654 raise util.Abort(_('no files to copy'))
655
655
656 for targetpath, srcs in copylist:
656 for targetpath, srcs in copylist:
657 for origsrc, abssrc, relsrc, exact in srcs:
657 for origsrc, abssrc, relsrc, exact in srcs:
658 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
658 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
659
659
660 if errors:
660 if errors:
661 ui.warn(_('(consider using --after)\n'))
661 ui.warn(_('(consider using --after)\n'))
662 return errors, copied
662 return errors, copied
663
663
664 def copy(ui, repo, *pats, **opts):
664 def copy(ui, repo, *pats, **opts):
665 """mark files as copied for the next commit
665 """mark files as copied for the next commit
666
666
667 Mark dest as having copies of source files. If dest is a
667 Mark dest as having copies of source files. If dest is a
668 directory, copies are put in that directory. If dest is a file,
668 directory, copies are put in that directory. If dest is a file,
669 there can only be one source.
669 there can only be one source.
670
670
671 By default, this command copies the contents of files as they
671 By default, this command copies the contents of files as they
672 stand in the working directory. If invoked with --after, the
672 stand in the working directory. If invoked with --after, the
673 operation is recorded, but no copying is performed.
673 operation is recorded, but no copying is performed.
674
674
675 This command takes effect in the next commit. To undo a copy
675 This command takes effect in the next commit. To undo a copy
676 before that, see hg revert.
676 before that, see hg revert.
677 """
677 """
678 wlock = repo.wlock(False)
678 wlock = repo.wlock(False)
679 errs, copied = docopy(ui, repo, pats, opts, wlock)
679 try:
680 errs, copied = docopy(ui, repo, pats, opts, wlock)
681 finally:
682 del wlock
680 return errs
683 return errs
681
684
682 def debugancestor(ui, index, rev1, rev2):
685 def debugancestor(ui, index, rev1, rev2):
683 """find the ancestor revision of two revisions in a given index"""
686 """find the ancestor revision of two revisions in a given index"""
684 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
687 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
685 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
688 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
686 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
689 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
687
690
688 def debugcomplete(ui, cmd='', **opts):
691 def debugcomplete(ui, cmd='', **opts):
689 """returns the completion list associated with the given command"""
692 """returns the completion list associated with the given command"""
690
693
691 if opts['options']:
694 if opts['options']:
692 options = []
695 options = []
693 otables = [globalopts]
696 otables = [globalopts]
694 if cmd:
697 if cmd:
695 aliases, entry = cmdutil.findcmd(ui, cmd)
698 aliases, entry = cmdutil.findcmd(ui, cmd)
696 otables.append(entry[1])
699 otables.append(entry[1])
697 for t in otables:
700 for t in otables:
698 for o in t:
701 for o in t:
699 if o[0]:
702 if o[0]:
700 options.append('-%s' % o[0])
703 options.append('-%s' % o[0])
701 options.append('--%s' % o[1])
704 options.append('--%s' % o[1])
702 ui.write("%s\n" % "\n".join(options))
705 ui.write("%s\n" % "\n".join(options))
703 return
706 return
704
707
705 clist = cmdutil.findpossible(ui, cmd).keys()
708 clist = cmdutil.findpossible(ui, cmd).keys()
706 clist.sort()
709 clist.sort()
707 ui.write("%s\n" % "\n".join(clist))
710 ui.write("%s\n" % "\n".join(clist))
708
711
709 def debugrebuildstate(ui, repo, rev=""):
712 def debugrebuildstate(ui, repo, rev=""):
710 """rebuild the dirstate as it would look like for the given revision"""
713 """rebuild the dirstate as it would look like for the given revision"""
711 if rev == "":
714 if rev == "":
712 rev = repo.changelog.tip()
715 rev = repo.changelog.tip()
713 ctx = repo.changectx(rev)
716 ctx = repo.changectx(rev)
714 files = ctx.manifest()
717 files = ctx.manifest()
715 wlock = repo.wlock()
718 wlock = repo.wlock()
716 repo.dirstate.rebuild(rev, files)
719 try:
720 repo.dirstate.rebuild(rev, files)
721 finally:
722 del wlock
717
723
718 def debugcheckstate(ui, repo):
724 def debugcheckstate(ui, repo):
719 """validate the correctness of the current dirstate"""
725 """validate the correctness of the current dirstate"""
720 parent1, parent2 = repo.dirstate.parents()
726 parent1, parent2 = repo.dirstate.parents()
721 m1 = repo.changectx(parent1).manifest()
727 m1 = repo.changectx(parent1).manifest()
722 m2 = repo.changectx(parent2).manifest()
728 m2 = repo.changectx(parent2).manifest()
723 errors = 0
729 errors = 0
724 for f in repo.dirstate:
730 for f in repo.dirstate:
725 state = repo.dirstate[f]
731 state = repo.dirstate[f]
726 if state in "nr" and f not in m1:
732 if state in "nr" and f not in m1:
727 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
733 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
728 errors += 1
734 errors += 1
729 if state in "a" and f in m1:
735 if state in "a" and f in m1:
730 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
736 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
731 errors += 1
737 errors += 1
732 if state in "m" and f not in m1 and f not in m2:
738 if state in "m" and f not in m1 and f not in m2:
733 ui.warn(_("%s in state %s, but not in either manifest\n") %
739 ui.warn(_("%s in state %s, but not in either manifest\n") %
734 (f, state))
740 (f, state))
735 errors += 1
741 errors += 1
736 for f in m1:
742 for f in m1:
737 state = repo.dirstate[f]
743 state = repo.dirstate[f]
738 if state not in "nrm":
744 if state not in "nrm":
739 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
745 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
740 errors += 1
746 errors += 1
741 if errors:
747 if errors:
742 error = _(".hg/dirstate inconsistent with current parent's manifest")
748 error = _(".hg/dirstate inconsistent with current parent's manifest")
743 raise util.Abort(error)
749 raise util.Abort(error)
744
750
745 def showconfig(ui, repo, *values, **opts):
751 def showconfig(ui, repo, *values, **opts):
746 """show combined config settings from all hgrc files
752 """show combined config settings from all hgrc files
747
753
748 With no args, print names and values of all config items.
754 With no args, print names and values of all config items.
749
755
750 With one arg of the form section.name, print just the value of
756 With one arg of the form section.name, print just the value of
751 that config item.
757 that config item.
752
758
753 With multiple args, print names and values of all config items
759 With multiple args, print names and values of all config items
754 with matching section names."""
760 with matching section names."""
755
761
756 untrusted = bool(opts.get('untrusted'))
762 untrusted = bool(opts.get('untrusted'))
757 if values:
763 if values:
758 if len([v for v in values if '.' in v]) > 1:
764 if len([v for v in values if '.' in v]) > 1:
759 raise util.Abort(_('only one config item permitted'))
765 raise util.Abort(_('only one config item permitted'))
760 for section, name, value in ui.walkconfig(untrusted=untrusted):
766 for section, name, value in ui.walkconfig(untrusted=untrusted):
761 sectname = section + '.' + name
767 sectname = section + '.' + name
762 if values:
768 if values:
763 for v in values:
769 for v in values:
764 if v == section:
770 if v == section:
765 ui.write('%s=%s\n' % (sectname, value))
771 ui.write('%s=%s\n' % (sectname, value))
766 elif v == sectname:
772 elif v == sectname:
767 ui.write(value, '\n')
773 ui.write(value, '\n')
768 else:
774 else:
769 ui.write('%s=%s\n' % (sectname, value))
775 ui.write('%s=%s\n' % (sectname, value))
770
776
771 def debugsetparents(ui, repo, rev1, rev2=None):
777 def debugsetparents(ui, repo, rev1, rev2=None):
772 """manually set the parents of the current working directory
778 """manually set the parents of the current working directory
773
779
774 This is useful for writing repository conversion tools, but should
780 This is useful for writing repository conversion tools, but should
775 be used with care.
781 be used with care.
776 """
782 """
777
783
778 if not rev2:
784 if not rev2:
779 rev2 = hex(nullid)
785 rev2 = hex(nullid)
780
786
781 wlock = repo.wlock()
787 wlock = repo.wlock()
782 try:
788 try:
783 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
789 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
784 finally:
790 finally:
785 wlock.release()
791 del wlock
786
792
787 def debugstate(ui, repo):
793 def debugstate(ui, repo):
788 """show the contents of the current dirstate"""
794 """show the contents of the current dirstate"""
789 dc = repo.dirstate._map
795 dc = repo.dirstate._map
790 k = dc.keys()
796 k = dc.keys()
791 k.sort()
797 k.sort()
792 for file_ in k:
798 for file_ in k:
793 if dc[file_][3] == -1:
799 if dc[file_][3] == -1:
794 # Pad or slice to locale representation
800 # Pad or slice to locale representation
795 locale_len = len(time.strftime("%x %X", time.localtime(0)))
801 locale_len = len(time.strftime("%x %X", time.localtime(0)))
796 timestr = 'unset'
802 timestr = 'unset'
797 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
803 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
798 else:
804 else:
799 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
805 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
800 ui.write("%c %3o %10d %s %s\n"
806 ui.write("%c %3o %10d %s %s\n"
801 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
807 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
802 timestr, file_))
808 timestr, file_))
803 for f in repo.dirstate.copies():
809 for f in repo.dirstate.copies():
804 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
810 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
805
811
806 def debugdata(ui, file_, rev):
812 def debugdata(ui, file_, rev):
807 """dump the contents of a data file revision"""
813 """dump the contents of a data file revision"""
808 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
814 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
809 try:
815 try:
810 ui.write(r.revision(r.lookup(rev)))
816 ui.write(r.revision(r.lookup(rev)))
811 except KeyError:
817 except KeyError:
812 raise util.Abort(_('invalid revision identifier %s') % rev)
818 raise util.Abort(_('invalid revision identifier %s') % rev)
813
819
814 def debugdate(ui, date, range=None, **opts):
820 def debugdate(ui, date, range=None, **opts):
815 """parse and display a date"""
821 """parse and display a date"""
816 if opts["extended"]:
822 if opts["extended"]:
817 d = util.parsedate(date, util.extendeddateformats)
823 d = util.parsedate(date, util.extendeddateformats)
818 else:
824 else:
819 d = util.parsedate(date)
825 d = util.parsedate(date)
820 ui.write("internal: %s %s\n" % d)
826 ui.write("internal: %s %s\n" % d)
821 ui.write("standard: %s\n" % util.datestr(d))
827 ui.write("standard: %s\n" % util.datestr(d))
822 if range:
828 if range:
823 m = util.matchdate(range)
829 m = util.matchdate(range)
824 ui.write("match: %s\n" % m(d[0]))
830 ui.write("match: %s\n" % m(d[0]))
825
831
826 def debugindex(ui, file_):
832 def debugindex(ui, file_):
827 """dump the contents of an index file"""
833 """dump the contents of an index file"""
828 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
834 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
829 ui.write(" rev offset length base linkrev" +
835 ui.write(" rev offset length base linkrev" +
830 " nodeid p1 p2\n")
836 " nodeid p1 p2\n")
831 for i in xrange(r.count()):
837 for i in xrange(r.count()):
832 node = r.node(i)
838 node = r.node(i)
833 pp = r.parents(node)
839 pp = r.parents(node)
834 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
840 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
835 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
841 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
836 short(node), short(pp[0]), short(pp[1])))
842 short(node), short(pp[0]), short(pp[1])))
837
843
838 def debugindexdot(ui, file_):
844 def debugindexdot(ui, file_):
839 """dump an index DAG as a .dot file"""
845 """dump an index DAG as a .dot file"""
840 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
846 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
841 ui.write("digraph G {\n")
847 ui.write("digraph G {\n")
842 for i in xrange(r.count()):
848 for i in xrange(r.count()):
843 node = r.node(i)
849 node = r.node(i)
844 pp = r.parents(node)
850 pp = r.parents(node)
845 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
851 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
846 if pp[1] != nullid:
852 if pp[1] != nullid:
847 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
853 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
848 ui.write("}\n")
854 ui.write("}\n")
849
855
850 def debuginstall(ui):
856 def debuginstall(ui):
851 '''test Mercurial installation'''
857 '''test Mercurial installation'''
852
858
853 def writetemp(contents):
859 def writetemp(contents):
854 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
860 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
855 f = os.fdopen(fd, "wb")
861 f = os.fdopen(fd, "wb")
856 f.write(contents)
862 f.write(contents)
857 f.close()
863 f.close()
858 return name
864 return name
859
865
860 problems = 0
866 problems = 0
861
867
862 # encoding
868 # encoding
863 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
869 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
864 try:
870 try:
865 util.fromlocal("test")
871 util.fromlocal("test")
866 except util.Abort, inst:
872 except util.Abort, inst:
867 ui.write(" %s\n" % inst)
873 ui.write(" %s\n" % inst)
868 ui.write(_(" (check that your locale is properly set)\n"))
874 ui.write(_(" (check that your locale is properly set)\n"))
869 problems += 1
875 problems += 1
870
876
871 # compiled modules
877 # compiled modules
872 ui.status(_("Checking extensions...\n"))
878 ui.status(_("Checking extensions...\n"))
873 try:
879 try:
874 import bdiff, mpatch, base85
880 import bdiff, mpatch, base85
875 except Exception, inst:
881 except Exception, inst:
876 ui.write(" %s\n" % inst)
882 ui.write(" %s\n" % inst)
877 ui.write(_(" One or more extensions could not be found"))
883 ui.write(_(" One or more extensions could not be found"))
878 ui.write(_(" (check that you compiled the extensions)\n"))
884 ui.write(_(" (check that you compiled the extensions)\n"))
879 problems += 1
885 problems += 1
880
886
881 # templates
887 # templates
882 ui.status(_("Checking templates...\n"))
888 ui.status(_("Checking templates...\n"))
883 try:
889 try:
884 import templater
890 import templater
885 t = templater.templater(templater.templatepath("map-cmdline.default"))
891 t = templater.templater(templater.templatepath("map-cmdline.default"))
886 except Exception, inst:
892 except Exception, inst:
887 ui.write(" %s\n" % inst)
893 ui.write(" %s\n" % inst)
888 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
894 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
889 problems += 1
895 problems += 1
890
896
891 # patch
897 # patch
892 ui.status(_("Checking patch...\n"))
898 ui.status(_("Checking patch...\n"))
893 patcher = ui.config('ui', 'patch')
899 patcher = ui.config('ui', 'patch')
894 patcher = ((patcher and util.find_exe(patcher)) or
900 patcher = ((patcher and util.find_exe(patcher)) or
895 util.find_exe('gpatch') or
901 util.find_exe('gpatch') or
896 util.find_exe('patch'))
902 util.find_exe('patch'))
897 if not patcher:
903 if not patcher:
898 ui.write(_(" Can't find patch or gpatch in PATH\n"))
904 ui.write(_(" Can't find patch or gpatch in PATH\n"))
899 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
905 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
900 problems += 1
906 problems += 1
901 else:
907 else:
902 # actually attempt a patch here
908 # actually attempt a patch here
903 a = "1\n2\n3\n4\n"
909 a = "1\n2\n3\n4\n"
904 b = "1\n2\n3\ninsert\n4\n"
910 b = "1\n2\n3\ninsert\n4\n"
905 fa = writetemp(a)
911 fa = writetemp(a)
906 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
912 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
907 fd = writetemp(d)
913 fd = writetemp(d)
908
914
909 files = {}
915 files = {}
910 try:
916 try:
911 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
917 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
912 except util.Abort, e:
918 except util.Abort, e:
913 ui.write(_(" patch call failed:\n"))
919 ui.write(_(" patch call failed:\n"))
914 ui.write(" " + str(e) + "\n")
920 ui.write(" " + str(e) + "\n")
915 problems += 1
921 problems += 1
916 else:
922 else:
917 if list(files) != [os.path.basename(fa)]:
923 if list(files) != [os.path.basename(fa)]:
918 ui.write(_(" unexpected patch output!"))
924 ui.write(_(" unexpected patch output!"))
919 ui.write(_(" (you may have an incompatible version of patch)\n"))
925 ui.write(_(" (you may have an incompatible version of patch)\n"))
920 problems += 1
926 problems += 1
921 a = file(fa).read()
927 a = file(fa).read()
922 if a != b:
928 if a != b:
923 ui.write(_(" patch test failed!"))
929 ui.write(_(" patch test failed!"))
924 ui.write(_(" (you may have an incompatible version of patch)\n"))
930 ui.write(_(" (you may have an incompatible version of patch)\n"))
925 problems += 1
931 problems += 1
926
932
927 os.unlink(fa)
933 os.unlink(fa)
928 os.unlink(fd)
934 os.unlink(fd)
929
935
930 # merge helper
936 # merge helper
931 ui.status(_("Checking merge helper...\n"))
937 ui.status(_("Checking merge helper...\n"))
932 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
938 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
933 or "hgmerge")
939 or "hgmerge")
934 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
940 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
935 if not cmdpath:
941 if not cmdpath:
936 if cmd == 'hgmerge':
942 if cmd == 'hgmerge':
937 ui.write(_(" No merge helper set and can't find default"
943 ui.write(_(" No merge helper set and can't find default"
938 " hgmerge script in PATH\n"))
944 " hgmerge script in PATH\n"))
939 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
945 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
940 else:
946 else:
941 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
947 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
942 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
948 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
943 problems += 1
949 problems += 1
944 else:
950 else:
945 # actually attempt a patch here
951 # actually attempt a patch here
946 fa = writetemp("1\n2\n3\n4\n")
952 fa = writetemp("1\n2\n3\n4\n")
947 fl = writetemp("1\n2\n3\ninsert\n4\n")
953 fl = writetemp("1\n2\n3\ninsert\n4\n")
948 fr = writetemp("begin\n1\n2\n3\n4\n")
954 fr = writetemp("begin\n1\n2\n3\n4\n")
949 r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr))
955 r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr))
950 if r:
956 if r:
951 ui.write(_(" Got unexpected merge error %d!\n") % r)
957 ui.write(_(" Got unexpected merge error %d!\n") % r)
952 problems += 1
958 problems += 1
953 m = file(fl).read()
959 m = file(fl).read()
954 if m != "begin\n1\n2\n3\ninsert\n4\n":
960 if m != "begin\n1\n2\n3\ninsert\n4\n":
955 ui.write(_(" Got unexpected merge results!\n"))
961 ui.write(_(" Got unexpected merge results!\n"))
956 ui.write(_(" (your merge helper may have the"
962 ui.write(_(" (your merge helper may have the"
957 " wrong argument order)\n"))
963 " wrong argument order)\n"))
958 ui.write(_(" Result: %r\n") % m)
964 ui.write(_(" Result: %r\n") % m)
959 problems += 1
965 problems += 1
960 os.unlink(fa)
966 os.unlink(fa)
961 os.unlink(fl)
967 os.unlink(fl)
962 os.unlink(fr)
968 os.unlink(fr)
963
969
964 # editor
970 # editor
965 ui.status(_("Checking commit editor...\n"))
971 ui.status(_("Checking commit editor...\n"))
966 editor = (os.environ.get("HGEDITOR") or
972 editor = (os.environ.get("HGEDITOR") or
967 ui.config("ui", "editor") or
973 ui.config("ui", "editor") or
968 os.environ.get("EDITOR", "vi"))
974 os.environ.get("EDITOR", "vi"))
969 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
975 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
970 if not cmdpath:
976 if not cmdpath:
971 if editor == 'vi':
977 if editor == 'vi':
972 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
978 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
973 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
979 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
974 else:
980 else:
975 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
981 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
976 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
982 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
977 problems += 1
983 problems += 1
978
984
979 # check username
985 # check username
980 ui.status(_("Checking username...\n"))
986 ui.status(_("Checking username...\n"))
981 user = os.environ.get("HGUSER")
987 user = os.environ.get("HGUSER")
982 if user is None:
988 if user is None:
983 user = ui.config("ui", "username")
989 user = ui.config("ui", "username")
984 if user is None:
990 if user is None:
985 user = os.environ.get("EMAIL")
991 user = os.environ.get("EMAIL")
986 if not user:
992 if not user:
987 ui.warn(" ")
993 ui.warn(" ")
988 ui.username()
994 ui.username()
989 ui.write(_(" (specify a username in your .hgrc file)\n"))
995 ui.write(_(" (specify a username in your .hgrc file)\n"))
990
996
991 if not problems:
997 if not problems:
992 ui.status(_("No problems detected\n"))
998 ui.status(_("No problems detected\n"))
993 else:
999 else:
994 ui.write(_("%s problems detected,"
1000 ui.write(_("%s problems detected,"
995 " please check your install!\n") % problems)
1001 " please check your install!\n") % problems)
996
1002
997 return problems
1003 return problems
998
1004
999 def debugrename(ui, repo, file1, *pats, **opts):
1005 def debugrename(ui, repo, file1, *pats, **opts):
1000 """dump rename information"""
1006 """dump rename information"""
1001
1007
1002 ctx = repo.changectx(opts.get('rev', 'tip'))
1008 ctx = repo.changectx(opts.get('rev', 'tip'))
1003 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
1009 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
1004 ctx.node()):
1010 ctx.node()):
1005 m = ctx.filectx(abs).renamed()
1011 m = ctx.filectx(abs).renamed()
1006 if m:
1012 if m:
1007 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
1013 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
1008 else:
1014 else:
1009 ui.write(_("%s not renamed\n") % rel)
1015 ui.write(_("%s not renamed\n") % rel)
1010
1016
1011 def debugwalk(ui, repo, *pats, **opts):
1017 def debugwalk(ui, repo, *pats, **opts):
1012 """show how files match on given patterns"""
1018 """show how files match on given patterns"""
1013 items = list(cmdutil.walk(repo, pats, opts))
1019 items = list(cmdutil.walk(repo, pats, opts))
1014 if not items:
1020 if not items:
1015 return
1021 return
1016 fmt = '%%s %%-%ds %%-%ds %%s' % (
1022 fmt = '%%s %%-%ds %%-%ds %%s' % (
1017 max([len(abs) for (src, abs, rel, exact) in items]),
1023 max([len(abs) for (src, abs, rel, exact) in items]),
1018 max([len(rel) for (src, abs, rel, exact) in items]))
1024 max([len(rel) for (src, abs, rel, exact) in items]))
1019 for src, abs, rel, exact in items:
1025 for src, abs, rel, exact in items:
1020 line = fmt % (src, abs, rel, exact and 'exact' or '')
1026 line = fmt % (src, abs, rel, exact and 'exact' or '')
1021 ui.write("%s\n" % line.rstrip())
1027 ui.write("%s\n" % line.rstrip())
1022
1028
1023 def diff(ui, repo, *pats, **opts):
1029 def diff(ui, repo, *pats, **opts):
1024 """diff repository (or selected files)
1030 """diff repository (or selected files)
1025
1031
1026 Show differences between revisions for the specified files.
1032 Show differences between revisions for the specified files.
1027
1033
1028 Differences between files are shown using the unified diff format.
1034 Differences between files are shown using the unified diff format.
1029
1035
1030 NOTE: diff may generate unexpected results for merges, as it will
1036 NOTE: diff may generate unexpected results for merges, as it will
1031 default to comparing against the working directory's first parent
1037 default to comparing against the working directory's first parent
1032 changeset if no revisions are specified.
1038 changeset if no revisions are specified.
1033
1039
1034 When two revision arguments are given, then changes are shown
1040 When two revision arguments are given, then changes are shown
1035 between those revisions. If only one revision is specified then
1041 between those revisions. If only one revision is specified then
1036 that revision is compared to the working directory, and, when no
1042 that revision is compared to the working directory, and, when no
1037 revisions are specified, the working directory files are compared
1043 revisions are specified, the working directory files are compared
1038 to its parent.
1044 to its parent.
1039
1045
1040 Without the -a option, diff will avoid generating diffs of files
1046 Without the -a option, diff will avoid generating diffs of files
1041 it detects as binary. With -a, diff will generate a diff anyway,
1047 it detects as binary. With -a, diff will generate a diff anyway,
1042 probably with undesirable results.
1048 probably with undesirable results.
1043 """
1049 """
1044 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1050 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1045
1051
1046 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1052 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1047
1053
1048 patch.diff(repo, node1, node2, fns, match=matchfn,
1054 patch.diff(repo, node1, node2, fns, match=matchfn,
1049 opts=patch.diffopts(ui, opts))
1055 opts=patch.diffopts(ui, opts))
1050
1056
1051 def export(ui, repo, *changesets, **opts):
1057 def export(ui, repo, *changesets, **opts):
1052 """dump the header and diffs for one or more changesets
1058 """dump the header and diffs for one or more changesets
1053
1059
1054 Print the changeset header and diffs for one or more revisions.
1060 Print the changeset header and diffs for one or more revisions.
1055
1061
1056 The information shown in the changeset header is: author,
1062 The information shown in the changeset header is: author,
1057 changeset hash, parent(s) and commit comment.
1063 changeset hash, parent(s) and commit comment.
1058
1064
1059 NOTE: export may generate unexpected diff output for merge changesets,
1065 NOTE: export may generate unexpected diff output for merge changesets,
1060 as it will compare the merge changeset against its first parent only.
1066 as it will compare the merge changeset against its first parent only.
1061
1067
1062 Output may be to a file, in which case the name of the file is
1068 Output may be to a file, in which case the name of the file is
1063 given using a format string. The formatting rules are as follows:
1069 given using a format string. The formatting rules are as follows:
1064
1070
1065 %% literal "%" character
1071 %% literal "%" character
1066 %H changeset hash (40 bytes of hexadecimal)
1072 %H changeset hash (40 bytes of hexadecimal)
1067 %N number of patches being generated
1073 %N number of patches being generated
1068 %R changeset revision number
1074 %R changeset revision number
1069 %b basename of the exporting repository
1075 %b basename of the exporting repository
1070 %h short-form changeset hash (12 bytes of hexadecimal)
1076 %h short-form changeset hash (12 bytes of hexadecimal)
1071 %n zero-padded sequence number, starting at 1
1077 %n zero-padded sequence number, starting at 1
1072 %r zero-padded changeset revision number
1078 %r zero-padded changeset revision number
1073
1079
1074 Without the -a option, export will avoid generating diffs of files
1080 Without the -a option, export will avoid generating diffs of files
1075 it detects as binary. With -a, export will generate a diff anyway,
1081 it detects as binary. With -a, export will generate a diff anyway,
1076 probably with undesirable results.
1082 probably with undesirable results.
1077
1083
1078 With the --switch-parent option, the diff will be against the second
1084 With the --switch-parent option, the diff will be against the second
1079 parent. It can be useful to review a merge.
1085 parent. It can be useful to review a merge.
1080 """
1086 """
1081 if not changesets:
1087 if not changesets:
1082 raise util.Abort(_("export requires at least one changeset"))
1088 raise util.Abort(_("export requires at least one changeset"))
1083 revs = cmdutil.revrange(repo, changesets)
1089 revs = cmdutil.revrange(repo, changesets)
1084 if len(revs) > 1:
1090 if len(revs) > 1:
1085 ui.note(_('exporting patches:\n'))
1091 ui.note(_('exporting patches:\n'))
1086 else:
1092 else:
1087 ui.note(_('exporting patch:\n'))
1093 ui.note(_('exporting patch:\n'))
1088 patch.export(repo, revs, template=opts['output'],
1094 patch.export(repo, revs, template=opts['output'],
1089 switch_parent=opts['switch_parent'],
1095 switch_parent=opts['switch_parent'],
1090 opts=patch.diffopts(ui, opts))
1096 opts=patch.diffopts(ui, opts))
1091
1097
1092 def grep(ui, repo, pattern, *pats, **opts):
1098 def grep(ui, repo, pattern, *pats, **opts):
1093 """search for a pattern in specified files and revisions
1099 """search for a pattern in specified files and revisions
1094
1100
1095 Search revisions of files for a regular expression.
1101 Search revisions of files for a regular expression.
1096
1102
1097 This command behaves differently than Unix grep. It only accepts
1103 This command behaves differently than Unix grep. It only accepts
1098 Python/Perl regexps. It searches repository history, not the
1104 Python/Perl regexps. It searches repository history, not the
1099 working directory. It always prints the revision number in which
1105 working directory. It always prints the revision number in which
1100 a match appears.
1106 a match appears.
1101
1107
1102 By default, grep only prints output for the first revision of a
1108 By default, grep only prints output for the first revision of a
1103 file in which it finds a match. To get it to print every revision
1109 file in which it finds a match. To get it to print every revision
1104 that contains a change in match status ("-" for a match that
1110 that contains a change in match status ("-" for a match that
1105 becomes a non-match, or "+" for a non-match that becomes a match),
1111 becomes a non-match, or "+" for a non-match that becomes a match),
1106 use the --all flag.
1112 use the --all flag.
1107 """
1113 """
1108 reflags = 0
1114 reflags = 0
1109 if opts['ignore_case']:
1115 if opts['ignore_case']:
1110 reflags |= re.I
1116 reflags |= re.I
1111 try:
1117 try:
1112 regexp = re.compile(pattern, reflags)
1118 regexp = re.compile(pattern, reflags)
1113 except Exception, inst:
1119 except Exception, inst:
1114 ui.warn(_("grep: invalid match pattern: %s!\n") % inst)
1120 ui.warn(_("grep: invalid match pattern: %s!\n") % inst)
1115 return None
1121 return None
1116 sep, eol = ':', '\n'
1122 sep, eol = ':', '\n'
1117 if opts['print0']:
1123 if opts['print0']:
1118 sep = eol = '\0'
1124 sep = eol = '\0'
1119
1125
1120 fcache = {}
1126 fcache = {}
1121 def getfile(fn):
1127 def getfile(fn):
1122 if fn not in fcache:
1128 if fn not in fcache:
1123 fcache[fn] = repo.file(fn)
1129 fcache[fn] = repo.file(fn)
1124 return fcache[fn]
1130 return fcache[fn]
1125
1131
1126 def matchlines(body):
1132 def matchlines(body):
1127 begin = 0
1133 begin = 0
1128 linenum = 0
1134 linenum = 0
1129 while True:
1135 while True:
1130 match = regexp.search(body, begin)
1136 match = regexp.search(body, begin)
1131 if not match:
1137 if not match:
1132 break
1138 break
1133 mstart, mend = match.span()
1139 mstart, mend = match.span()
1134 linenum += body.count('\n', begin, mstart) + 1
1140 linenum += body.count('\n', begin, mstart) + 1
1135 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1141 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1136 lend = body.find('\n', mend)
1142 lend = body.find('\n', mend)
1137 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1143 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1138 begin = lend + 1
1144 begin = lend + 1
1139
1145
1140 class linestate(object):
1146 class linestate(object):
1141 def __init__(self, line, linenum, colstart, colend):
1147 def __init__(self, line, linenum, colstart, colend):
1142 self.line = line
1148 self.line = line
1143 self.linenum = linenum
1149 self.linenum = linenum
1144 self.colstart = colstart
1150 self.colstart = colstart
1145 self.colend = colend
1151 self.colend = colend
1146
1152
1147 def __eq__(self, other):
1153 def __eq__(self, other):
1148 return self.line == other.line
1154 return self.line == other.line
1149
1155
1150 matches = {}
1156 matches = {}
1151 copies = {}
1157 copies = {}
1152 def grepbody(fn, rev, body):
1158 def grepbody(fn, rev, body):
1153 matches[rev].setdefault(fn, [])
1159 matches[rev].setdefault(fn, [])
1154 m = matches[rev][fn]
1160 m = matches[rev][fn]
1155 for lnum, cstart, cend, line in matchlines(body):
1161 for lnum, cstart, cend, line in matchlines(body):
1156 s = linestate(line, lnum, cstart, cend)
1162 s = linestate(line, lnum, cstart, cend)
1157 m.append(s)
1163 m.append(s)
1158
1164
1159 def difflinestates(a, b):
1165 def difflinestates(a, b):
1160 sm = difflib.SequenceMatcher(None, a, b)
1166 sm = difflib.SequenceMatcher(None, a, b)
1161 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1167 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1162 if tag == 'insert':
1168 if tag == 'insert':
1163 for i in xrange(blo, bhi):
1169 for i in xrange(blo, bhi):
1164 yield ('+', b[i])
1170 yield ('+', b[i])
1165 elif tag == 'delete':
1171 elif tag == 'delete':
1166 for i in xrange(alo, ahi):
1172 for i in xrange(alo, ahi):
1167 yield ('-', a[i])
1173 yield ('-', a[i])
1168 elif tag == 'replace':
1174 elif tag == 'replace':
1169 for i in xrange(alo, ahi):
1175 for i in xrange(alo, ahi):
1170 yield ('-', a[i])
1176 yield ('-', a[i])
1171 for i in xrange(blo, bhi):
1177 for i in xrange(blo, bhi):
1172 yield ('+', b[i])
1178 yield ('+', b[i])
1173
1179
1174 prev = {}
1180 prev = {}
1175 def display(fn, rev, states, prevstates):
1181 def display(fn, rev, states, prevstates):
1176 found = False
1182 found = False
1177 filerevmatches = {}
1183 filerevmatches = {}
1178 r = prev.get(fn, -1)
1184 r = prev.get(fn, -1)
1179 if opts['all']:
1185 if opts['all']:
1180 iter = difflinestates(states, prevstates)
1186 iter = difflinestates(states, prevstates)
1181 else:
1187 else:
1182 iter = [('', l) for l in prevstates]
1188 iter = [('', l) for l in prevstates]
1183 for change, l in iter:
1189 for change, l in iter:
1184 cols = [fn, str(r)]
1190 cols = [fn, str(r)]
1185 if opts['line_number']:
1191 if opts['line_number']:
1186 cols.append(str(l.linenum))
1192 cols.append(str(l.linenum))
1187 if opts['all']:
1193 if opts['all']:
1188 cols.append(change)
1194 cols.append(change)
1189 if opts['user']:
1195 if opts['user']:
1190 cols.append(ui.shortuser(get(r)[1]))
1196 cols.append(ui.shortuser(get(r)[1]))
1191 if opts['files_with_matches']:
1197 if opts['files_with_matches']:
1192 c = (fn, r)
1198 c = (fn, r)
1193 if c in filerevmatches:
1199 if c in filerevmatches:
1194 continue
1200 continue
1195 filerevmatches[c] = 1
1201 filerevmatches[c] = 1
1196 else:
1202 else:
1197 cols.append(l.line)
1203 cols.append(l.line)
1198 ui.write(sep.join(cols), eol)
1204 ui.write(sep.join(cols), eol)
1199 found = True
1205 found = True
1200 return found
1206 return found
1201
1207
1202 fstate = {}
1208 fstate = {}
1203 skip = {}
1209 skip = {}
1204 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1210 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1205 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1211 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1206 found = False
1212 found = False
1207 follow = opts.get('follow')
1213 follow = opts.get('follow')
1208 for st, rev, fns in changeiter:
1214 for st, rev, fns in changeiter:
1209 if st == 'window':
1215 if st == 'window':
1210 matches.clear()
1216 matches.clear()
1211 elif st == 'add':
1217 elif st == 'add':
1212 mf = repo.changectx(rev).manifest()
1218 mf = repo.changectx(rev).manifest()
1213 matches[rev] = {}
1219 matches[rev] = {}
1214 for fn in fns:
1220 for fn in fns:
1215 if fn in skip:
1221 if fn in skip:
1216 continue
1222 continue
1217 fstate.setdefault(fn, {})
1223 fstate.setdefault(fn, {})
1218 try:
1224 try:
1219 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1225 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1220 if follow:
1226 if follow:
1221 copied = getfile(fn).renamed(mf[fn])
1227 copied = getfile(fn).renamed(mf[fn])
1222 if copied:
1228 if copied:
1223 copies.setdefault(rev, {})[fn] = copied[0]
1229 copies.setdefault(rev, {})[fn] = copied[0]
1224 except KeyError:
1230 except KeyError:
1225 pass
1231 pass
1226 elif st == 'iter':
1232 elif st == 'iter':
1227 states = matches[rev].items()
1233 states = matches[rev].items()
1228 states.sort()
1234 states.sort()
1229 for fn, m in states:
1235 for fn, m in states:
1230 copy = copies.get(rev, {}).get(fn)
1236 copy = copies.get(rev, {}).get(fn)
1231 if fn in skip:
1237 if fn in skip:
1232 if copy:
1238 if copy:
1233 skip[copy] = True
1239 skip[copy] = True
1234 continue
1240 continue
1235 if fn in prev or fstate[fn]:
1241 if fn in prev or fstate[fn]:
1236 r = display(fn, rev, m, fstate[fn])
1242 r = display(fn, rev, m, fstate[fn])
1237 found = found or r
1243 found = found or r
1238 if r and not opts['all']:
1244 if r and not opts['all']:
1239 skip[fn] = True
1245 skip[fn] = True
1240 if copy:
1246 if copy:
1241 skip[copy] = True
1247 skip[copy] = True
1242 fstate[fn] = m
1248 fstate[fn] = m
1243 if copy:
1249 if copy:
1244 fstate[copy] = m
1250 fstate[copy] = m
1245 prev[fn] = rev
1251 prev[fn] = rev
1246
1252
1247 fstate = fstate.items()
1253 fstate = fstate.items()
1248 fstate.sort()
1254 fstate.sort()
1249 for fn, state in fstate:
1255 for fn, state in fstate:
1250 if fn in skip:
1256 if fn in skip:
1251 continue
1257 continue
1252 if fn not in copies.get(prev[fn], {}):
1258 if fn not in copies.get(prev[fn], {}):
1253 found = display(fn, rev, {}, state) or found
1259 found = display(fn, rev, {}, state) or found
1254 return (not found and 1) or 0
1260 return (not found and 1) or 0
1255
1261
1256 def heads(ui, repo, *branchrevs, **opts):
1262 def heads(ui, repo, *branchrevs, **opts):
1257 """show current repository heads or show branch heads
1263 """show current repository heads or show branch heads
1258
1264
1259 With no arguments, show all repository head changesets.
1265 With no arguments, show all repository head changesets.
1260
1266
1261 If branch or revisions names are given this will show the heads of
1267 If branch or revisions names are given this will show the heads of
1262 the specified branches or the branches those revisions are tagged
1268 the specified branches or the branches those revisions are tagged
1263 with.
1269 with.
1264
1270
1265 Repository "heads" are changesets that don't have child
1271 Repository "heads" are changesets that don't have child
1266 changesets. They are where development generally takes place and
1272 changesets. They are where development generally takes place and
1267 are the usual targets for update and merge operations.
1273 are the usual targets for update and merge operations.
1268
1274
1269 Branch heads are changesets that have a given branch tag, but have
1275 Branch heads are changesets that have a given branch tag, but have
1270 no child changesets with that tag. They are usually where
1276 no child changesets with that tag. They are usually where
1271 development on the given branch takes place.
1277 development on the given branch takes place.
1272 """
1278 """
1273 if opts['rev']:
1279 if opts['rev']:
1274 start = repo.lookup(opts['rev'])
1280 start = repo.lookup(opts['rev'])
1275 else:
1281 else:
1276 start = None
1282 start = None
1277 if not branchrevs:
1283 if not branchrevs:
1278 # Assume we're looking repo-wide heads if no revs were specified.
1284 # Assume we're looking repo-wide heads if no revs were specified.
1279 heads = repo.heads(start)
1285 heads = repo.heads(start)
1280 else:
1286 else:
1281 heads = []
1287 heads = []
1282 visitedset = util.set()
1288 visitedset = util.set()
1283 for branchrev in branchrevs:
1289 for branchrev in branchrevs:
1284 branch = repo.changectx(branchrev).branch()
1290 branch = repo.changectx(branchrev).branch()
1285 if branch in visitedset:
1291 if branch in visitedset:
1286 continue
1292 continue
1287 visitedset.add(branch)
1293 visitedset.add(branch)
1288 bheads = repo.branchheads(branch, start)
1294 bheads = repo.branchheads(branch, start)
1289 if not bheads:
1295 if not bheads:
1290 if branch != branchrev:
1296 if branch != branchrev:
1291 ui.warn(_("no changes on branch %s containing %s are "
1297 ui.warn(_("no changes on branch %s containing %s are "
1292 "reachable from %s\n")
1298 "reachable from %s\n")
1293 % (branch, branchrev, opts['rev']))
1299 % (branch, branchrev, opts['rev']))
1294 else:
1300 else:
1295 ui.warn(_("no changes on branch %s are reachable from %s\n")
1301 ui.warn(_("no changes on branch %s are reachable from %s\n")
1296 % (branch, opts['rev']))
1302 % (branch, opts['rev']))
1297 heads.extend(bheads)
1303 heads.extend(bheads)
1298 if not heads:
1304 if not heads:
1299 return 1
1305 return 1
1300 displayer = cmdutil.show_changeset(ui, repo, opts)
1306 displayer = cmdutil.show_changeset(ui, repo, opts)
1301 for n in heads:
1307 for n in heads:
1302 displayer.show(changenode=n)
1308 displayer.show(changenode=n)
1303
1309
1304 def help_(ui, name=None, with_version=False):
1310 def help_(ui, name=None, with_version=False):
1305 """show help for a command, extension, or list of commands
1311 """show help for a command, extension, or list of commands
1306
1312
1307 With no arguments, print a list of commands and short help.
1313 With no arguments, print a list of commands and short help.
1308
1314
1309 Given a command name, print help for that command.
1315 Given a command name, print help for that command.
1310
1316
1311 Given an extension name, print help for that extension, and the
1317 Given an extension name, print help for that extension, and the
1312 commands it provides."""
1318 commands it provides."""
1313 option_lists = []
1319 option_lists = []
1314
1320
1315 def addglobalopts(aliases):
1321 def addglobalopts(aliases):
1316 if ui.verbose:
1322 if ui.verbose:
1317 option_lists.append((_("global options:"), globalopts))
1323 option_lists.append((_("global options:"), globalopts))
1318 if name == 'shortlist':
1324 if name == 'shortlist':
1319 option_lists.append((_('use "hg help" for the full list '
1325 option_lists.append((_('use "hg help" for the full list '
1320 'of commands'), ()))
1326 'of commands'), ()))
1321 else:
1327 else:
1322 if name == 'shortlist':
1328 if name == 'shortlist':
1323 msg = _('use "hg help" for the full list of commands '
1329 msg = _('use "hg help" for the full list of commands '
1324 'or "hg -v" for details')
1330 'or "hg -v" for details')
1325 elif aliases:
1331 elif aliases:
1326 msg = _('use "hg -v help%s" to show aliases and '
1332 msg = _('use "hg -v help%s" to show aliases and '
1327 'global options') % (name and " " + name or "")
1333 'global options') % (name and " " + name or "")
1328 else:
1334 else:
1329 msg = _('use "hg -v help %s" to show global options') % name
1335 msg = _('use "hg -v help %s" to show global options') % name
1330 option_lists.append((msg, ()))
1336 option_lists.append((msg, ()))
1331
1337
1332 def helpcmd(name):
1338 def helpcmd(name):
1333 if with_version:
1339 if with_version:
1334 version_(ui)
1340 version_(ui)
1335 ui.write('\n')
1341 ui.write('\n')
1336 aliases, i = cmdutil.findcmd(ui, name)
1342 aliases, i = cmdutil.findcmd(ui, name)
1337 # synopsis
1343 # synopsis
1338 ui.write("%s\n\n" % i[2])
1344 ui.write("%s\n\n" % i[2])
1339
1345
1340 # description
1346 # description
1341 doc = i[0].__doc__
1347 doc = i[0].__doc__
1342 if not doc:
1348 if not doc:
1343 doc = _("(No help text available)")
1349 doc = _("(No help text available)")
1344 if ui.quiet:
1350 if ui.quiet:
1345 doc = doc.splitlines(0)[0]
1351 doc = doc.splitlines(0)[0]
1346 ui.write("%s\n" % doc.rstrip())
1352 ui.write("%s\n" % doc.rstrip())
1347
1353
1348 if not ui.quiet:
1354 if not ui.quiet:
1349 # aliases
1355 # aliases
1350 if len(aliases) > 1:
1356 if len(aliases) > 1:
1351 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1357 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1352
1358
1353 # options
1359 # options
1354 if i[1]:
1360 if i[1]:
1355 option_lists.append((_("options:\n"), i[1]))
1361 option_lists.append((_("options:\n"), i[1]))
1356
1362
1357 addglobalopts(False)
1363 addglobalopts(False)
1358
1364
1359 def helplist(select=None):
1365 def helplist(select=None):
1360 h = {}
1366 h = {}
1361 cmds = {}
1367 cmds = {}
1362 for c, e in table.items():
1368 for c, e in table.items():
1363 f = c.split("|", 1)[0]
1369 f = c.split("|", 1)[0]
1364 if select and not select(f):
1370 if select and not select(f):
1365 continue
1371 continue
1366 if name == "shortlist" and not f.startswith("^"):
1372 if name == "shortlist" and not f.startswith("^"):
1367 continue
1373 continue
1368 f = f.lstrip("^")
1374 f = f.lstrip("^")
1369 if not ui.debugflag and f.startswith("debug"):
1375 if not ui.debugflag and f.startswith("debug"):
1370 continue
1376 continue
1371 doc = e[0].__doc__
1377 doc = e[0].__doc__
1372 if not doc:
1378 if not doc:
1373 doc = _("(No help text available)")
1379 doc = _("(No help text available)")
1374 h[f] = doc.splitlines(0)[0].rstrip()
1380 h[f] = doc.splitlines(0)[0].rstrip()
1375 cmds[f] = c.lstrip("^")
1381 cmds[f] = c.lstrip("^")
1376
1382
1377 fns = h.keys()
1383 fns = h.keys()
1378 fns.sort()
1384 fns.sort()
1379 m = max(map(len, fns))
1385 m = max(map(len, fns))
1380 for f in fns:
1386 for f in fns:
1381 if ui.verbose:
1387 if ui.verbose:
1382 commands = cmds[f].replace("|",", ")
1388 commands = cmds[f].replace("|",", ")
1383 ui.write(" %s:\n %s\n"%(commands, h[f]))
1389 ui.write(" %s:\n %s\n"%(commands, h[f]))
1384 else:
1390 else:
1385 ui.write(' %-*s %s\n' % (m, f, h[f]))
1391 ui.write(' %-*s %s\n' % (m, f, h[f]))
1386
1392
1387 if not ui.quiet:
1393 if not ui.quiet:
1388 addglobalopts(True)
1394 addglobalopts(True)
1389
1395
1390 def helptopic(name):
1396 def helptopic(name):
1391 v = None
1397 v = None
1392 for i in help.helptable:
1398 for i in help.helptable:
1393 l = i.split('|')
1399 l = i.split('|')
1394 if name in l:
1400 if name in l:
1395 v = i
1401 v = i
1396 header = l[-1]
1402 header = l[-1]
1397 if not v:
1403 if not v:
1398 raise cmdutil.UnknownCommand(name)
1404 raise cmdutil.UnknownCommand(name)
1399
1405
1400 # description
1406 # description
1401 doc = help.helptable[v]
1407 doc = help.helptable[v]
1402 if not doc:
1408 if not doc:
1403 doc = _("(No help text available)")
1409 doc = _("(No help text available)")
1404 if callable(doc):
1410 if callable(doc):
1405 doc = doc()
1411 doc = doc()
1406
1412
1407 ui.write("%s\n" % header)
1413 ui.write("%s\n" % header)
1408 ui.write("%s\n" % doc.rstrip())
1414 ui.write("%s\n" % doc.rstrip())
1409
1415
1410 def helpext(name):
1416 def helpext(name):
1411 try:
1417 try:
1412 mod = extensions.find(name)
1418 mod = extensions.find(name)
1413 except KeyError:
1419 except KeyError:
1414 raise cmdutil.UnknownCommand(name)
1420 raise cmdutil.UnknownCommand(name)
1415
1421
1416 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1422 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1417 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1423 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1418 for d in doc[1:]:
1424 for d in doc[1:]:
1419 ui.write(d, '\n')
1425 ui.write(d, '\n')
1420
1426
1421 ui.status('\n')
1427 ui.status('\n')
1422
1428
1423 try:
1429 try:
1424 ct = mod.cmdtable
1430 ct = mod.cmdtable
1425 except AttributeError:
1431 except AttributeError:
1426 ct = None
1432 ct = None
1427 if not ct:
1433 if not ct:
1428 ui.status(_('no commands defined\n'))
1434 ui.status(_('no commands defined\n'))
1429 return
1435 return
1430
1436
1431 ui.status(_('list of commands:\n\n'))
1437 ui.status(_('list of commands:\n\n'))
1432 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1438 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1433 helplist(modcmds.has_key)
1439 helplist(modcmds.has_key)
1434
1440
1435 if name and name != 'shortlist':
1441 if name and name != 'shortlist':
1436 i = None
1442 i = None
1437 for f in (helpcmd, helptopic, helpext):
1443 for f in (helpcmd, helptopic, helpext):
1438 try:
1444 try:
1439 f(name)
1445 f(name)
1440 i = None
1446 i = None
1441 break
1447 break
1442 except cmdutil.UnknownCommand, inst:
1448 except cmdutil.UnknownCommand, inst:
1443 i = inst
1449 i = inst
1444 if i:
1450 if i:
1445 raise i
1451 raise i
1446
1452
1447 else:
1453 else:
1448 # program name
1454 # program name
1449 if ui.verbose or with_version:
1455 if ui.verbose or with_version:
1450 version_(ui)
1456 version_(ui)
1451 else:
1457 else:
1452 ui.status(_("Mercurial Distributed SCM\n"))
1458 ui.status(_("Mercurial Distributed SCM\n"))
1453 ui.status('\n')
1459 ui.status('\n')
1454
1460
1455 # list of commands
1461 # list of commands
1456 if name == "shortlist":
1462 if name == "shortlist":
1457 ui.status(_('basic commands:\n\n'))
1463 ui.status(_('basic commands:\n\n'))
1458 else:
1464 else:
1459 ui.status(_('list of commands:\n\n'))
1465 ui.status(_('list of commands:\n\n'))
1460
1466
1461 helplist()
1467 helplist()
1462
1468
1463 # list all option lists
1469 # list all option lists
1464 opt_output = []
1470 opt_output = []
1465 for title, options in option_lists:
1471 for title, options in option_lists:
1466 opt_output.append(("\n%s" % title, None))
1472 opt_output.append(("\n%s" % title, None))
1467 for shortopt, longopt, default, desc in options:
1473 for shortopt, longopt, default, desc in options:
1468 if "DEPRECATED" in desc and not ui.verbose: continue
1474 if "DEPRECATED" in desc and not ui.verbose: continue
1469 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1475 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1470 longopt and " --%s" % longopt),
1476 longopt and " --%s" % longopt),
1471 "%s%s" % (desc,
1477 "%s%s" % (desc,
1472 default
1478 default
1473 and _(" (default: %s)") % default
1479 and _(" (default: %s)") % default
1474 or "")))
1480 or "")))
1475
1481
1476 if opt_output:
1482 if opt_output:
1477 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1483 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1478 for first, second in opt_output:
1484 for first, second in opt_output:
1479 if second:
1485 if second:
1480 ui.write(" %-*s %s\n" % (opts_len, first, second))
1486 ui.write(" %-*s %s\n" % (opts_len, first, second))
1481 else:
1487 else:
1482 ui.write("%s\n" % first)
1488 ui.write("%s\n" % first)
1483
1489
1484 def identify(ui, repo, source=None,
1490 def identify(ui, repo, source=None,
1485 rev=None, num=None, id=None, branch=None, tags=None):
1491 rev=None, num=None, id=None, branch=None, tags=None):
1486 """identify the working copy or specified revision
1492 """identify the working copy or specified revision
1487
1493
1488 With no revision, print a summary of the current state of the repo.
1494 With no revision, print a summary of the current state of the repo.
1489
1495
1490 With a path, do a lookup in another repository.
1496 With a path, do a lookup in another repository.
1491
1497
1492 This summary identifies the repository state using one or two parent
1498 This summary identifies the repository state using one or two parent
1493 hash identifiers, followed by a "+" if there are uncommitted changes
1499 hash identifiers, followed by a "+" if there are uncommitted changes
1494 in the working directory, a list of tags for this revision and a branch
1500 in the working directory, a list of tags for this revision and a branch
1495 name for non-default branches.
1501 name for non-default branches.
1496 """
1502 """
1497
1503
1498 hexfunc = ui.debugflag and hex or short
1504 hexfunc = ui.debugflag and hex or short
1499 default = not (num or id or branch or tags)
1505 default = not (num or id or branch or tags)
1500 output = []
1506 output = []
1501
1507
1502 if source:
1508 if source:
1503 source, revs = cmdutil.parseurl(ui.expandpath(source), [])
1509 source, revs = cmdutil.parseurl(ui.expandpath(source), [])
1504 srepo = hg.repository(ui, source)
1510 srepo = hg.repository(ui, source)
1505 if not rev and revs:
1511 if not rev and revs:
1506 rev = revs[0]
1512 rev = revs[0]
1507 if not rev:
1513 if not rev:
1508 rev = "tip"
1514 rev = "tip"
1509 if num or branch or tags:
1515 if num or branch or tags:
1510 raise util.Abort(
1516 raise util.Abort(
1511 "can't query remote revision number, branch, or tags")
1517 "can't query remote revision number, branch, or tags")
1512 output = [hexfunc(srepo.lookup(rev))]
1518 output = [hexfunc(srepo.lookup(rev))]
1513 elif not rev:
1519 elif not rev:
1514 ctx = repo.workingctx()
1520 ctx = repo.workingctx()
1515 parents = ctx.parents()
1521 parents = ctx.parents()
1516 changed = False
1522 changed = False
1517 if default or id or num:
1523 if default or id or num:
1518 changed = ctx.files() + ctx.deleted()
1524 changed = ctx.files() + ctx.deleted()
1519 if default or id:
1525 if default or id:
1520 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1526 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1521 (changed) and "+" or "")]
1527 (changed) and "+" or "")]
1522 if num:
1528 if num:
1523 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1529 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1524 (changed) and "+" or ""))
1530 (changed) and "+" or ""))
1525 else:
1531 else:
1526 ctx = repo.changectx(rev)
1532 ctx = repo.changectx(rev)
1527 if default or id:
1533 if default or id:
1528 output = [hexfunc(ctx.node())]
1534 output = [hexfunc(ctx.node())]
1529 if num:
1535 if num:
1530 output.append(str(ctx.rev()))
1536 output.append(str(ctx.rev()))
1531
1537
1532 if not source and default and not ui.quiet:
1538 if not source and default and not ui.quiet:
1533 b = util.tolocal(ctx.branch())
1539 b = util.tolocal(ctx.branch())
1534 if b != 'default':
1540 if b != 'default':
1535 output.append("(%s)" % b)
1541 output.append("(%s)" % b)
1536
1542
1537 # multiple tags for a single parent separated by '/'
1543 # multiple tags for a single parent separated by '/'
1538 t = "/".join(ctx.tags())
1544 t = "/".join(ctx.tags())
1539 if t:
1545 if t:
1540 output.append(t)
1546 output.append(t)
1541
1547
1542 if branch:
1548 if branch:
1543 output.append(util.tolocal(ctx.branch()))
1549 output.append(util.tolocal(ctx.branch()))
1544
1550
1545 if tags:
1551 if tags:
1546 output.extend(ctx.tags())
1552 output.extend(ctx.tags())
1547
1553
1548 ui.write("%s\n" % ' '.join(output))
1554 ui.write("%s\n" % ' '.join(output))
1549
1555
1550 def import_(ui, repo, patch1, *patches, **opts):
1556 def import_(ui, repo, patch1, *patches, **opts):
1551 """import an ordered set of patches
1557 """import an ordered set of patches
1552
1558
1553 Import a list of patches and commit them individually.
1559 Import a list of patches and commit them individually.
1554
1560
1555 If there are outstanding changes in the working directory, import
1561 If there are outstanding changes in the working directory, import
1556 will abort unless given the -f flag.
1562 will abort unless given the -f flag.
1557
1563
1558 You can import a patch straight from a mail message. Even patches
1564 You can import a patch straight from a mail message. Even patches
1559 as attachments work (body part must be type text/plain or
1565 as attachments work (body part must be type text/plain or
1560 text/x-patch to be used). From and Subject headers of email
1566 text/x-patch to be used). From and Subject headers of email
1561 message are used as default committer and commit message. All
1567 message are used as default committer and commit message. All
1562 text/plain body parts before first diff are added to commit
1568 text/plain body parts before first diff are added to commit
1563 message.
1569 message.
1564
1570
1565 If the imported patch was generated by hg export, user and description
1571 If the imported patch was generated by hg export, user and description
1566 from patch override values from message headers and body. Values
1572 from patch override values from message headers and body. Values
1567 given on command line with -m and -u override these.
1573 given on command line with -m and -u override these.
1568
1574
1569 If --exact is specified, import will set the working directory
1575 If --exact is specified, import will set the working directory
1570 to the parent of each patch before applying it, and will abort
1576 to the parent of each patch before applying it, and will abort
1571 if the resulting changeset has a different ID than the one
1577 if the resulting changeset has a different ID than the one
1572 recorded in the patch. This may happen due to character set
1578 recorded in the patch. This may happen due to character set
1573 problems or other deficiencies in the text patch format.
1579 problems or other deficiencies in the text patch format.
1574
1580
1575 To read a patch from standard input, use patch name "-".
1581 To read a patch from standard input, use patch name "-".
1576 """
1582 """
1577 patches = (patch1,) + patches
1583 patches = (patch1,) + patches
1578
1584
1579 if opts.get('exact') or not opts['force']:
1585 if opts.get('exact') or not opts['force']:
1580 cmdutil.bail_if_changed(repo)
1586 cmdutil.bail_if_changed(repo)
1581
1587
1582 d = opts["base"]
1588 d = opts["base"]
1583 strip = opts["strip"]
1589 strip = opts["strip"]
1584
1590 wlock = lock = None
1585 wlock = repo.wlock()
1591 try:
1586 lock = repo.lock()
1592 wlock = repo.wlock()
1587
1593 lock = repo.lock()
1588 for p in patches:
1594 for p in patches:
1589 pf = os.path.join(d, p)
1595 pf = os.path.join(d, p)
1590
1596
1591 if pf == '-':
1597 if pf == '-':
1592 ui.status(_("applying patch from stdin\n"))
1598 ui.status(_("applying patch from stdin\n"))
1593 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, sys.stdin)
1599 data = patch.extract(ui, sys.stdin)
1594 else:
1595 ui.status(_("applying %s\n") % p)
1596 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, file(pf, 'rb'))
1597
1598 if tmpname is None:
1599 raise util.Abort(_('no diffs found'))
1600
1601 try:
1602 cmdline_message = cmdutil.logmessage(opts)
1603 if cmdline_message:
1604 # pickup the cmdline msg
1605 message = cmdline_message
1606 elif message:
1607 # pickup the patch msg
1608 message = message.strip()
1609 else:
1600 else:
1610 # launch the editor
1601 ui.status(_("applying %s\n") % p)
1611 message = None
1602 data = patch.extract(ui, file(pf, 'rb'))
1612 ui.debug(_('message:\n%s\n') % message)
1603
1613
1604 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1614 wp = repo.workingctx().parents()
1605
1615 if opts.get('exact'):
1606 if tmpname is None:
1616 if not nodeid or not p1:
1607 raise util.Abort(_('no diffs found'))
1617 raise util.Abort(_('not a mercurial patch'))
1608
1618 p1 = repo.lookup(p1)
1609 try:
1619 p2 = repo.lookup(p2 or hex(nullid))
1610 cmdline_message = cmdutil.logmessage(opts)
1620
1611 if cmdline_message:
1621 if p1 != wp[0].node():
1612 # pickup the cmdline msg
1622 hg.clean(repo, p1, wlock=wlock)
1613 message = cmdline_message
1623 repo.dirstate.setparents(p1, p2)
1614 elif message:
1624 elif p2:
1615 # pickup the patch msg
1625 try:
1616 message = message.strip()
1617 else:
1618 # launch the editor
1619 message = None
1620 ui.debug(_('message:\n%s\n') % message)
1621
1622 wp = repo.workingctx().parents()
1623 if opts.get('exact'):
1624 if not nodeid or not p1:
1625 raise util.Abort(_('not a mercurial patch'))
1626 p1 = repo.lookup(p1)
1626 p1 = repo.lookup(p1)
1627 p2 = repo.lookup(p2)
1627 p2 = repo.lookup(p2 or hex(nullid))
1628 if p1 == wp[0].node():
1628
1629 repo.dirstate.setparents(p1, p2)
1629 if p1 != wp[0].node():
1630 except hg.RepoError:
1630 hg.clean(repo, p1, wlock=wlock)
1631 pass
1631 repo.dirstate.setparents(p1, p2)
1632 if opts.get('exact') or opts.get('import_branch'):
1632 elif p2:
1633 repo.dirstate.setbranch(branch or 'default')
1633 try:
1634
1634 p1 = repo.lookup(p1)
1635 files = {}
1635 p2 = repo.lookup(p2)
1636 try:
1636 if p1 == wp[0].node():
1637 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1637 repo.dirstate.setparents(p1, p2)
1638 files=files)
1638 except hg.RepoError:
1639 pass
1640 if opts.get('exact') or opts.get('import_branch'):
1641 repo.dirstate.setbranch(branch or 'default')
1642
1643 files = {}
1644 try:
1645 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1646 files=files)
1647 finally:
1648 files = patch.updatedir(ui, repo, files, wlock=wlock)
1649 n = repo.commit(files, message, user, date, wlock=wlock,
1650 lock=lock)
1651 if opts.get('exact'):
1652 if hex(n) != nodeid:
1653 repo.rollback(wlock=wlock, lock=lock)
1654 raise util.Abort(_('patch is damaged' +
1655 ' or loses information'))
1639 finally:
1656 finally:
1640 files = patch.updatedir(ui, repo, files, wlock=wlock)
1657 os.unlink(tmpname)
1641 n = repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1658 finally:
1642 if opts.get('exact'):
1659 del wlock, lock
1643 if hex(n) != nodeid:
1644 repo.rollback(wlock=wlock, lock=lock)
1645 raise util.Abort(_('patch is damaged or loses information'))
1646 finally:
1647 os.unlink(tmpname)
1648
1660
1649 def incoming(ui, repo, source="default", **opts):
1661 def incoming(ui, repo, source="default", **opts):
1650 """show new changesets found in source
1662 """show new changesets found in source
1651
1663
1652 Show new changesets found in the specified path/URL or the default
1664 Show new changesets found in the specified path/URL or the default
1653 pull location. These are the changesets that would be pulled if a pull
1665 pull location. These are the changesets that would be pulled if a pull
1654 was requested.
1666 was requested.
1655
1667
1656 For remote repository, using --bundle avoids downloading the changesets
1668 For remote repository, using --bundle avoids downloading the changesets
1657 twice if the incoming is followed by a pull.
1669 twice if the incoming is followed by a pull.
1658
1670
1659 See pull for valid source format details.
1671 See pull for valid source format details.
1660 """
1672 """
1661 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
1673 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
1662 cmdutil.setremoteconfig(ui, opts)
1674 cmdutil.setremoteconfig(ui, opts)
1663
1675
1664 other = hg.repository(ui, source)
1676 other = hg.repository(ui, source)
1665 ui.status(_('comparing with %s\n') % source)
1677 ui.status(_('comparing with %s\n') % source)
1666 if revs:
1678 if revs:
1667 if 'lookup' in other.capabilities:
1679 if 'lookup' in other.capabilities:
1668 revs = [other.lookup(rev) for rev in revs]
1680 revs = [other.lookup(rev) for rev in revs]
1669 else:
1681 else:
1670 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1682 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1671 raise util.Abort(error)
1683 raise util.Abort(error)
1672 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1684 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1673 if not incoming:
1685 if not incoming:
1674 try:
1686 try:
1675 os.unlink(opts["bundle"])
1687 os.unlink(opts["bundle"])
1676 except:
1688 except:
1677 pass
1689 pass
1678 ui.status(_("no changes found\n"))
1690 ui.status(_("no changes found\n"))
1679 return 1
1691 return 1
1680
1692
1681 cleanup = None
1693 cleanup = None
1682 try:
1694 try:
1683 fname = opts["bundle"]
1695 fname = opts["bundle"]
1684 if fname or not other.local():
1696 if fname or not other.local():
1685 # create a bundle (uncompressed if other repo is not local)
1697 # create a bundle (uncompressed if other repo is not local)
1686 if revs is None:
1698 if revs is None:
1687 cg = other.changegroup(incoming, "incoming")
1699 cg = other.changegroup(incoming, "incoming")
1688 else:
1700 else:
1689 if 'changegroupsubset' not in other.capabilities:
1701 if 'changegroupsubset' not in other.capabilities:
1690 raise util.Abort(_("Partial incoming cannot be done because other repository doesn't support changegroupsubset."))
1702 raise util.Abort(_("Partial incoming cannot be done because other repository doesn't support changegroupsubset."))
1691 cg = other.changegroupsubset(incoming, revs, 'incoming')
1703 cg = other.changegroupsubset(incoming, revs, 'incoming')
1692 bundletype = other.local() and "HG10BZ" or "HG10UN"
1704 bundletype = other.local() and "HG10BZ" or "HG10UN"
1693 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1705 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1694 # keep written bundle?
1706 # keep written bundle?
1695 if opts["bundle"]:
1707 if opts["bundle"]:
1696 cleanup = None
1708 cleanup = None
1697 if not other.local():
1709 if not other.local():
1698 # use the created uncompressed bundlerepo
1710 # use the created uncompressed bundlerepo
1699 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1711 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1700
1712
1701 o = other.changelog.nodesbetween(incoming, revs)[0]
1713 o = other.changelog.nodesbetween(incoming, revs)[0]
1702 if opts['newest_first']:
1714 if opts['newest_first']:
1703 o.reverse()
1715 o.reverse()
1704 displayer = cmdutil.show_changeset(ui, other, opts)
1716 displayer = cmdutil.show_changeset(ui, other, opts)
1705 for n in o:
1717 for n in o:
1706 parents = [p for p in other.changelog.parents(n) if p != nullid]
1718 parents = [p for p in other.changelog.parents(n) if p != nullid]
1707 if opts['no_merges'] and len(parents) == 2:
1719 if opts['no_merges'] and len(parents) == 2:
1708 continue
1720 continue
1709 displayer.show(changenode=n)
1721 displayer.show(changenode=n)
1710 finally:
1722 finally:
1711 if hasattr(other, 'close'):
1723 if hasattr(other, 'close'):
1712 other.close()
1724 other.close()
1713 if cleanup:
1725 if cleanup:
1714 os.unlink(cleanup)
1726 os.unlink(cleanup)
1715
1727
1716 def init(ui, dest=".", **opts):
1728 def init(ui, dest=".", **opts):
1717 """create a new repository in the given directory
1729 """create a new repository in the given directory
1718
1730
1719 Initialize a new repository in the given directory. If the given
1731 Initialize a new repository in the given directory. If the given
1720 directory does not exist, it is created.
1732 directory does not exist, it is created.
1721
1733
1722 If no directory is given, the current directory is used.
1734 If no directory is given, the current directory is used.
1723
1735
1724 It is possible to specify an ssh:// URL as the destination.
1736 It is possible to specify an ssh:// URL as the destination.
1725 Look at the help text for the pull command for important details
1737 Look at the help text for the pull command for important details
1726 about ssh:// URLs.
1738 about ssh:// URLs.
1727 """
1739 """
1728 cmdutil.setremoteconfig(ui, opts)
1740 cmdutil.setremoteconfig(ui, opts)
1729 hg.repository(ui, dest, create=1)
1741 hg.repository(ui, dest, create=1)
1730
1742
1731 def locate(ui, repo, *pats, **opts):
1743 def locate(ui, repo, *pats, **opts):
1732 """locate files matching specific patterns
1744 """locate files matching specific patterns
1733
1745
1734 Print all files under Mercurial control whose names match the
1746 Print all files under Mercurial control whose names match the
1735 given patterns.
1747 given patterns.
1736
1748
1737 This command searches the entire repository by default. To search
1749 This command searches the entire repository by default. To search
1738 just the current directory and its subdirectories, use
1750 just the current directory and its subdirectories, use
1739 "--include .".
1751 "--include .".
1740
1752
1741 If no patterns are given to match, this command prints all file
1753 If no patterns are given to match, this command prints all file
1742 names.
1754 names.
1743
1755
1744 If you want to feed the output of this command into the "xargs"
1756 If you want to feed the output of this command into the "xargs"
1745 command, use the "-0" option to both this command and "xargs".
1757 command, use the "-0" option to both this command and "xargs".
1746 This will avoid the problem of "xargs" treating single filenames
1758 This will avoid the problem of "xargs" treating single filenames
1747 that contain white space as multiple filenames.
1759 that contain white space as multiple filenames.
1748 """
1760 """
1749 end = opts['print0'] and '\0' or '\n'
1761 end = opts['print0'] and '\0' or '\n'
1750 rev = opts['rev']
1762 rev = opts['rev']
1751 if rev:
1763 if rev:
1752 node = repo.lookup(rev)
1764 node = repo.lookup(rev)
1753 else:
1765 else:
1754 node = None
1766 node = None
1755
1767
1756 ret = 1
1768 ret = 1
1757 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1769 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1758 badmatch=util.always,
1770 badmatch=util.always,
1759 default='relglob'):
1771 default='relglob'):
1760 if src == 'b':
1772 if src == 'b':
1761 continue
1773 continue
1762 if not node and abs not in repo.dirstate:
1774 if not node and abs not in repo.dirstate:
1763 continue
1775 continue
1764 if opts['fullpath']:
1776 if opts['fullpath']:
1765 ui.write(os.path.join(repo.root, abs), end)
1777 ui.write(os.path.join(repo.root, abs), end)
1766 else:
1778 else:
1767 ui.write(((pats and rel) or abs), end)
1779 ui.write(((pats and rel) or abs), end)
1768 ret = 0
1780 ret = 0
1769
1781
1770 return ret
1782 return ret
1771
1783
1772 def log(ui, repo, *pats, **opts):
1784 def log(ui, repo, *pats, **opts):
1773 """show revision history of entire repository or files
1785 """show revision history of entire repository or files
1774
1786
1775 Print the revision history of the specified files or the entire
1787 Print the revision history of the specified files or the entire
1776 project.
1788 project.
1777
1789
1778 File history is shown without following rename or copy history of
1790 File history is shown without following rename or copy history of
1779 files. Use -f/--follow with a file name to follow history across
1791 files. Use -f/--follow with a file name to follow history across
1780 renames and copies. --follow without a file name will only show
1792 renames and copies. --follow without a file name will only show
1781 ancestors or descendants of the starting revision. --follow-first
1793 ancestors or descendants of the starting revision. --follow-first
1782 only follows the first parent of merge revisions.
1794 only follows the first parent of merge revisions.
1783
1795
1784 If no revision range is specified, the default is tip:0 unless
1796 If no revision range is specified, the default is tip:0 unless
1785 --follow is set, in which case the working directory parent is
1797 --follow is set, in which case the working directory parent is
1786 used as the starting revision.
1798 used as the starting revision.
1787
1799
1788 By default this command outputs: changeset id and hash, tags,
1800 By default this command outputs: changeset id and hash, tags,
1789 non-trivial parents, user, date and time, and a summary for each
1801 non-trivial parents, user, date and time, and a summary for each
1790 commit. When the -v/--verbose switch is used, the list of changed
1802 commit. When the -v/--verbose switch is used, the list of changed
1791 files and full commit message is shown.
1803 files and full commit message is shown.
1792
1804
1793 NOTE: log -p may generate unexpected diff output for merge
1805 NOTE: log -p may generate unexpected diff output for merge
1794 changesets, as it will compare the merge changeset against its
1806 changesets, as it will compare the merge changeset against its
1795 first parent only. Also, the files: list will only reflect files
1807 first parent only. Also, the files: list will only reflect files
1796 that are different from BOTH parents.
1808 that are different from BOTH parents.
1797
1809
1798 """
1810 """
1799
1811
1800 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1812 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1801 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1813 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1802
1814
1803 if opts['limit']:
1815 if opts['limit']:
1804 try:
1816 try:
1805 limit = int(opts['limit'])
1817 limit = int(opts['limit'])
1806 except ValueError:
1818 except ValueError:
1807 raise util.Abort(_('limit must be a positive integer'))
1819 raise util.Abort(_('limit must be a positive integer'))
1808 if limit <= 0: raise util.Abort(_('limit must be positive'))
1820 if limit <= 0: raise util.Abort(_('limit must be positive'))
1809 else:
1821 else:
1810 limit = sys.maxint
1822 limit = sys.maxint
1811 count = 0
1823 count = 0
1812
1824
1813 if opts['copies'] and opts['rev']:
1825 if opts['copies'] and opts['rev']:
1814 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1826 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1815 else:
1827 else:
1816 endrev = repo.changelog.count()
1828 endrev = repo.changelog.count()
1817 rcache = {}
1829 rcache = {}
1818 ncache = {}
1830 ncache = {}
1819 dcache = []
1831 dcache = []
1820 def getrenamed(fn, rev, man):
1832 def getrenamed(fn, rev, man):
1821 '''looks up all renames for a file (up to endrev) the first
1833 '''looks up all renames for a file (up to endrev) the first
1822 time the file is given. It indexes on the changerev and only
1834 time the file is given. It indexes on the changerev and only
1823 parses the manifest if linkrev != changerev.
1835 parses the manifest if linkrev != changerev.
1824 Returns rename info for fn at changerev rev.'''
1836 Returns rename info for fn at changerev rev.'''
1825 if fn not in rcache:
1837 if fn not in rcache:
1826 rcache[fn] = {}
1838 rcache[fn] = {}
1827 ncache[fn] = {}
1839 ncache[fn] = {}
1828 fl = repo.file(fn)
1840 fl = repo.file(fn)
1829 for i in xrange(fl.count()):
1841 for i in xrange(fl.count()):
1830 node = fl.node(i)
1842 node = fl.node(i)
1831 lr = fl.linkrev(node)
1843 lr = fl.linkrev(node)
1832 renamed = fl.renamed(node)
1844 renamed = fl.renamed(node)
1833 rcache[fn][lr] = renamed
1845 rcache[fn][lr] = renamed
1834 if renamed:
1846 if renamed:
1835 ncache[fn][node] = renamed
1847 ncache[fn][node] = renamed
1836 if lr >= endrev:
1848 if lr >= endrev:
1837 break
1849 break
1838 if rev in rcache[fn]:
1850 if rev in rcache[fn]:
1839 return rcache[fn][rev]
1851 return rcache[fn][rev]
1840 mr = repo.manifest.rev(man)
1852 mr = repo.manifest.rev(man)
1841 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1853 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1842 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1854 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1843 if not dcache or dcache[0] != man:
1855 if not dcache or dcache[0] != man:
1844 dcache[:] = [man, repo.manifest.readdelta(man)]
1856 dcache[:] = [man, repo.manifest.readdelta(man)]
1845 if fn in dcache[1]:
1857 if fn in dcache[1]:
1846 return ncache[fn].get(dcache[1][fn])
1858 return ncache[fn].get(dcache[1][fn])
1847 return None
1859 return None
1848
1860
1849 df = False
1861 df = False
1850 if opts["date"]:
1862 if opts["date"]:
1851 df = util.matchdate(opts["date"])
1863 df = util.matchdate(opts["date"])
1852
1864
1853 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1865 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1854 for st, rev, fns in changeiter:
1866 for st, rev, fns in changeiter:
1855 if st == 'add':
1867 if st == 'add':
1856 changenode = repo.changelog.node(rev)
1868 changenode = repo.changelog.node(rev)
1857 parents = [p for p in repo.changelog.parentrevs(rev)
1869 parents = [p for p in repo.changelog.parentrevs(rev)
1858 if p != nullrev]
1870 if p != nullrev]
1859 if opts['no_merges'] and len(parents) == 2:
1871 if opts['no_merges'] and len(parents) == 2:
1860 continue
1872 continue
1861 if opts['only_merges'] and len(parents) != 2:
1873 if opts['only_merges'] and len(parents) != 2:
1862 continue
1874 continue
1863
1875
1864 if df:
1876 if df:
1865 changes = get(rev)
1877 changes = get(rev)
1866 if not df(changes[2][0]):
1878 if not df(changes[2][0]):
1867 continue
1879 continue
1868
1880
1869 if opts['keyword']:
1881 if opts['keyword']:
1870 changes = get(rev)
1882 changes = get(rev)
1871 miss = 0
1883 miss = 0
1872 for k in [kw.lower() for kw in opts['keyword']]:
1884 for k in [kw.lower() for kw in opts['keyword']]:
1873 if not (k in changes[1].lower() or
1885 if not (k in changes[1].lower() or
1874 k in changes[4].lower() or
1886 k in changes[4].lower() or
1875 k in " ".join(changes[3]).lower()):
1887 k in " ".join(changes[3]).lower()):
1876 miss = 1
1888 miss = 1
1877 break
1889 break
1878 if miss:
1890 if miss:
1879 continue
1891 continue
1880
1892
1881 copies = []
1893 copies = []
1882 if opts.get('copies') and rev:
1894 if opts.get('copies') and rev:
1883 mf = get(rev)[0]
1895 mf = get(rev)[0]
1884 for fn in get(rev)[3]:
1896 for fn in get(rev)[3]:
1885 rename = getrenamed(fn, rev, mf)
1897 rename = getrenamed(fn, rev, mf)
1886 if rename:
1898 if rename:
1887 copies.append((fn, rename[0]))
1899 copies.append((fn, rename[0]))
1888 displayer.show(rev, changenode, copies=copies)
1900 displayer.show(rev, changenode, copies=copies)
1889 elif st == 'iter':
1901 elif st == 'iter':
1890 if count == limit: break
1902 if count == limit: break
1891 if displayer.flush(rev):
1903 if displayer.flush(rev):
1892 count += 1
1904 count += 1
1893
1905
1894 def manifest(ui, repo, rev=None):
1906 def manifest(ui, repo, rev=None):
1895 """output the current or given revision of the project manifest
1907 """output the current or given revision of the project manifest
1896
1908
1897 Print a list of version controlled files for the given revision.
1909 Print a list of version controlled files for the given revision.
1898 If no revision is given, the parent of the working directory is used,
1910 If no revision is given, the parent of the working directory is used,
1899 or tip if no revision is checked out.
1911 or tip if no revision is checked out.
1900
1912
1901 The manifest is the list of files being version controlled. If no revision
1913 The manifest is the list of files being version controlled. If no revision
1902 is given then the first parent of the working directory is used.
1914 is given then the first parent of the working directory is used.
1903
1915
1904 With -v flag, print file permissions. With --debug flag, print
1916 With -v flag, print file permissions. With --debug flag, print
1905 file revision hashes.
1917 file revision hashes.
1906 """
1918 """
1907
1919
1908 m = repo.changectx(rev).manifest()
1920 m = repo.changectx(rev).manifest()
1909 files = m.keys()
1921 files = m.keys()
1910 files.sort()
1922 files.sort()
1911
1923
1912 for f in files:
1924 for f in files:
1913 if ui.debugflag:
1925 if ui.debugflag:
1914 ui.write("%40s " % hex(m[f]))
1926 ui.write("%40s " % hex(m[f]))
1915 if ui.verbose:
1927 if ui.verbose:
1916 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1928 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1917 ui.write("%s\n" % f)
1929 ui.write("%s\n" % f)
1918
1930
1919 def merge(ui, repo, node=None, force=None, rev=None):
1931 def merge(ui, repo, node=None, force=None, rev=None):
1920 """merge working directory with another revision
1932 """merge working directory with another revision
1921
1933
1922 Merge the contents of the current working directory and the
1934 Merge the contents of the current working directory and the
1923 requested revision. Files that changed between either parent are
1935 requested revision. Files that changed between either parent are
1924 marked as changed for the next commit and a commit must be
1936 marked as changed for the next commit and a commit must be
1925 performed before any further updates are allowed.
1937 performed before any further updates are allowed.
1926
1938
1927 If no revision is specified, the working directory's parent is a
1939 If no revision is specified, the working directory's parent is a
1928 head revision, and the repository contains exactly one other head,
1940 head revision, and the repository contains exactly one other head,
1929 the other head is merged with by default. Otherwise, an explicit
1941 the other head is merged with by default. Otherwise, an explicit
1930 revision to merge with must be provided.
1942 revision to merge with must be provided.
1931 """
1943 """
1932
1944
1933 if rev and node:
1945 if rev and node:
1934 raise util.Abort(_("please specify just one revision"))
1946 raise util.Abort(_("please specify just one revision"))
1935
1947
1936 if not node:
1948 if not node:
1937 node = rev
1949 node = rev
1938
1950
1939 if not node:
1951 if not node:
1940 heads = repo.heads()
1952 heads = repo.heads()
1941 if len(heads) > 2:
1953 if len(heads) > 2:
1942 raise util.Abort(_('repo has %d heads - '
1954 raise util.Abort(_('repo has %d heads - '
1943 'please merge with an explicit rev') %
1955 'please merge with an explicit rev') %
1944 len(heads))
1956 len(heads))
1945 if len(heads) == 1:
1957 if len(heads) == 1:
1946 raise util.Abort(_('there is nothing to merge - '
1958 raise util.Abort(_('there is nothing to merge - '
1947 'use "hg update" instead'))
1959 'use "hg update" instead'))
1948 parent = repo.dirstate.parents()[0]
1960 parent = repo.dirstate.parents()[0]
1949 if parent not in heads:
1961 if parent not in heads:
1950 raise util.Abort(_('working dir not at a head rev - '
1962 raise util.Abort(_('working dir not at a head rev - '
1951 'use "hg update" or merge with an explicit rev'))
1963 'use "hg update" or merge with an explicit rev'))
1952 node = parent == heads[0] and heads[-1] or heads[0]
1964 node = parent == heads[0] and heads[-1] or heads[0]
1953 return hg.merge(repo, node, force=force)
1965 return hg.merge(repo, node, force=force)
1954
1966
1955 def outgoing(ui, repo, dest=None, **opts):
1967 def outgoing(ui, repo, dest=None, **opts):
1956 """show changesets not found in destination
1968 """show changesets not found in destination
1957
1969
1958 Show changesets not found in the specified destination repository or
1970 Show changesets not found in the specified destination repository or
1959 the default push location. These are the changesets that would be pushed
1971 the default push location. These are the changesets that would be pushed
1960 if a push was requested.
1972 if a push was requested.
1961
1973
1962 See pull for valid destination format details.
1974 See pull for valid destination format details.
1963 """
1975 """
1964 dest, revs = cmdutil.parseurl(
1976 dest, revs = cmdutil.parseurl(
1965 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1977 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1966 cmdutil.setremoteconfig(ui, opts)
1978 cmdutil.setremoteconfig(ui, opts)
1967 if revs:
1979 if revs:
1968 revs = [repo.lookup(rev) for rev in revs]
1980 revs = [repo.lookup(rev) for rev in revs]
1969
1981
1970 other = hg.repository(ui, dest)
1982 other = hg.repository(ui, dest)
1971 ui.status(_('comparing with %s\n') % dest)
1983 ui.status(_('comparing with %s\n') % dest)
1972 o = repo.findoutgoing(other, force=opts['force'])
1984 o = repo.findoutgoing(other, force=opts['force'])
1973 if not o:
1985 if not o:
1974 ui.status(_("no changes found\n"))
1986 ui.status(_("no changes found\n"))
1975 return 1
1987 return 1
1976 o = repo.changelog.nodesbetween(o, revs)[0]
1988 o = repo.changelog.nodesbetween(o, revs)[0]
1977 if opts['newest_first']:
1989 if opts['newest_first']:
1978 o.reverse()
1990 o.reverse()
1979 displayer = cmdutil.show_changeset(ui, repo, opts)
1991 displayer = cmdutil.show_changeset(ui, repo, opts)
1980 for n in o:
1992 for n in o:
1981 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1993 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1982 if opts['no_merges'] and len(parents) == 2:
1994 if opts['no_merges'] and len(parents) == 2:
1983 continue
1995 continue
1984 displayer.show(changenode=n)
1996 displayer.show(changenode=n)
1985
1997
1986 def parents(ui, repo, file_=None, **opts):
1998 def parents(ui, repo, file_=None, **opts):
1987 """show the parents of the working dir or revision
1999 """show the parents of the working dir or revision
1988
2000
1989 Print the working directory's parent revisions. If a
2001 Print the working directory's parent revisions. If a
1990 revision is given via --rev, the parent of that revision
2002 revision is given via --rev, the parent of that revision
1991 will be printed. If a file argument is given, revision in
2003 will be printed. If a file argument is given, revision in
1992 which the file was last changed (before the working directory
2004 which the file was last changed (before the working directory
1993 revision or the argument to --rev if given) is printed.
2005 revision or the argument to --rev if given) is printed.
1994 """
2006 """
1995 rev = opts.get('rev')
2007 rev = opts.get('rev')
1996 if file_:
2008 if file_:
1997 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
2009 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
1998 if anypats or len(files) != 1:
2010 if anypats or len(files) != 1:
1999 raise util.Abort(_('can only specify an explicit file name'))
2011 raise util.Abort(_('can only specify an explicit file name'))
2000 ctx = repo.filectx(files[0], changeid=rev)
2012 ctx = repo.filectx(files[0], changeid=rev)
2001 elif rev:
2013 elif rev:
2002 ctx = repo.changectx(rev)
2014 ctx = repo.changectx(rev)
2003 else:
2015 else:
2004 ctx = repo.workingctx()
2016 ctx = repo.workingctx()
2005 p = [cp.node() for cp in ctx.parents()]
2017 p = [cp.node() for cp in ctx.parents()]
2006
2018
2007 displayer = cmdutil.show_changeset(ui, repo, opts)
2019 displayer = cmdutil.show_changeset(ui, repo, opts)
2008 for n in p:
2020 for n in p:
2009 if n != nullid:
2021 if n != nullid:
2010 displayer.show(changenode=n)
2022 displayer.show(changenode=n)
2011
2023
2012 def paths(ui, repo, search=None):
2024 def paths(ui, repo, search=None):
2013 """show definition of symbolic path names
2025 """show definition of symbolic path names
2014
2026
2015 Show definition of symbolic path name NAME. If no name is given, show
2027 Show definition of symbolic path name NAME. If no name is given, show
2016 definition of available names.
2028 definition of available names.
2017
2029
2018 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2030 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2019 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2031 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2020 """
2032 """
2021 if search:
2033 if search:
2022 for name, path in ui.configitems("paths"):
2034 for name, path in ui.configitems("paths"):
2023 if name == search:
2035 if name == search:
2024 ui.write("%s\n" % path)
2036 ui.write("%s\n" % path)
2025 return
2037 return
2026 ui.warn(_("not found!\n"))
2038 ui.warn(_("not found!\n"))
2027 return 1
2039 return 1
2028 else:
2040 else:
2029 for name, path in ui.configitems("paths"):
2041 for name, path in ui.configitems("paths"):
2030 ui.write("%s = %s\n" % (name, path))
2042 ui.write("%s = %s\n" % (name, path))
2031
2043
2032 def postincoming(ui, repo, modheads, optupdate, wasempty):
2044 def postincoming(ui, repo, modheads, optupdate, wasempty):
2033 if modheads == 0:
2045 if modheads == 0:
2034 return
2046 return
2035 if optupdate:
2047 if optupdate:
2036 if wasempty:
2048 if wasempty:
2037 return hg.update(repo, repo.lookup('default'))
2049 return hg.update(repo, repo.lookup('default'))
2038 elif modheads == 1:
2050 elif modheads == 1:
2039 return hg.update(repo, repo.changelog.tip()) # update
2051 return hg.update(repo, repo.changelog.tip()) # update
2040 else:
2052 else:
2041 ui.status(_("not updating, since new heads added\n"))
2053 ui.status(_("not updating, since new heads added\n"))
2042 if modheads > 1:
2054 if modheads > 1:
2043 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2055 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2044 else:
2056 else:
2045 ui.status(_("(run 'hg update' to get a working copy)\n"))
2057 ui.status(_("(run 'hg update' to get a working copy)\n"))
2046
2058
2047 def pull(ui, repo, source="default", **opts):
2059 def pull(ui, repo, source="default", **opts):
2048 """pull changes from the specified source
2060 """pull changes from the specified source
2049
2061
2050 Pull changes from a remote repository to a local one.
2062 Pull changes from a remote repository to a local one.
2051
2063
2052 This finds all changes from the repository at the specified path
2064 This finds all changes from the repository at the specified path
2053 or URL and adds them to the local repository. By default, this
2065 or URL and adds them to the local repository. By default, this
2054 does not update the copy of the project in the working directory.
2066 does not update the copy of the project in the working directory.
2055
2067
2056 Valid URLs are of the form:
2068 Valid URLs are of the form:
2057
2069
2058 local/filesystem/path (or file://local/filesystem/path)
2070 local/filesystem/path (or file://local/filesystem/path)
2059 http://[user@]host[:port]/[path]
2071 http://[user@]host[:port]/[path]
2060 https://[user@]host[:port]/[path]
2072 https://[user@]host[:port]/[path]
2061 ssh://[user@]host[:port]/[path]
2073 ssh://[user@]host[:port]/[path]
2062 static-http://host[:port]/[path]
2074 static-http://host[:port]/[path]
2063
2075
2064 Paths in the local filesystem can either point to Mercurial
2076 Paths in the local filesystem can either point to Mercurial
2065 repositories or to bundle files (as created by 'hg bundle' or
2077 repositories or to bundle files (as created by 'hg bundle' or
2066 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2078 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2067 allows access to a Mercurial repository where you simply use a web
2079 allows access to a Mercurial repository where you simply use a web
2068 server to publish the .hg directory as static content.
2080 server to publish the .hg directory as static content.
2069
2081
2070 An optional identifier after # indicates a particular branch, tag,
2082 An optional identifier after # indicates a particular branch, tag,
2071 or changeset to pull.
2083 or changeset to pull.
2072
2084
2073 Some notes about using SSH with Mercurial:
2085 Some notes about using SSH with Mercurial:
2074 - SSH requires an accessible shell account on the destination machine
2086 - SSH requires an accessible shell account on the destination machine
2075 and a copy of hg in the remote path or specified with as remotecmd.
2087 and a copy of hg in the remote path or specified with as remotecmd.
2076 - path is relative to the remote user's home directory by default.
2088 - path is relative to the remote user's home directory by default.
2077 Use an extra slash at the start of a path to specify an absolute path:
2089 Use an extra slash at the start of a path to specify an absolute path:
2078 ssh://example.com//tmp/repository
2090 ssh://example.com//tmp/repository
2079 - Mercurial doesn't use its own compression via SSH; the right thing
2091 - Mercurial doesn't use its own compression via SSH; the right thing
2080 to do is to configure it in your ~/.ssh/config, e.g.:
2092 to do is to configure it in your ~/.ssh/config, e.g.:
2081 Host *.mylocalnetwork.example.com
2093 Host *.mylocalnetwork.example.com
2082 Compression no
2094 Compression no
2083 Host *
2095 Host *
2084 Compression yes
2096 Compression yes
2085 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2097 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2086 with the --ssh command line option.
2098 with the --ssh command line option.
2087 """
2099 """
2088 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
2100 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
2089 cmdutil.setremoteconfig(ui, opts)
2101 cmdutil.setremoteconfig(ui, opts)
2090
2102
2091 other = hg.repository(ui, source)
2103 other = hg.repository(ui, source)
2092 ui.status(_('pulling from %s\n') % (source))
2104 ui.status(_('pulling from %s\n') % (source))
2093 if revs:
2105 if revs:
2094 if 'lookup' in other.capabilities:
2106 if 'lookup' in other.capabilities:
2095 revs = [other.lookup(rev) for rev in revs]
2107 revs = [other.lookup(rev) for rev in revs]
2096 else:
2108 else:
2097 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2109 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2098 raise util.Abort(error)
2110 raise util.Abort(error)
2099
2111
2100 wasempty = repo.changelog.count() == 0
2112 wasempty = repo.changelog.count() == 0
2101 modheads = repo.pull(other, heads=revs, force=opts['force'])
2113 modheads = repo.pull(other, heads=revs, force=opts['force'])
2102 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2114 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2103
2115
2104 def push(ui, repo, dest=None, **opts):
2116 def push(ui, repo, dest=None, **opts):
2105 """push changes to the specified destination
2117 """push changes to the specified destination
2106
2118
2107 Push changes from the local repository to the given destination.
2119 Push changes from the local repository to the given destination.
2108
2120
2109 This is the symmetrical operation for pull. It helps to move
2121 This is the symmetrical operation for pull. It helps to move
2110 changes from the current repository to a different one. If the
2122 changes from the current repository to a different one. If the
2111 destination is local this is identical to a pull in that directory
2123 destination is local this is identical to a pull in that directory
2112 from the current one.
2124 from the current one.
2113
2125
2114 By default, push will refuse to run if it detects the result would
2126 By default, push will refuse to run if it detects the result would
2115 increase the number of remote heads. This generally indicates the
2127 increase the number of remote heads. This generally indicates the
2116 the client has forgotten to sync and merge before pushing.
2128 the client has forgotten to sync and merge before pushing.
2117
2129
2118 Valid URLs are of the form:
2130 Valid URLs are of the form:
2119
2131
2120 local/filesystem/path (or file://local/filesystem/path)
2132 local/filesystem/path (or file://local/filesystem/path)
2121 ssh://[user@]host[:port]/[path]
2133 ssh://[user@]host[:port]/[path]
2122 http://[user@]host[:port]/[path]
2134 http://[user@]host[:port]/[path]
2123 https://[user@]host[:port]/[path]
2135 https://[user@]host[:port]/[path]
2124
2136
2125 An optional identifier after # indicates a particular branch, tag,
2137 An optional identifier after # indicates a particular branch, tag,
2126 or changeset to push.
2138 or changeset to push.
2127
2139
2128 Look at the help text for the pull command for important details
2140 Look at the help text for the pull command for important details
2129 about ssh:// URLs.
2141 about ssh:// URLs.
2130
2142
2131 Pushing to http:// and https:// URLs is only possible, if this
2143 Pushing to http:// and https:// URLs is only possible, if this
2132 feature is explicitly enabled on the remote Mercurial server.
2144 feature is explicitly enabled on the remote Mercurial server.
2133 """
2145 """
2134 dest, revs = cmdutil.parseurl(
2146 dest, revs = cmdutil.parseurl(
2135 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2147 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2136 cmdutil.setremoteconfig(ui, opts)
2148 cmdutil.setremoteconfig(ui, opts)
2137
2149
2138 other = hg.repository(ui, dest)
2150 other = hg.repository(ui, dest)
2139 ui.status('pushing to %s\n' % (dest))
2151 ui.status('pushing to %s\n' % (dest))
2140 if revs:
2152 if revs:
2141 revs = [repo.lookup(rev) for rev in revs]
2153 revs = [repo.lookup(rev) for rev in revs]
2142 r = repo.push(other, opts['force'], revs=revs)
2154 r = repo.push(other, opts['force'], revs=revs)
2143 return r == 0
2155 return r == 0
2144
2156
2145 def rawcommit(ui, repo, *pats, **opts):
2157 def rawcommit(ui, repo, *pats, **opts):
2146 """raw commit interface (DEPRECATED)
2158 """raw commit interface (DEPRECATED)
2147
2159
2148 (DEPRECATED)
2160 (DEPRECATED)
2149 Lowlevel commit, for use in helper scripts.
2161 Lowlevel commit, for use in helper scripts.
2150
2162
2151 This command is not intended to be used by normal users, as it is
2163 This command is not intended to be used by normal users, as it is
2152 primarily useful for importing from other SCMs.
2164 primarily useful for importing from other SCMs.
2153
2165
2154 This command is now deprecated and will be removed in a future
2166 This command is now deprecated and will be removed in a future
2155 release, please use debugsetparents and commit instead.
2167 release, please use debugsetparents and commit instead.
2156 """
2168 """
2157
2169
2158 ui.warn(_("(the rawcommit command is deprecated)\n"))
2170 ui.warn(_("(the rawcommit command is deprecated)\n"))
2159
2171
2160 message = cmdutil.logmessage(opts)
2172 message = cmdutil.logmessage(opts)
2161
2173
2162 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2174 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2163 if opts['files']:
2175 if opts['files']:
2164 files += open(opts['files']).read().splitlines()
2176 files += open(opts['files']).read().splitlines()
2165
2177
2166 parents = [repo.lookup(p) for p in opts['parent']]
2178 parents = [repo.lookup(p) for p in opts['parent']]
2167
2179
2168 try:
2180 try:
2169 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2181 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2170 except ValueError, inst:
2182 except ValueError, inst:
2171 raise util.Abort(str(inst))
2183 raise util.Abort(str(inst))
2172
2184
2173 def recover(ui, repo):
2185 def recover(ui, repo):
2174 """roll back an interrupted transaction
2186 """roll back an interrupted transaction
2175
2187
2176 Recover from an interrupted commit or pull.
2188 Recover from an interrupted commit or pull.
2177
2189
2178 This command tries to fix the repository status after an interrupted
2190 This command tries to fix the repository status after an interrupted
2179 operation. It should only be necessary when Mercurial suggests it.
2191 operation. It should only be necessary when Mercurial suggests it.
2180 """
2192 """
2181 if repo.recover():
2193 if repo.recover():
2182 return hg.verify(repo)
2194 return hg.verify(repo)
2183 return 1
2195 return 1
2184
2196
2185 def remove(ui, repo, *pats, **opts):
2197 def remove(ui, repo, *pats, **opts):
2186 """remove the specified files on the next commit
2198 """remove the specified files on the next commit
2187
2199
2188 Schedule the indicated files for removal from the repository.
2200 Schedule the indicated files for removal from the repository.
2189
2201
2190 This only removes files from the current branch, not from the
2202 This only removes files from the current branch, not from the
2191 entire project history. If the files still exist in the working
2203 entire project history. If the files still exist in the working
2192 directory, they will be deleted from it. If invoked with --after,
2204 directory, they will be deleted from it. If invoked with --after,
2193 files are marked as removed, but not actually unlinked unless --force
2205 files are marked as removed, but not actually unlinked unless --force
2194 is also given. Without exact file names, --after will only mark
2206 is also given. Without exact file names, --after will only mark
2195 files as removed if they are no longer in the working directory.
2207 files as removed if they are no longer in the working directory.
2196
2208
2197 This command schedules the files to be removed at the next commit.
2209 This command schedules the files to be removed at the next commit.
2198 To undo a remove before that, see hg revert.
2210 To undo a remove before that, see hg revert.
2199
2211
2200 Modified files and added files are not removed by default. To
2212 Modified files and added files are not removed by default. To
2201 remove them, use the -f/--force option.
2213 remove them, use the -f/--force option.
2202 """
2214 """
2203 names = []
2215 names = []
2204 if not opts['after'] and not pats:
2216 if not opts['after'] and not pats:
2205 raise util.Abort(_('no files specified'))
2217 raise util.Abort(_('no files specified'))
2206 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2218 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2207 exact = dict.fromkeys(files)
2219 exact = dict.fromkeys(files)
2208 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2220 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2209 modified, added, removed, deleted, unknown = mardu
2221 modified, added, removed, deleted, unknown = mardu
2210 remove, forget = [], []
2222 remove, forget = [], []
2211 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2223 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2212 reason = None
2224 reason = None
2213 if abs in modified and not opts['force']:
2225 if abs in modified and not opts['force']:
2214 reason = _('is modified (use -f to force removal)')
2226 reason = _('is modified (use -f to force removal)')
2215 elif abs in added:
2227 elif abs in added:
2216 if opts['force']:
2228 if opts['force']:
2217 forget.append(abs)
2229 forget.append(abs)
2218 continue
2230 continue
2219 reason = _('has been marked for add (use -f to force removal)')
2231 reason = _('has been marked for add (use -f to force removal)')
2220 elif abs not in repo.dirstate:
2232 elif abs not in repo.dirstate:
2221 reason = _('is not managed')
2233 reason = _('is not managed')
2222 elif opts['after'] and not exact and abs not in deleted:
2234 elif opts['after'] and not exact and abs not in deleted:
2223 continue
2235 continue
2224 elif abs in removed:
2236 elif abs in removed:
2225 continue
2237 continue
2226 if reason:
2238 if reason:
2227 if exact:
2239 if exact:
2228 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2240 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2229 else:
2241 else:
2230 if ui.verbose or not exact:
2242 if ui.verbose or not exact:
2231 ui.status(_('removing %s\n') % rel)
2243 ui.status(_('removing %s\n') % rel)
2232 remove.append(abs)
2244 remove.append(abs)
2233 repo.forget(forget)
2245 repo.forget(forget)
2234 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2246 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2235
2247
2236 def rename(ui, repo, *pats, **opts):
2248 def rename(ui, repo, *pats, **opts):
2237 """rename files; equivalent of copy + remove
2249 """rename files; equivalent of copy + remove
2238
2250
2239 Mark dest as copies of sources; mark sources for deletion. If
2251 Mark dest as copies of sources; mark sources for deletion. If
2240 dest is a directory, copies are put in that directory. If dest is
2252 dest is a directory, copies are put in that directory. If dest is
2241 a file, there can only be one source.
2253 a file, there can only be one source.
2242
2254
2243 By default, this command copies the contents of files as they
2255 By default, this command copies the contents of files as they
2244 stand in the working directory. If invoked with --after, the
2256 stand in the working directory. If invoked with --after, the
2245 operation is recorded, but no copying is performed.
2257 operation is recorded, but no copying is performed.
2246
2258
2247 This command takes effect in the next commit. To undo a rename
2259 This command takes effect in the next commit. To undo a rename
2248 before that, see hg revert.
2260 before that, see hg revert.
2249 """
2261 """
2250 wlock = repo.wlock(False)
2262 wlock = repo.wlock(False)
2251 errs, copied = docopy(ui, repo, pats, opts, wlock)
2263 try:
2252 names = []
2264 errs, copied = docopy(ui, repo, pats, opts, wlock)
2253 for abs, rel, exact in copied:
2265 names = []
2254 if ui.verbose or not exact:
2266 for abs, rel, exact in copied:
2255 ui.status(_('removing %s\n') % rel)
2267 if ui.verbose or not exact:
2256 names.append(abs)
2268 ui.status(_('removing %s\n') % rel)
2257 if not opts.get('dry_run'):
2269 names.append(abs)
2258 repo.remove(names, True, wlock=wlock)
2270 if not opts.get('dry_run'):
2259 return errs
2271 repo.remove(names, True, wlock=wlock)
2272 return errs
2273 finally:
2274 del wlock
2260
2275
2261 def revert(ui, repo, *pats, **opts):
2276 def revert(ui, repo, *pats, **opts):
2262 """revert files or dirs to their states as of some revision
2277 """revert files or dirs to their states as of some revision
2263
2278
2264 With no revision specified, revert the named files or directories
2279 With no revision specified, revert the named files or directories
2265 to the contents they had in the parent of the working directory.
2280 to the contents they had in the parent of the working directory.
2266 This restores the contents of the affected files to an unmodified
2281 This restores the contents of the affected files to an unmodified
2267 state and unschedules adds, removes, copies, and renames. If the
2282 state and unschedules adds, removes, copies, and renames. If the
2268 working directory has two parents, you must explicitly specify the
2283 working directory has two parents, you must explicitly specify the
2269 revision to revert to.
2284 revision to revert to.
2270
2285
2271 Modified files are saved with a .orig suffix before reverting.
2286 Modified files are saved with a .orig suffix before reverting.
2272 To disable these backups, use --no-backup.
2287 To disable these backups, use --no-backup.
2273
2288
2274 Using the -r option, revert the given files or directories to their
2289 Using the -r option, revert the given files or directories to their
2275 contents as of a specific revision. This can be helpful to "roll
2290 contents as of a specific revision. This can be helpful to "roll
2276 back" some or all of a change that should not have been committed.
2291 back" some or all of a change that should not have been committed.
2277
2292
2278 Revert modifies the working directory. It does not commit any
2293 Revert modifies the working directory. It does not commit any
2279 changes, or change the parent of the working directory. If you
2294 changes, or change the parent of the working directory. If you
2280 revert to a revision other than the parent of the working
2295 revert to a revision other than the parent of the working
2281 directory, the reverted files will thus appear modified
2296 directory, the reverted files will thus appear modified
2282 afterwards.
2297 afterwards.
2283
2298
2284 If a file has been deleted, it is restored. If the executable
2299 If a file has been deleted, it is restored. If the executable
2285 mode of a file was changed, it is reset.
2300 mode of a file was changed, it is reset.
2286
2301
2287 If names are given, all files matching the names are reverted.
2302 If names are given, all files matching the names are reverted.
2288
2303
2289 If no arguments are given, no files are reverted.
2304 If no arguments are given, no files are reverted.
2290 """
2305 """
2291
2306
2292 if opts["date"]:
2307 if opts["date"]:
2293 if opts["rev"]:
2308 if opts["rev"]:
2294 raise util.Abort(_("you can't specify a revision and a date"))
2309 raise util.Abort(_("you can't specify a revision and a date"))
2295 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2310 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2296
2311
2297 if not pats and not opts['all']:
2312 if not pats and not opts['all']:
2298 raise util.Abort(_('no files or directories specified; '
2313 raise util.Abort(_('no files or directories specified; '
2299 'use --all to revert the whole repo'))
2314 'use --all to revert the whole repo'))
2300
2315
2301 parent, p2 = repo.dirstate.parents()
2316 parent, p2 = repo.dirstate.parents()
2302 if not opts['rev'] and p2 != nullid:
2317 if not opts['rev'] and p2 != nullid:
2303 raise util.Abort(_('uncommitted merge - please provide a '
2318 raise util.Abort(_('uncommitted merge - please provide a '
2304 'specific revision'))
2319 'specific revision'))
2305 ctx = repo.changectx(opts['rev'])
2320 ctx = repo.changectx(opts['rev'])
2306 node = ctx.node()
2321 node = ctx.node()
2307 mf = ctx.manifest()
2322 mf = ctx.manifest()
2308 if node == parent:
2323 if node == parent:
2309 pmf = mf
2324 pmf = mf
2310 else:
2325 else:
2311 pmf = None
2326 pmf = None
2312
2327
2313 wlock = repo.wlock()
2314
2315 # need all matching names in dirstate and manifest of target rev,
2328 # need all matching names in dirstate and manifest of target rev,
2316 # so have to walk both. do not print errors if files exist in one
2329 # so have to walk both. do not print errors if files exist in one
2317 # but not other.
2330 # but not other.
2318
2331
2319 names = {}
2332 names = {}
2320 target_only = {}
2333 target_only = {}
2321
2334
2322 # walk dirstate.
2335 wlock = repo.wlock()
2323
2336 try:
2324 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2337 # walk dirstate.
2325 badmatch=mf.has_key):
2338 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2326 names[abs] = (rel, exact)
2339 badmatch=mf.has_key):
2327 if src == 'b':
2340 names[abs] = (rel, exact)
2328 target_only[abs] = True
2341 if src == 'b':
2329
2342 target_only[abs] = True
2330 # walk target manifest.
2343
2331
2344 # walk target manifest.
2332 def badmatch(path):
2345
2333 if path in names:
2346 def badmatch(path):
2334 return True
2347 if path in names:
2335 path_ = path + '/'
2336 for f in names:
2337 if f.startswith(path_):
2338 return True
2348 return True
2339 return False
2349 path_ = path + '/'
2340
2350 for f in names:
2341 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2351 if f.startswith(path_):
2342 badmatch=badmatch):
2352 return True
2343 if abs in names or src == 'b':
2353 return False
2344 continue
2354
2345 names[abs] = (rel, exact)
2355 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2346 target_only[abs] = True
2356 badmatch=badmatch):
2347
2357 if abs in names or src == 'b':
2348 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2358 continue
2349 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2359 names[abs] = (rel, exact)
2350
2360 target_only[abs] = True
2351 revert = ([], _('reverting %s\n'))
2361
2352 add = ([], _('adding %s\n'))
2362 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2353 remove = ([], _('removing %s\n'))
2363 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2354 forget = ([], _('forgetting %s\n'))
2364
2355 undelete = ([], _('undeleting %s\n'))
2365 revert = ([], _('reverting %s\n'))
2356 update = {}
2366 add = ([], _('adding %s\n'))
2357
2367 remove = ([], _('removing %s\n'))
2358 disptable = (
2368 forget = ([], _('forgetting %s\n'))
2359 # dispatch table:
2369 undelete = ([], _('undeleting %s\n'))
2360 # file state
2370 update = {}
2361 # action if in target manifest
2371
2362 # action if not in target manifest
2372 disptable = (
2363 # make backup if in target manifest
2373 # dispatch table:
2364 # make backup if not in target manifest
2374 # file state
2365 (modified, revert, remove, True, True),
2375 # action if in target manifest
2366 (added, revert, forget, True, False),
2376 # action if not in target manifest
2367 (removed, undelete, None, False, False),
2377 # make backup if in target manifest
2368 (deleted, revert, remove, False, False),
2378 # make backup if not in target manifest
2369 (unknown, add, None, True, False),
2379 (modified, revert, remove, True, True),
2370 (target_only, add, None, False, False),
2380 (added, revert, forget, True, False),
2371 )
2381 (removed, undelete, None, False, False),
2372
2382 (deleted, revert, remove, False, False),
2373 entries = names.items()
2383 (unknown, add, None, True, False),
2374 entries.sort()
2384 (target_only, add, None, False, False),
2375
2385 )
2376 for abs, (rel, exact) in entries:
2386
2377 mfentry = mf.get(abs)
2387 entries = names.items()
2378 target = repo.wjoin(abs)
2388 entries.sort()
2379 def handle(xlist, dobackup):
2389
2380 xlist[0].append(abs)
2390 for abs, (rel, exact) in entries:
2381 update[abs] = 1
2391 mfentry = mf.get(abs)
2382 if dobackup and not opts['no_backup'] and util.lexists(target):
2392 target = repo.wjoin(abs)
2383 bakname = "%s.orig" % rel
2393 def handle(xlist, dobackup):
2384 ui.note(_('saving current version of %s as %s\n') %
2394 xlist[0].append(abs)
2385 (rel, bakname))
2395 update[abs] = 1
2386 if not opts.get('dry_run'):
2396 if dobackup and not opts['no_backup'] and util.lexists(target):
2387 util.copyfile(target, bakname)
2397 bakname = "%s.orig" % rel
2388 if ui.verbose or not exact:
2398 ui.note(_('saving current version of %s as %s\n') %
2389 ui.status(xlist[1] % rel)
2399 (rel, bakname))
2390 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2400 if not opts.get('dry_run'):
2391 if abs not in table: continue
2401 util.copyfile(target, bakname)
2392 # file has changed in dirstate
2402 if ui.verbose or not exact:
2393 if mfentry:
2403 ui.status(xlist[1] % rel)
2394 handle(hitlist, backuphit)
2404 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2395 elif misslist is not None:
2405 if abs not in table: continue
2396 handle(misslist, backupmiss)
2406 # file has changed in dirstate
2407 if mfentry:
2408 handle(hitlist, backuphit)
2409 elif misslist is not None:
2410 handle(misslist, backupmiss)
2411 else:
2412 if exact: ui.warn(_('file not managed: %s\n') % rel)
2413 break
2397 else:
2414 else:
2398 if exact: ui.warn(_('file not managed: %s\n') % rel)
2415 # file has not changed in dirstate
2399 break
2416 if node == parent:
2400 else:
2417 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2401 # file has not changed in dirstate
2418 continue
2402 if node == parent:
2419 if pmf is None:
2403 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2420 # only need parent manifest in this unlikely case,
2404 continue
2421 # so do not read by default
2405 if pmf is None:
2422 pmf = repo.changectx(parent).manifest()
2406 # only need parent manifest in this unlikely case,
2423 if abs in pmf:
2407 # so do not read by default
2424 if mfentry:
2408 pmf = repo.changectx(parent).manifest()
2425 # if version of file is same in parent and target
2409 if abs in pmf:
2426 # manifests, do nothing
2410 if mfentry:
2427 if pmf[abs] != mfentry:
2411 # if version of file is same in parent and target
2428 handle(revert, False)
2412 # manifests, do nothing
2429 else:
2413 if pmf[abs] != mfentry:
2430 handle(remove, False)
2414 handle(revert, False)
2431
2415 else:
2432 if not opts.get('dry_run'):
2416 handle(remove, False)
2433 for f in forget[0]:
2417
2434 repo.dirstate.forget(f)
2418 if not opts.get('dry_run'):
2435 r = hg.revert(repo, node, update.has_key, wlock)
2419 for f in forget[0]:
2436 for f in add[0]:
2420 repo.dirstate.forget(f)
2437 repo.dirstate.add(f)
2421 r = hg.revert(repo, node, update.has_key, wlock)
2438 for f in undelete[0]:
2422 for f in add[0]:
2439 repo.dirstate.normal(f)
2423 repo.dirstate.add(f)
2440 for f in remove[0]:
2424 for f in undelete[0]:
2441 repo.dirstate.remove(f)
2425 repo.dirstate.normal(f)
2442 return r
2426 for f in remove[0]:
2443 finally:
2427 repo.dirstate.remove(f)
2444 del wlock
2428 return r
2429
2445
2430 def rollback(ui, repo):
2446 def rollback(ui, repo):
2431 """roll back the last transaction in this repository
2447 """roll back the last transaction in this repository
2432
2448
2433 Roll back the last transaction in this repository, restoring the
2449 Roll back the last transaction in this repository, restoring the
2434 project to its state prior to the transaction.
2450 project to its state prior to the transaction.
2435
2451
2436 Transactions are used to encapsulate the effects of all commands
2452 Transactions are used to encapsulate the effects of all commands
2437 that create new changesets or propagate existing changesets into a
2453 that create new changesets or propagate existing changesets into a
2438 repository. For example, the following commands are transactional,
2454 repository. For example, the following commands are transactional,
2439 and their effects can be rolled back:
2455 and their effects can be rolled back:
2440
2456
2441 commit
2457 commit
2442 import
2458 import
2443 pull
2459 pull
2444 push (with this repository as destination)
2460 push (with this repository as destination)
2445 unbundle
2461 unbundle
2446
2462
2447 This command should be used with care. There is only one level of
2463 This command should be used with care. There is only one level of
2448 rollback, and there is no way to undo a rollback. It will also
2464 rollback, and there is no way to undo a rollback. It will also
2449 restore the dirstate at the time of the last transaction, which
2465 restore the dirstate at the time of the last transaction, which
2450 may lose subsequent dirstate changes.
2466 may lose subsequent dirstate changes.
2451
2467
2452 This command is not intended for use on public repositories. Once
2468 This command is not intended for use on public repositories. Once
2453 changes are visible for pull by other users, rolling a transaction
2469 changes are visible for pull by other users, rolling a transaction
2454 back locally is ineffective (someone else may already have pulled
2470 back locally is ineffective (someone else may already have pulled
2455 the changes). Furthermore, a race is possible with readers of the
2471 the changes). Furthermore, a race is possible with readers of the
2456 repository; for example an in-progress pull from the repository
2472 repository; for example an in-progress pull from the repository
2457 may fail if a rollback is performed.
2473 may fail if a rollback is performed.
2458 """
2474 """
2459 repo.rollback()
2475 repo.rollback()
2460
2476
2461 def root(ui, repo):
2477 def root(ui, repo):
2462 """print the root (top) of the current working dir
2478 """print the root (top) of the current working dir
2463
2479
2464 Print the root directory of the current repository.
2480 Print the root directory of the current repository.
2465 """
2481 """
2466 ui.write(repo.root + "\n")
2482 ui.write(repo.root + "\n")
2467
2483
2468 def serve(ui, repo, **opts):
2484 def serve(ui, repo, **opts):
2469 """export the repository via HTTP
2485 """export the repository via HTTP
2470
2486
2471 Start a local HTTP repository browser and pull server.
2487 Start a local HTTP repository browser and pull server.
2472
2488
2473 By default, the server logs accesses to stdout and errors to
2489 By default, the server logs accesses to stdout and errors to
2474 stderr. Use the "-A" and "-E" options to log to files.
2490 stderr. Use the "-A" and "-E" options to log to files.
2475 """
2491 """
2476
2492
2477 if opts["stdio"]:
2493 if opts["stdio"]:
2478 if repo is None:
2494 if repo is None:
2479 raise hg.RepoError(_("There is no Mercurial repository here"
2495 raise hg.RepoError(_("There is no Mercurial repository here"
2480 " (.hg not found)"))
2496 " (.hg not found)"))
2481 s = sshserver.sshserver(ui, repo)
2497 s = sshserver.sshserver(ui, repo)
2482 s.serve_forever()
2498 s.serve_forever()
2483
2499
2484 parentui = ui.parentui or ui
2500 parentui = ui.parentui or ui
2485 optlist = ("name templates style address port ipv6"
2501 optlist = ("name templates style address port ipv6"
2486 " accesslog errorlog webdir_conf certificate")
2502 " accesslog errorlog webdir_conf certificate")
2487 for o in optlist.split():
2503 for o in optlist.split():
2488 if opts[o]:
2504 if opts[o]:
2489 parentui.setconfig("web", o, str(opts[o]))
2505 parentui.setconfig("web", o, str(opts[o]))
2490 if repo.ui != parentui:
2506 if repo.ui != parentui:
2491 repo.ui.setconfig("web", o, str(opts[o]))
2507 repo.ui.setconfig("web", o, str(opts[o]))
2492
2508
2493 if repo is None and not ui.config("web", "webdir_conf"):
2509 if repo is None and not ui.config("web", "webdir_conf"):
2494 raise hg.RepoError(_("There is no Mercurial repository here"
2510 raise hg.RepoError(_("There is no Mercurial repository here"
2495 " (.hg not found)"))
2511 " (.hg not found)"))
2496
2512
2497 class service:
2513 class service:
2498 def init(self):
2514 def init(self):
2499 util.set_signal_handler()
2515 util.set_signal_handler()
2500 try:
2516 try:
2501 self.httpd = hgweb.server.create_server(parentui, repo)
2517 self.httpd = hgweb.server.create_server(parentui, repo)
2502 except socket.error, inst:
2518 except socket.error, inst:
2503 raise util.Abort(_('cannot start server: ') + inst.args[1])
2519 raise util.Abort(_('cannot start server: ') + inst.args[1])
2504
2520
2505 if not ui.verbose: return
2521 if not ui.verbose: return
2506
2522
2507 if self.httpd.port != 80:
2523 if self.httpd.port != 80:
2508 ui.status(_('listening at http://%s:%d/\n') %
2524 ui.status(_('listening at http://%s:%d/\n') %
2509 (self.httpd.addr, self.httpd.port))
2525 (self.httpd.addr, self.httpd.port))
2510 else:
2526 else:
2511 ui.status(_('listening at http://%s/\n') % self.httpd.addr)
2527 ui.status(_('listening at http://%s/\n') % self.httpd.addr)
2512
2528
2513 def run(self):
2529 def run(self):
2514 self.httpd.serve_forever()
2530 self.httpd.serve_forever()
2515
2531
2516 service = service()
2532 service = service()
2517
2533
2518 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2534 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2519
2535
2520 def status(ui, repo, *pats, **opts):
2536 def status(ui, repo, *pats, **opts):
2521 """show changed files in the working directory
2537 """show changed files in the working directory
2522
2538
2523 Show status of files in the repository. If names are given, only
2539 Show status of files in the repository. If names are given, only
2524 files that match are shown. Files that are clean or ignored, are
2540 files that match are shown. Files that are clean or ignored, are
2525 not listed unless -c (clean), -i (ignored) or -A is given.
2541 not listed unless -c (clean), -i (ignored) or -A is given.
2526
2542
2527 NOTE: status may appear to disagree with diff if permissions have
2543 NOTE: status may appear to disagree with diff if permissions have
2528 changed or a merge has occurred. The standard diff format does not
2544 changed or a merge has occurred. The standard diff format does not
2529 report permission changes and diff only reports changes relative
2545 report permission changes and diff only reports changes relative
2530 to one merge parent.
2546 to one merge parent.
2531
2547
2532 If one revision is given, it is used as the base revision.
2548 If one revision is given, it is used as the base revision.
2533 If two revisions are given, the difference between them is shown.
2549 If two revisions are given, the difference between them is shown.
2534
2550
2535 The codes used to show the status of files are:
2551 The codes used to show the status of files are:
2536 M = modified
2552 M = modified
2537 A = added
2553 A = added
2538 R = removed
2554 R = removed
2539 C = clean
2555 C = clean
2540 ! = deleted, but still tracked
2556 ! = deleted, but still tracked
2541 ? = not tracked
2557 ? = not tracked
2542 I = ignored (not shown by default)
2558 I = ignored (not shown by default)
2543 = the previous added file was copied from here
2559 = the previous added file was copied from here
2544 """
2560 """
2545
2561
2546 all = opts['all']
2562 all = opts['all']
2547 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2563 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2548
2564
2549 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2565 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2550 cwd = (pats and repo.getcwd()) or ''
2566 cwd = (pats and repo.getcwd()) or ''
2551 modified, added, removed, deleted, unknown, ignored, clean = [
2567 modified, added, removed, deleted, unknown, ignored, clean = [
2552 n for n in repo.status(node1=node1, node2=node2, files=files,
2568 n for n in repo.status(node1=node1, node2=node2, files=files,
2553 match=matchfn,
2569 match=matchfn,
2554 list_ignored=all or opts['ignored'],
2570 list_ignored=all or opts['ignored'],
2555 list_clean=all or opts['clean'])]
2571 list_clean=all or opts['clean'])]
2556
2572
2557 changetypes = (('modified', 'M', modified),
2573 changetypes = (('modified', 'M', modified),
2558 ('added', 'A', added),
2574 ('added', 'A', added),
2559 ('removed', 'R', removed),
2575 ('removed', 'R', removed),
2560 ('deleted', '!', deleted),
2576 ('deleted', '!', deleted),
2561 ('unknown', '?', unknown),
2577 ('unknown', '?', unknown),
2562 ('ignored', 'I', ignored))
2578 ('ignored', 'I', ignored))
2563
2579
2564 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2580 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2565
2581
2566 end = opts['print0'] and '\0' or '\n'
2582 end = opts['print0'] and '\0' or '\n'
2567
2583
2568 for opt, char, changes in ([ct for ct in explicit_changetypes
2584 for opt, char, changes in ([ct for ct in explicit_changetypes
2569 if all or opts[ct[0]]]
2585 if all or opts[ct[0]]]
2570 or changetypes):
2586 or changetypes):
2571 if opts['no_status']:
2587 if opts['no_status']:
2572 format = "%%s%s" % end
2588 format = "%%s%s" % end
2573 else:
2589 else:
2574 format = "%s %%s%s" % (char, end)
2590 format = "%s %%s%s" % (char, end)
2575
2591
2576 for f in changes:
2592 for f in changes:
2577 ui.write(format % repo.pathto(f, cwd))
2593 ui.write(format % repo.pathto(f, cwd))
2578 if ((all or opts.get('copies')) and not opts.get('no_status')):
2594 if ((all or opts.get('copies')) and not opts.get('no_status')):
2579 copied = repo.dirstate.copied(f)
2595 copied = repo.dirstate.copied(f)
2580 if copied:
2596 if copied:
2581 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2597 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2582
2598
2583 def tag(ui, repo, name, rev_=None, **opts):
2599 def tag(ui, repo, name, rev_=None, **opts):
2584 """add a tag for the current or given revision
2600 """add a tag for the current or given revision
2585
2601
2586 Name a particular revision using <name>.
2602 Name a particular revision using <name>.
2587
2603
2588 Tags are used to name particular revisions of the repository and are
2604 Tags are used to name particular revisions of the repository and are
2589 very useful to compare different revision, to go back to significant
2605 very useful to compare different revision, to go back to significant
2590 earlier versions or to mark branch points as releases, etc.
2606 earlier versions or to mark branch points as releases, etc.
2591
2607
2592 If no revision is given, the parent of the working directory is used,
2608 If no revision is given, the parent of the working directory is used,
2593 or tip if no revision is checked out.
2609 or tip if no revision is checked out.
2594
2610
2595 To facilitate version control, distribution, and merging of tags,
2611 To facilitate version control, distribution, and merging of tags,
2596 they are stored as a file named ".hgtags" which is managed
2612 they are stored as a file named ".hgtags" which is managed
2597 similarly to other project files and can be hand-edited if
2613 similarly to other project files and can be hand-edited if
2598 necessary. The file '.hg/localtags' is used for local tags (not
2614 necessary. The file '.hg/localtags' is used for local tags (not
2599 shared among repositories).
2615 shared among repositories).
2600 """
2616 """
2601 if name in ['tip', '.', 'null']:
2617 if name in ['tip', '.', 'null']:
2602 raise util.Abort(_("the name '%s' is reserved") % name)
2618 raise util.Abort(_("the name '%s' is reserved") % name)
2603 if rev_ is not None:
2619 if rev_ is not None:
2604 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2620 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2605 "please use 'hg tag [-r REV] NAME' instead\n"))
2621 "please use 'hg tag [-r REV] NAME' instead\n"))
2606 if opts['rev']:
2622 if opts['rev']:
2607 raise util.Abort(_("use only one form to specify the revision"))
2623 raise util.Abort(_("use only one form to specify the revision"))
2608 if opts['rev'] and opts['remove']:
2624 if opts['rev'] and opts['remove']:
2609 raise util.Abort(_("--rev and --remove are incompatible"))
2625 raise util.Abort(_("--rev and --remove are incompatible"))
2610 if opts['rev']:
2626 if opts['rev']:
2611 rev_ = opts['rev']
2627 rev_ = opts['rev']
2612 message = opts['message']
2628 message = opts['message']
2613 if opts['remove']:
2629 if opts['remove']:
2614 if not name in repo.tags():
2630 if not name in repo.tags():
2615 raise util.Abort(_('tag %s does not exist') % name)
2631 raise util.Abort(_('tag %s does not exist') % name)
2616 rev_ = nullid
2632 rev_ = nullid
2617 if not message:
2633 if not message:
2618 message = _('Removed tag %s') % name
2634 message = _('Removed tag %s') % name
2619 elif name in repo.tags() and not opts['force']:
2635 elif name in repo.tags() and not opts['force']:
2620 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2636 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2621 % name)
2637 % name)
2622 if not rev_ and repo.dirstate.parents()[1] != nullid:
2638 if not rev_ and repo.dirstate.parents()[1] != nullid:
2623 raise util.Abort(_('uncommitted merge - please provide a '
2639 raise util.Abort(_('uncommitted merge - please provide a '
2624 'specific revision'))
2640 'specific revision'))
2625 r = repo.changectx(rev_).node()
2641 r = repo.changectx(rev_).node()
2626
2642
2627 if not message:
2643 if not message:
2628 message = _('Added tag %s for changeset %s') % (name, short(r))
2644 message = _('Added tag %s for changeset %s') % (name, short(r))
2629
2645
2630 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2646 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2631
2647
2632 def tags(ui, repo):
2648 def tags(ui, repo):
2633 """list repository tags
2649 """list repository tags
2634
2650
2635 List the repository tags.
2651 List the repository tags.
2636
2652
2637 This lists both regular and local tags.
2653 This lists both regular and local tags.
2638 """
2654 """
2639
2655
2640 l = repo.tagslist()
2656 l = repo.tagslist()
2641 l.reverse()
2657 l.reverse()
2642 hexfunc = ui.debugflag and hex or short
2658 hexfunc = ui.debugflag and hex or short
2643 for t, n in l:
2659 for t, n in l:
2644 try:
2660 try:
2645 hn = hexfunc(n)
2661 hn = hexfunc(n)
2646 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2662 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2647 except revlog.LookupError:
2663 except revlog.LookupError:
2648 r = " ?:%s" % hn
2664 r = " ?:%s" % hn
2649 if ui.quiet:
2665 if ui.quiet:
2650 ui.write("%s\n" % t)
2666 ui.write("%s\n" % t)
2651 else:
2667 else:
2652 spaces = " " * (30 - util.locallen(t))
2668 spaces = " " * (30 - util.locallen(t))
2653 ui.write("%s%s %s\n" % (t, spaces, r))
2669 ui.write("%s%s %s\n" % (t, spaces, r))
2654
2670
2655 def tip(ui, repo, **opts):
2671 def tip(ui, repo, **opts):
2656 """show the tip revision
2672 """show the tip revision
2657
2673
2658 Show the tip revision.
2674 Show the tip revision.
2659 """
2675 """
2660 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2676 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2661
2677
2662 def unbundle(ui, repo, fname1, *fnames, **opts):
2678 def unbundle(ui, repo, fname1, *fnames, **opts):
2663 """apply one or more changegroup files
2679 """apply one or more changegroup files
2664
2680
2665 Apply one or more compressed changegroup files generated by the
2681 Apply one or more compressed changegroup files generated by the
2666 bundle command.
2682 bundle command.
2667 """
2683 """
2668 fnames = (fname1,) + fnames
2684 fnames = (fname1,) + fnames
2669 result = None
2685 result = None
2670 wasempty = repo.changelog.count() == 0
2686 wasempty = repo.changelog.count() == 0
2671 for fname in fnames:
2687 for fname in fnames:
2672 if os.path.exists(fname):
2688 if os.path.exists(fname):
2673 f = open(fname, "rb")
2689 f = open(fname, "rb")
2674 else:
2690 else:
2675 f = urllib.urlopen(fname)
2691 f = urllib.urlopen(fname)
2676 gen = changegroup.readbundle(f, fname)
2692 gen = changegroup.readbundle(f, fname)
2677 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2693 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2678
2694
2679 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2695 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2680
2696
2681 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2697 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2682 """update working directory
2698 """update working directory
2683
2699
2684 Update the working directory to the specified revision, or the
2700 Update the working directory to the specified revision, or the
2685 tip of the current branch if none is specified.
2701 tip of the current branch if none is specified.
2686
2702
2687 If there are no outstanding changes in the working directory and
2703 If there are no outstanding changes in the working directory and
2688 there is a linear relationship between the current version and the
2704 there is a linear relationship between the current version and the
2689 requested version, the result is the requested version.
2705 requested version, the result is the requested version.
2690
2706
2691 To merge the working directory with another revision, use the
2707 To merge the working directory with another revision, use the
2692 merge command.
2708 merge command.
2693
2709
2694 By default, update will refuse to run if doing so would require
2710 By default, update will refuse to run if doing so would require
2695 discarding local changes.
2711 discarding local changes.
2696 """
2712 """
2697 if rev and node:
2713 if rev and node:
2698 raise util.Abort(_("please specify just one revision"))
2714 raise util.Abort(_("please specify just one revision"))
2699
2715
2700 if not rev:
2716 if not rev:
2701 rev = node
2717 rev = node
2702
2718
2703 if date:
2719 if date:
2704 if rev:
2720 if rev:
2705 raise util.Abort(_("you can't specify a revision and a date"))
2721 raise util.Abort(_("you can't specify a revision and a date"))
2706 rev = cmdutil.finddate(ui, repo, date)
2722 rev = cmdutil.finddate(ui, repo, date)
2707
2723
2708 if clean:
2724 if clean:
2709 return hg.clean(repo, rev)
2725 return hg.clean(repo, rev)
2710 else:
2726 else:
2711 return hg.update(repo, rev)
2727 return hg.update(repo, rev)
2712
2728
2713 def verify(ui, repo):
2729 def verify(ui, repo):
2714 """verify the integrity of the repository
2730 """verify the integrity of the repository
2715
2731
2716 Verify the integrity of the current repository.
2732 Verify the integrity of the current repository.
2717
2733
2718 This will perform an extensive check of the repository's
2734 This will perform an extensive check of the repository's
2719 integrity, validating the hashes and checksums of each entry in
2735 integrity, validating the hashes and checksums of each entry in
2720 the changelog, manifest, and tracked files, as well as the
2736 the changelog, manifest, and tracked files, as well as the
2721 integrity of their crosslinks and indices.
2737 integrity of their crosslinks and indices.
2722 """
2738 """
2723 return hg.verify(repo)
2739 return hg.verify(repo)
2724
2740
2725 def version_(ui):
2741 def version_(ui):
2726 """output version and copyright information"""
2742 """output version and copyright information"""
2727 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2743 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2728 % version.get_version())
2744 % version.get_version())
2729 ui.status(_(
2745 ui.status(_(
2730 "\nCopyright (C) 2005-2007 Matt Mackall <mpm@selenic.com> and others\n"
2746 "\nCopyright (C) 2005-2007 Matt Mackall <mpm@selenic.com> and others\n"
2731 "This is free software; see the source for copying conditions. "
2747 "This is free software; see the source for copying conditions. "
2732 "There is NO\nwarranty; "
2748 "There is NO\nwarranty; "
2733 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2749 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2734 ))
2750 ))
2735
2751
2736 # Command options and aliases are listed here, alphabetically
2752 # Command options and aliases are listed here, alphabetically
2737
2753
2738 globalopts = [
2754 globalopts = [
2739 ('R', 'repository', '',
2755 ('R', 'repository', '',
2740 _('repository root directory or symbolic path name')),
2756 _('repository root directory or symbolic path name')),
2741 ('', 'cwd', '', _('change working directory')),
2757 ('', 'cwd', '', _('change working directory')),
2742 ('y', 'noninteractive', None,
2758 ('y', 'noninteractive', None,
2743 _('do not prompt, assume \'yes\' for any required answers')),
2759 _('do not prompt, assume \'yes\' for any required answers')),
2744 ('q', 'quiet', None, _('suppress output')),
2760 ('q', 'quiet', None, _('suppress output')),
2745 ('v', 'verbose', None, _('enable additional output')),
2761 ('v', 'verbose', None, _('enable additional output')),
2746 ('', 'config', [], _('set/override config option')),
2762 ('', 'config', [], _('set/override config option')),
2747 ('', 'debug', None, _('enable debugging output')),
2763 ('', 'debug', None, _('enable debugging output')),
2748 ('', 'debugger', None, _('start debugger')),
2764 ('', 'debugger', None, _('start debugger')),
2749 ('', 'encoding', util._encoding, _('set the charset encoding')),
2765 ('', 'encoding', util._encoding, _('set the charset encoding')),
2750 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2766 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2751 ('', 'lsprof', None, _('print improved command execution profile')),
2767 ('', 'lsprof', None, _('print improved command execution profile')),
2752 ('', 'traceback', None, _('print traceback on exception')),
2768 ('', 'traceback', None, _('print traceback on exception')),
2753 ('', 'time', None, _('time how long the command takes')),
2769 ('', 'time', None, _('time how long the command takes')),
2754 ('', 'profile', None, _('print command execution profile')),
2770 ('', 'profile', None, _('print command execution profile')),
2755 ('', 'version', None, _('output version information and exit')),
2771 ('', 'version', None, _('output version information and exit')),
2756 ('h', 'help', None, _('display help and exit')),
2772 ('h', 'help', None, _('display help and exit')),
2757 ]
2773 ]
2758
2774
2759 dryrunopts = [('n', 'dry-run', None,
2775 dryrunopts = [('n', 'dry-run', None,
2760 _('do not perform actions, just print output'))]
2776 _('do not perform actions, just print output'))]
2761
2777
2762 remoteopts = [
2778 remoteopts = [
2763 ('e', 'ssh', '', _('specify ssh command to use')),
2779 ('e', 'ssh', '', _('specify ssh command to use')),
2764 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2780 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2765 ]
2781 ]
2766
2782
2767 walkopts = [
2783 walkopts = [
2768 ('I', 'include', [], _('include names matching the given patterns')),
2784 ('I', 'include', [], _('include names matching the given patterns')),
2769 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2785 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2770 ]
2786 ]
2771
2787
2772 commitopts = [
2788 commitopts = [
2773 ('m', 'message', '', _('use <text> as commit message')),
2789 ('m', 'message', '', _('use <text> as commit message')),
2774 ('l', 'logfile', '', _('read commit message from <file>')),
2790 ('l', 'logfile', '', _('read commit message from <file>')),
2775 ]
2791 ]
2776
2792
2777 table = {
2793 table = {
2778 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2794 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2779 "addremove":
2795 "addremove":
2780 (addremove,
2796 (addremove,
2781 [('s', 'similarity', '',
2797 [('s', 'similarity', '',
2782 _('guess renamed files by similarity (0<=s<=100)')),
2798 _('guess renamed files by similarity (0<=s<=100)')),
2783 ] + walkopts + dryrunopts,
2799 ] + walkopts + dryrunopts,
2784 _('hg addremove [OPTION]... [FILE]...')),
2800 _('hg addremove [OPTION]... [FILE]...')),
2785 "^annotate":
2801 "^annotate":
2786 (annotate,
2802 (annotate,
2787 [('r', 'rev', '', _('annotate the specified revision')),
2803 [('r', 'rev', '', _('annotate the specified revision')),
2788 ('f', 'follow', None, _('follow file copies and renames')),
2804 ('f', 'follow', None, _('follow file copies and renames')),
2789 ('a', 'text', None, _('treat all files as text')),
2805 ('a', 'text', None, _('treat all files as text')),
2790 ('u', 'user', None, _('list the author')),
2806 ('u', 'user', None, _('list the author')),
2791 ('d', 'date', None, _('list the date')),
2807 ('d', 'date', None, _('list the date')),
2792 ('n', 'number', None, _('list the revision number (default)')),
2808 ('n', 'number', None, _('list the revision number (default)')),
2793 ('c', 'changeset', None, _('list the changeset')),
2809 ('c', 'changeset', None, _('list the changeset')),
2794 ('l', 'line-number', None,
2810 ('l', 'line-number', None,
2795 _('show line number at the first appearance'))
2811 _('show line number at the first appearance'))
2796 ] + walkopts,
2812 ] + walkopts,
2797 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2813 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2798 "archive":
2814 "archive":
2799 (archive,
2815 (archive,
2800 [('', 'no-decode', None, _('do not pass files through decoders')),
2816 [('', 'no-decode', None, _('do not pass files through decoders')),
2801 ('p', 'prefix', '', _('directory prefix for files in archive')),
2817 ('p', 'prefix', '', _('directory prefix for files in archive')),
2802 ('r', 'rev', '', _('revision to distribute')),
2818 ('r', 'rev', '', _('revision to distribute')),
2803 ('t', 'type', '', _('type of distribution to create')),
2819 ('t', 'type', '', _('type of distribution to create')),
2804 ] + walkopts,
2820 ] + walkopts,
2805 _('hg archive [OPTION]... DEST')),
2821 _('hg archive [OPTION]... DEST')),
2806 "backout":
2822 "backout":
2807 (backout,
2823 (backout,
2808 [('', 'merge', None,
2824 [('', 'merge', None,
2809 _('merge with old dirstate parent after backout')),
2825 _('merge with old dirstate parent after backout')),
2810 ('d', 'date', '', _('record datecode as commit date')),
2826 ('d', 'date', '', _('record datecode as commit date')),
2811 ('', 'parent', '', _('parent to choose when backing out merge')),
2827 ('', 'parent', '', _('parent to choose when backing out merge')),
2812 ('u', 'user', '', _('record user as committer')),
2828 ('u', 'user', '', _('record user as committer')),
2813 ('r', 'rev', '', _('revision to backout')),
2829 ('r', 'rev', '', _('revision to backout')),
2814 ] + walkopts + commitopts,
2830 ] + walkopts + commitopts,
2815 _('hg backout [OPTION]... [-r] REV')),
2831 _('hg backout [OPTION]... [-r] REV')),
2816 "branch":
2832 "branch":
2817 (branch,
2833 (branch,
2818 [('f', 'force', None,
2834 [('f', 'force', None,
2819 _('set branch name even if it shadows an existing branch'))],
2835 _('set branch name even if it shadows an existing branch'))],
2820 _('hg branch [NAME]')),
2836 _('hg branch [NAME]')),
2821 "branches":
2837 "branches":
2822 (branches,
2838 (branches,
2823 [('a', 'active', False,
2839 [('a', 'active', False,
2824 _('show only branches that have unmerged heads'))],
2840 _('show only branches that have unmerged heads'))],
2825 _('hg branches [-a]')),
2841 _('hg branches [-a]')),
2826 "bundle":
2842 "bundle":
2827 (bundle,
2843 (bundle,
2828 [('f', 'force', None,
2844 [('f', 'force', None,
2829 _('run even when remote repository is unrelated')),
2845 _('run even when remote repository is unrelated')),
2830 ('r', 'rev', [],
2846 ('r', 'rev', [],
2831 _('a changeset you would like to bundle')),
2847 _('a changeset you would like to bundle')),
2832 ('', 'base', [],
2848 ('', 'base', [],
2833 _('a base changeset to specify instead of a destination')),
2849 _('a base changeset to specify instead of a destination')),
2834 ] + remoteopts,
2850 ] + remoteopts,
2835 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2851 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2836 "cat":
2852 "cat":
2837 (cat,
2853 (cat,
2838 [('o', 'output', '', _('print output to file with formatted name')),
2854 [('o', 'output', '', _('print output to file with formatted name')),
2839 ('r', 'rev', '', _('print the given revision')),
2855 ('r', 'rev', '', _('print the given revision')),
2840 ] + walkopts,
2856 ] + walkopts,
2841 _('hg cat [OPTION]... FILE...')),
2857 _('hg cat [OPTION]... FILE...')),
2842 "^clone":
2858 "^clone":
2843 (clone,
2859 (clone,
2844 [('U', 'noupdate', None, _('do not update the new working directory')),
2860 [('U', 'noupdate', None, _('do not update the new working directory')),
2845 ('r', 'rev', [],
2861 ('r', 'rev', [],
2846 _('a changeset you would like to have after cloning')),
2862 _('a changeset you would like to have after cloning')),
2847 ('', 'pull', None, _('use pull protocol to copy metadata')),
2863 ('', 'pull', None, _('use pull protocol to copy metadata')),
2848 ('', 'uncompressed', None,
2864 ('', 'uncompressed', None,
2849 _('use uncompressed transfer (fast over LAN)')),
2865 _('use uncompressed transfer (fast over LAN)')),
2850 ] + remoteopts,
2866 ] + remoteopts,
2851 _('hg clone [OPTION]... SOURCE [DEST]')),
2867 _('hg clone [OPTION]... SOURCE [DEST]')),
2852 "^commit|ci":
2868 "^commit|ci":
2853 (commit,
2869 (commit,
2854 [('A', 'addremove', None,
2870 [('A', 'addremove', None,
2855 _('mark new/missing files as added/removed before committing')),
2871 _('mark new/missing files as added/removed before committing')),
2856 ('d', 'date', '', _('record datecode as commit date')),
2872 ('d', 'date', '', _('record datecode as commit date')),
2857 ('u', 'user', '', _('record user as commiter')),
2873 ('u', 'user', '', _('record user as commiter')),
2858 ] + walkopts + commitopts,
2874 ] + walkopts + commitopts,
2859 _('hg commit [OPTION]... [FILE]...')),
2875 _('hg commit [OPTION]... [FILE]...')),
2860 "copy|cp":
2876 "copy|cp":
2861 (copy,
2877 (copy,
2862 [('A', 'after', None, _('record a copy that has already occurred')),
2878 [('A', 'after', None, _('record a copy that has already occurred')),
2863 ('f', 'force', None,
2879 ('f', 'force', None,
2864 _('forcibly copy over an existing managed file')),
2880 _('forcibly copy over an existing managed file')),
2865 ] + walkopts + dryrunopts,
2881 ] + walkopts + dryrunopts,
2866 _('hg copy [OPTION]... [SOURCE]... DEST')),
2882 _('hg copy [OPTION]... [SOURCE]... DEST')),
2867 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2883 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2868 "debugcomplete":
2884 "debugcomplete":
2869 (debugcomplete,
2885 (debugcomplete,
2870 [('o', 'options', None, _('show the command options'))],
2886 [('o', 'options', None, _('show the command options'))],
2871 _('debugcomplete [-o] CMD')),
2887 _('debugcomplete [-o] CMD')),
2872 "debuginstall": (debuginstall, [], _('debuginstall')),
2888 "debuginstall": (debuginstall, [], _('debuginstall')),
2873 "debugrebuildstate":
2889 "debugrebuildstate":
2874 (debugrebuildstate,
2890 (debugrebuildstate,
2875 [('r', 'rev', '', _('revision to rebuild to'))],
2891 [('r', 'rev', '', _('revision to rebuild to'))],
2876 _('debugrebuildstate [-r REV] [REV]')),
2892 _('debugrebuildstate [-r REV] [REV]')),
2877 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2893 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2878 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2894 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2879 "debugstate": (debugstate, [], _('debugstate')),
2895 "debugstate": (debugstate, [], _('debugstate')),
2880 "debugdate":
2896 "debugdate":
2881 (debugdate,
2897 (debugdate,
2882 [('e', 'extended', None, _('try extended date formats'))],
2898 [('e', 'extended', None, _('try extended date formats'))],
2883 _('debugdate [-e] DATE [RANGE]')),
2899 _('debugdate [-e] DATE [RANGE]')),
2884 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2900 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2885 "debugindex": (debugindex, [], _('debugindex FILE')),
2901 "debugindex": (debugindex, [], _('debugindex FILE')),
2886 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2902 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2887 "debugrename":
2903 "debugrename":
2888 (debugrename,
2904 (debugrename,
2889 [('r', 'rev', '', _('revision to debug'))],
2905 [('r', 'rev', '', _('revision to debug'))],
2890 _('debugrename [-r REV] FILE')),
2906 _('debugrename [-r REV] FILE')),
2891 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2907 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2892 "^diff":
2908 "^diff":
2893 (diff,
2909 (diff,
2894 [('r', 'rev', [], _('revision')),
2910 [('r', 'rev', [], _('revision')),
2895 ('a', 'text', None, _('treat all files as text')),
2911 ('a', 'text', None, _('treat all files as text')),
2896 ('p', 'show-function', None,
2912 ('p', 'show-function', None,
2897 _('show which function each change is in')),
2913 _('show which function each change is in')),
2898 ('g', 'git', None, _('use git extended diff format')),
2914 ('g', 'git', None, _('use git extended diff format')),
2899 ('', 'nodates', None, _("don't include dates in diff headers")),
2915 ('', 'nodates', None, _("don't include dates in diff headers")),
2900 ('w', 'ignore-all-space', None,
2916 ('w', 'ignore-all-space', None,
2901 _('ignore white space when comparing lines')),
2917 _('ignore white space when comparing lines')),
2902 ('b', 'ignore-space-change', None,
2918 ('b', 'ignore-space-change', None,
2903 _('ignore changes in the amount of white space')),
2919 _('ignore changes in the amount of white space')),
2904 ('B', 'ignore-blank-lines', None,
2920 ('B', 'ignore-blank-lines', None,
2905 _('ignore changes whose lines are all blank')),
2921 _('ignore changes whose lines are all blank')),
2906 ] + walkopts,
2922 ] + walkopts,
2907 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2923 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2908 "^export":
2924 "^export":
2909 (export,
2925 (export,
2910 [('o', 'output', '', _('print output to file with formatted name')),
2926 [('o', 'output', '', _('print output to file with formatted name')),
2911 ('a', 'text', None, _('treat all files as text')),
2927 ('a', 'text', None, _('treat all files as text')),
2912 ('g', 'git', None, _('use git extended diff format')),
2928 ('g', 'git', None, _('use git extended diff format')),
2913 ('', 'nodates', None, _("don't include dates in diff headers")),
2929 ('', 'nodates', None, _("don't include dates in diff headers")),
2914 ('', 'switch-parent', None, _('diff against the second parent'))],
2930 ('', 'switch-parent', None, _('diff against the second parent'))],
2915 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2931 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2916 "grep":
2932 "grep":
2917 (grep,
2933 (grep,
2918 [('0', 'print0', None, _('end fields with NUL')),
2934 [('0', 'print0', None, _('end fields with NUL')),
2919 ('', 'all', None, _('print all revisions that match')),
2935 ('', 'all', None, _('print all revisions that match')),
2920 ('f', 'follow', None,
2936 ('f', 'follow', None,
2921 _('follow changeset history, or file history across copies and renames')),
2937 _('follow changeset history, or file history across copies and renames')),
2922 ('i', 'ignore-case', None, _('ignore case when matching')),
2938 ('i', 'ignore-case', None, _('ignore case when matching')),
2923 ('l', 'files-with-matches', None,
2939 ('l', 'files-with-matches', None,
2924 _('print only filenames and revs that match')),
2940 _('print only filenames and revs that match')),
2925 ('n', 'line-number', None, _('print matching line numbers')),
2941 ('n', 'line-number', None, _('print matching line numbers')),
2926 ('r', 'rev', [], _('search in given revision range')),
2942 ('r', 'rev', [], _('search in given revision range')),
2927 ('u', 'user', None, _('print user who committed change')),
2943 ('u', 'user', None, _('print user who committed change')),
2928 ] + walkopts,
2944 ] + walkopts,
2929 _('hg grep [OPTION]... PATTERN [FILE]...')),
2945 _('hg grep [OPTION]... PATTERN [FILE]...')),
2930 "heads":
2946 "heads":
2931 (heads,
2947 (heads,
2932 [('', 'style', '', _('display using template map file')),
2948 [('', 'style', '', _('display using template map file')),
2933 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2949 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2934 ('', 'template', '', _('display with template'))],
2950 ('', 'template', '', _('display with template'))],
2935 _('hg heads [-r REV] [REV]...')),
2951 _('hg heads [-r REV] [REV]...')),
2936 "help": (help_, [], _('hg help [COMMAND]')),
2952 "help": (help_, [], _('hg help [COMMAND]')),
2937 "identify|id":
2953 "identify|id":
2938 (identify,
2954 (identify,
2939 [('r', 'rev', '', _('identify the specified rev')),
2955 [('r', 'rev', '', _('identify the specified rev')),
2940 ('n', 'num', None, _('show local revision number')),
2956 ('n', 'num', None, _('show local revision number')),
2941 ('i', 'id', None, _('show global revision id')),
2957 ('i', 'id', None, _('show global revision id')),
2942 ('b', 'branch', None, _('show branch')),
2958 ('b', 'branch', None, _('show branch')),
2943 ('t', 'tags', None, _('show tags'))],
2959 ('t', 'tags', None, _('show tags'))],
2944 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2960 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2945 "import|patch":
2961 "import|patch":
2946 (import_,
2962 (import_,
2947 [('p', 'strip', 1,
2963 [('p', 'strip', 1,
2948 _('directory strip option for patch. This has the same\n'
2964 _('directory strip option for patch. This has the same\n'
2949 'meaning as the corresponding patch option')),
2965 'meaning as the corresponding patch option')),
2950 ('b', 'base', '', _('base path')),
2966 ('b', 'base', '', _('base path')),
2951 ('f', 'force', None,
2967 ('f', 'force', None,
2952 _('skip check for outstanding uncommitted changes')),
2968 _('skip check for outstanding uncommitted changes')),
2953 ('', 'exact', None,
2969 ('', 'exact', None,
2954 _('apply patch to the nodes from which it was generated')),
2970 _('apply patch to the nodes from which it was generated')),
2955 ('', 'import-branch', None,
2971 ('', 'import-branch', None,
2956 _('Use any branch information in patch (implied by --exact)'))] + commitopts,
2972 _('Use any branch information in patch (implied by --exact)'))] + commitopts,
2957 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2973 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2958 "incoming|in": (incoming,
2974 "incoming|in": (incoming,
2959 [('M', 'no-merges', None, _('do not show merges')),
2975 [('M', 'no-merges', None, _('do not show merges')),
2960 ('f', 'force', None,
2976 ('f', 'force', None,
2961 _('run even when remote repository is unrelated')),
2977 _('run even when remote repository is unrelated')),
2962 ('', 'style', '', _('display using template map file')),
2978 ('', 'style', '', _('display using template map file')),
2963 ('n', 'newest-first', None, _('show newest record first')),
2979 ('n', 'newest-first', None, _('show newest record first')),
2964 ('', 'bundle', '', _('file to store the bundles into')),
2980 ('', 'bundle', '', _('file to store the bundles into')),
2965 ('p', 'patch', None, _('show patch')),
2981 ('p', 'patch', None, _('show patch')),
2966 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2982 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2967 ('', 'template', '', _('display with template')),
2983 ('', 'template', '', _('display with template')),
2968 ] + remoteopts,
2984 ] + remoteopts,
2969 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2985 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2970 ' [--bundle FILENAME] [SOURCE]')),
2986 ' [--bundle FILENAME] [SOURCE]')),
2971 "^init":
2987 "^init":
2972 (init,
2988 (init,
2973 remoteopts,
2989 remoteopts,
2974 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2990 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2975 "locate":
2991 "locate":
2976 (locate,
2992 (locate,
2977 [('r', 'rev', '', _('search the repository as it stood at rev')),
2993 [('r', 'rev', '', _('search the repository as it stood at rev')),
2978 ('0', 'print0', None,
2994 ('0', 'print0', None,
2979 _('end filenames with NUL, for use with xargs')),
2995 _('end filenames with NUL, for use with xargs')),
2980 ('f', 'fullpath', None,
2996 ('f', 'fullpath', None,
2981 _('print complete paths from the filesystem root')),
2997 _('print complete paths from the filesystem root')),
2982 ] + walkopts,
2998 ] + walkopts,
2983 _('hg locate [OPTION]... [PATTERN]...')),
2999 _('hg locate [OPTION]... [PATTERN]...')),
2984 "^log|history":
3000 "^log|history":
2985 (log,
3001 (log,
2986 [('f', 'follow', None,
3002 [('f', 'follow', None,
2987 _('follow changeset history, or file history across copies and renames')),
3003 _('follow changeset history, or file history across copies and renames')),
2988 ('', 'follow-first', None,
3004 ('', 'follow-first', None,
2989 _('only follow the first parent of merge changesets')),
3005 _('only follow the first parent of merge changesets')),
2990 ('d', 'date', '', _('show revs matching date spec')),
3006 ('d', 'date', '', _('show revs matching date spec')),
2991 ('C', 'copies', None, _('show copied files')),
3007 ('C', 'copies', None, _('show copied files')),
2992 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3008 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
2993 ('l', 'limit', '', _('limit number of changes displayed')),
3009 ('l', 'limit', '', _('limit number of changes displayed')),
2994 ('r', 'rev', [], _('show the specified revision or range')),
3010 ('r', 'rev', [], _('show the specified revision or range')),
2995 ('', 'removed', None, _('include revs where files were removed')),
3011 ('', 'removed', None, _('include revs where files were removed')),
2996 ('M', 'no-merges', None, _('do not show merges')),
3012 ('M', 'no-merges', None, _('do not show merges')),
2997 ('', 'style', '', _('display using template map file')),
3013 ('', 'style', '', _('display using template map file')),
2998 ('m', 'only-merges', None, _('show only merges')),
3014 ('m', 'only-merges', None, _('show only merges')),
2999 ('p', 'patch', None, _('show patch')),
3015 ('p', 'patch', None, _('show patch')),
3000 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3016 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3001 ('', 'template', '', _('display with template')),
3017 ('', 'template', '', _('display with template')),
3002 ] + walkopts,
3018 ] + walkopts,
3003 _('hg log [OPTION]... [FILE]')),
3019 _('hg log [OPTION]... [FILE]')),
3004 "manifest": (manifest, [], _('hg manifest [REV]')),
3020 "manifest": (manifest, [], _('hg manifest [REV]')),
3005 "^merge":
3021 "^merge":
3006 (merge,
3022 (merge,
3007 [('f', 'force', None, _('force a merge with outstanding changes')),
3023 [('f', 'force', None, _('force a merge with outstanding changes')),
3008 ('r', 'rev', '', _('revision to merge')),
3024 ('r', 'rev', '', _('revision to merge')),
3009 ],
3025 ],
3010 _('hg merge [-f] [[-r] REV]')),
3026 _('hg merge [-f] [[-r] REV]')),
3011 "outgoing|out": (outgoing,
3027 "outgoing|out": (outgoing,
3012 [('M', 'no-merges', None, _('do not show merges')),
3028 [('M', 'no-merges', None, _('do not show merges')),
3013 ('f', 'force', None,
3029 ('f', 'force', None,
3014 _('run even when remote repository is unrelated')),
3030 _('run even when remote repository is unrelated')),
3015 ('p', 'patch', None, _('show patch')),
3031 ('p', 'patch', None, _('show patch')),
3016 ('', 'style', '', _('display using template map file')),
3032 ('', 'style', '', _('display using template map file')),
3017 ('r', 'rev', [], _('a specific revision you would like to push')),
3033 ('r', 'rev', [], _('a specific revision you would like to push')),
3018 ('n', 'newest-first', None, _('show newest record first')),
3034 ('n', 'newest-first', None, _('show newest record first')),
3019 ('', 'template', '', _('display with template')),
3035 ('', 'template', '', _('display with template')),
3020 ] + remoteopts,
3036 ] + remoteopts,
3021 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3037 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3022 "^parents":
3038 "^parents":
3023 (parents,
3039 (parents,
3024 [('r', 'rev', '', _('show parents from the specified rev')),
3040 [('r', 'rev', '', _('show parents from the specified rev')),
3025 ('', 'style', '', _('display using template map file')),
3041 ('', 'style', '', _('display using template map file')),
3026 ('', 'template', '', _('display with template'))],
3042 ('', 'template', '', _('display with template'))],
3027 _('hg parents [-r REV] [FILE]')),
3043 _('hg parents [-r REV] [FILE]')),
3028 "paths": (paths, [], _('hg paths [NAME]')),
3044 "paths": (paths, [], _('hg paths [NAME]')),
3029 "^pull":
3045 "^pull":
3030 (pull,
3046 (pull,
3031 [('u', 'update', None,
3047 [('u', 'update', None,
3032 _('update to new tip if changesets were pulled')),
3048 _('update to new tip if changesets were pulled')),
3033 ('f', 'force', None,
3049 ('f', 'force', None,
3034 _('run even when remote repository is unrelated')),
3050 _('run even when remote repository is unrelated')),
3035 ('r', 'rev', [],
3051 ('r', 'rev', [],
3036 _('a specific revision up to which you would like to pull')),
3052 _('a specific revision up to which you would like to pull')),
3037 ] + remoteopts,
3053 ] + remoteopts,
3038 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3054 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3039 "^push":
3055 "^push":
3040 (push,
3056 (push,
3041 [('f', 'force', None, _('force push')),
3057 [('f', 'force', None, _('force push')),
3042 ('r', 'rev', [], _('a specific revision you would like to push')),
3058 ('r', 'rev', [], _('a specific revision you would like to push')),
3043 ] + remoteopts,
3059 ] + remoteopts,
3044 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3060 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3045 "debugrawcommit|rawcommit":
3061 "debugrawcommit|rawcommit":
3046 (rawcommit,
3062 (rawcommit,
3047 [('p', 'parent', [], _('parent')),
3063 [('p', 'parent', [], _('parent')),
3048 ('d', 'date', '', _('date code')),
3064 ('d', 'date', '', _('date code')),
3049 ('u', 'user', '', _('user')),
3065 ('u', 'user', '', _('user')),
3050 ('F', 'files', '', _('file list'))
3066 ('F', 'files', '', _('file list'))
3051 ] + commitopts,
3067 ] + commitopts,
3052 _('hg debugrawcommit [OPTION]... [FILE]...')),
3068 _('hg debugrawcommit [OPTION]... [FILE]...')),
3053 "recover": (recover, [], _('hg recover')),
3069 "recover": (recover, [], _('hg recover')),
3054 "^remove|rm":
3070 "^remove|rm":
3055 (remove,
3071 (remove,
3056 [('A', 'after', None, _('record remove that has already occurred')),
3072 [('A', 'after', None, _('record remove that has already occurred')),
3057 ('f', 'force', None, _('remove file even if modified')),
3073 ('f', 'force', None, _('remove file even if modified')),
3058 ] + walkopts,
3074 ] + walkopts,
3059 _('hg remove [OPTION]... FILE...')),
3075 _('hg remove [OPTION]... FILE...')),
3060 "rename|mv":
3076 "rename|mv":
3061 (rename,
3077 (rename,
3062 [('A', 'after', None, _('record a rename that has already occurred')),
3078 [('A', 'after', None, _('record a rename that has already occurred')),
3063 ('f', 'force', None,
3079 ('f', 'force', None,
3064 _('forcibly copy over an existing managed file')),
3080 _('forcibly copy over an existing managed file')),
3065 ] + walkopts + dryrunopts,
3081 ] + walkopts + dryrunopts,
3066 _('hg rename [OPTION]... SOURCE... DEST')),
3082 _('hg rename [OPTION]... SOURCE... DEST')),
3067 "^revert":
3083 "^revert":
3068 (revert,
3084 (revert,
3069 [('a', 'all', None, _('revert all changes when no arguments given')),
3085 [('a', 'all', None, _('revert all changes when no arguments given')),
3070 ('d', 'date', '', _('tipmost revision matching date')),
3086 ('d', 'date', '', _('tipmost revision matching date')),
3071 ('r', 'rev', '', _('revision to revert to')),
3087 ('r', 'rev', '', _('revision to revert to')),
3072 ('', 'no-backup', None, _('do not save backup copies of files')),
3088 ('', 'no-backup', None, _('do not save backup copies of files')),
3073 ] + walkopts + dryrunopts,
3089 ] + walkopts + dryrunopts,
3074 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3090 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3075 "rollback": (rollback, [], _('hg rollback')),
3091 "rollback": (rollback, [], _('hg rollback')),
3076 "root": (root, [], _('hg root')),
3092 "root": (root, [], _('hg root')),
3077 "showconfig|debugconfig":
3093 "showconfig|debugconfig":
3078 (showconfig,
3094 (showconfig,
3079 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3095 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3080 _('showconfig [-u] [NAME]...')),
3096 _('showconfig [-u] [NAME]...')),
3081 "^serve":
3097 "^serve":
3082 (serve,
3098 (serve,
3083 [('A', 'accesslog', '', _('name of access log file to write to')),
3099 [('A', 'accesslog', '', _('name of access log file to write to')),
3084 ('d', 'daemon', None, _('run server in background')),
3100 ('d', 'daemon', None, _('run server in background')),
3085 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3101 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3086 ('E', 'errorlog', '', _('name of error log file to write to')),
3102 ('E', 'errorlog', '', _('name of error log file to write to')),
3087 ('p', 'port', 0, _('port to use (default: 8000)')),
3103 ('p', 'port', 0, _('port to use (default: 8000)')),
3088 ('a', 'address', '', _('address to use')),
3104 ('a', 'address', '', _('address to use')),
3089 ('n', 'name', '',
3105 ('n', 'name', '',
3090 _('name to show in web pages (default: working dir)')),
3106 _('name to show in web pages (default: working dir)')),
3091 ('', 'webdir-conf', '', _('name of the webdir config file'
3107 ('', 'webdir-conf', '', _('name of the webdir config file'
3092 ' (serve more than one repo)')),
3108 ' (serve more than one repo)')),
3093 ('', 'pid-file', '', _('name of file to write process ID to')),
3109 ('', 'pid-file', '', _('name of file to write process ID to')),
3094 ('', 'stdio', None, _('for remote clients')),
3110 ('', 'stdio', None, _('for remote clients')),
3095 ('t', 'templates', '', _('web templates to use')),
3111 ('t', 'templates', '', _('web templates to use')),
3096 ('', 'style', '', _('template style to use')),
3112 ('', 'style', '', _('template style to use')),
3097 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3113 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3098 ('', 'certificate', '', _('SSL certificate file'))],
3114 ('', 'certificate', '', _('SSL certificate file'))],
3099 _('hg serve [OPTION]...')),
3115 _('hg serve [OPTION]...')),
3100 "^status|st":
3116 "^status|st":
3101 (status,
3117 (status,
3102 [('A', 'all', None, _('show status of all files')),
3118 [('A', 'all', None, _('show status of all files')),
3103 ('m', 'modified', None, _('show only modified files')),
3119 ('m', 'modified', None, _('show only modified files')),
3104 ('a', 'added', None, _('show only added files')),
3120 ('a', 'added', None, _('show only added files')),
3105 ('r', 'removed', None, _('show only removed files')),
3121 ('r', 'removed', None, _('show only removed files')),
3106 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3122 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3107 ('c', 'clean', None, _('show only files without changes')),
3123 ('c', 'clean', None, _('show only files without changes')),
3108 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3124 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3109 ('i', 'ignored', None, _('show only ignored files')),
3125 ('i', 'ignored', None, _('show only ignored files')),
3110 ('n', 'no-status', None, _('hide status prefix')),
3126 ('n', 'no-status', None, _('hide status prefix')),
3111 ('C', 'copies', None, _('show source of copied files')),
3127 ('C', 'copies', None, _('show source of copied files')),
3112 ('0', 'print0', None,
3128 ('0', 'print0', None,
3113 _('end filenames with NUL, for use with xargs')),
3129 _('end filenames with NUL, for use with xargs')),
3114 ('', 'rev', [], _('show difference from revision')),
3130 ('', 'rev', [], _('show difference from revision')),
3115 ] + walkopts,
3131 ] + walkopts,
3116 _('hg status [OPTION]... [FILE]...')),
3132 _('hg status [OPTION]... [FILE]...')),
3117 "tag":
3133 "tag":
3118 (tag,
3134 (tag,
3119 [('f', 'force', None, _('replace existing tag')),
3135 [('f', 'force', None, _('replace existing tag')),
3120 ('l', 'local', None, _('make the tag local')),
3136 ('l', 'local', None, _('make the tag local')),
3121 ('m', 'message', '', _('message for tag commit log entry')),
3137 ('m', 'message', '', _('message for tag commit log entry')),
3122 ('d', 'date', '', _('record datecode as commit date')),
3138 ('d', 'date', '', _('record datecode as commit date')),
3123 ('u', 'user', '', _('record user as commiter')),
3139 ('u', 'user', '', _('record user as commiter')),
3124 ('r', 'rev', '', _('revision to tag')),
3140 ('r', 'rev', '', _('revision to tag')),
3125 ('', 'remove', None, _('remove a tag'))],
3141 ('', 'remove', None, _('remove a tag'))],
3126 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3142 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3127 "tags": (tags, [], _('hg tags')),
3143 "tags": (tags, [], _('hg tags')),
3128 "tip":
3144 "tip":
3129 (tip,
3145 (tip,
3130 [('', 'style', '', _('display using template map file')),
3146 [('', 'style', '', _('display using template map file')),
3131 ('p', 'patch', None, _('show patch')),
3147 ('p', 'patch', None, _('show patch')),
3132 ('', 'template', '', _('display with template'))],
3148 ('', 'template', '', _('display with template'))],
3133 _('hg tip [-p]')),
3149 _('hg tip [-p]')),
3134 "unbundle":
3150 "unbundle":
3135 (unbundle,
3151 (unbundle,
3136 [('u', 'update', None,
3152 [('u', 'update', None,
3137 _('update to new tip if changesets were unbundled'))],
3153 _('update to new tip if changesets were unbundled'))],
3138 _('hg unbundle [-u] FILE...')),
3154 _('hg unbundle [-u] FILE...')),
3139 "^update|up|checkout|co":
3155 "^update|up|checkout|co":
3140 (update,
3156 (update,
3141 [('C', 'clean', None, _('overwrite locally modified files')),
3157 [('C', 'clean', None, _('overwrite locally modified files')),
3142 ('d', 'date', '', _('tipmost revision matching date')),
3158 ('d', 'date', '', _('tipmost revision matching date')),
3143 ('r', 'rev', '', _('revision'))],
3159 ('r', 'rev', '', _('revision'))],
3144 _('hg update [-C] [-d DATE] [[-r] REV]')),
3160 _('hg update [-C] [-d DATE] [[-r] REV]')),
3145 "verify": (verify, [], _('hg verify')),
3161 "verify": (verify, [], _('hg verify')),
3146 "version": (version_, [], _('hg version')),
3162 "version": (version_, [], _('hg version')),
3147 }
3163 }
3148
3164
3149 extensions.commandtable = table
3165 extensions.commandtable = table
3150
3166
3151 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3167 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3152 " debugindex debugindexdot debugdate debuginstall")
3168 " debugindex debugindexdot debugdate debuginstall")
3153 optionalrepo = ("paths serve showconfig")
3169 optionalrepo = ("paths serve showconfig")
3154
3170
3155 def dispatch(args, argv0=None):
3171 def dispatch(args, argv0=None):
3156 try:
3172 try:
3157 u = ui.ui(traceback='--traceback' in args)
3173 u = ui.ui(traceback='--traceback' in args)
3158 except util.Abort, inst:
3174 except util.Abort, inst:
3159 sys.stderr.write(_("abort: %s\n") % inst)
3175 sys.stderr.write(_("abort: %s\n") % inst)
3160 return -1
3176 return -1
3161 return cmdutil.runcatch(u, args, argv0=argv0)
3177 return cmdutil.runcatch(u, args, argv0=argv0)
3162
3178
3163 def run():
3179 def run():
3164 sys.exit(dispatch(sys.argv[1:], argv0=sys.argv[0]))
3180 sys.exit(dispatch(sys.argv[1:], argv0=sys.argv[0]))
@@ -1,285 +1,281 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from node import *
9 from node import *
10 from repo import *
10 from repo import *
11 from i18n import _
11 from i18n import _
12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
13 import errno, lock, os, shutil, util, cmdutil, extensions
13 import errno, lock, os, shutil, util, cmdutil, extensions
14 import merge as _merge
14 import merge as _merge
15 import verify as _verify
15 import verify as _verify
16
16
17 def _local(path):
17 def _local(path):
18 return (os.path.isfile(util.drop_scheme('file', path)) and
18 return (os.path.isfile(util.drop_scheme('file', path)) and
19 bundlerepo or localrepo)
19 bundlerepo or localrepo)
20
20
21 schemes = {
21 schemes = {
22 'bundle': bundlerepo,
22 'bundle': bundlerepo,
23 'file': _local,
23 'file': _local,
24 'http': httprepo,
24 'http': httprepo,
25 'https': httprepo,
25 'https': httprepo,
26 'ssh': sshrepo,
26 'ssh': sshrepo,
27 'static-http': statichttprepo,
27 'static-http': statichttprepo,
28 }
28 }
29
29
30 def _lookup(path):
30 def _lookup(path):
31 scheme = 'file'
31 scheme = 'file'
32 if path:
32 if path:
33 c = path.find(':')
33 c = path.find(':')
34 if c > 0:
34 if c > 0:
35 scheme = path[:c]
35 scheme = path[:c]
36 thing = schemes.get(scheme) or schemes['file']
36 thing = schemes.get(scheme) or schemes['file']
37 try:
37 try:
38 return thing(path)
38 return thing(path)
39 except TypeError:
39 except TypeError:
40 return thing
40 return thing
41
41
42 def islocal(repo):
42 def islocal(repo):
43 '''return true if repo or path is local'''
43 '''return true if repo or path is local'''
44 if isinstance(repo, str):
44 if isinstance(repo, str):
45 try:
45 try:
46 return _lookup(repo).islocal(repo)
46 return _lookup(repo).islocal(repo)
47 except AttributeError:
47 except AttributeError:
48 return False
48 return False
49 return repo.local()
49 return repo.local()
50
50
51 def repository(ui, path='', create=False):
51 def repository(ui, path='', create=False):
52 """return a repository object for the specified path"""
52 """return a repository object for the specified path"""
53 repo = _lookup(path).instance(ui, path, create)
53 repo = _lookup(path).instance(ui, path, create)
54 ui = getattr(repo, "ui", ui)
54 ui = getattr(repo, "ui", ui)
55 for hook in extensions.setuphooks:
55 for hook in extensions.setuphooks:
56 hook(ui, repo)
56 hook(ui, repo)
57 return repo
57 return repo
58
58
59 def defaultdest(source):
59 def defaultdest(source):
60 '''return default destination of clone if none is given'''
60 '''return default destination of clone if none is given'''
61 return os.path.basename(os.path.normpath(source))
61 return os.path.basename(os.path.normpath(source))
62
62
63 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
63 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
64 stream=False):
64 stream=False):
65 """Make a copy of an existing repository.
65 """Make a copy of an existing repository.
66
66
67 Create a copy of an existing repository in a new directory. The
67 Create a copy of an existing repository in a new directory. The
68 source and destination are URLs, as passed to the repository
68 source and destination are URLs, as passed to the repository
69 function. Returns a pair of repository objects, the source and
69 function. Returns a pair of repository objects, the source and
70 newly created destination.
70 newly created destination.
71
71
72 The location of the source is added to the new repository's
72 The location of the source is added to the new repository's
73 .hg/hgrc file, as the default to be used for future pulls and
73 .hg/hgrc file, as the default to be used for future pulls and
74 pushes.
74 pushes.
75
75
76 If an exception is raised, the partly cloned/updated destination
76 If an exception is raised, the partly cloned/updated destination
77 repository will be deleted.
77 repository will be deleted.
78
78
79 Arguments:
79 Arguments:
80
80
81 source: repository object or URL
81 source: repository object or URL
82
82
83 dest: URL of destination repository to create (defaults to base
83 dest: URL of destination repository to create (defaults to base
84 name of source repository)
84 name of source repository)
85
85
86 pull: always pull from source repository, even in local case
86 pull: always pull from source repository, even in local case
87
87
88 stream: stream raw data uncompressed from repository (fast over
88 stream: stream raw data uncompressed from repository (fast over
89 LAN, slow over WAN)
89 LAN, slow over WAN)
90
90
91 rev: revision to clone up to (implies pull=True)
91 rev: revision to clone up to (implies pull=True)
92
92
93 update: update working directory after clone completes, if
93 update: update working directory after clone completes, if
94 destination is local repository
94 destination is local repository
95 """
95 """
96
96
97 origsource = source
97 origsource = source
98 source, rev = cmdutil.parseurl(ui.expandpath(source), rev)
98 source, rev = cmdutil.parseurl(ui.expandpath(source), rev)
99
99
100 if isinstance(source, str):
100 if isinstance(source, str):
101 src_repo = repository(ui, source)
101 src_repo = repository(ui, source)
102 else:
102 else:
103 src_repo = source
103 src_repo = source
104 source = src_repo.url()
104 source = src_repo.url()
105
105
106 if dest is None:
106 if dest is None:
107 dest = defaultdest(source)
107 dest = defaultdest(source)
108 ui.status(_("destination directory: %s\n") % dest)
108 ui.status(_("destination directory: %s\n") % dest)
109
109
110 def localpath(path):
110 def localpath(path):
111 if path.startswith('file://'):
111 if path.startswith('file://'):
112 return path[7:]
112 return path[7:]
113 if path.startswith('file:'):
113 if path.startswith('file:'):
114 return path[5:]
114 return path[5:]
115 return path
115 return path
116
116
117 dest = localpath(dest)
117 dest = localpath(dest)
118 source = localpath(source)
118 source = localpath(source)
119
119
120 if os.path.exists(dest):
120 if os.path.exists(dest):
121 raise util.Abort(_("destination '%s' already exists") % dest)
121 raise util.Abort(_("destination '%s' already exists") % dest)
122
122
123 class DirCleanup(object):
123 class DirCleanup(object):
124 def __init__(self, dir_):
124 def __init__(self, dir_):
125 self.rmtree = shutil.rmtree
125 self.rmtree = shutil.rmtree
126 self.dir_ = dir_
126 self.dir_ = dir_
127 def close(self):
127 def close(self):
128 self.dir_ = None
128 self.dir_ = None
129 def __del__(self):
129 def __del__(self):
130 if self.dir_:
130 if self.dir_:
131 self.rmtree(self.dir_, True)
131 self.rmtree(self.dir_, True)
132
132
133 dir_cleanup = None
133 src_lock = dest_lock = dir_cleanup = None
134 if islocal(dest):
134 try:
135 dir_cleanup = DirCleanup(dest)
135 if islocal(dest):
136 dir_cleanup = DirCleanup(dest)
136
137
137 abspath = origsource
138 abspath = origsource
138 copy = False
139 copy = False
139 if src_repo.local() and islocal(dest):
140 if src_repo.local() and islocal(dest):
140 abspath = os.path.abspath(origsource)
141 abspath = os.path.abspath(origsource)
141 copy = not pull and not rev
142 copy = not pull and not rev
142
143
143 src_lock, dest_lock = None, None
144 if copy:
144 if copy:
145 try:
145 try:
146 # we use a lock here because if we race with commit, we
146 # we use a lock here because if we race with commit, we
147 # can end up with extra data in the cloned revlogs that's
147 # can end up with extra data in the cloned revlogs that's
148 # not pointed to by changesets, thus causing verify to
148 # not pointed to by changesets, thus causing verify to
149 # fail
149 # fail
150 src_lock = src_repo.lock()
150 src_lock = src_repo.lock()
151 except lock.LockException:
151 except lock.LockException:
152 copy = False
152 copy = False
153
153
154 if copy:
154 if copy:
155 def force_copy(src, dst):
155 def force_copy(src, dst):
156 try:
156 try:
157 util.copyfiles(src, dst)
157 util.copyfiles(src, dst)
158 except OSError, inst:
158 except OSError, inst:
159 if inst.errno != errno.ENOENT:
159 if inst.errno != errno.ENOENT:
160 raise
160 raise
161
161
162 src_store = os.path.realpath(src_repo.spath)
162 src_store = os.path.realpath(src_repo.spath)
163 if not os.path.exists(dest):
163 if not os.path.exists(dest):
164 os.mkdir(dest)
164 os.mkdir(dest)
165 dest_path = os.path.realpath(os.path.join(dest, ".hg"))
165 dest_path = os.path.realpath(os.path.join(dest, ".hg"))
166 os.mkdir(dest_path)
166 os.mkdir(dest_path)
167 if src_repo.spath != src_repo.path:
167 if src_repo.spath != src_repo.path:
168 dest_store = os.path.join(dest_path, "store")
168 dest_store = os.path.join(dest_path, "store")
169 os.mkdir(dest_store)
169 os.mkdir(dest_store)
170 else:
170 else:
171 dest_store = dest_path
171 dest_store = dest_path
172 # copy the requires file
172 # copy the requires file
173 force_copy(src_repo.join("requires"),
173 force_copy(src_repo.join("requires"),
174 os.path.join(dest_path, "requires"))
174 os.path.join(dest_path, "requires"))
175 # we lock here to avoid premature writing to the target
175 # we lock here to avoid premature writing to the target
176 dest_lock = lock.lock(os.path.join(dest_store, "lock"))
176 dest_lock = lock.lock(os.path.join(dest_store, "lock"))
177
177
178 files = ("data",
178 files = ("data",
179 "00manifest.d", "00manifest.i",
179 "00manifest.d", "00manifest.i",
180 "00changelog.d", "00changelog.i")
180 "00changelog.d", "00changelog.i")
181 for f in files:
181 for f in files:
182 src = os.path.join(src_store, f)
182 src = os.path.join(src_store, f)
183 dst = os.path.join(dest_store, f)
183 dst = os.path.join(dest_store, f)
184 force_copy(src, dst)
184 force_copy(src, dst)
185
186 # we need to re-init the repo after manually copying the data
187 # into it
188 dest_repo = repository(ui, dest)
189
190 else:
191 dest_repo = repository(ui, dest, create=True)
185
192
186 # we need to re-init the repo after manually copying the data
193 revs = None
187 # into it
194 if rev:
188 dest_repo = repository(ui, dest)
195 if 'lookup' not in src_repo.capabilities:
189
196 raise util.Abort(_("src repository does not support revision "
190 else:
197 "lookup and so doesn't support clone by "
191 dest_repo = repository(ui, dest, create=True)
198 "revision"))
199 revs = [src_repo.lookup(r) for r in rev]
192
200
193 revs = None
201 if dest_repo.local():
194 if rev:
202 dest_repo.clone(src_repo, heads=revs, stream=stream)
195 if 'lookup' not in src_repo.capabilities:
203 elif src_repo.local():
196 raise util.Abort(_("src repository does not support revision "
204 src_repo.push(dest_repo, revs=revs)
197 "lookup and so doesn't support clone by "
205 else:
198 "revision"))
206 raise util.Abort(_("clone from remote to remote not supported"))
199 revs = [src_repo.lookup(r) for r in rev]
200
207
201 if dest_repo.local():
208 if dest_repo.local():
202 dest_repo.clone(src_repo, heads=revs, stream=stream)
209 fp = dest_repo.opener("hgrc", "w", text=True)
203 elif src_repo.local():
210 fp.write("[paths]\n")
204 src_repo.push(dest_repo, revs=revs)
211 fp.write("default = %s\n" % abspath)
205 else:
212 fp.close()
206 raise util.Abort(_("clone from remote to remote not supported"))
207
208 if src_lock:
209 src_lock.release()
210
213
211 if dest_repo.local():
214 if update:
212 fp = dest_repo.opener("hgrc", "w", text=True)
215 try:
213 fp.write("[paths]\n")
216 checkout = dest_repo.lookup("default")
214 fp.write("default = %s\n" % abspath)
217 except:
215 fp.close()
218 checkout = dest_repo.changelog.tip()
216
219 _update(dest_repo, checkout)
217 if dest_lock:
220 if dir_cleanup:
218 dest_lock.release()
221 dir_cleanup.close()
219
222
220 if update:
223 return src_repo, dest_repo
221 try:
224 finally:
222 checkout = dest_repo.lookup("default")
225 del src_lock, dest_lock, dir_cleanup
223 except:
224 checkout = dest_repo.changelog.tip()
225 _update(dest_repo, checkout)
226 if dir_cleanup:
227 dir_cleanup.close()
228
229 return src_repo, dest_repo
230
226
231 def _showstats(repo, stats):
227 def _showstats(repo, stats):
232 stats = ((stats[0], _("updated")),
228 stats = ((stats[0], _("updated")),
233 (stats[1], _("merged")),
229 (stats[1], _("merged")),
234 (stats[2], _("removed")),
230 (stats[2], _("removed")),
235 (stats[3], _("unresolved")))
231 (stats[3], _("unresolved")))
236 note = ", ".join([_("%d files %s") % s for s in stats])
232 note = ", ".join([_("%d files %s") % s for s in stats])
237 repo.ui.status("%s\n" % note)
233 repo.ui.status("%s\n" % note)
238
234
239 def _update(repo, node): return update(repo, node)
235 def _update(repo, node): return update(repo, node)
240
236
241 def update(repo, node):
237 def update(repo, node):
242 """update the working directory to node, merging linear changes"""
238 """update the working directory to node, merging linear changes"""
243 pl = repo.parents()
239 pl = repo.parents()
244 stats = _merge.update(repo, node, False, False, None, None)
240 stats = _merge.update(repo, node, False, False, None, None)
245 _showstats(repo, stats)
241 _showstats(repo, stats)
246 if stats[3]:
242 if stats[3]:
247 repo.ui.status(_("There are unresolved merges with"
243 repo.ui.status(_("There are unresolved merges with"
248 " locally modified files.\n"))
244 " locally modified files.\n"))
249 if stats[1]:
245 if stats[1]:
250 repo.ui.status(_("You can finish the partial merge using:\n"))
246 repo.ui.status(_("You can finish the partial merge using:\n"))
251 else:
247 else:
252 repo.ui.status(_("You can redo the full merge using:\n"))
248 repo.ui.status(_("You can redo the full merge using:\n"))
253 # len(pl)==1, otherwise _merge.update() would have raised util.Abort:
249 # len(pl)==1, otherwise _merge.update() would have raised util.Abort:
254 repo.ui.status(_(" hg update %s\n hg update %s\n")
250 repo.ui.status(_(" hg update %s\n hg update %s\n")
255 % (pl[0].rev(), repo.changectx(node).rev()))
251 % (pl[0].rev(), repo.changectx(node).rev()))
256 return stats[3]
252 return stats[3]
257
253
258 def clean(repo, node, wlock=None, show_stats=True):
254 def clean(repo, node, wlock=None, show_stats=True):
259 """forcibly switch the working directory to node, clobbering changes"""
255 """forcibly switch the working directory to node, clobbering changes"""
260 stats = _merge.update(repo, node, False, True, None, wlock)
256 stats = _merge.update(repo, node, False, True, None, wlock)
261 if show_stats: _showstats(repo, stats)
257 if show_stats: _showstats(repo, stats)
262 return stats[3]
258 return stats[3]
263
259
264 def merge(repo, node, force=None, remind=True, wlock=None):
260 def merge(repo, node, force=None, remind=True, wlock=None):
265 """branch merge with node, resolving changes"""
261 """branch merge with node, resolving changes"""
266 stats = _merge.update(repo, node, True, force, False, wlock)
262 stats = _merge.update(repo, node, True, force, False, wlock)
267 _showstats(repo, stats)
263 _showstats(repo, stats)
268 if stats[3]:
264 if stats[3]:
269 pl = repo.parents()
265 pl = repo.parents()
270 repo.ui.status(_("There are unresolved merges,"
266 repo.ui.status(_("There are unresolved merges,"
271 " you can redo the full merge using:\n"
267 " you can redo the full merge using:\n"
272 " hg update -C %s\n"
268 " hg update -C %s\n"
273 " hg merge %s\n")
269 " hg merge %s\n")
274 % (pl[0].rev(), pl[1].rev()))
270 % (pl[0].rev(), pl[1].rev()))
275 elif remind:
271 elif remind:
276 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
272 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
277 return stats[3]
273 return stats[3]
278
274
279 def revert(repo, node, choose, wlock):
275 def revert(repo, node, choose, wlock):
280 """revert changes to revision in node without updating dirstate"""
276 """revert changes to revision in node without updating dirstate"""
281 return _merge.update(repo, node, False, True, choose, wlock)[3]
277 return _merge.update(repo, node, False, True, choose, wlock)[3]
282
278
283 def verify(repo):
279 def verify(repo):
284 """verify the consistency of a repository"""
280 """verify the consistency of a repository"""
285 return _verify.verify(repo)
281 return _verify.verify(repo)
@@ -1,1188 +1,1188 b''
1 # hgweb/hgweb_mod.py - Web interface for a repository.
1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os, mimetypes, re, zlib, mimetools, cStringIO, sys
9 import os, mimetypes, re, zlib, mimetools, cStringIO, sys
10 import tempfile, urllib, bz2
10 import tempfile, urllib, bz2
11 from mercurial.node import *
11 from mercurial.node import *
12 from mercurial.i18n import gettext as _
12 from mercurial.i18n import gettext as _
13 from mercurial import mdiff, ui, hg, util, archival, streamclone, patch
13 from mercurial import mdiff, ui, hg, util, archival, streamclone, patch
14 from mercurial import revlog, templater
14 from mercurial import revlog, templater
15 from common import get_mtime, staticfile, style_map, paritygen
15 from common import get_mtime, staticfile, style_map, paritygen
16
16
17 def _up(p):
17 def _up(p):
18 if p[0] != "/":
18 if p[0] != "/":
19 p = "/" + p
19 p = "/" + p
20 if p[-1] == "/":
20 if p[-1] == "/":
21 p = p[:-1]
21 p = p[:-1]
22 up = os.path.dirname(p)
22 up = os.path.dirname(p)
23 if up == "/":
23 if up == "/":
24 return "/"
24 return "/"
25 return up + "/"
25 return up + "/"
26
26
27 def revnavgen(pos, pagelen, limit, nodefunc):
27 def revnavgen(pos, pagelen, limit, nodefunc):
28 def seq(factor, limit=None):
28 def seq(factor, limit=None):
29 if limit:
29 if limit:
30 yield limit
30 yield limit
31 if limit >= 20 and limit <= 40:
31 if limit >= 20 and limit <= 40:
32 yield 50
32 yield 50
33 else:
33 else:
34 yield 1 * factor
34 yield 1 * factor
35 yield 3 * factor
35 yield 3 * factor
36 for f in seq(factor * 10):
36 for f in seq(factor * 10):
37 yield f
37 yield f
38
38
39 def nav(**map):
39 def nav(**map):
40 l = []
40 l = []
41 last = 0
41 last = 0
42 for f in seq(1, pagelen):
42 for f in seq(1, pagelen):
43 if f < pagelen or f <= last:
43 if f < pagelen or f <= last:
44 continue
44 continue
45 if f > limit:
45 if f > limit:
46 break
46 break
47 last = f
47 last = f
48 if pos + f < limit:
48 if pos + f < limit:
49 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
49 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
50 if pos - f >= 0:
50 if pos - f >= 0:
51 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
51 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
52
52
53 try:
53 try:
54 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
54 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
55
55
56 for label, node in l:
56 for label, node in l:
57 yield {"label": label, "node": node}
57 yield {"label": label, "node": node}
58
58
59 yield {"label": "tip", "node": "tip"}
59 yield {"label": "tip", "node": "tip"}
60 except hg.RepoError:
60 except hg.RepoError:
61 pass
61 pass
62
62
63 return nav
63 return nav
64
64
65 class hgweb(object):
65 class hgweb(object):
66 def __init__(self, repo, name=None):
66 def __init__(self, repo, name=None):
67 if isinstance(repo, str):
67 if isinstance(repo, str):
68 self.repo = hg.repository(ui.ui(report_untrusted=False), repo)
68 self.repo = hg.repository(ui.ui(report_untrusted=False), repo)
69 else:
69 else:
70 self.repo = repo
70 self.repo = repo
71
71
72 self.mtime = -1
72 self.mtime = -1
73 self.reponame = name
73 self.reponame = name
74 self.archives = 'zip', 'gz', 'bz2'
74 self.archives = 'zip', 'gz', 'bz2'
75 self.stripecount = 1
75 self.stripecount = 1
76 # a repo owner may set web.templates in .hg/hgrc to get any file
76 # a repo owner may set web.templates in .hg/hgrc to get any file
77 # readable by the user running the CGI script
77 # readable by the user running the CGI script
78 self.templatepath = self.config("web", "templates",
78 self.templatepath = self.config("web", "templates",
79 templater.templatepath(),
79 templater.templatepath(),
80 untrusted=False)
80 untrusted=False)
81
81
82 # The CGI scripts are often run by a user different from the repo owner.
82 # The CGI scripts are often run by a user different from the repo owner.
83 # Trust the settings from the .hg/hgrc files by default.
83 # Trust the settings from the .hg/hgrc files by default.
84 def config(self, section, name, default=None, untrusted=True):
84 def config(self, section, name, default=None, untrusted=True):
85 return self.repo.ui.config(section, name, default,
85 return self.repo.ui.config(section, name, default,
86 untrusted=untrusted)
86 untrusted=untrusted)
87
87
88 def configbool(self, section, name, default=False, untrusted=True):
88 def configbool(self, section, name, default=False, untrusted=True):
89 return self.repo.ui.configbool(section, name, default,
89 return self.repo.ui.configbool(section, name, default,
90 untrusted=untrusted)
90 untrusted=untrusted)
91
91
92 def configlist(self, section, name, default=None, untrusted=True):
92 def configlist(self, section, name, default=None, untrusted=True):
93 return self.repo.ui.configlist(section, name, default,
93 return self.repo.ui.configlist(section, name, default,
94 untrusted=untrusted)
94 untrusted=untrusted)
95
95
96 def refresh(self):
96 def refresh(self):
97 mtime = get_mtime(self.repo.root)
97 mtime = get_mtime(self.repo.root)
98 if mtime != self.mtime:
98 if mtime != self.mtime:
99 self.mtime = mtime
99 self.mtime = mtime
100 self.repo = hg.repository(self.repo.ui, self.repo.root)
100 self.repo = hg.repository(self.repo.ui, self.repo.root)
101 self.maxchanges = int(self.config("web", "maxchanges", 10))
101 self.maxchanges = int(self.config("web", "maxchanges", 10))
102 self.stripecount = int(self.config("web", "stripes", 1))
102 self.stripecount = int(self.config("web", "stripes", 1))
103 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
103 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
104 self.maxfiles = int(self.config("web", "maxfiles", 10))
104 self.maxfiles = int(self.config("web", "maxfiles", 10))
105 self.allowpull = self.configbool("web", "allowpull", True)
105 self.allowpull = self.configbool("web", "allowpull", True)
106 self.encoding = self.config("web", "encoding", util._encoding)
106 self.encoding = self.config("web", "encoding", util._encoding)
107
107
108 def archivelist(self, nodeid):
108 def archivelist(self, nodeid):
109 allowed = self.configlist("web", "allow_archive")
109 allowed = self.configlist("web", "allow_archive")
110 for i, spec in self.archive_specs.iteritems():
110 for i, spec in self.archive_specs.iteritems():
111 if i in allowed or self.configbool("web", "allow" + i):
111 if i in allowed or self.configbool("web", "allow" + i):
112 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
112 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
113
113
114 def listfilediffs(self, files, changeset):
114 def listfilediffs(self, files, changeset):
115 for f in files[:self.maxfiles]:
115 for f in files[:self.maxfiles]:
116 yield self.t("filedifflink", node=hex(changeset), file=f)
116 yield self.t("filedifflink", node=hex(changeset), file=f)
117 if len(files) > self.maxfiles:
117 if len(files) > self.maxfiles:
118 yield self.t("fileellipses")
118 yield self.t("fileellipses")
119
119
120 def siblings(self, siblings=[], hiderev=None, **args):
120 def siblings(self, siblings=[], hiderev=None, **args):
121 siblings = [s for s in siblings if s.node() != nullid]
121 siblings = [s for s in siblings if s.node() != nullid]
122 if len(siblings) == 1 and siblings[0].rev() == hiderev:
122 if len(siblings) == 1 and siblings[0].rev() == hiderev:
123 return
123 return
124 for s in siblings:
124 for s in siblings:
125 d = {'node': hex(s.node()), 'rev': s.rev()}
125 d = {'node': hex(s.node()), 'rev': s.rev()}
126 if hasattr(s, 'path'):
126 if hasattr(s, 'path'):
127 d['file'] = s.path()
127 d['file'] = s.path()
128 d.update(args)
128 d.update(args)
129 yield d
129 yield d
130
130
131 def renamelink(self, fl, node):
131 def renamelink(self, fl, node):
132 r = fl.renamed(node)
132 r = fl.renamed(node)
133 if r:
133 if r:
134 return [dict(file=r[0], node=hex(r[1]))]
134 return [dict(file=r[0], node=hex(r[1]))]
135 return []
135 return []
136
136
137 def nodetagsdict(self, node):
137 def nodetagsdict(self, node):
138 return [{"name": i} for i in self.repo.nodetags(node)]
138 return [{"name": i} for i in self.repo.nodetags(node)]
139
139
140 def nodebranchdict(self, ctx):
140 def nodebranchdict(self, ctx):
141 branches = []
141 branches = []
142 branch = ctx.branch()
142 branch = ctx.branch()
143 if self.repo.branchtags()[branch] == ctx.node():
143 if self.repo.branchtags()[branch] == ctx.node():
144 branches.append({"name": branch})
144 branches.append({"name": branch})
145 return branches
145 return branches
146
146
147 def showtag(self, t1, node=nullid, **args):
147 def showtag(self, t1, node=nullid, **args):
148 for t in self.repo.nodetags(node):
148 for t in self.repo.nodetags(node):
149 yield self.t(t1, tag=t, **args)
149 yield self.t(t1, tag=t, **args)
150
150
151 def diff(self, node1, node2, files):
151 def diff(self, node1, node2, files):
152 def filterfiles(filters, files):
152 def filterfiles(filters, files):
153 l = [x for x in files if x in filters]
153 l = [x for x in files if x in filters]
154
154
155 for t in filters:
155 for t in filters:
156 if t and t[-1] != os.sep:
156 if t and t[-1] != os.sep:
157 t += os.sep
157 t += os.sep
158 l += [x for x in files if x.startswith(t)]
158 l += [x for x in files if x.startswith(t)]
159 return l
159 return l
160
160
161 parity = paritygen(self.stripecount)
161 parity = paritygen(self.stripecount)
162 def diffblock(diff, f, fn):
162 def diffblock(diff, f, fn):
163 yield self.t("diffblock",
163 yield self.t("diffblock",
164 lines=prettyprintlines(diff),
164 lines=prettyprintlines(diff),
165 parity=parity.next(),
165 parity=parity.next(),
166 file=f,
166 file=f,
167 filenode=hex(fn or nullid))
167 filenode=hex(fn or nullid))
168
168
169 def prettyprintlines(diff):
169 def prettyprintlines(diff):
170 for l in diff.splitlines(1):
170 for l in diff.splitlines(1):
171 if l.startswith('+'):
171 if l.startswith('+'):
172 yield self.t("difflineplus", line=l)
172 yield self.t("difflineplus", line=l)
173 elif l.startswith('-'):
173 elif l.startswith('-'):
174 yield self.t("difflineminus", line=l)
174 yield self.t("difflineminus", line=l)
175 elif l.startswith('@'):
175 elif l.startswith('@'):
176 yield self.t("difflineat", line=l)
176 yield self.t("difflineat", line=l)
177 else:
177 else:
178 yield self.t("diffline", line=l)
178 yield self.t("diffline", line=l)
179
179
180 r = self.repo
180 r = self.repo
181 c1 = r.changectx(node1)
181 c1 = r.changectx(node1)
182 c2 = r.changectx(node2)
182 c2 = r.changectx(node2)
183 date1 = util.datestr(c1.date())
183 date1 = util.datestr(c1.date())
184 date2 = util.datestr(c2.date())
184 date2 = util.datestr(c2.date())
185
185
186 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
186 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
187 if files:
187 if files:
188 modified, added, removed = map(lambda x: filterfiles(files, x),
188 modified, added, removed = map(lambda x: filterfiles(files, x),
189 (modified, added, removed))
189 (modified, added, removed))
190
190
191 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
191 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
192 for f in modified:
192 for f in modified:
193 to = c1.filectx(f).data()
193 to = c1.filectx(f).data()
194 tn = c2.filectx(f).data()
194 tn = c2.filectx(f).data()
195 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
195 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
196 opts=diffopts), f, tn)
196 opts=diffopts), f, tn)
197 for f in added:
197 for f in added:
198 to = None
198 to = None
199 tn = c2.filectx(f).data()
199 tn = c2.filectx(f).data()
200 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
200 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
201 opts=diffopts), f, tn)
201 opts=diffopts), f, tn)
202 for f in removed:
202 for f in removed:
203 to = c1.filectx(f).data()
203 to = c1.filectx(f).data()
204 tn = None
204 tn = None
205 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
205 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
206 opts=diffopts), f, tn)
206 opts=diffopts), f, tn)
207
207
208 def changelog(self, ctx, shortlog=False):
208 def changelog(self, ctx, shortlog=False):
209 def changelist(**map):
209 def changelist(**map):
210 cl = self.repo.changelog
210 cl = self.repo.changelog
211 l = [] # build a list in forward order for efficiency
211 l = [] # build a list in forward order for efficiency
212 for i in xrange(start, end):
212 for i in xrange(start, end):
213 ctx = self.repo.changectx(i)
213 ctx = self.repo.changectx(i)
214 n = ctx.node()
214 n = ctx.node()
215
215
216 l.insert(0, {"parity": parity.next(),
216 l.insert(0, {"parity": parity.next(),
217 "author": ctx.user(),
217 "author": ctx.user(),
218 "parent": self.siblings(ctx.parents(), i - 1),
218 "parent": self.siblings(ctx.parents(), i - 1),
219 "child": self.siblings(ctx.children(), i + 1),
219 "child": self.siblings(ctx.children(), i + 1),
220 "changelogtag": self.showtag("changelogtag",n),
220 "changelogtag": self.showtag("changelogtag",n),
221 "desc": ctx.description(),
221 "desc": ctx.description(),
222 "date": ctx.date(),
222 "date": ctx.date(),
223 "files": self.listfilediffs(ctx.files(), n),
223 "files": self.listfilediffs(ctx.files(), n),
224 "rev": i,
224 "rev": i,
225 "node": hex(n),
225 "node": hex(n),
226 "tags": self.nodetagsdict(n),
226 "tags": self.nodetagsdict(n),
227 "branches": self.nodebranchdict(ctx)})
227 "branches": self.nodebranchdict(ctx)})
228
228
229 for e in l:
229 for e in l:
230 yield e
230 yield e
231
231
232 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
232 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
233 cl = self.repo.changelog
233 cl = self.repo.changelog
234 count = cl.count()
234 count = cl.count()
235 pos = ctx.rev()
235 pos = ctx.rev()
236 start = max(0, pos - maxchanges + 1)
236 start = max(0, pos - maxchanges + 1)
237 end = min(count, start + maxchanges)
237 end = min(count, start + maxchanges)
238 pos = end - 1
238 pos = end - 1
239 parity = paritygen(self.stripecount, offset=start-end)
239 parity = paritygen(self.stripecount, offset=start-end)
240
240
241 changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
241 changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
242
242
243 yield self.t(shortlog and 'shortlog' or 'changelog',
243 yield self.t(shortlog and 'shortlog' or 'changelog',
244 changenav=changenav,
244 changenav=changenav,
245 node=hex(cl.tip()),
245 node=hex(cl.tip()),
246 rev=pos, changesets=count, entries=changelist,
246 rev=pos, changesets=count, entries=changelist,
247 archives=self.archivelist("tip"))
247 archives=self.archivelist("tip"))
248
248
249 def search(self, query):
249 def search(self, query):
250
250
251 def changelist(**map):
251 def changelist(**map):
252 cl = self.repo.changelog
252 cl = self.repo.changelog
253 count = 0
253 count = 0
254 qw = query.lower().split()
254 qw = query.lower().split()
255
255
256 def revgen():
256 def revgen():
257 for i in xrange(cl.count() - 1, 0, -100):
257 for i in xrange(cl.count() - 1, 0, -100):
258 l = []
258 l = []
259 for j in xrange(max(0, i - 100), i):
259 for j in xrange(max(0, i - 100), i):
260 ctx = self.repo.changectx(j)
260 ctx = self.repo.changectx(j)
261 l.append(ctx)
261 l.append(ctx)
262 l.reverse()
262 l.reverse()
263 for e in l:
263 for e in l:
264 yield e
264 yield e
265
265
266 for ctx in revgen():
266 for ctx in revgen():
267 miss = 0
267 miss = 0
268 for q in qw:
268 for q in qw:
269 if not (q in ctx.user().lower() or
269 if not (q in ctx.user().lower() or
270 q in ctx.description().lower() or
270 q in ctx.description().lower() or
271 q in " ".join(ctx.files()).lower()):
271 q in " ".join(ctx.files()).lower()):
272 miss = 1
272 miss = 1
273 break
273 break
274 if miss:
274 if miss:
275 continue
275 continue
276
276
277 count += 1
277 count += 1
278 n = ctx.node()
278 n = ctx.node()
279
279
280 yield self.t('searchentry',
280 yield self.t('searchentry',
281 parity=parity.next(),
281 parity=parity.next(),
282 author=ctx.user(),
282 author=ctx.user(),
283 parent=self.siblings(ctx.parents()),
283 parent=self.siblings(ctx.parents()),
284 child=self.siblings(ctx.children()),
284 child=self.siblings(ctx.children()),
285 changelogtag=self.showtag("changelogtag",n),
285 changelogtag=self.showtag("changelogtag",n),
286 desc=ctx.description(),
286 desc=ctx.description(),
287 date=ctx.date(),
287 date=ctx.date(),
288 files=self.listfilediffs(ctx.files(), n),
288 files=self.listfilediffs(ctx.files(), n),
289 rev=ctx.rev(),
289 rev=ctx.rev(),
290 node=hex(n),
290 node=hex(n),
291 tags=self.nodetagsdict(n),
291 tags=self.nodetagsdict(n),
292 branches=self.nodebranchdict(ctx))
292 branches=self.nodebranchdict(ctx))
293
293
294 if count >= self.maxchanges:
294 if count >= self.maxchanges:
295 break
295 break
296
296
297 cl = self.repo.changelog
297 cl = self.repo.changelog
298 parity = paritygen(self.stripecount)
298 parity = paritygen(self.stripecount)
299
299
300 yield self.t('search',
300 yield self.t('search',
301 query=query,
301 query=query,
302 node=hex(cl.tip()),
302 node=hex(cl.tip()),
303 entries=changelist,
303 entries=changelist,
304 archives=self.archivelist("tip"))
304 archives=self.archivelist("tip"))
305
305
306 def changeset(self, ctx):
306 def changeset(self, ctx):
307 n = ctx.node()
307 n = ctx.node()
308 parents = ctx.parents()
308 parents = ctx.parents()
309 p1 = parents[0].node()
309 p1 = parents[0].node()
310
310
311 files = []
311 files = []
312 parity = paritygen(self.stripecount)
312 parity = paritygen(self.stripecount)
313 for f in ctx.files():
313 for f in ctx.files():
314 files.append(self.t("filenodelink",
314 files.append(self.t("filenodelink",
315 node=hex(n), file=f,
315 node=hex(n), file=f,
316 parity=parity.next()))
316 parity=parity.next()))
317
317
318 def diff(**map):
318 def diff(**map):
319 yield self.diff(p1, n, None)
319 yield self.diff(p1, n, None)
320
320
321 yield self.t('changeset',
321 yield self.t('changeset',
322 diff=diff,
322 diff=diff,
323 rev=ctx.rev(),
323 rev=ctx.rev(),
324 node=hex(n),
324 node=hex(n),
325 parent=self.siblings(parents),
325 parent=self.siblings(parents),
326 child=self.siblings(ctx.children()),
326 child=self.siblings(ctx.children()),
327 changesettag=self.showtag("changesettag",n),
327 changesettag=self.showtag("changesettag",n),
328 author=ctx.user(),
328 author=ctx.user(),
329 desc=ctx.description(),
329 desc=ctx.description(),
330 date=ctx.date(),
330 date=ctx.date(),
331 files=files,
331 files=files,
332 archives=self.archivelist(hex(n)),
332 archives=self.archivelist(hex(n)),
333 tags=self.nodetagsdict(n),
333 tags=self.nodetagsdict(n),
334 branches=self.nodebranchdict(ctx))
334 branches=self.nodebranchdict(ctx))
335
335
336 def filelog(self, fctx):
336 def filelog(self, fctx):
337 f = fctx.path()
337 f = fctx.path()
338 fl = fctx.filelog()
338 fl = fctx.filelog()
339 count = fl.count()
339 count = fl.count()
340 pagelen = self.maxshortchanges
340 pagelen = self.maxshortchanges
341 pos = fctx.filerev()
341 pos = fctx.filerev()
342 start = max(0, pos - pagelen + 1)
342 start = max(0, pos - pagelen + 1)
343 end = min(count, start + pagelen)
343 end = min(count, start + pagelen)
344 pos = end - 1
344 pos = end - 1
345 parity = paritygen(self.stripecount, offset=start-end)
345 parity = paritygen(self.stripecount, offset=start-end)
346
346
347 def entries(**map):
347 def entries(**map):
348 l = []
348 l = []
349
349
350 for i in xrange(start, end):
350 for i in xrange(start, end):
351 ctx = fctx.filectx(i)
351 ctx = fctx.filectx(i)
352 n = fl.node(i)
352 n = fl.node(i)
353
353
354 l.insert(0, {"parity": parity.next(),
354 l.insert(0, {"parity": parity.next(),
355 "filerev": i,
355 "filerev": i,
356 "file": f,
356 "file": f,
357 "node": hex(ctx.node()),
357 "node": hex(ctx.node()),
358 "author": ctx.user(),
358 "author": ctx.user(),
359 "date": ctx.date(),
359 "date": ctx.date(),
360 "rename": self.renamelink(fl, n),
360 "rename": self.renamelink(fl, n),
361 "parent": self.siblings(fctx.parents()),
361 "parent": self.siblings(fctx.parents()),
362 "child": self.siblings(fctx.children()),
362 "child": self.siblings(fctx.children()),
363 "desc": ctx.description()})
363 "desc": ctx.description()})
364
364
365 for e in l:
365 for e in l:
366 yield e
366 yield e
367
367
368 nodefunc = lambda x: fctx.filectx(fileid=x)
368 nodefunc = lambda x: fctx.filectx(fileid=x)
369 nav = revnavgen(pos, pagelen, count, nodefunc)
369 nav = revnavgen(pos, pagelen, count, nodefunc)
370 yield self.t("filelog", file=f, node=hex(fctx.node()), nav=nav,
370 yield self.t("filelog", file=f, node=hex(fctx.node()), nav=nav,
371 entries=entries)
371 entries=entries)
372
372
373 def filerevision(self, fctx):
373 def filerevision(self, fctx):
374 f = fctx.path()
374 f = fctx.path()
375 text = fctx.data()
375 text = fctx.data()
376 fl = fctx.filelog()
376 fl = fctx.filelog()
377 n = fctx.filenode()
377 n = fctx.filenode()
378 parity = paritygen(self.stripecount)
378 parity = paritygen(self.stripecount)
379
379
380 mt = mimetypes.guess_type(f)[0]
380 mt = mimetypes.guess_type(f)[0]
381 rawtext = text
381 rawtext = text
382 if util.binary(text):
382 if util.binary(text):
383 mt = mt or 'application/octet-stream'
383 mt = mt or 'application/octet-stream'
384 text = "(binary:%s)" % mt
384 text = "(binary:%s)" % mt
385 mt = mt or 'text/plain'
385 mt = mt or 'text/plain'
386
386
387 def lines():
387 def lines():
388 for l, t in enumerate(text.splitlines(1)):
388 for l, t in enumerate(text.splitlines(1)):
389 yield {"line": t,
389 yield {"line": t,
390 "linenumber": "% 6d" % (l + 1),
390 "linenumber": "% 6d" % (l + 1),
391 "parity": parity.next()}
391 "parity": parity.next()}
392
392
393 yield self.t("filerevision",
393 yield self.t("filerevision",
394 file=f,
394 file=f,
395 path=_up(f),
395 path=_up(f),
396 text=lines(),
396 text=lines(),
397 raw=rawtext,
397 raw=rawtext,
398 mimetype=mt,
398 mimetype=mt,
399 rev=fctx.rev(),
399 rev=fctx.rev(),
400 node=hex(fctx.node()),
400 node=hex(fctx.node()),
401 author=fctx.user(),
401 author=fctx.user(),
402 date=fctx.date(),
402 date=fctx.date(),
403 desc=fctx.description(),
403 desc=fctx.description(),
404 parent=self.siblings(fctx.parents()),
404 parent=self.siblings(fctx.parents()),
405 child=self.siblings(fctx.children()),
405 child=self.siblings(fctx.children()),
406 rename=self.renamelink(fl, n),
406 rename=self.renamelink(fl, n),
407 permissions=fctx.manifest().flags(f))
407 permissions=fctx.manifest().flags(f))
408
408
409 def fileannotate(self, fctx):
409 def fileannotate(self, fctx):
410 f = fctx.path()
410 f = fctx.path()
411 n = fctx.filenode()
411 n = fctx.filenode()
412 fl = fctx.filelog()
412 fl = fctx.filelog()
413 parity = paritygen(self.stripecount)
413 parity = paritygen(self.stripecount)
414
414
415 def annotate(**map):
415 def annotate(**map):
416 last = None
416 last = None
417 for f, l in fctx.annotate(follow=True):
417 for f, l in fctx.annotate(follow=True):
418 fnode = f.filenode()
418 fnode = f.filenode()
419 name = self.repo.ui.shortuser(f.user())
419 name = self.repo.ui.shortuser(f.user())
420
420
421 if last != fnode:
421 if last != fnode:
422 last = fnode
422 last = fnode
423
423
424 yield {"parity": parity.next(),
424 yield {"parity": parity.next(),
425 "node": hex(f.node()),
425 "node": hex(f.node()),
426 "rev": f.rev(),
426 "rev": f.rev(),
427 "author": name,
427 "author": name,
428 "file": f.path(),
428 "file": f.path(),
429 "line": l}
429 "line": l}
430
430
431 yield self.t("fileannotate",
431 yield self.t("fileannotate",
432 file=f,
432 file=f,
433 annotate=annotate,
433 annotate=annotate,
434 path=_up(f),
434 path=_up(f),
435 rev=fctx.rev(),
435 rev=fctx.rev(),
436 node=hex(fctx.node()),
436 node=hex(fctx.node()),
437 author=fctx.user(),
437 author=fctx.user(),
438 date=fctx.date(),
438 date=fctx.date(),
439 desc=fctx.description(),
439 desc=fctx.description(),
440 rename=self.renamelink(fl, n),
440 rename=self.renamelink(fl, n),
441 parent=self.siblings(fctx.parents()),
441 parent=self.siblings(fctx.parents()),
442 child=self.siblings(fctx.children()),
442 child=self.siblings(fctx.children()),
443 permissions=fctx.manifest().flags(f))
443 permissions=fctx.manifest().flags(f))
444
444
445 def manifest(self, ctx, path):
445 def manifest(self, ctx, path):
446 mf = ctx.manifest()
446 mf = ctx.manifest()
447 node = ctx.node()
447 node = ctx.node()
448
448
449 files = {}
449 files = {}
450 parity = paritygen(self.stripecount)
450 parity = paritygen(self.stripecount)
451
451
452 if path and path[-1] != "/":
452 if path and path[-1] != "/":
453 path += "/"
453 path += "/"
454 l = len(path)
454 l = len(path)
455 abspath = "/" + path
455 abspath = "/" + path
456
456
457 for f, n in mf.items():
457 for f, n in mf.items():
458 if f[:l] != path:
458 if f[:l] != path:
459 continue
459 continue
460 remain = f[l:]
460 remain = f[l:]
461 if "/" in remain:
461 if "/" in remain:
462 short = remain[:remain.index("/") + 1] # bleah
462 short = remain[:remain.index("/") + 1] # bleah
463 files[short] = (f, None)
463 files[short] = (f, None)
464 else:
464 else:
465 short = os.path.basename(remain)
465 short = os.path.basename(remain)
466 files[short] = (f, n)
466 files[short] = (f, n)
467
467
468 def filelist(**map):
468 def filelist(**map):
469 fl = files.keys()
469 fl = files.keys()
470 fl.sort()
470 fl.sort()
471 for f in fl:
471 for f in fl:
472 full, fnode = files[f]
472 full, fnode = files[f]
473 if not fnode:
473 if not fnode:
474 continue
474 continue
475
475
476 yield {"file": full,
476 yield {"file": full,
477 "parity": parity.next(),
477 "parity": parity.next(),
478 "basename": f,
478 "basename": f,
479 "size": ctx.filectx(full).size(),
479 "size": ctx.filectx(full).size(),
480 "permissions": mf.flags(full)}
480 "permissions": mf.flags(full)}
481
481
482 def dirlist(**map):
482 def dirlist(**map):
483 fl = files.keys()
483 fl = files.keys()
484 fl.sort()
484 fl.sort()
485 for f in fl:
485 for f in fl:
486 full, fnode = files[f]
486 full, fnode = files[f]
487 if fnode:
487 if fnode:
488 continue
488 continue
489
489
490 yield {"parity": parity.next(),
490 yield {"parity": parity.next(),
491 "path": os.path.join(abspath, f),
491 "path": os.path.join(abspath, f),
492 "basename": f[:-1]}
492 "basename": f[:-1]}
493
493
494 yield self.t("manifest",
494 yield self.t("manifest",
495 rev=ctx.rev(),
495 rev=ctx.rev(),
496 node=hex(node),
496 node=hex(node),
497 path=abspath,
497 path=abspath,
498 up=_up(abspath),
498 up=_up(abspath),
499 upparity=parity.next(),
499 upparity=parity.next(),
500 fentries=filelist,
500 fentries=filelist,
501 dentries=dirlist,
501 dentries=dirlist,
502 archives=self.archivelist(hex(node)),
502 archives=self.archivelist(hex(node)),
503 tags=self.nodetagsdict(node),
503 tags=self.nodetagsdict(node),
504 branches=self.nodebranchdict(ctx))
504 branches=self.nodebranchdict(ctx))
505
505
506 def tags(self):
506 def tags(self):
507 i = self.repo.tagslist()
507 i = self.repo.tagslist()
508 i.reverse()
508 i.reverse()
509 parity = paritygen(self.stripecount)
509 parity = paritygen(self.stripecount)
510
510
511 def entries(notip=False, **map):
511 def entries(notip=False, **map):
512 for k, n in i:
512 for k, n in i:
513 if notip and k == "tip":
513 if notip and k == "tip":
514 continue
514 continue
515 yield {"parity": parity.next(),
515 yield {"parity": parity.next(),
516 "tag": k,
516 "tag": k,
517 "date": self.repo.changectx(n).date(),
517 "date": self.repo.changectx(n).date(),
518 "node": hex(n)}
518 "node": hex(n)}
519
519
520 yield self.t("tags",
520 yield self.t("tags",
521 node=hex(self.repo.changelog.tip()),
521 node=hex(self.repo.changelog.tip()),
522 entries=lambda **x: entries(False, **x),
522 entries=lambda **x: entries(False, **x),
523 entriesnotip=lambda **x: entries(True, **x))
523 entriesnotip=lambda **x: entries(True, **x))
524
524
525 def summary(self):
525 def summary(self):
526 i = self.repo.tagslist()
526 i = self.repo.tagslist()
527 i.reverse()
527 i.reverse()
528
528
529 def tagentries(**map):
529 def tagentries(**map):
530 parity = paritygen(self.stripecount)
530 parity = paritygen(self.stripecount)
531 count = 0
531 count = 0
532 for k, n in i:
532 for k, n in i:
533 if k == "tip": # skip tip
533 if k == "tip": # skip tip
534 continue;
534 continue;
535
535
536 count += 1
536 count += 1
537 if count > 10: # limit to 10 tags
537 if count > 10: # limit to 10 tags
538 break;
538 break;
539
539
540 yield self.t("tagentry",
540 yield self.t("tagentry",
541 parity=parity.next(),
541 parity=parity.next(),
542 tag=k,
542 tag=k,
543 node=hex(n),
543 node=hex(n),
544 date=self.repo.changectx(n).date())
544 date=self.repo.changectx(n).date())
545
545
546
546
547 def branches(**map):
547 def branches(**map):
548 parity = paritygen(self.stripecount)
548 parity = paritygen(self.stripecount)
549
549
550 b = self.repo.branchtags()
550 b = self.repo.branchtags()
551 l = [(-self.repo.changelog.rev(n), n, t) for t, n in b.items()]
551 l = [(-self.repo.changelog.rev(n), n, t) for t, n in b.items()]
552 l.sort()
552 l.sort()
553
553
554 for r,n,t in l:
554 for r,n,t in l:
555 ctx = self.repo.changectx(n)
555 ctx = self.repo.changectx(n)
556
556
557 yield {'parity': parity.next(),
557 yield {'parity': parity.next(),
558 'branch': t,
558 'branch': t,
559 'node': hex(n),
559 'node': hex(n),
560 'date': ctx.date()}
560 'date': ctx.date()}
561
561
562 def changelist(**map):
562 def changelist(**map):
563 parity = paritygen(self.stripecount, offset=start-end)
563 parity = paritygen(self.stripecount, offset=start-end)
564 l = [] # build a list in forward order for efficiency
564 l = [] # build a list in forward order for efficiency
565 for i in xrange(start, end):
565 for i in xrange(start, end):
566 ctx = self.repo.changectx(i)
566 ctx = self.repo.changectx(i)
567 n = ctx.node()
567 n = ctx.node()
568 hn = hex(n)
568 hn = hex(n)
569
569
570 l.insert(0, self.t(
570 l.insert(0, self.t(
571 'shortlogentry',
571 'shortlogentry',
572 parity=parity.next(),
572 parity=parity.next(),
573 author=ctx.user(),
573 author=ctx.user(),
574 desc=ctx.description(),
574 desc=ctx.description(),
575 date=ctx.date(),
575 date=ctx.date(),
576 rev=i,
576 rev=i,
577 node=hn,
577 node=hn,
578 tags=self.nodetagsdict(n),
578 tags=self.nodetagsdict(n),
579 branches=self.nodebranchdict(ctx)))
579 branches=self.nodebranchdict(ctx)))
580
580
581 yield l
581 yield l
582
582
583 cl = self.repo.changelog
583 cl = self.repo.changelog
584 count = cl.count()
584 count = cl.count()
585 start = max(0, count - self.maxchanges)
585 start = max(0, count - self.maxchanges)
586 end = min(count, start + self.maxchanges)
586 end = min(count, start + self.maxchanges)
587
587
588 yield self.t("summary",
588 yield self.t("summary",
589 desc=self.config("web", "description", "unknown"),
589 desc=self.config("web", "description", "unknown"),
590 owner=(self.config("ui", "username") or # preferred
590 owner=(self.config("ui", "username") or # preferred
591 self.config("web", "contact") or # deprecated
591 self.config("web", "contact") or # deprecated
592 self.config("web", "author", "unknown")), # also
592 self.config("web", "author", "unknown")), # also
593 lastchange=cl.read(cl.tip())[2],
593 lastchange=cl.read(cl.tip())[2],
594 tags=tagentries,
594 tags=tagentries,
595 branches=branches,
595 branches=branches,
596 shortlog=changelist,
596 shortlog=changelist,
597 node=hex(cl.tip()),
597 node=hex(cl.tip()),
598 archives=self.archivelist("tip"))
598 archives=self.archivelist("tip"))
599
599
600 def filediff(self, fctx):
600 def filediff(self, fctx):
601 n = fctx.node()
601 n = fctx.node()
602 path = fctx.path()
602 path = fctx.path()
603 parents = fctx.parents()
603 parents = fctx.parents()
604 p1 = parents and parents[0].node() or nullid
604 p1 = parents and parents[0].node() or nullid
605
605
606 def diff(**map):
606 def diff(**map):
607 yield self.diff(p1, n, [path])
607 yield self.diff(p1, n, [path])
608
608
609 yield self.t("filediff",
609 yield self.t("filediff",
610 file=path,
610 file=path,
611 node=hex(n),
611 node=hex(n),
612 rev=fctx.rev(),
612 rev=fctx.rev(),
613 parent=self.siblings(parents),
613 parent=self.siblings(parents),
614 child=self.siblings(fctx.children()),
614 child=self.siblings(fctx.children()),
615 diff=diff)
615 diff=diff)
616
616
617 archive_specs = {
617 archive_specs = {
618 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
618 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
619 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
619 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
620 'zip': ('application/zip', 'zip', '.zip', None),
620 'zip': ('application/zip', 'zip', '.zip', None),
621 }
621 }
622
622
623 def archive(self, req, key, type_):
623 def archive(self, req, key, type_):
624 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
624 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
625 cnode = self.repo.lookup(key)
625 cnode = self.repo.lookup(key)
626 arch_version = key
626 arch_version = key
627 if cnode == key or key == 'tip':
627 if cnode == key or key == 'tip':
628 arch_version = short(cnode)
628 arch_version = short(cnode)
629 name = "%s-%s" % (reponame, arch_version)
629 name = "%s-%s" % (reponame, arch_version)
630 mimetype, artype, extension, encoding = self.archive_specs[type_]
630 mimetype, artype, extension, encoding = self.archive_specs[type_]
631 headers = [('Content-type', mimetype),
631 headers = [('Content-type', mimetype),
632 ('Content-disposition', 'attachment; filename=%s%s' %
632 ('Content-disposition', 'attachment; filename=%s%s' %
633 (name, extension))]
633 (name, extension))]
634 if encoding:
634 if encoding:
635 headers.append(('Content-encoding', encoding))
635 headers.append(('Content-encoding', encoding))
636 req.header(headers)
636 req.header(headers)
637 archival.archive(self.repo, req.out, cnode, artype, prefix=name)
637 archival.archive(self.repo, req.out, cnode, artype, prefix=name)
638
638
639 # add tags to things
639 # add tags to things
640 # tags -> list of changesets corresponding to tags
640 # tags -> list of changesets corresponding to tags
641 # find tag, changeset, file
641 # find tag, changeset, file
642
642
643 def cleanpath(self, path):
643 def cleanpath(self, path):
644 path = path.lstrip('/')
644 path = path.lstrip('/')
645 return util.canonpath(self.repo.root, '', path)
645 return util.canonpath(self.repo.root, '', path)
646
646
647 def run(self):
647 def run(self):
648 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
648 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
649 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
649 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
650 import mercurial.hgweb.wsgicgi as wsgicgi
650 import mercurial.hgweb.wsgicgi as wsgicgi
651 from request import wsgiapplication
651 from request import wsgiapplication
652 def make_web_app():
652 def make_web_app():
653 return self
653 return self
654 wsgicgi.launch(wsgiapplication(make_web_app))
654 wsgicgi.launch(wsgiapplication(make_web_app))
655
655
656 def run_wsgi(self, req):
656 def run_wsgi(self, req):
657 def header(**map):
657 def header(**map):
658 header_file = cStringIO.StringIO(
658 header_file = cStringIO.StringIO(
659 ''.join(self.t("header", encoding=self.encoding, **map)))
659 ''.join(self.t("header", encoding=self.encoding, **map)))
660 msg = mimetools.Message(header_file, 0)
660 msg = mimetools.Message(header_file, 0)
661 req.header(msg.items())
661 req.header(msg.items())
662 yield header_file.read()
662 yield header_file.read()
663
663
664 def rawfileheader(**map):
664 def rawfileheader(**map):
665 req.header([('Content-type', map['mimetype']),
665 req.header([('Content-type', map['mimetype']),
666 ('Content-disposition', 'filename=%s' % map['file']),
666 ('Content-disposition', 'filename=%s' % map['file']),
667 ('Content-length', str(len(map['raw'])))])
667 ('Content-length', str(len(map['raw'])))])
668 yield ''
668 yield ''
669
669
670 def footer(**map):
670 def footer(**map):
671 yield self.t("footer", **map)
671 yield self.t("footer", **map)
672
672
673 def motd(**map):
673 def motd(**map):
674 yield self.config("web", "motd", "")
674 yield self.config("web", "motd", "")
675
675
676 def expand_form(form):
676 def expand_form(form):
677 shortcuts = {
677 shortcuts = {
678 'cl': [('cmd', ['changelog']), ('rev', None)],
678 'cl': [('cmd', ['changelog']), ('rev', None)],
679 'sl': [('cmd', ['shortlog']), ('rev', None)],
679 'sl': [('cmd', ['shortlog']), ('rev', None)],
680 'cs': [('cmd', ['changeset']), ('node', None)],
680 'cs': [('cmd', ['changeset']), ('node', None)],
681 'f': [('cmd', ['file']), ('filenode', None)],
681 'f': [('cmd', ['file']), ('filenode', None)],
682 'fl': [('cmd', ['filelog']), ('filenode', None)],
682 'fl': [('cmd', ['filelog']), ('filenode', None)],
683 'fd': [('cmd', ['filediff']), ('node', None)],
683 'fd': [('cmd', ['filediff']), ('node', None)],
684 'fa': [('cmd', ['annotate']), ('filenode', None)],
684 'fa': [('cmd', ['annotate']), ('filenode', None)],
685 'mf': [('cmd', ['manifest']), ('manifest', None)],
685 'mf': [('cmd', ['manifest']), ('manifest', None)],
686 'ca': [('cmd', ['archive']), ('node', None)],
686 'ca': [('cmd', ['archive']), ('node', None)],
687 'tags': [('cmd', ['tags'])],
687 'tags': [('cmd', ['tags'])],
688 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
688 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
689 'static': [('cmd', ['static']), ('file', None)]
689 'static': [('cmd', ['static']), ('file', None)]
690 }
690 }
691
691
692 for k in shortcuts.iterkeys():
692 for k in shortcuts.iterkeys():
693 if form.has_key(k):
693 if form.has_key(k):
694 for name, value in shortcuts[k]:
694 for name, value in shortcuts[k]:
695 if value is None:
695 if value is None:
696 value = form[k]
696 value = form[k]
697 form[name] = value
697 form[name] = value
698 del form[k]
698 del form[k]
699
699
700 def rewrite_request(req):
700 def rewrite_request(req):
701 '''translate new web interface to traditional format'''
701 '''translate new web interface to traditional format'''
702
702
703 def spliturl(req):
703 def spliturl(req):
704 def firstitem(query):
704 def firstitem(query):
705 return query.split('&', 1)[0].split(';', 1)[0]
705 return query.split('&', 1)[0].split(';', 1)[0]
706
706
707 def normurl(url):
707 def normurl(url):
708 inner = '/'.join([x for x in url.split('/') if x])
708 inner = '/'.join([x for x in url.split('/') if x])
709 tl = len(url) > 1 and url.endswith('/') and '/' or ''
709 tl = len(url) > 1 and url.endswith('/') and '/' or ''
710
710
711 return '%s%s%s' % (url.startswith('/') and '/' or '',
711 return '%s%s%s' % (url.startswith('/') and '/' or '',
712 inner, tl)
712 inner, tl)
713
713
714 root = normurl(urllib.unquote(req.env.get('REQUEST_URI', '').split('?', 1)[0]))
714 root = normurl(urllib.unquote(req.env.get('REQUEST_URI', '').split('?', 1)[0]))
715 pi = normurl(req.env.get('PATH_INFO', ''))
715 pi = normurl(req.env.get('PATH_INFO', ''))
716 if pi:
716 if pi:
717 # strip leading /
717 # strip leading /
718 pi = pi[1:]
718 pi = pi[1:]
719 if pi:
719 if pi:
720 root = root[:root.rfind(pi)]
720 root = root[:root.rfind(pi)]
721 if req.env.has_key('REPO_NAME'):
721 if req.env.has_key('REPO_NAME'):
722 rn = req.env['REPO_NAME'] + '/'
722 rn = req.env['REPO_NAME'] + '/'
723 root += rn
723 root += rn
724 query = pi[len(rn):]
724 query = pi[len(rn):]
725 else:
725 else:
726 query = pi
726 query = pi
727 else:
727 else:
728 root += '?'
728 root += '?'
729 query = firstitem(req.env['QUERY_STRING'])
729 query = firstitem(req.env['QUERY_STRING'])
730
730
731 return (root, query)
731 return (root, query)
732
732
733 req.url, query = spliturl(req)
733 req.url, query = spliturl(req)
734
734
735 if req.form.has_key('cmd'):
735 if req.form.has_key('cmd'):
736 # old style
736 # old style
737 return
737 return
738
738
739 args = query.split('/', 2)
739 args = query.split('/', 2)
740 if not args or not args[0]:
740 if not args or not args[0]:
741 return
741 return
742
742
743 cmd = args.pop(0)
743 cmd = args.pop(0)
744 style = cmd.rfind('-')
744 style = cmd.rfind('-')
745 if style != -1:
745 if style != -1:
746 req.form['style'] = [cmd[:style]]
746 req.form['style'] = [cmd[:style]]
747 cmd = cmd[style+1:]
747 cmd = cmd[style+1:]
748 # avoid accepting e.g. style parameter as command
748 # avoid accepting e.g. style parameter as command
749 if hasattr(self, 'do_' + cmd):
749 if hasattr(self, 'do_' + cmd):
750 req.form['cmd'] = [cmd]
750 req.form['cmd'] = [cmd]
751
751
752 if args and args[0]:
752 if args and args[0]:
753 node = args.pop(0)
753 node = args.pop(0)
754 req.form['node'] = [node]
754 req.form['node'] = [node]
755 if args:
755 if args:
756 req.form['file'] = args
756 req.form['file'] = args
757
757
758 if cmd == 'static':
758 if cmd == 'static':
759 req.form['file'] = req.form['node']
759 req.form['file'] = req.form['node']
760 elif cmd == 'archive':
760 elif cmd == 'archive':
761 fn = req.form['node'][0]
761 fn = req.form['node'][0]
762 for type_, spec in self.archive_specs.iteritems():
762 for type_, spec in self.archive_specs.iteritems():
763 ext = spec[2]
763 ext = spec[2]
764 if fn.endswith(ext):
764 if fn.endswith(ext):
765 req.form['node'] = [fn[:-len(ext)]]
765 req.form['node'] = [fn[:-len(ext)]]
766 req.form['type'] = [type_]
766 req.form['type'] = [type_]
767
767
768 def sessionvars(**map):
768 def sessionvars(**map):
769 fields = []
769 fields = []
770 if req.form.has_key('style'):
770 if req.form.has_key('style'):
771 style = req.form['style'][0]
771 style = req.form['style'][0]
772 if style != self.config('web', 'style', ''):
772 if style != self.config('web', 'style', ''):
773 fields.append(('style', style))
773 fields.append(('style', style))
774
774
775 separator = req.url[-1] == '?' and ';' or '?'
775 separator = req.url[-1] == '?' and ';' or '?'
776 for name, value in fields:
776 for name, value in fields:
777 yield dict(name=name, value=value, separator=separator)
777 yield dict(name=name, value=value, separator=separator)
778 separator = ';'
778 separator = ';'
779
779
780 self.refresh()
780 self.refresh()
781
781
782 expand_form(req.form)
782 expand_form(req.form)
783 rewrite_request(req)
783 rewrite_request(req)
784
784
785 style = self.config("web", "style", "")
785 style = self.config("web", "style", "")
786 if req.form.has_key('style'):
786 if req.form.has_key('style'):
787 style = req.form['style'][0]
787 style = req.form['style'][0]
788 mapfile = style_map(self.templatepath, style)
788 mapfile = style_map(self.templatepath, style)
789
789
790 proto = req.env.get('wsgi.url_scheme')
790 proto = req.env.get('wsgi.url_scheme')
791 if proto == 'https':
791 if proto == 'https':
792 proto = 'https'
792 proto = 'https'
793 default_port = "443"
793 default_port = "443"
794 else:
794 else:
795 proto = 'http'
795 proto = 'http'
796 default_port = "80"
796 default_port = "80"
797
797
798 port = req.env["SERVER_PORT"]
798 port = req.env["SERVER_PORT"]
799 port = port != default_port and (":" + port) or ""
799 port = port != default_port and (":" + port) or ""
800 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
800 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
801 staticurl = self.config("web", "staticurl") or req.url + 'static/'
801 staticurl = self.config("web", "staticurl") or req.url + 'static/'
802 if not staticurl.endswith('/'):
802 if not staticurl.endswith('/'):
803 staticurl += '/'
803 staticurl += '/'
804
804
805 if not self.reponame:
805 if not self.reponame:
806 self.reponame = (self.config("web", "name")
806 self.reponame = (self.config("web", "name")
807 or req.env.get('REPO_NAME')
807 or req.env.get('REPO_NAME')
808 or req.url.strip('/') or self.repo.root)
808 or req.url.strip('/') or self.repo.root)
809
809
810 self.t = templater.templater(mapfile, templater.common_filters,
810 self.t = templater.templater(mapfile, templater.common_filters,
811 defaults={"url": req.url,
811 defaults={"url": req.url,
812 "staticurl": staticurl,
812 "staticurl": staticurl,
813 "urlbase": urlbase,
813 "urlbase": urlbase,
814 "repo": self.reponame,
814 "repo": self.reponame,
815 "header": header,
815 "header": header,
816 "footer": footer,
816 "footer": footer,
817 "motd": motd,
817 "motd": motd,
818 "rawfileheader": rawfileheader,
818 "rawfileheader": rawfileheader,
819 "sessionvars": sessionvars
819 "sessionvars": sessionvars
820 })
820 })
821
821
822 try:
822 try:
823 if not req.form.has_key('cmd'):
823 if not req.form.has_key('cmd'):
824 req.form['cmd'] = [self.t.cache['default']]
824 req.form['cmd'] = [self.t.cache['default']]
825
825
826 cmd = req.form['cmd'][0]
826 cmd = req.form['cmd'][0]
827
827
828 method = getattr(self, 'do_' + cmd, None)
828 method = getattr(self, 'do_' + cmd, None)
829 if method:
829 if method:
830 try:
830 try:
831 method(req)
831 method(req)
832 except (hg.RepoError, revlog.RevlogError), inst:
832 except (hg.RepoError, revlog.RevlogError), inst:
833 req.write(self.t("error", error=str(inst)))
833 req.write(self.t("error", error=str(inst)))
834 else:
834 else:
835 req.write(self.t("error", error='No such method: ' + cmd))
835 req.write(self.t("error", error='No such method: ' + cmd))
836 finally:
836 finally:
837 self.t = None
837 self.t = None
838
838
839 def changectx(self, req):
839 def changectx(self, req):
840 if req.form.has_key('node'):
840 if req.form.has_key('node'):
841 changeid = req.form['node'][0]
841 changeid = req.form['node'][0]
842 elif req.form.has_key('manifest'):
842 elif req.form.has_key('manifest'):
843 changeid = req.form['manifest'][0]
843 changeid = req.form['manifest'][0]
844 else:
844 else:
845 changeid = self.repo.changelog.count() - 1
845 changeid = self.repo.changelog.count() - 1
846
846
847 try:
847 try:
848 ctx = self.repo.changectx(changeid)
848 ctx = self.repo.changectx(changeid)
849 except hg.RepoError:
849 except hg.RepoError:
850 man = self.repo.manifest
850 man = self.repo.manifest
851 mn = man.lookup(changeid)
851 mn = man.lookup(changeid)
852 ctx = self.repo.changectx(man.linkrev(mn))
852 ctx = self.repo.changectx(man.linkrev(mn))
853
853
854 return ctx
854 return ctx
855
855
856 def filectx(self, req):
856 def filectx(self, req):
857 path = self.cleanpath(req.form['file'][0])
857 path = self.cleanpath(req.form['file'][0])
858 if req.form.has_key('node'):
858 if req.form.has_key('node'):
859 changeid = req.form['node'][0]
859 changeid = req.form['node'][0]
860 else:
860 else:
861 changeid = req.form['filenode'][0]
861 changeid = req.form['filenode'][0]
862 try:
862 try:
863 ctx = self.repo.changectx(changeid)
863 ctx = self.repo.changectx(changeid)
864 fctx = ctx.filectx(path)
864 fctx = ctx.filectx(path)
865 except hg.RepoError:
865 except hg.RepoError:
866 fctx = self.repo.filectx(path, fileid=changeid)
866 fctx = self.repo.filectx(path, fileid=changeid)
867
867
868 return fctx
868 return fctx
869
869
870 def do_log(self, req):
870 def do_log(self, req):
871 if req.form.has_key('file') and req.form['file'][0]:
871 if req.form.has_key('file') and req.form['file'][0]:
872 self.do_filelog(req)
872 self.do_filelog(req)
873 else:
873 else:
874 self.do_changelog(req)
874 self.do_changelog(req)
875
875
876 def do_rev(self, req):
876 def do_rev(self, req):
877 self.do_changeset(req)
877 self.do_changeset(req)
878
878
879 def do_file(self, req):
879 def do_file(self, req):
880 path = self.cleanpath(req.form.get('file', [''])[0])
880 path = self.cleanpath(req.form.get('file', [''])[0])
881 if path:
881 if path:
882 try:
882 try:
883 req.write(self.filerevision(self.filectx(req)))
883 req.write(self.filerevision(self.filectx(req)))
884 return
884 return
885 except revlog.LookupError:
885 except revlog.LookupError:
886 pass
886 pass
887
887
888 req.write(self.manifest(self.changectx(req), path))
888 req.write(self.manifest(self.changectx(req), path))
889
889
890 def do_diff(self, req):
890 def do_diff(self, req):
891 self.do_filediff(req)
891 self.do_filediff(req)
892
892
893 def do_changelog(self, req, shortlog = False):
893 def do_changelog(self, req, shortlog = False):
894 if req.form.has_key('node'):
894 if req.form.has_key('node'):
895 ctx = self.changectx(req)
895 ctx = self.changectx(req)
896 else:
896 else:
897 if req.form.has_key('rev'):
897 if req.form.has_key('rev'):
898 hi = req.form['rev'][0]
898 hi = req.form['rev'][0]
899 else:
899 else:
900 hi = self.repo.changelog.count() - 1
900 hi = self.repo.changelog.count() - 1
901 try:
901 try:
902 ctx = self.repo.changectx(hi)
902 ctx = self.repo.changectx(hi)
903 except hg.RepoError:
903 except hg.RepoError:
904 req.write(self.search(hi)) # XXX redirect to 404 page?
904 req.write(self.search(hi)) # XXX redirect to 404 page?
905 return
905 return
906
906
907 req.write(self.changelog(ctx, shortlog = shortlog))
907 req.write(self.changelog(ctx, shortlog = shortlog))
908
908
909 def do_shortlog(self, req):
909 def do_shortlog(self, req):
910 self.do_changelog(req, shortlog = True)
910 self.do_changelog(req, shortlog = True)
911
911
912 def do_changeset(self, req):
912 def do_changeset(self, req):
913 req.write(self.changeset(self.changectx(req)))
913 req.write(self.changeset(self.changectx(req)))
914
914
915 def do_manifest(self, req):
915 def do_manifest(self, req):
916 req.write(self.manifest(self.changectx(req),
916 req.write(self.manifest(self.changectx(req),
917 self.cleanpath(req.form['path'][0])))
917 self.cleanpath(req.form['path'][0])))
918
918
919 def do_tags(self, req):
919 def do_tags(self, req):
920 req.write(self.tags())
920 req.write(self.tags())
921
921
922 def do_summary(self, req):
922 def do_summary(self, req):
923 req.write(self.summary())
923 req.write(self.summary())
924
924
925 def do_filediff(self, req):
925 def do_filediff(self, req):
926 req.write(self.filediff(self.filectx(req)))
926 req.write(self.filediff(self.filectx(req)))
927
927
928 def do_annotate(self, req):
928 def do_annotate(self, req):
929 req.write(self.fileannotate(self.filectx(req)))
929 req.write(self.fileannotate(self.filectx(req)))
930
930
931 def do_filelog(self, req):
931 def do_filelog(self, req):
932 req.write(self.filelog(self.filectx(req)))
932 req.write(self.filelog(self.filectx(req)))
933
933
934 def do_lookup(self, req):
934 def do_lookup(self, req):
935 try:
935 try:
936 r = hex(self.repo.lookup(req.form['key'][0]))
936 r = hex(self.repo.lookup(req.form['key'][0]))
937 success = 1
937 success = 1
938 except Exception,inst:
938 except Exception,inst:
939 r = str(inst)
939 r = str(inst)
940 success = 0
940 success = 0
941 resp = "%s %s\n" % (success, r)
941 resp = "%s %s\n" % (success, r)
942 req.httphdr("application/mercurial-0.1", length=len(resp))
942 req.httphdr("application/mercurial-0.1", length=len(resp))
943 req.write(resp)
943 req.write(resp)
944
944
945 def do_heads(self, req):
945 def do_heads(self, req):
946 resp = " ".join(map(hex, self.repo.heads())) + "\n"
946 resp = " ".join(map(hex, self.repo.heads())) + "\n"
947 req.httphdr("application/mercurial-0.1", length=len(resp))
947 req.httphdr("application/mercurial-0.1", length=len(resp))
948 req.write(resp)
948 req.write(resp)
949
949
950 def do_branches(self, req):
950 def do_branches(self, req):
951 nodes = []
951 nodes = []
952 if req.form.has_key('nodes'):
952 if req.form.has_key('nodes'):
953 nodes = map(bin, req.form['nodes'][0].split(" "))
953 nodes = map(bin, req.form['nodes'][0].split(" "))
954 resp = cStringIO.StringIO()
954 resp = cStringIO.StringIO()
955 for b in self.repo.branches(nodes):
955 for b in self.repo.branches(nodes):
956 resp.write(" ".join(map(hex, b)) + "\n")
956 resp.write(" ".join(map(hex, b)) + "\n")
957 resp = resp.getvalue()
957 resp = resp.getvalue()
958 req.httphdr("application/mercurial-0.1", length=len(resp))
958 req.httphdr("application/mercurial-0.1", length=len(resp))
959 req.write(resp)
959 req.write(resp)
960
960
961 def do_between(self, req):
961 def do_between(self, req):
962 if req.form.has_key('pairs'):
962 if req.form.has_key('pairs'):
963 pairs = [map(bin, p.split("-"))
963 pairs = [map(bin, p.split("-"))
964 for p in req.form['pairs'][0].split(" ")]
964 for p in req.form['pairs'][0].split(" ")]
965 resp = cStringIO.StringIO()
965 resp = cStringIO.StringIO()
966 for b in self.repo.between(pairs):
966 for b in self.repo.between(pairs):
967 resp.write(" ".join(map(hex, b)) + "\n")
967 resp.write(" ".join(map(hex, b)) + "\n")
968 resp = resp.getvalue()
968 resp = resp.getvalue()
969 req.httphdr("application/mercurial-0.1", length=len(resp))
969 req.httphdr("application/mercurial-0.1", length=len(resp))
970 req.write(resp)
970 req.write(resp)
971
971
972 def do_changegroup(self, req):
972 def do_changegroup(self, req):
973 req.httphdr("application/mercurial-0.1")
973 req.httphdr("application/mercurial-0.1")
974 nodes = []
974 nodes = []
975 if not self.allowpull:
975 if not self.allowpull:
976 return
976 return
977
977
978 if req.form.has_key('roots'):
978 if req.form.has_key('roots'):
979 nodes = map(bin, req.form['roots'][0].split(" "))
979 nodes = map(bin, req.form['roots'][0].split(" "))
980
980
981 z = zlib.compressobj()
981 z = zlib.compressobj()
982 f = self.repo.changegroup(nodes, 'serve')
982 f = self.repo.changegroup(nodes, 'serve')
983 while 1:
983 while 1:
984 chunk = f.read(4096)
984 chunk = f.read(4096)
985 if not chunk:
985 if not chunk:
986 break
986 break
987 req.write(z.compress(chunk))
987 req.write(z.compress(chunk))
988
988
989 req.write(z.flush())
989 req.write(z.flush())
990
990
991 def do_changegroupsubset(self, req):
991 def do_changegroupsubset(self, req):
992 req.httphdr("application/mercurial-0.1")
992 req.httphdr("application/mercurial-0.1")
993 bases = []
993 bases = []
994 heads = []
994 heads = []
995 if not self.allowpull:
995 if not self.allowpull:
996 return
996 return
997
997
998 if req.form.has_key('bases'):
998 if req.form.has_key('bases'):
999 bases = [bin(x) for x in req.form['bases'][0].split(' ')]
999 bases = [bin(x) for x in req.form['bases'][0].split(' ')]
1000 if req.form.has_key('heads'):
1000 if req.form.has_key('heads'):
1001 heads = [bin(x) for x in req.form['heads'][0].split(' ')]
1001 heads = [bin(x) for x in req.form['heads'][0].split(' ')]
1002
1002
1003 z = zlib.compressobj()
1003 z = zlib.compressobj()
1004 f = self.repo.changegroupsubset(bases, heads, 'serve')
1004 f = self.repo.changegroupsubset(bases, heads, 'serve')
1005 while 1:
1005 while 1:
1006 chunk = f.read(4096)
1006 chunk = f.read(4096)
1007 if not chunk:
1007 if not chunk:
1008 break
1008 break
1009 req.write(z.compress(chunk))
1009 req.write(z.compress(chunk))
1010
1010
1011 req.write(z.flush())
1011 req.write(z.flush())
1012
1012
1013 def do_archive(self, req):
1013 def do_archive(self, req):
1014 type_ = req.form['type'][0]
1014 type_ = req.form['type'][0]
1015 allowed = self.configlist("web", "allow_archive")
1015 allowed = self.configlist("web", "allow_archive")
1016 if (type_ in self.archives and (type_ in allowed or
1016 if (type_ in self.archives and (type_ in allowed or
1017 self.configbool("web", "allow" + type_, False))):
1017 self.configbool("web", "allow" + type_, False))):
1018 self.archive(req, req.form['node'][0], type_)
1018 self.archive(req, req.form['node'][0], type_)
1019 return
1019 return
1020
1020
1021 req.write(self.t("error"))
1021 req.write(self.t("error"))
1022
1022
1023 def do_static(self, req):
1023 def do_static(self, req):
1024 fname = req.form['file'][0]
1024 fname = req.form['file'][0]
1025 # a repo owner may set web.static in .hg/hgrc to get any file
1025 # a repo owner may set web.static in .hg/hgrc to get any file
1026 # readable by the user running the CGI script
1026 # readable by the user running the CGI script
1027 static = self.config("web", "static",
1027 static = self.config("web", "static",
1028 os.path.join(self.templatepath, "static"),
1028 os.path.join(self.templatepath, "static"),
1029 untrusted=False)
1029 untrusted=False)
1030 req.write(staticfile(static, fname, req)
1030 req.write(staticfile(static, fname, req)
1031 or self.t("error", error="%r not found" % fname))
1031 or self.t("error", error="%r not found" % fname))
1032
1032
1033 def do_capabilities(self, req):
1033 def do_capabilities(self, req):
1034 caps = ['lookup', 'changegroupsubset']
1034 caps = ['lookup', 'changegroupsubset']
1035 if self.configbool('server', 'uncompressed'):
1035 if self.configbool('server', 'uncompressed'):
1036 caps.append('stream=%d' % self.repo.changelog.version)
1036 caps.append('stream=%d' % self.repo.changelog.version)
1037 # XXX: make configurable and/or share code with do_unbundle:
1037 # XXX: make configurable and/or share code with do_unbundle:
1038 unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN']
1038 unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN']
1039 if unbundleversions:
1039 if unbundleversions:
1040 caps.append('unbundle=%s' % ','.join(unbundleversions))
1040 caps.append('unbundle=%s' % ','.join(unbundleversions))
1041 resp = ' '.join(caps)
1041 resp = ' '.join(caps)
1042 req.httphdr("application/mercurial-0.1", length=len(resp))
1042 req.httphdr("application/mercurial-0.1", length=len(resp))
1043 req.write(resp)
1043 req.write(resp)
1044
1044
1045 def check_perm(self, req, op, default):
1045 def check_perm(self, req, op, default):
1046 '''check permission for operation based on user auth.
1046 '''check permission for operation based on user auth.
1047 return true if op allowed, else false.
1047 return true if op allowed, else false.
1048 default is policy to use if no config given.'''
1048 default is policy to use if no config given.'''
1049
1049
1050 user = req.env.get('REMOTE_USER')
1050 user = req.env.get('REMOTE_USER')
1051
1051
1052 deny = self.configlist('web', 'deny_' + op)
1052 deny = self.configlist('web', 'deny_' + op)
1053 if deny and (not user or deny == ['*'] or user in deny):
1053 if deny and (not user or deny == ['*'] or user in deny):
1054 return False
1054 return False
1055
1055
1056 allow = self.configlist('web', 'allow_' + op)
1056 allow = self.configlist('web', 'allow_' + op)
1057 return (allow and (allow == ['*'] or user in allow)) or default
1057 return (allow and (allow == ['*'] or user in allow)) or default
1058
1058
1059 def do_unbundle(self, req):
1059 def do_unbundle(self, req):
1060 def bail(response, headers={}):
1060 def bail(response, headers={}):
1061 length = int(req.env['CONTENT_LENGTH'])
1061 length = int(req.env['CONTENT_LENGTH'])
1062 for s in util.filechunkiter(req, limit=length):
1062 for s in util.filechunkiter(req, limit=length):
1063 # drain incoming bundle, else client will not see
1063 # drain incoming bundle, else client will not see
1064 # response when run outside cgi script
1064 # response when run outside cgi script
1065 pass
1065 pass
1066 req.httphdr("application/mercurial-0.1", headers=headers)
1066 req.httphdr("application/mercurial-0.1", headers=headers)
1067 req.write('0\n')
1067 req.write('0\n')
1068 req.write(response)
1068 req.write(response)
1069
1069
1070 # require ssl by default, auth info cannot be sniffed and
1070 # require ssl by default, auth info cannot be sniffed and
1071 # replayed
1071 # replayed
1072 ssl_req = self.configbool('web', 'push_ssl', True)
1072 ssl_req = self.configbool('web', 'push_ssl', True)
1073 if ssl_req:
1073 if ssl_req:
1074 if req.env.get('wsgi.url_scheme') != 'https':
1074 if req.env.get('wsgi.url_scheme') != 'https':
1075 bail(_('ssl required\n'))
1075 bail(_('ssl required\n'))
1076 return
1076 return
1077 proto = 'https'
1077 proto = 'https'
1078 else:
1078 else:
1079 proto = 'http'
1079 proto = 'http'
1080
1080
1081 # do not allow push unless explicitly allowed
1081 # do not allow push unless explicitly allowed
1082 if not self.check_perm(req, 'push', False):
1082 if not self.check_perm(req, 'push', False):
1083 bail(_('push not authorized\n'),
1083 bail(_('push not authorized\n'),
1084 headers={'status': '401 Unauthorized'})
1084 headers={'status': '401 Unauthorized'})
1085 return
1085 return
1086
1086
1087 their_heads = req.form['heads'][0].split(' ')
1087 their_heads = req.form['heads'][0].split(' ')
1088
1088
1089 def check_heads():
1089 def check_heads():
1090 heads = map(hex, self.repo.heads())
1090 heads = map(hex, self.repo.heads())
1091 return their_heads == [hex('force')] or their_heads == heads
1091 return their_heads == [hex('force')] or their_heads == heads
1092
1092
1093 # fail early if possible
1093 # fail early if possible
1094 if not check_heads():
1094 if not check_heads():
1095 bail(_('unsynced changes\n'))
1095 bail(_('unsynced changes\n'))
1096 return
1096 return
1097
1097
1098 req.httphdr("application/mercurial-0.1")
1098 req.httphdr("application/mercurial-0.1")
1099
1099
1100 # do not lock repo until all changegroup data is
1100 # do not lock repo until all changegroup data is
1101 # streamed. save to temporary file.
1101 # streamed. save to temporary file.
1102
1102
1103 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
1103 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
1104 fp = os.fdopen(fd, 'wb+')
1104 fp = os.fdopen(fd, 'wb+')
1105 try:
1105 try:
1106 length = int(req.env['CONTENT_LENGTH'])
1106 length = int(req.env['CONTENT_LENGTH'])
1107 for s in util.filechunkiter(req, limit=length):
1107 for s in util.filechunkiter(req, limit=length):
1108 fp.write(s)
1108 fp.write(s)
1109
1109
1110 try:
1110 try:
1111 lock = self.repo.lock()
1111 lock = self.repo.lock()
1112 try:
1112 try:
1113 if not check_heads():
1113 if not check_heads():
1114 req.write('0\n')
1114 req.write('0\n')
1115 req.write(_('unsynced changes\n'))
1115 req.write(_('unsynced changes\n'))
1116 return
1116 return
1117
1117
1118 fp.seek(0)
1118 fp.seek(0)
1119 header = fp.read(6)
1119 header = fp.read(6)
1120 if not header.startswith("HG"):
1120 if not header.startswith("HG"):
1121 # old client with uncompressed bundle
1121 # old client with uncompressed bundle
1122 def generator(f):
1122 def generator(f):
1123 yield header
1123 yield header
1124 for chunk in f:
1124 for chunk in f:
1125 yield chunk
1125 yield chunk
1126 elif not header.startswith("HG10"):
1126 elif not header.startswith("HG10"):
1127 req.write("0\n")
1127 req.write("0\n")
1128 req.write(_("unknown bundle version\n"))
1128 req.write(_("unknown bundle version\n"))
1129 return
1129 return
1130 elif header == "HG10GZ":
1130 elif header == "HG10GZ":
1131 def generator(f):
1131 def generator(f):
1132 zd = zlib.decompressobj()
1132 zd = zlib.decompressobj()
1133 for chunk in f:
1133 for chunk in f:
1134 yield zd.decompress(chunk)
1134 yield zd.decompress(chunk)
1135 elif header == "HG10BZ":
1135 elif header == "HG10BZ":
1136 def generator(f):
1136 def generator(f):
1137 zd = bz2.BZ2Decompressor()
1137 zd = bz2.BZ2Decompressor()
1138 zd.decompress("BZ")
1138 zd.decompress("BZ")
1139 for chunk in f:
1139 for chunk in f:
1140 yield zd.decompress(chunk)
1140 yield zd.decompress(chunk)
1141 elif header == "HG10UN":
1141 elif header == "HG10UN":
1142 def generator(f):
1142 def generator(f):
1143 for chunk in f:
1143 for chunk in f:
1144 yield chunk
1144 yield chunk
1145 else:
1145 else:
1146 req.write("0\n")
1146 req.write("0\n")
1147 req.write(_("unknown bundle compression type\n"))
1147 req.write(_("unknown bundle compression type\n"))
1148 return
1148 return
1149 gen = generator(util.filechunkiter(fp, 4096))
1149 gen = generator(util.filechunkiter(fp, 4096))
1150
1150
1151 # send addchangegroup output to client
1151 # send addchangegroup output to client
1152
1152
1153 old_stdout = sys.stdout
1153 old_stdout = sys.stdout
1154 sys.stdout = cStringIO.StringIO()
1154 sys.stdout = cStringIO.StringIO()
1155
1155
1156 try:
1156 try:
1157 url = 'remote:%s:%s' % (proto,
1157 url = 'remote:%s:%s' % (proto,
1158 req.env.get('REMOTE_HOST', ''))
1158 req.env.get('REMOTE_HOST', ''))
1159 try:
1159 try:
1160 ret = self.repo.addchangegroup(
1160 ret = self.repo.addchangegroup(
1161 util.chunkbuffer(gen), 'serve', url)
1161 util.chunkbuffer(gen), 'serve', url)
1162 except util.Abort, inst:
1162 except util.Abort, inst:
1163 sys.stdout.write("abort: %s\n" % inst)
1163 sys.stdout.write("abort: %s\n" % inst)
1164 ret = 0
1164 ret = 0
1165 finally:
1165 finally:
1166 val = sys.stdout.getvalue()
1166 val = sys.stdout.getvalue()
1167 sys.stdout = old_stdout
1167 sys.stdout = old_stdout
1168 req.write('%d\n' % ret)
1168 req.write('%d\n' % ret)
1169 req.write(val)
1169 req.write(val)
1170 finally:
1170 finally:
1171 lock.release()
1171 del lock
1172 except (OSError, IOError), inst:
1172 except (OSError, IOError), inst:
1173 req.write('0\n')
1173 req.write('0\n')
1174 filename = getattr(inst, 'filename', '')
1174 filename = getattr(inst, 'filename', '')
1175 # Don't send our filesystem layout to the client
1175 # Don't send our filesystem layout to the client
1176 if filename.startswith(self.repo.root):
1176 if filename.startswith(self.repo.root):
1177 filename = filename[len(self.repo.root)+1:]
1177 filename = filename[len(self.repo.root)+1:]
1178 else:
1178 else:
1179 filename = ''
1179 filename = ''
1180 error = getattr(inst, 'strerror', 'Unknown error')
1180 error = getattr(inst, 'strerror', 'Unknown error')
1181 req.write('%s: %s\n' % (error, filename))
1181 req.write('%s: %s\n' % (error, filename))
1182 finally:
1182 finally:
1183 fp.close()
1183 fp.close()
1184 os.unlink(tempname)
1184 os.unlink(tempname)
1185
1185
1186 def do_stream_out(self, req):
1186 def do_stream_out(self, req):
1187 req.httphdr("application/mercurial-0.1")
1187 req.httphdr("application/mercurial-0.1")
1188 streamclone.stream_out(self.repo, req, untrusted=True)
1188 streamclone.stream_out(self.repo, req, untrusted=True)
This diff has been collapsed as it changes many lines, (628 lines changed) Show them Hide them
@@ -1,1950 +1,1976 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import repo, changegroup
10 import repo, changegroup
11 import changelog, dirstate, filelog, manifest, context
11 import changelog, dirstate, filelog, manifest, context
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
13 import os, revlog, time, util, extensions, hook
13 import os, revlog, time, util, extensions, hook
14
14
15 class localrepository(repo.repository):
15 class localrepository(repo.repository):
16 capabilities = ('lookup', 'changegroupsubset')
16 capabilities = ('lookup', 'changegroupsubset')
17 supported = ('revlogv1', 'store')
17 supported = ('revlogv1', 'store')
18
18
19 def __del__(self):
19 def __del__(self):
20 self.transhandle = None
20 self.transhandle = None
21 def __init__(self, parentui, path=None, create=0):
21 def __init__(self, parentui, path=None, create=0):
22 repo.repository.__init__(self)
22 repo.repository.__init__(self)
23 self.path = path
23 self.path = path
24 self.root = os.path.realpath(path)
24 self.root = os.path.realpath(path)
25 self.path = os.path.join(self.root, ".hg")
25 self.path = os.path.join(self.root, ".hg")
26 self.origroot = path
26 self.origroot = path
27 self.opener = util.opener(self.path)
27 self.opener = util.opener(self.path)
28 self.wopener = util.opener(self.root)
28 self.wopener = util.opener(self.root)
29
29
30 if not os.path.isdir(self.path):
30 if not os.path.isdir(self.path):
31 if create:
31 if create:
32 if not os.path.exists(path):
32 if not os.path.exists(path):
33 os.mkdir(path)
33 os.mkdir(path)
34 os.mkdir(self.path)
34 os.mkdir(self.path)
35 requirements = ["revlogv1"]
35 requirements = ["revlogv1"]
36 if parentui.configbool('format', 'usestore', True):
36 if parentui.configbool('format', 'usestore', True):
37 os.mkdir(os.path.join(self.path, "store"))
37 os.mkdir(os.path.join(self.path, "store"))
38 requirements.append("store")
38 requirements.append("store")
39 # create an invalid changelog
39 # create an invalid changelog
40 self.opener("00changelog.i", "a").write(
40 self.opener("00changelog.i", "a").write(
41 '\0\0\0\2' # represents revlogv2
41 '\0\0\0\2' # represents revlogv2
42 ' dummy changelog to prevent using the old repo layout'
42 ' dummy changelog to prevent using the old repo layout'
43 )
43 )
44 reqfile = self.opener("requires", "w")
44 reqfile = self.opener("requires", "w")
45 for r in requirements:
45 for r in requirements:
46 reqfile.write("%s\n" % r)
46 reqfile.write("%s\n" % r)
47 reqfile.close()
47 reqfile.close()
48 else:
48 else:
49 raise repo.RepoError(_("repository %s not found") % path)
49 raise repo.RepoError(_("repository %s not found") % path)
50 elif create:
50 elif create:
51 raise repo.RepoError(_("repository %s already exists") % path)
51 raise repo.RepoError(_("repository %s already exists") % path)
52 else:
52 else:
53 # find requirements
53 # find requirements
54 try:
54 try:
55 requirements = self.opener("requires").read().splitlines()
55 requirements = self.opener("requires").read().splitlines()
56 except IOError, inst:
56 except IOError, inst:
57 if inst.errno != errno.ENOENT:
57 if inst.errno != errno.ENOENT:
58 raise
58 raise
59 requirements = []
59 requirements = []
60 # check them
60 # check them
61 for r in requirements:
61 for r in requirements:
62 if r not in self.supported:
62 if r not in self.supported:
63 raise repo.RepoError(_("requirement '%s' not supported") % r)
63 raise repo.RepoError(_("requirement '%s' not supported") % r)
64
64
65 # setup store
65 # setup store
66 if "store" in requirements:
66 if "store" in requirements:
67 self.encodefn = util.encodefilename
67 self.encodefn = util.encodefilename
68 self.decodefn = util.decodefilename
68 self.decodefn = util.decodefilename
69 self.spath = os.path.join(self.path, "store")
69 self.spath = os.path.join(self.path, "store")
70 else:
70 else:
71 self.encodefn = lambda x: x
71 self.encodefn = lambda x: x
72 self.decodefn = lambda x: x
72 self.decodefn = lambda x: x
73 self.spath = self.path
73 self.spath = self.path
74 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
74 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
75
75
76 self.ui = ui.ui(parentui=parentui)
76 self.ui = ui.ui(parentui=parentui)
77 try:
77 try:
78 self.ui.readconfig(self.join("hgrc"), self.root)
78 self.ui.readconfig(self.join("hgrc"), self.root)
79 extensions.loadall(self.ui)
79 extensions.loadall(self.ui)
80 except IOError:
80 except IOError:
81 pass
81 pass
82
82
83 self.tagscache = None
83 self.tagscache = None
84 self.branchcache = None
84 self.branchcache = None
85 self.nodetagscache = None
85 self.nodetagscache = None
86 self.filterpats = {}
86 self.filterpats = {}
87 self.transhandle = None
87 self.transhandle = None
88
88
89 def __getattr__(self, name):
89 def __getattr__(self, name):
90 if name == 'changelog':
90 if name == 'changelog':
91 self.changelog = changelog.changelog(self.sopener)
91 self.changelog = changelog.changelog(self.sopener)
92 self.sopener.defversion = self.changelog.version
92 self.sopener.defversion = self.changelog.version
93 return self.changelog
93 return self.changelog
94 if name == 'manifest':
94 if name == 'manifest':
95 self.changelog
95 self.changelog
96 self.manifest = manifest.manifest(self.sopener)
96 self.manifest = manifest.manifest(self.sopener)
97 return self.manifest
97 return self.manifest
98 if name == 'dirstate':
98 if name == 'dirstate':
99 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
99 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
100 return self.dirstate
100 return self.dirstate
101 else:
101 else:
102 raise AttributeError, name
102 raise AttributeError, name
103
103
104 def url(self):
104 def url(self):
105 return 'file:' + self.root
105 return 'file:' + self.root
106
106
107 def hook(self, name, throw=False, **args):
107 def hook(self, name, throw=False, **args):
108 return hook.hook(self.ui, self, name, throw, **args)
108 return hook.hook(self.ui, self, name, throw, **args)
109
109
110 tag_disallowed = ':\r\n'
110 tag_disallowed = ':\r\n'
111
111
112 def _tag(self, name, node, message, local, user, date, parent=None,
112 def _tag(self, name, node, message, local, user, date, parent=None,
113 extra={}):
113 extra={}):
114 use_dirstate = parent is None
114 use_dirstate = parent is None
115
115
116 for c in self.tag_disallowed:
116 for c in self.tag_disallowed:
117 if c in name:
117 if c in name:
118 raise util.Abort(_('%r cannot be used in a tag name') % c)
118 raise util.Abort(_('%r cannot be used in a tag name') % c)
119
119
120 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
120 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
121
121
122 def writetag(fp, name, munge, prevtags):
122 def writetag(fp, name, munge, prevtags):
123 if prevtags and prevtags[-1] != '\n':
123 if prevtags and prevtags[-1] != '\n':
124 fp.write('\n')
124 fp.write('\n')
125 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
125 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
126 fp.close()
126 fp.close()
127 self.hook('tag', node=hex(node), tag=name, local=local)
127 self.hook('tag', node=hex(node), tag=name, local=local)
128
128
129 prevtags = ''
129 prevtags = ''
130 if local:
130 if local:
131 try:
131 try:
132 fp = self.opener('localtags', 'r+')
132 fp = self.opener('localtags', 'r+')
133 except IOError, err:
133 except IOError, err:
134 fp = self.opener('localtags', 'a')
134 fp = self.opener('localtags', 'a')
135 else:
135 else:
136 prevtags = fp.read()
136 prevtags = fp.read()
137
137
138 # local tags are stored in the current charset
138 # local tags are stored in the current charset
139 writetag(fp, name, None, prevtags)
139 writetag(fp, name, None, prevtags)
140 return
140 return
141
141
142 if use_dirstate:
142 if use_dirstate:
143 try:
143 try:
144 fp = self.wfile('.hgtags', 'rb+')
144 fp = self.wfile('.hgtags', 'rb+')
145 except IOError, err:
145 except IOError, err:
146 fp = self.wfile('.hgtags', 'ab')
146 fp = self.wfile('.hgtags', 'ab')
147 else:
147 else:
148 prevtags = fp.read()
148 prevtags = fp.read()
149 else:
149 else:
150 try:
150 try:
151 prevtags = self.filectx('.hgtags', parent).data()
151 prevtags = self.filectx('.hgtags', parent).data()
152 except revlog.LookupError:
152 except revlog.LookupError:
153 pass
153 pass
154 fp = self.wfile('.hgtags', 'wb')
154 fp = self.wfile('.hgtags', 'wb')
155
155
156 # committed tags are stored in UTF-8
156 # committed tags are stored in UTF-8
157 writetag(fp, name, util.fromlocal, prevtags)
157 writetag(fp, name, util.fromlocal, prevtags)
158
158
159 if use_dirstate and '.hgtags' not in self.dirstate:
159 if use_dirstate and '.hgtags' not in self.dirstate:
160 self.add(['.hgtags'])
160 self.add(['.hgtags'])
161
161
162 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
162 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
163 extra=extra)
163 extra=extra)
164
164
165 self.hook('tag', node=hex(node), tag=name, local=local)
165 self.hook('tag', node=hex(node), tag=name, local=local)
166
166
167 return tagnode
167 return tagnode
168
168
169 def tag(self, name, node, message, local, user, date):
169 def tag(self, name, node, message, local, user, date):
170 '''tag a revision with a symbolic name.
170 '''tag a revision with a symbolic name.
171
171
172 if local is True, the tag is stored in a per-repository file.
172 if local is True, the tag is stored in a per-repository file.
173 otherwise, it is stored in the .hgtags file, and a new
173 otherwise, it is stored in the .hgtags file, and a new
174 changeset is committed with the change.
174 changeset is committed with the change.
175
175
176 keyword arguments:
176 keyword arguments:
177
177
178 local: whether to store tag in non-version-controlled file
178 local: whether to store tag in non-version-controlled file
179 (default False)
179 (default False)
180
180
181 message: commit message to use if committing
181 message: commit message to use if committing
182
182
183 user: name of user to use if committing
183 user: name of user to use if committing
184
184
185 date: date tuple to use if committing'''
185 date: date tuple to use if committing'''
186
186
187 for x in self.status()[:5]:
187 for x in self.status()[:5]:
188 if '.hgtags' in x:
188 if '.hgtags' in x:
189 raise util.Abort(_('working copy of .hgtags is changed '
189 raise util.Abort(_('working copy of .hgtags is changed '
190 '(please commit .hgtags manually)'))
190 '(please commit .hgtags manually)'))
191
191
192
192
193 self._tag(name, node, message, local, user, date)
193 self._tag(name, node, message, local, user, date)
194
194
195 def tags(self):
195 def tags(self):
196 '''return a mapping of tag to node'''
196 '''return a mapping of tag to node'''
197 if self.tagscache:
197 if self.tagscache:
198 return self.tagscache
198 return self.tagscache
199
199
200 globaltags = {}
200 globaltags = {}
201
201
202 def readtags(lines, fn):
202 def readtags(lines, fn):
203 filetags = {}
203 filetags = {}
204 count = 0
204 count = 0
205
205
206 def warn(msg):
206 def warn(msg):
207 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
207 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
208
208
209 for l in lines:
209 for l in lines:
210 count += 1
210 count += 1
211 if not l:
211 if not l:
212 continue
212 continue
213 s = l.split(" ", 1)
213 s = l.split(" ", 1)
214 if len(s) != 2:
214 if len(s) != 2:
215 warn(_("cannot parse entry"))
215 warn(_("cannot parse entry"))
216 continue
216 continue
217 node, key = s
217 node, key = s
218 key = util.tolocal(key.strip()) # stored in UTF-8
218 key = util.tolocal(key.strip()) # stored in UTF-8
219 try:
219 try:
220 bin_n = bin(node)
220 bin_n = bin(node)
221 except TypeError:
221 except TypeError:
222 warn(_("node '%s' is not well formed") % node)
222 warn(_("node '%s' is not well formed") % node)
223 continue
223 continue
224 if bin_n not in self.changelog.nodemap:
224 if bin_n not in self.changelog.nodemap:
225 warn(_("tag '%s' refers to unknown node") % key)
225 warn(_("tag '%s' refers to unknown node") % key)
226 continue
226 continue
227
227
228 h = []
228 h = []
229 if key in filetags:
229 if key in filetags:
230 n, h = filetags[key]
230 n, h = filetags[key]
231 h.append(n)
231 h.append(n)
232 filetags[key] = (bin_n, h)
232 filetags[key] = (bin_n, h)
233
233
234 for k, nh in filetags.items():
234 for k, nh in filetags.items():
235 if k not in globaltags:
235 if k not in globaltags:
236 globaltags[k] = nh
236 globaltags[k] = nh
237 continue
237 continue
238 # we prefer the global tag if:
238 # we prefer the global tag if:
239 # it supercedes us OR
239 # it supercedes us OR
240 # mutual supercedes and it has a higher rank
240 # mutual supercedes and it has a higher rank
241 # otherwise we win because we're tip-most
241 # otherwise we win because we're tip-most
242 an, ah = nh
242 an, ah = nh
243 bn, bh = globaltags[k]
243 bn, bh = globaltags[k]
244 if (bn != an and an in bh and
244 if (bn != an and an in bh and
245 (bn not in ah or len(bh) > len(ah))):
245 (bn not in ah or len(bh) > len(ah))):
246 an = bn
246 an = bn
247 ah.extend([n for n in bh if n not in ah])
247 ah.extend([n for n in bh if n not in ah])
248 globaltags[k] = an, ah
248 globaltags[k] = an, ah
249
249
250 # read the tags file from each head, ending with the tip
250 # read the tags file from each head, ending with the tip
251 f = None
251 f = None
252 for rev, node, fnode in self._hgtagsnodes():
252 for rev, node, fnode in self._hgtagsnodes():
253 f = (f and f.filectx(fnode) or
253 f = (f and f.filectx(fnode) or
254 self.filectx('.hgtags', fileid=fnode))
254 self.filectx('.hgtags', fileid=fnode))
255 readtags(f.data().splitlines(), f)
255 readtags(f.data().splitlines(), f)
256
256
257 try:
257 try:
258 data = util.fromlocal(self.opener("localtags").read())
258 data = util.fromlocal(self.opener("localtags").read())
259 # localtags are stored in the local character set
259 # localtags are stored in the local character set
260 # while the internal tag table is stored in UTF-8
260 # while the internal tag table is stored in UTF-8
261 readtags(data.splitlines(), "localtags")
261 readtags(data.splitlines(), "localtags")
262 except IOError:
262 except IOError:
263 pass
263 pass
264
264
265 self.tagscache = {}
265 self.tagscache = {}
266 for k,nh in globaltags.items():
266 for k,nh in globaltags.items():
267 n = nh[0]
267 n = nh[0]
268 if n != nullid:
268 if n != nullid:
269 self.tagscache[k] = n
269 self.tagscache[k] = n
270 self.tagscache['tip'] = self.changelog.tip()
270 self.tagscache['tip'] = self.changelog.tip()
271
271
272 return self.tagscache
272 return self.tagscache
273
273
274 def _hgtagsnodes(self):
274 def _hgtagsnodes(self):
275 heads = self.heads()
275 heads = self.heads()
276 heads.reverse()
276 heads.reverse()
277 last = {}
277 last = {}
278 ret = []
278 ret = []
279 for node in heads:
279 for node in heads:
280 c = self.changectx(node)
280 c = self.changectx(node)
281 rev = c.rev()
281 rev = c.rev()
282 try:
282 try:
283 fnode = c.filenode('.hgtags')
283 fnode = c.filenode('.hgtags')
284 except revlog.LookupError:
284 except revlog.LookupError:
285 continue
285 continue
286 ret.append((rev, node, fnode))
286 ret.append((rev, node, fnode))
287 if fnode in last:
287 if fnode in last:
288 ret[last[fnode]] = None
288 ret[last[fnode]] = None
289 last[fnode] = len(ret) - 1
289 last[fnode] = len(ret) - 1
290 return [item for item in ret if item]
290 return [item for item in ret if item]
291
291
292 def tagslist(self):
292 def tagslist(self):
293 '''return a list of tags ordered by revision'''
293 '''return a list of tags ordered by revision'''
294 l = []
294 l = []
295 for t, n in self.tags().items():
295 for t, n in self.tags().items():
296 try:
296 try:
297 r = self.changelog.rev(n)
297 r = self.changelog.rev(n)
298 except:
298 except:
299 r = -2 # sort to the beginning of the list if unknown
299 r = -2 # sort to the beginning of the list if unknown
300 l.append((r, t, n))
300 l.append((r, t, n))
301 l.sort()
301 l.sort()
302 return [(t, n) for r, t, n in l]
302 return [(t, n) for r, t, n in l]
303
303
304 def nodetags(self, node):
304 def nodetags(self, node):
305 '''return the tags associated with a node'''
305 '''return the tags associated with a node'''
306 if not self.nodetagscache:
306 if not self.nodetagscache:
307 self.nodetagscache = {}
307 self.nodetagscache = {}
308 for t, n in self.tags().items():
308 for t, n in self.tags().items():
309 self.nodetagscache.setdefault(n, []).append(t)
309 self.nodetagscache.setdefault(n, []).append(t)
310 return self.nodetagscache.get(node, [])
310 return self.nodetagscache.get(node, [])
311
311
312 def _branchtags(self):
312 def _branchtags(self):
313 partial, last, lrev = self._readbranchcache()
313 partial, last, lrev = self._readbranchcache()
314
314
315 tiprev = self.changelog.count() - 1
315 tiprev = self.changelog.count() - 1
316 if lrev != tiprev:
316 if lrev != tiprev:
317 self._updatebranchcache(partial, lrev+1, tiprev+1)
317 self._updatebranchcache(partial, lrev+1, tiprev+1)
318 self._writebranchcache(partial, self.changelog.tip(), tiprev)
318 self._writebranchcache(partial, self.changelog.tip(), tiprev)
319
319
320 return partial
320 return partial
321
321
322 def branchtags(self):
322 def branchtags(self):
323 if self.branchcache is not None:
323 if self.branchcache is not None:
324 return self.branchcache
324 return self.branchcache
325
325
326 self.branchcache = {} # avoid recursion in changectx
326 self.branchcache = {} # avoid recursion in changectx
327 partial = self._branchtags()
327 partial = self._branchtags()
328
328
329 # the branch cache is stored on disk as UTF-8, but in the local
329 # the branch cache is stored on disk as UTF-8, but in the local
330 # charset internally
330 # charset internally
331 for k, v in partial.items():
331 for k, v in partial.items():
332 self.branchcache[util.tolocal(k)] = v
332 self.branchcache[util.tolocal(k)] = v
333 return self.branchcache
333 return self.branchcache
334
334
335 def _readbranchcache(self):
335 def _readbranchcache(self):
336 partial = {}
336 partial = {}
337 try:
337 try:
338 f = self.opener("branch.cache")
338 f = self.opener("branch.cache")
339 lines = f.read().split('\n')
339 lines = f.read().split('\n')
340 f.close()
340 f.close()
341 except (IOError, OSError):
341 except (IOError, OSError):
342 return {}, nullid, nullrev
342 return {}, nullid, nullrev
343
343
344 try:
344 try:
345 last, lrev = lines.pop(0).split(" ", 1)
345 last, lrev = lines.pop(0).split(" ", 1)
346 last, lrev = bin(last), int(lrev)
346 last, lrev = bin(last), int(lrev)
347 if not (lrev < self.changelog.count() and
347 if not (lrev < self.changelog.count() and
348 self.changelog.node(lrev) == last): # sanity check
348 self.changelog.node(lrev) == last): # sanity check
349 # invalidate the cache
349 # invalidate the cache
350 raise ValueError('Invalid branch cache: unknown tip')
350 raise ValueError('Invalid branch cache: unknown tip')
351 for l in lines:
351 for l in lines:
352 if not l: continue
352 if not l: continue
353 node, label = l.split(" ", 1)
353 node, label = l.split(" ", 1)
354 partial[label.strip()] = bin(node)
354 partial[label.strip()] = bin(node)
355 except (KeyboardInterrupt, util.SignalInterrupt):
355 except (KeyboardInterrupt, util.SignalInterrupt):
356 raise
356 raise
357 except Exception, inst:
357 except Exception, inst:
358 if self.ui.debugflag:
358 if self.ui.debugflag:
359 self.ui.warn(str(inst), '\n')
359 self.ui.warn(str(inst), '\n')
360 partial, last, lrev = {}, nullid, nullrev
360 partial, last, lrev = {}, nullid, nullrev
361 return partial, last, lrev
361 return partial, last, lrev
362
362
363 def _writebranchcache(self, branches, tip, tiprev):
363 def _writebranchcache(self, branches, tip, tiprev):
364 try:
364 try:
365 f = self.opener("branch.cache", "w", atomictemp=True)
365 f = self.opener("branch.cache", "w", atomictemp=True)
366 f.write("%s %s\n" % (hex(tip), tiprev))
366 f.write("%s %s\n" % (hex(tip), tiprev))
367 for label, node in branches.iteritems():
367 for label, node in branches.iteritems():
368 f.write("%s %s\n" % (hex(node), label))
368 f.write("%s %s\n" % (hex(node), label))
369 f.rename()
369 f.rename()
370 except (IOError, OSError):
370 except (IOError, OSError):
371 pass
371 pass
372
372
373 def _updatebranchcache(self, partial, start, end):
373 def _updatebranchcache(self, partial, start, end):
374 for r in xrange(start, end):
374 for r in xrange(start, end):
375 c = self.changectx(r)
375 c = self.changectx(r)
376 b = c.branch()
376 b = c.branch()
377 partial[b] = c.node()
377 partial[b] = c.node()
378
378
379 def lookup(self, key):
379 def lookup(self, key):
380 if key == '.':
380 if key == '.':
381 key, second = self.dirstate.parents()
381 key, second = self.dirstate.parents()
382 if key == nullid:
382 if key == nullid:
383 raise repo.RepoError(_("no revision checked out"))
383 raise repo.RepoError(_("no revision checked out"))
384 if second != nullid:
384 if second != nullid:
385 self.ui.warn(_("warning: working directory has two parents, "
385 self.ui.warn(_("warning: working directory has two parents, "
386 "tag '.' uses the first\n"))
386 "tag '.' uses the first\n"))
387 elif key == 'null':
387 elif key == 'null':
388 return nullid
388 return nullid
389 n = self.changelog._match(key)
389 n = self.changelog._match(key)
390 if n:
390 if n:
391 return n
391 return n
392 if key in self.tags():
392 if key in self.tags():
393 return self.tags()[key]
393 return self.tags()[key]
394 if key in self.branchtags():
394 if key in self.branchtags():
395 return self.branchtags()[key]
395 return self.branchtags()[key]
396 n = self.changelog._partialmatch(key)
396 n = self.changelog._partialmatch(key)
397 if n:
397 if n:
398 return n
398 return n
399 raise repo.RepoError(_("unknown revision '%s'") % key)
399 raise repo.RepoError(_("unknown revision '%s'") % key)
400
400
401 def dev(self):
401 def dev(self):
402 return os.lstat(self.path).st_dev
402 return os.lstat(self.path).st_dev
403
403
404 def local(self):
404 def local(self):
405 return True
405 return True
406
406
407 def join(self, f):
407 def join(self, f):
408 return os.path.join(self.path, f)
408 return os.path.join(self.path, f)
409
409
410 def sjoin(self, f):
410 def sjoin(self, f):
411 f = self.encodefn(f)
411 f = self.encodefn(f)
412 return os.path.join(self.spath, f)
412 return os.path.join(self.spath, f)
413
413
414 def wjoin(self, f):
414 def wjoin(self, f):
415 return os.path.join(self.root, f)
415 return os.path.join(self.root, f)
416
416
417 def file(self, f):
417 def file(self, f):
418 if f[0] == '/':
418 if f[0] == '/':
419 f = f[1:]
419 f = f[1:]
420 return filelog.filelog(self.sopener, f)
420 return filelog.filelog(self.sopener, f)
421
421
422 def changectx(self, changeid=None):
422 def changectx(self, changeid=None):
423 return context.changectx(self, changeid)
423 return context.changectx(self, changeid)
424
424
425 def workingctx(self):
425 def workingctx(self):
426 return context.workingctx(self)
426 return context.workingctx(self)
427
427
428 def parents(self, changeid=None):
428 def parents(self, changeid=None):
429 '''
429 '''
430 get list of changectxs for parents of changeid or working directory
430 get list of changectxs for parents of changeid or working directory
431 '''
431 '''
432 if changeid is None:
432 if changeid is None:
433 pl = self.dirstate.parents()
433 pl = self.dirstate.parents()
434 else:
434 else:
435 n = self.changelog.lookup(changeid)
435 n = self.changelog.lookup(changeid)
436 pl = self.changelog.parents(n)
436 pl = self.changelog.parents(n)
437 if pl[1] == nullid:
437 if pl[1] == nullid:
438 return [self.changectx(pl[0])]
438 return [self.changectx(pl[0])]
439 return [self.changectx(pl[0]), self.changectx(pl[1])]
439 return [self.changectx(pl[0]), self.changectx(pl[1])]
440
440
441 def filectx(self, path, changeid=None, fileid=None):
441 def filectx(self, path, changeid=None, fileid=None):
442 """changeid can be a changeset revision, node, or tag.
442 """changeid can be a changeset revision, node, or tag.
443 fileid can be a file revision or node."""
443 fileid can be a file revision or node."""
444 return context.filectx(self, path, changeid, fileid)
444 return context.filectx(self, path, changeid, fileid)
445
445
446 def getcwd(self):
446 def getcwd(self):
447 return self.dirstate.getcwd()
447 return self.dirstate.getcwd()
448
448
449 def pathto(self, f, cwd=None):
449 def pathto(self, f, cwd=None):
450 return self.dirstate.pathto(f, cwd)
450 return self.dirstate.pathto(f, cwd)
451
451
452 def wfile(self, f, mode='r'):
452 def wfile(self, f, mode='r'):
453 return self.wopener(f, mode)
453 return self.wopener(f, mode)
454
454
455 def _link(self, f):
455 def _link(self, f):
456 return os.path.islink(self.wjoin(f))
456 return os.path.islink(self.wjoin(f))
457
457
458 def _filter(self, filter, filename, data):
458 def _filter(self, filter, filename, data):
459 if filter not in self.filterpats:
459 if filter not in self.filterpats:
460 l = []
460 l = []
461 for pat, cmd in self.ui.configitems(filter):
461 for pat, cmd in self.ui.configitems(filter):
462 mf = util.matcher(self.root, "", [pat], [], [])[1]
462 mf = util.matcher(self.root, "", [pat], [], [])[1]
463 l.append((mf, cmd))
463 l.append((mf, cmd))
464 self.filterpats[filter] = l
464 self.filterpats[filter] = l
465
465
466 for mf, cmd in self.filterpats[filter]:
466 for mf, cmd in self.filterpats[filter]:
467 if mf(filename):
467 if mf(filename):
468 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
468 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
469 data = util.filter(data, cmd)
469 data = util.filter(data, cmd)
470 break
470 break
471
471
472 return data
472 return data
473
473
474 def wread(self, filename):
474 def wread(self, filename):
475 if self._link(filename):
475 if self._link(filename):
476 data = os.readlink(self.wjoin(filename))
476 data = os.readlink(self.wjoin(filename))
477 else:
477 else:
478 data = self.wopener(filename, 'r').read()
478 data = self.wopener(filename, 'r').read()
479 return self._filter("encode", filename, data)
479 return self._filter("encode", filename, data)
480
480
481 def wwrite(self, filename, data, flags):
481 def wwrite(self, filename, data, flags):
482 data = self._filter("decode", filename, data)
482 data = self._filter("decode", filename, data)
483 if "l" in flags:
483 if "l" in flags:
484 self.wopener.symlink(data, filename)
484 self.wopener.symlink(data, filename)
485 else:
485 else:
486 try:
486 try:
487 if self._link(filename):
487 if self._link(filename):
488 os.unlink(self.wjoin(filename))
488 os.unlink(self.wjoin(filename))
489 except OSError:
489 except OSError:
490 pass
490 pass
491 self.wopener(filename, 'w').write(data)
491 self.wopener(filename, 'w').write(data)
492 util.set_exec(self.wjoin(filename), "x" in flags)
492 util.set_exec(self.wjoin(filename), "x" in flags)
493
493
494 def wwritedata(self, filename, data):
494 def wwritedata(self, filename, data):
495 return self._filter("decode", filename, data)
495 return self._filter("decode", filename, data)
496
496
497 def transaction(self):
497 def transaction(self):
498 tr = self.transhandle
498 tr = self.transhandle
499 if tr != None and tr.running():
499 if tr != None and tr.running():
500 return tr.nest()
500 return tr.nest()
501
501
502 # save dirstate for rollback
502 # save dirstate for rollback
503 try:
503 try:
504 ds = self.opener("dirstate").read()
504 ds = self.opener("dirstate").read()
505 except IOError:
505 except IOError:
506 ds = ""
506 ds = ""
507 self.opener("journal.dirstate", "w").write(ds)
507 self.opener("journal.dirstate", "w").write(ds)
508
508
509 renames = [(self.sjoin("journal"), self.sjoin("undo")),
509 renames = [(self.sjoin("journal"), self.sjoin("undo")),
510 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
510 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
511 tr = transaction.transaction(self.ui.warn, self.sopener,
511 tr = transaction.transaction(self.ui.warn, self.sopener,
512 self.sjoin("journal"),
512 self.sjoin("journal"),
513 aftertrans(renames))
513 aftertrans(renames))
514 self.transhandle = tr
514 self.transhandle = tr
515 return tr
515 return tr
516
516
517 def recover(self):
517 def recover(self):
518 l = self.lock()
518 l = self.lock()
519 if os.path.exists(self.sjoin("journal")):
519 try:
520 self.ui.status(_("rolling back interrupted transaction\n"))
520 if os.path.exists(self.sjoin("journal")):
521 transaction.rollback(self.sopener, self.sjoin("journal"))
521 self.ui.status(_("rolling back interrupted transaction\n"))
522 self.invalidate()
522 transaction.rollback(self.sopener, self.sjoin("journal"))
523 return True
523 self.invalidate()
524 else:
524 return True
525 self.ui.warn(_("no interrupted transaction available\n"))
525 else:
526 return False
526 self.ui.warn(_("no interrupted transaction available\n"))
527 return False
528 finally:
529 del l
527
530
528 def rollback(self, wlock=None, lock=None):
531 def rollback(self, wlock=None, lock=None):
529 if not wlock:
532 try:
530 wlock = self.wlock()
533 if not wlock:
531 if not lock:
534 wlock = self.wlock()
532 lock = self.lock()
535 if not lock:
533 if os.path.exists(self.sjoin("undo")):
536 lock = self.lock()
534 self.ui.status(_("rolling back last transaction\n"))
537 if os.path.exists(self.sjoin("undo")):
535 transaction.rollback(self.sopener, self.sjoin("undo"))
538 self.ui.status(_("rolling back last transaction\n"))
536 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
539 transaction.rollback(self.sopener, self.sjoin("undo"))
537 self.invalidate()
540 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
538 self.dirstate.invalidate()
541 self.invalidate()
539 else:
542 self.dirstate.invalidate()
540 self.ui.warn(_("no rollback information available\n"))
543 else:
544 self.ui.warn(_("no rollback information available\n"))
545 finally:
546 del wlock, lock
541
547
542 def invalidate(self):
548 def invalidate(self):
543 for a in "changelog manifest".split():
549 for a in "changelog manifest".split():
544 if hasattr(self, a):
550 if hasattr(self, a):
545 self.__delattr__(a)
551 self.__delattr__(a)
546 self.tagscache = None
552 self.tagscache = None
547 self.nodetagscache = None
553 self.nodetagscache = None
548
554
549 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
555 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
550 try:
556 try:
551 l = lock.lock(lockname, 0, releasefn, desc=desc)
557 l = lock.lock(lockname, 0, releasefn, desc=desc)
552 except lock.LockHeld, inst:
558 except lock.LockHeld, inst:
553 if not wait:
559 if not wait:
554 raise
560 raise
555 self.ui.warn(_("waiting for lock on %s held by %r\n") %
561 self.ui.warn(_("waiting for lock on %s held by %r\n") %
556 (desc, inst.locker))
562 (desc, inst.locker))
557 # default to 600 seconds timeout
563 # default to 600 seconds timeout
558 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
564 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
559 releasefn, desc=desc)
565 releasefn, desc=desc)
560 if acquirefn:
566 if acquirefn:
561 acquirefn()
567 acquirefn()
562 return l
568 return l
563
569
564 def lock(self, wait=True):
570 def lock(self, wait=True):
565 return self._lock(self.sjoin("lock"), wait, None, self.invalidate,
571 return self._lock(self.sjoin("lock"), wait, None, self.invalidate,
566 _('repository %s') % self.origroot)
572 _('repository %s') % self.origroot)
567
573
568 def wlock(self, wait=True):
574 def wlock(self, wait=True):
569 return self._lock(self.join("wlock"), wait, self.dirstate.write,
575 return self._lock(self.join("wlock"), wait, self.dirstate.write,
570 self.dirstate.invalidate,
576 self.dirstate.invalidate,
571 _('working directory of %s') % self.origroot)
577 _('working directory of %s') % self.origroot)
572
578
573 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
579 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
574 """
580 """
575 commit an individual file as part of a larger transaction
581 commit an individual file as part of a larger transaction
576 """
582 """
577
583
578 t = self.wread(fn)
584 t = self.wread(fn)
579 fl = self.file(fn)
585 fl = self.file(fn)
580 fp1 = manifest1.get(fn, nullid)
586 fp1 = manifest1.get(fn, nullid)
581 fp2 = manifest2.get(fn, nullid)
587 fp2 = manifest2.get(fn, nullid)
582
588
583 meta = {}
589 meta = {}
584 cp = self.dirstate.copied(fn)
590 cp = self.dirstate.copied(fn)
585 if cp:
591 if cp:
586 # Mark the new revision of this file as a copy of another
592 # Mark the new revision of this file as a copy of another
587 # file. This copy data will effectively act as a parent
593 # file. This copy data will effectively act as a parent
588 # of this new revision. If this is a merge, the first
594 # of this new revision. If this is a merge, the first
589 # parent will be the nullid (meaning "look up the copy data")
595 # parent will be the nullid (meaning "look up the copy data")
590 # and the second one will be the other parent. For example:
596 # and the second one will be the other parent. For example:
591 #
597 #
592 # 0 --- 1 --- 3 rev1 changes file foo
598 # 0 --- 1 --- 3 rev1 changes file foo
593 # \ / rev2 renames foo to bar and changes it
599 # \ / rev2 renames foo to bar and changes it
594 # \- 2 -/ rev3 should have bar with all changes and
600 # \- 2 -/ rev3 should have bar with all changes and
595 # should record that bar descends from
601 # should record that bar descends from
596 # bar in rev2 and foo in rev1
602 # bar in rev2 and foo in rev1
597 #
603 #
598 # this allows this merge to succeed:
604 # this allows this merge to succeed:
599 #
605 #
600 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
606 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
601 # \ / merging rev3 and rev4 should use bar@rev2
607 # \ / merging rev3 and rev4 should use bar@rev2
602 # \- 2 --- 4 as the merge base
608 # \- 2 --- 4 as the merge base
603 #
609 #
604 meta["copy"] = cp
610 meta["copy"] = cp
605 if not manifest2: # not a branch merge
611 if not manifest2: # not a branch merge
606 meta["copyrev"] = hex(manifest1.get(cp, nullid))
612 meta["copyrev"] = hex(manifest1.get(cp, nullid))
607 fp2 = nullid
613 fp2 = nullid
608 elif fp2 != nullid: # copied on remote side
614 elif fp2 != nullid: # copied on remote side
609 meta["copyrev"] = hex(manifest1.get(cp, nullid))
615 meta["copyrev"] = hex(manifest1.get(cp, nullid))
610 elif fp1 != nullid: # copied on local side, reversed
616 elif fp1 != nullid: # copied on local side, reversed
611 meta["copyrev"] = hex(manifest2.get(cp))
617 meta["copyrev"] = hex(manifest2.get(cp))
612 fp2 = fp1
618 fp2 = fp1
613 else: # directory rename
619 else: # directory rename
614 meta["copyrev"] = hex(manifest1.get(cp, nullid))
620 meta["copyrev"] = hex(manifest1.get(cp, nullid))
615 self.ui.debug(_(" %s: copy %s:%s\n") %
621 self.ui.debug(_(" %s: copy %s:%s\n") %
616 (fn, cp, meta["copyrev"]))
622 (fn, cp, meta["copyrev"]))
617 fp1 = nullid
623 fp1 = nullid
618 elif fp2 != nullid:
624 elif fp2 != nullid:
619 # is one parent an ancestor of the other?
625 # is one parent an ancestor of the other?
620 fpa = fl.ancestor(fp1, fp2)
626 fpa = fl.ancestor(fp1, fp2)
621 if fpa == fp1:
627 if fpa == fp1:
622 fp1, fp2 = fp2, nullid
628 fp1, fp2 = fp2, nullid
623 elif fpa == fp2:
629 elif fpa == fp2:
624 fp2 = nullid
630 fp2 = nullid
625
631
626 # is the file unmodified from the parent? report existing entry
632 # is the file unmodified from the parent? report existing entry
627 if fp2 == nullid and not fl.cmp(fp1, t):
633 if fp2 == nullid and not fl.cmp(fp1, t):
628 return fp1
634 return fp1
629
635
630 changelist.append(fn)
636 changelist.append(fn)
631 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
637 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
632
638
633 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
639 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
634 if p1 is None:
640 if p1 is None:
635 p1, p2 = self.dirstate.parents()
641 p1, p2 = self.dirstate.parents()
636 return self.commit(files=files, text=text, user=user, date=date,
642 return self.commit(files=files, text=text, user=user, date=date,
637 p1=p1, p2=p2, wlock=wlock, extra=extra)
643 p1=p1, p2=p2, wlock=wlock, extra=extra)
638
644
639 def commit(self, files=None, text="", user=None, date=None,
645 def commit(self, files=None, text="", user=None, date=None,
640 match=util.always, force=False, lock=None, wlock=None,
646 match=util.always, force=False, lock=None, wlock=None,
641 force_editor=False, p1=None, p2=None, extra={}):
647 force_editor=False, p1=None, p2=None, extra={}):
642
648 tr = None
643 commit = []
649 try:
644 remove = []
650 commit = []
645 changed = []
651 remove = []
646 use_dirstate = (p1 is None) # not rawcommit
652 changed = []
647 extra = extra.copy()
653 use_dirstate = (p1 is None) # not rawcommit
654 extra = extra.copy()
648
655
649 if use_dirstate:
656 if use_dirstate:
650 if files:
657 if files:
651 for f in files:
658 for f in files:
652 s = self.dirstate[f]
659 s = self.dirstate[f]
653 if s in 'nma':
660 if s in 'nma':
654 commit.append(f)
661 commit.append(f)
655 elif s == 'r':
662 elif s == 'r':
656 remove.append(f)
663 remove.append(f)
657 else:
664 else:
658 self.ui.warn(_("%s not tracked!\n") % f)
665 self.ui.warn(_("%s not tracked!\n") % f)
666 else:
667 changes = self.status(match=match)[:5]
668 modified, added, removed, deleted, unknown = changes
669 commit = modified + added
670 remove = removed
659 else:
671 else:
660 changes = self.status(match=match)[:5]
672 commit = files
661 modified, added, removed, deleted, unknown = changes
662 commit = modified + added
663 remove = removed
664 else:
665 commit = files
666
673
667 if use_dirstate:
674 if use_dirstate:
668 p1, p2 = self.dirstate.parents()
675 p1, p2 = self.dirstate.parents()
669 update_dirstate = True
676 update_dirstate = True
670 else:
677 else:
671 p1, p2 = p1, p2 or nullid
678 p1, p2 = p1, p2 or nullid
672 update_dirstate = (self.dirstate.parents()[0] == p1)
679 update_dirstate = (self.dirstate.parents()[0] == p1)
673
680
674 c1 = self.changelog.read(p1)
681 c1 = self.changelog.read(p1)
675 c2 = self.changelog.read(p2)
682 c2 = self.changelog.read(p2)
676 m1 = self.manifest.read(c1[0]).copy()
683 m1 = self.manifest.read(c1[0]).copy()
677 m2 = self.manifest.read(c2[0])
684 m2 = self.manifest.read(c2[0])
678
685
679 if use_dirstate:
686 if use_dirstate:
680 branchname = self.workingctx().branch()
687 branchname = self.workingctx().branch()
681 try:
688 try:
682 branchname = branchname.decode('UTF-8').encode('UTF-8')
689 branchname = branchname.decode('UTF-8').encode('UTF-8')
683 except UnicodeDecodeError:
690 except UnicodeDecodeError:
684 raise util.Abort(_('branch name not in UTF-8!'))
691 raise util.Abort(_('branch name not in UTF-8!'))
685 else:
692 else:
686 branchname = ""
693 branchname = ""
687
694
688 if use_dirstate:
695 if use_dirstate:
689 oldname = c1[5].get("branch") # stored in UTF-8
696 oldname = c1[5].get("branch") # stored in UTF-8
690 if (not commit and not remove and not force and p2 == nullid
697 if (not commit and not remove and not force and p2 == nullid
691 and branchname == oldname):
698 and branchname == oldname):
692 self.ui.status(_("nothing changed\n"))
699 self.ui.status(_("nothing changed\n"))
693 return None
700 return None
694
701
695 xp1 = hex(p1)
702 xp1 = hex(p1)
696 if p2 == nullid: xp2 = ''
703 if p2 == nullid: xp2 = ''
697 else: xp2 = hex(p2)
704 else: xp2 = hex(p2)
698
705
699 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
706 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
700
707
701 if not wlock:
708 if not wlock:
702 wlock = self.wlock()
709 wlock = self.wlock()
703 if not lock:
710 if not lock:
704 lock = self.lock()
711 lock = self.lock()
705 tr = self.transaction()
712 tr = self.transaction()
706
713
707 # check in files
714 # check in files
708 new = {}
715 new = {}
709 linkrev = self.changelog.count()
716 linkrev = self.changelog.count()
710 commit.sort()
717 commit.sort()
711 is_exec = util.execfunc(self.root, m1.execf)
718 is_exec = util.execfunc(self.root, m1.execf)
712 is_link = util.linkfunc(self.root, m1.linkf)
719 is_link = util.linkfunc(self.root, m1.linkf)
713 for f in commit:
720 for f in commit:
714 self.ui.note(f + "\n")
721 self.ui.note(f + "\n")
715 try:
722 try:
716 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
723 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
717 new_exec = is_exec(f)
724 new_exec = is_exec(f)
718 new_link = is_link(f)
725 new_link = is_link(f)
719 if not changed or changed[-1] != f:
726 if not changed or changed[-1] != f:
720 # mention the file in the changelog if some flag changed,
727 # mention the file in the changelog if some
721 # even if there was no content change.
728 # flag changed, even if there was no content
722 old_exec = m1.execf(f)
729 # change.
723 old_link = m1.linkf(f)
730 old_exec = m1.execf(f)
724 if old_exec != new_exec or old_link != new_link:
731 old_link = m1.linkf(f)
725 changed.append(f)
732 if old_exec != new_exec or old_link != new_link:
726 m1.set(f, new_exec, new_link)
733 changed.append(f)
727 except (OSError, IOError):
734 m1.set(f, new_exec, new_link)
728 if use_dirstate:
735 except (OSError, IOError):
729 self.ui.warn(_("trouble committing %s!\n") % f)
736 if use_dirstate:
730 raise
737 self.ui.warn(_("trouble committing %s!\n") % f)
731 else:
738 raise
732 remove.append(f)
739 else:
740 remove.append(f)
733
741
734 # update manifest
742 # update manifest
735 m1.update(new)
743 m1.update(new)
736 remove.sort()
744 remove.sort()
737 removed = []
745 removed = []
738
746
739 for f in remove:
747 for f in remove:
740 if f in m1:
748 if f in m1:
741 del m1[f]
749 del m1[f]
742 removed.append(f)
750 removed.append(f)
743 elif f in m2:
751 elif f in m2:
744 removed.append(f)
752 removed.append(f)
745 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
753 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0],
754 (new, removed))
746
755
747 # add changeset
756 # add changeset
748 new = new.keys()
757 new = new.keys()
749 new.sort()
758 new.sort()
750
759
751 user = user or self.ui.username()
760 user = user or self.ui.username()
752 if not text or force_editor:
761 if not text or force_editor:
753 edittext = []
762 edittext = []
754 if text:
763 if text:
755 edittext.append(text)
764 edittext.append(text)
756 edittext.append("")
765 edittext.append("")
757 edittext.append("HG: user: %s" % user)
766 edittext.append("HG: user: %s" % user)
758 if p2 != nullid:
767 if p2 != nullid:
759 edittext.append("HG: branch merge")
768 edittext.append("HG: branch merge")
760 if branchname:
769 if branchname:
761 edittext.append("HG: branch %s" % util.tolocal(branchname))
770 edittext.append("HG: branch %s" % util.tolocal(branchname))
762 edittext.extend(["HG: changed %s" % f for f in changed])
771 edittext.extend(["HG: changed %s" % f for f in changed])
763 edittext.extend(["HG: removed %s" % f for f in removed])
772 edittext.extend(["HG: removed %s" % f for f in removed])
764 if not changed and not remove:
773 if not changed and not remove:
765 edittext.append("HG: no files changed")
774 edittext.append("HG: no files changed")
766 edittext.append("")
775 edittext.append("")
767 # run editor in the repository root
776 # run editor in the repository root
768 olddir = os.getcwd()
777 olddir = os.getcwd()
769 os.chdir(self.root)
778 os.chdir(self.root)
770 text = self.ui.edit("\n".join(edittext), user)
779 text = self.ui.edit("\n".join(edittext), user)
771 os.chdir(olddir)
780 os.chdir(olddir)
772
781
773 lines = [line.rstrip() for line in text.rstrip().splitlines()]
782 lines = [line.rstrip() for line in text.rstrip().splitlines()]
774 while lines and not lines[0]:
783 while lines and not lines[0]:
775 del lines[0]
784 del lines[0]
776 if not lines:
785 if not lines:
777 return None
786 return None
778 text = '\n'.join(lines)
787 text = '\n'.join(lines)
779 if branchname:
788 if branchname:
780 extra["branch"] = branchname
789 extra["branch"] = branchname
781 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
790 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
782 user, date, extra)
791 user, date, extra)
783 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
792 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
784 parent2=xp2)
793 parent2=xp2)
785 tr.close()
794 tr.close()
786
795
787 if self.branchcache and "branch" in extra:
796 if self.branchcache and "branch" in extra:
788 self.branchcache[util.tolocal(extra["branch"])] = n
797 self.branchcache[util.tolocal(extra["branch"])] = n
789
798
790 if use_dirstate or update_dirstate:
799 if use_dirstate or update_dirstate:
791 self.dirstate.setparents(n)
800 self.dirstate.setparents(n)
792 if use_dirstate:
801 if use_dirstate:
793 for f in new:
802 for f in new:
794 self.dirstate.normal(f)
803 self.dirstate.normal(f)
795 for f in removed:
804 for f in removed:
796 self.dirstate.forget(f)
805 self.dirstate.forget(f)
797
806
798 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
807 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
799 return n
808 return n
809 finally:
810 del lock, wlock, tr
800
811
801 def walk(self, node=None, files=[], match=util.always, badmatch=None):
812 def walk(self, node=None, files=[], match=util.always, badmatch=None):
802 '''
813 '''
803 walk recursively through the directory tree or a given
814 walk recursively through the directory tree or a given
804 changeset, finding all files matched by the match
815 changeset, finding all files matched by the match
805 function
816 function
806
817
807 results are yielded in a tuple (src, filename), where src
818 results are yielded in a tuple (src, filename), where src
808 is one of:
819 is one of:
809 'f' the file was found in the directory tree
820 'f' the file was found in the directory tree
810 'm' the file was only in the dirstate and not in the tree
821 'm' the file was only in the dirstate and not in the tree
811 'b' file was not found and matched badmatch
822 'b' file was not found and matched badmatch
812 '''
823 '''
813
824
814 if node:
825 if node:
815 fdict = dict.fromkeys(files)
826 fdict = dict.fromkeys(files)
816 # for dirstate.walk, files=['.'] means "walk the whole tree".
827 # for dirstate.walk, files=['.'] means "walk the whole tree".
817 # follow that here, too
828 # follow that here, too
818 fdict.pop('.', None)
829 fdict.pop('.', None)
819 mdict = self.manifest.read(self.changelog.read(node)[0])
830 mdict = self.manifest.read(self.changelog.read(node)[0])
820 mfiles = mdict.keys()
831 mfiles = mdict.keys()
821 mfiles.sort()
832 mfiles.sort()
822 for fn in mfiles:
833 for fn in mfiles:
823 for ffn in fdict:
834 for ffn in fdict:
824 # match if the file is the exact name or a directory
835 # match if the file is the exact name or a directory
825 if ffn == fn or fn.startswith("%s/" % ffn):
836 if ffn == fn or fn.startswith("%s/" % ffn):
826 del fdict[ffn]
837 del fdict[ffn]
827 break
838 break
828 if match(fn):
839 if match(fn):
829 yield 'm', fn
840 yield 'm', fn
830 ffiles = fdict.keys()
841 ffiles = fdict.keys()
831 ffiles.sort()
842 ffiles.sort()
832 for fn in ffiles:
843 for fn in ffiles:
833 if badmatch and badmatch(fn):
844 if badmatch and badmatch(fn):
834 if match(fn):
845 if match(fn):
835 yield 'b', fn
846 yield 'b', fn
836 else:
847 else:
837 self.ui.warn(_('%s: No such file in rev %s\n')
848 self.ui.warn(_('%s: No such file in rev %s\n')
838 % (self.pathto(fn), short(node)))
849 % (self.pathto(fn), short(node)))
839 else:
850 else:
840 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
851 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
841 yield src, fn
852 yield src, fn
842
853
843 def status(self, node1=None, node2=None, files=[], match=util.always,
854 def status(self, node1=None, node2=None, files=[], match=util.always,
844 wlock=None, list_ignored=False, list_clean=False):
855 wlock=None, list_ignored=False, list_clean=False):
845 """return status of files between two nodes or node and working directory
856 """return status of files between two nodes or node and working directory
846
857
847 If node1 is None, use the first dirstate parent instead.
858 If node1 is None, use the first dirstate parent instead.
848 If node2 is None, compare node1 with working directory.
859 If node2 is None, compare node1 with working directory.
849 """
860 """
850
861
851 def fcmp(fn, getnode):
862 def fcmp(fn, getnode):
852 t1 = self.wread(fn)
863 t1 = self.wread(fn)
853 return self.file(fn).cmp(getnode(fn), t1)
864 return self.file(fn).cmp(getnode(fn), t1)
854
865
855 def mfmatches(node):
866 def mfmatches(node):
856 change = self.changelog.read(node)
867 change = self.changelog.read(node)
857 mf = self.manifest.read(change[0]).copy()
868 mf = self.manifest.read(change[0]).copy()
858 for fn in mf.keys():
869 for fn in mf.keys():
859 if not match(fn):
870 if not match(fn):
860 del mf[fn]
871 del mf[fn]
861 return mf
872 return mf
862
873
863 modified, added, removed, deleted, unknown = [], [], [], [], []
874 modified, added, removed, deleted, unknown = [], [], [], [], []
864 ignored, clean = [], []
875 ignored, clean = [], []
865
876
866 compareworking = False
877 compareworking = False
867 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
878 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
868 compareworking = True
879 compareworking = True
869
880
870 if not compareworking:
881 if not compareworking:
871 # read the manifest from node1 before the manifest from node2,
882 # read the manifest from node1 before the manifest from node2,
872 # so that we'll hit the manifest cache if we're going through
883 # so that we'll hit the manifest cache if we're going through
873 # all the revisions in parent->child order.
884 # all the revisions in parent->child order.
874 mf1 = mfmatches(node1)
885 mf1 = mfmatches(node1)
875
886
876 # are we comparing the working directory?
887 # are we comparing the working directory?
877 if not node2:
888 if not node2:
878 (lookup, modified, added, removed, deleted, unknown,
889 (lookup, modified, added, removed, deleted, unknown,
879 ignored, clean) = self.dirstate.status(files, match,
890 ignored, clean) = self.dirstate.status(files, match,
880 list_ignored, list_clean)
891 list_ignored, list_clean)
881
892
882 # are we comparing working dir against its parent?
893 # are we comparing working dir against its parent?
883 if compareworking:
894 if compareworking:
884 if lookup:
895 if lookup:
885 fixup = []
896 fixup = []
886 # do a full compare of any files that might have changed
897 # do a full compare of any files that might have changed
887 ctx = self.changectx()
898 ctx = self.changectx()
888 for f in lookup:
899 for f in lookup:
889 if f not in ctx or ctx[f].cmp(self.wread(f)):
900 if f not in ctx or ctx[f].cmp(self.wread(f)):
890 modified.append(f)
901 modified.append(f)
891 else:
902 else:
892 fixup.append(f)
903 fixup.append(f)
893 if list_clean:
904 if list_clean:
894 clean.append(f)
905 clean.append(f)
895
906
896 # update dirstate for files that are actually clean
907 # update dirstate for files that are actually clean
897 if fixup:
908 if fixup:
898 cleanup = False
909 fixlock = wlock
899 if not wlock:
910 try:
900 try:
911 if not fixlock:
901 wlock = self.wlock(False)
912 try:
902 cleanup = True
913 fixlock = self.wlock(False)
903 except lock.LockException:
914 except lock.LockException:
904 pass
915 pass
905 if wlock:
916 if fixlock:
906 for f in fixup:
917 for f in fixup:
907 self.dirstate.normal(f)
918 self.dirstate.normal(f)
908 if cleanup:
919 finally:
909 wlock.release()
920 del fixlock
910 else:
921 else:
911 # we are comparing working dir against non-parent
922 # we are comparing working dir against non-parent
912 # generate a pseudo-manifest for the working dir
923 # generate a pseudo-manifest for the working dir
913 # XXX: create it in dirstate.py ?
924 # XXX: create it in dirstate.py ?
914 mf2 = mfmatches(self.dirstate.parents()[0])
925 mf2 = mfmatches(self.dirstate.parents()[0])
915 is_exec = util.execfunc(self.root, mf2.execf)
926 is_exec = util.execfunc(self.root, mf2.execf)
916 is_link = util.linkfunc(self.root, mf2.linkf)
927 is_link = util.linkfunc(self.root, mf2.linkf)
917 for f in lookup + modified + added:
928 for f in lookup + modified + added:
918 mf2[f] = ""
929 mf2[f] = ""
919 mf2.set(f, is_exec(f), is_link(f))
930 mf2.set(f, is_exec(f), is_link(f))
920 for f in removed:
931 for f in removed:
921 if f in mf2:
932 if f in mf2:
922 del mf2[f]
933 del mf2[f]
923
934
924 else:
935 else:
925 # we are comparing two revisions
936 # we are comparing two revisions
926 mf2 = mfmatches(node2)
937 mf2 = mfmatches(node2)
927
938
928 if not compareworking:
939 if not compareworking:
929 # flush lists from dirstate before comparing manifests
940 # flush lists from dirstate before comparing manifests
930 modified, added, clean = [], [], []
941 modified, added, clean = [], [], []
931
942
932 # make sure to sort the files so we talk to the disk in a
943 # make sure to sort the files so we talk to the disk in a
933 # reasonable order
944 # reasonable order
934 mf2keys = mf2.keys()
945 mf2keys = mf2.keys()
935 mf2keys.sort()
946 mf2keys.sort()
936 getnode = lambda fn: mf1.get(fn, nullid)
947 getnode = lambda fn: mf1.get(fn, nullid)
937 for fn in mf2keys:
948 for fn in mf2keys:
938 if mf1.has_key(fn):
949 if mf1.has_key(fn):
939 if (mf1.flags(fn) != mf2.flags(fn) or
950 if (mf1.flags(fn) != mf2.flags(fn) or
940 (mf1[fn] != mf2[fn] and
951 (mf1[fn] != mf2[fn] and
941 (mf2[fn] != "" or fcmp(fn, getnode)))):
952 (mf2[fn] != "" or fcmp(fn, getnode)))):
942 modified.append(fn)
953 modified.append(fn)
943 elif list_clean:
954 elif list_clean:
944 clean.append(fn)
955 clean.append(fn)
945 del mf1[fn]
956 del mf1[fn]
946 else:
957 else:
947 added.append(fn)
958 added.append(fn)
948
959
949 removed = mf1.keys()
960 removed = mf1.keys()
950
961
951 # sort and return results:
962 # sort and return results:
952 for l in modified, added, removed, deleted, unknown, ignored, clean:
963 for l in modified, added, removed, deleted, unknown, ignored, clean:
953 l.sort()
964 l.sort()
954 return (modified, added, removed, deleted, unknown, ignored, clean)
965 return (modified, added, removed, deleted, unknown, ignored, clean)
955
966
956 def add(self, list, wlock=None):
967 def add(self, list, wlock=None):
957 if not wlock:
968 try:
958 wlock = self.wlock()
969 if not wlock:
959 for f in list:
970 wlock = self.wlock()
960 p = self.wjoin(f)
971 for f in list:
961 try:
972 p = self.wjoin(f)
962 st = os.lstat(p)
973 try:
963 except:
974 st = os.lstat(p)
964 self.ui.warn(_("%s does not exist!\n") % f)
975 except:
965 continue
976 self.ui.warn(_("%s does not exist!\n") % f)
966 if st.st_size > 10000000:
977 continue
967 self.ui.warn(_("%s: files over 10MB may cause memory and"
978 if st.st_size > 10000000:
968 " performance problems\n"
979 self.ui.warn(_("%s: files over 10MB may cause memory and"
969 "(use 'hg revert %s' to unadd the file)\n")
980 " performance problems\n"
970 % (f, f))
981 "(use 'hg revert %s' to unadd the file)\n")
971 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
982 % (f, f))
972 self.ui.warn(_("%s not added: only files and symlinks "
983 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
973 "supported currently\n") % f)
984 self.ui.warn(_("%s not added: only files and symlinks "
974 elif self.dirstate[f] in 'an':
985 "supported currently\n") % f)
975 self.ui.warn(_("%s already tracked!\n") % f)
986 elif self.dirstate[f] in 'an':
976 else:
987 self.ui.warn(_("%s already tracked!\n") % f)
977 self.dirstate.add(f)
988 else:
989 self.dirstate.add(f)
990 finally:
991 del wlock
978
992
979 def forget(self, list, wlock=None):
993 def forget(self, list, wlock=None):
980 if not wlock:
994 try:
981 wlock = self.wlock()
995 if not wlock:
982 for f in list:
996 wlock = self.wlock()
983 if self.dirstate[f] != 'a':
997 for f in list:
984 self.ui.warn(_("%s not added!\n") % f)
998 if self.dirstate[f] != 'a':
985 else:
999 self.ui.warn(_("%s not added!\n") % f)
986 self.dirstate.forget(f)
1000 else:
1001 self.dirstate.forget(f)
1002 finally:
1003 del wlock
987
1004
988 def remove(self, list, unlink=False, wlock=None):
1005 def remove(self, list, unlink=False, wlock=None):
989 if unlink:
1006 try:
1007 if unlink:
1008 for f in list:
1009 try:
1010 util.unlink(self.wjoin(f))
1011 except OSError, inst:
1012 if inst.errno != errno.ENOENT:
1013 raise
1014 if not wlock:
1015 wlock = self.wlock()
990 for f in list:
1016 for f in list:
991 try:
1017 if unlink and os.path.exists(self.wjoin(f)):
992 util.unlink(self.wjoin(f))
1018 self.ui.warn(_("%s still exists!\n") % f)
993 except OSError, inst:
1019 elif self.dirstate[f] == 'a':
994 if inst.errno != errno.ENOENT:
1020 self.dirstate.forget(f)
995 raise
1021 elif f not in self.dirstate:
996 if not wlock:
1022 self.ui.warn(_("%s not tracked!\n") % f)
997 wlock = self.wlock()
1023 else:
998 for f in list:
1024 self.dirstate.remove(f)
999 if unlink and os.path.exists(self.wjoin(f)):
1025 finally:
1000 self.ui.warn(_("%s still exists!\n") % f)
1026 del wlock
1001 elif self.dirstate[f] == 'a':
1002 self.dirstate.forget(f)
1003 elif f not in self.dirstate:
1004 self.ui.warn(_("%s not tracked!\n") % f)
1005 else:
1006 self.dirstate.remove(f)
1007
1027
1008 def undelete(self, list, wlock=None):
1028 def undelete(self, list, wlock=None):
1009 p = self.dirstate.parents()[0]
1029 try:
1010 mn = self.changelog.read(p)[0]
1030 p = self.dirstate.parents()[0]
1011 m = self.manifest.read(mn)
1031 mn = self.changelog.read(p)[0]
1012 if not wlock:
1032 m = self.manifest.read(mn)
1013 wlock = self.wlock()
1033 if not wlock:
1014 for f in list:
1034 wlock = self.wlock()
1015 if self.dirstate[f] != 'r':
1035 for f in list:
1016 self.ui.warn("%s not removed!\n" % f)
1036 if self.dirstate[f] != 'r':
1017 else:
1037 self.ui.warn("%s not removed!\n" % f)
1018 t = self.file(f).read(m[f])
1038 else:
1019 self.wwrite(f, t, m.flags(f))
1039 t = self.file(f).read(m[f])
1020 self.dirstate.normal(f)
1040 self.wwrite(f, t, m.flags(f))
1041 self.dirstate.normal(f)
1042 finally:
1043 del wlock
1021
1044
1022 def copy(self, source, dest, wlock=None):
1045 def copy(self, source, dest, wlock=None):
1023 p = self.wjoin(dest)
1046 try:
1024 if not (os.path.exists(p) or os.path.islink(p)):
1047 p = self.wjoin(dest)
1025 self.ui.warn(_("%s does not exist!\n") % dest)
1048 if not (os.path.exists(p) or os.path.islink(p)):
1026 elif not (os.path.isfile(p) or os.path.islink(p)):
1049 self.ui.warn(_("%s does not exist!\n") % dest)
1027 self.ui.warn(_("copy failed: %s is not a file or a "
1050 elif not (os.path.isfile(p) or os.path.islink(p)):
1028 "symbolic link\n") % dest)
1051 self.ui.warn(_("copy failed: %s is not a file or a "
1029 else:
1052 "symbolic link\n") % dest)
1030 if not wlock:
1053 else:
1031 wlock = self.wlock()
1054 if not wlock:
1032 if dest not in self.dirstate:
1055 wlock = self.wlock()
1033 self.dirstate.add(dest)
1056 if dest not in self.dirstate:
1034 self.dirstate.copy(source, dest)
1057 self.dirstate.add(dest)
1058 self.dirstate.copy(source, dest)
1059 finally:
1060 del wlock
1035
1061
1036 def heads(self, start=None):
1062 def heads(self, start=None):
1037 heads = self.changelog.heads(start)
1063 heads = self.changelog.heads(start)
1038 # sort the output in rev descending order
1064 # sort the output in rev descending order
1039 heads = [(-self.changelog.rev(h), h) for h in heads]
1065 heads = [(-self.changelog.rev(h), h) for h in heads]
1040 heads.sort()
1066 heads.sort()
1041 return [n for (r, n) in heads]
1067 return [n for (r, n) in heads]
1042
1068
1043 def branchheads(self, branch, start=None):
1069 def branchheads(self, branch, start=None):
1044 branches = self.branchtags()
1070 branches = self.branchtags()
1045 if branch not in branches:
1071 if branch not in branches:
1046 return []
1072 return []
1047 # The basic algorithm is this:
1073 # The basic algorithm is this:
1048 #
1074 #
1049 # Start from the branch tip since there are no later revisions that can
1075 # Start from the branch tip since there are no later revisions that can
1050 # possibly be in this branch, and the tip is a guaranteed head.
1076 # possibly be in this branch, and the tip is a guaranteed head.
1051 #
1077 #
1052 # Remember the tip's parents as the first ancestors, since these by
1078 # Remember the tip's parents as the first ancestors, since these by
1053 # definition are not heads.
1079 # definition are not heads.
1054 #
1080 #
1055 # Step backwards from the brach tip through all the revisions. We are
1081 # Step backwards from the brach tip through all the revisions. We are
1056 # guaranteed by the rules of Mercurial that we will now be visiting the
1082 # guaranteed by the rules of Mercurial that we will now be visiting the
1057 # nodes in reverse topological order (children before parents).
1083 # nodes in reverse topological order (children before parents).
1058 #
1084 #
1059 # If a revision is one of the ancestors of a head then we can toss it
1085 # If a revision is one of the ancestors of a head then we can toss it
1060 # out of the ancestors set (we've already found it and won't be
1086 # out of the ancestors set (we've already found it and won't be
1061 # visiting it again) and put its parents in the ancestors set.
1087 # visiting it again) and put its parents in the ancestors set.
1062 #
1088 #
1063 # Otherwise, if a revision is in the branch it's another head, since it
1089 # Otherwise, if a revision is in the branch it's another head, since it
1064 # wasn't in the ancestor list of an existing head. So add it to the
1090 # wasn't in the ancestor list of an existing head. So add it to the
1065 # head list, and add its parents to the ancestor list.
1091 # head list, and add its parents to the ancestor list.
1066 #
1092 #
1067 # If it is not in the branch ignore it.
1093 # If it is not in the branch ignore it.
1068 #
1094 #
1069 # Once we have a list of heads, use nodesbetween to filter out all the
1095 # Once we have a list of heads, use nodesbetween to filter out all the
1070 # heads that cannot be reached from startrev. There may be a more
1096 # heads that cannot be reached from startrev. There may be a more
1071 # efficient way to do this as part of the previous algorithm.
1097 # efficient way to do this as part of the previous algorithm.
1072
1098
1073 set = util.set
1099 set = util.set
1074 heads = [self.changelog.rev(branches[branch])]
1100 heads = [self.changelog.rev(branches[branch])]
1075 # Don't care if ancestors contains nullrev or not.
1101 # Don't care if ancestors contains nullrev or not.
1076 ancestors = set(self.changelog.parentrevs(heads[0]))
1102 ancestors = set(self.changelog.parentrevs(heads[0]))
1077 for rev in xrange(heads[0] - 1, nullrev, -1):
1103 for rev in xrange(heads[0] - 1, nullrev, -1):
1078 if rev in ancestors:
1104 if rev in ancestors:
1079 ancestors.update(self.changelog.parentrevs(rev))
1105 ancestors.update(self.changelog.parentrevs(rev))
1080 ancestors.remove(rev)
1106 ancestors.remove(rev)
1081 elif self.changectx(rev).branch() == branch:
1107 elif self.changectx(rev).branch() == branch:
1082 heads.append(rev)
1108 heads.append(rev)
1083 ancestors.update(self.changelog.parentrevs(rev))
1109 ancestors.update(self.changelog.parentrevs(rev))
1084 heads = [self.changelog.node(rev) for rev in heads]
1110 heads = [self.changelog.node(rev) for rev in heads]
1085 if start is not None:
1111 if start is not None:
1086 heads = self.changelog.nodesbetween([start], heads)[2]
1112 heads = self.changelog.nodesbetween([start], heads)[2]
1087 return heads
1113 return heads
1088
1114
1089 def branches(self, nodes):
1115 def branches(self, nodes):
1090 if not nodes:
1116 if not nodes:
1091 nodes = [self.changelog.tip()]
1117 nodes = [self.changelog.tip()]
1092 b = []
1118 b = []
1093 for n in nodes:
1119 for n in nodes:
1094 t = n
1120 t = n
1095 while 1:
1121 while 1:
1096 p = self.changelog.parents(n)
1122 p = self.changelog.parents(n)
1097 if p[1] != nullid or p[0] == nullid:
1123 if p[1] != nullid or p[0] == nullid:
1098 b.append((t, n, p[0], p[1]))
1124 b.append((t, n, p[0], p[1]))
1099 break
1125 break
1100 n = p[0]
1126 n = p[0]
1101 return b
1127 return b
1102
1128
1103 def between(self, pairs):
1129 def between(self, pairs):
1104 r = []
1130 r = []
1105
1131
1106 for top, bottom in pairs:
1132 for top, bottom in pairs:
1107 n, l, i = top, [], 0
1133 n, l, i = top, [], 0
1108 f = 1
1134 f = 1
1109
1135
1110 while n != bottom:
1136 while n != bottom:
1111 p = self.changelog.parents(n)[0]
1137 p = self.changelog.parents(n)[0]
1112 if i == f:
1138 if i == f:
1113 l.append(n)
1139 l.append(n)
1114 f = f * 2
1140 f = f * 2
1115 n = p
1141 n = p
1116 i += 1
1142 i += 1
1117
1143
1118 r.append(l)
1144 r.append(l)
1119
1145
1120 return r
1146 return r
1121
1147
1122 def findincoming(self, remote, base=None, heads=None, force=False):
1148 def findincoming(self, remote, base=None, heads=None, force=False):
1123 """Return list of roots of the subsets of missing nodes from remote
1149 """Return list of roots of the subsets of missing nodes from remote
1124
1150
1125 If base dict is specified, assume that these nodes and their parents
1151 If base dict is specified, assume that these nodes and their parents
1126 exist on the remote side and that no child of a node of base exists
1152 exist on the remote side and that no child of a node of base exists
1127 in both remote and self.
1153 in both remote and self.
1128 Furthermore base will be updated to include the nodes that exists
1154 Furthermore base will be updated to include the nodes that exists
1129 in self and remote but no children exists in self and remote.
1155 in self and remote but no children exists in self and remote.
1130 If a list of heads is specified, return only nodes which are heads
1156 If a list of heads is specified, return only nodes which are heads
1131 or ancestors of these heads.
1157 or ancestors of these heads.
1132
1158
1133 All the ancestors of base are in self and in remote.
1159 All the ancestors of base are in self and in remote.
1134 All the descendants of the list returned are missing in self.
1160 All the descendants of the list returned are missing in self.
1135 (and so we know that the rest of the nodes are missing in remote, see
1161 (and so we know that the rest of the nodes are missing in remote, see
1136 outgoing)
1162 outgoing)
1137 """
1163 """
1138 m = self.changelog.nodemap
1164 m = self.changelog.nodemap
1139 search = []
1165 search = []
1140 fetch = {}
1166 fetch = {}
1141 seen = {}
1167 seen = {}
1142 seenbranch = {}
1168 seenbranch = {}
1143 if base == None:
1169 if base == None:
1144 base = {}
1170 base = {}
1145
1171
1146 if not heads:
1172 if not heads:
1147 heads = remote.heads()
1173 heads = remote.heads()
1148
1174
1149 if self.changelog.tip() == nullid:
1175 if self.changelog.tip() == nullid:
1150 base[nullid] = 1
1176 base[nullid] = 1
1151 if heads != [nullid]:
1177 if heads != [nullid]:
1152 return [nullid]
1178 return [nullid]
1153 return []
1179 return []
1154
1180
1155 # assume we're closer to the tip than the root
1181 # assume we're closer to the tip than the root
1156 # and start by examining the heads
1182 # and start by examining the heads
1157 self.ui.status(_("searching for changes\n"))
1183 self.ui.status(_("searching for changes\n"))
1158
1184
1159 unknown = []
1185 unknown = []
1160 for h in heads:
1186 for h in heads:
1161 if h not in m:
1187 if h not in m:
1162 unknown.append(h)
1188 unknown.append(h)
1163 else:
1189 else:
1164 base[h] = 1
1190 base[h] = 1
1165
1191
1166 if not unknown:
1192 if not unknown:
1167 return []
1193 return []
1168
1194
1169 req = dict.fromkeys(unknown)
1195 req = dict.fromkeys(unknown)
1170 reqcnt = 0
1196 reqcnt = 0
1171
1197
1172 # search through remote branches
1198 # search through remote branches
1173 # a 'branch' here is a linear segment of history, with four parts:
1199 # a 'branch' here is a linear segment of history, with four parts:
1174 # head, root, first parent, second parent
1200 # head, root, first parent, second parent
1175 # (a branch always has two parents (or none) by definition)
1201 # (a branch always has two parents (or none) by definition)
1176 unknown = remote.branches(unknown)
1202 unknown = remote.branches(unknown)
1177 while unknown:
1203 while unknown:
1178 r = []
1204 r = []
1179 while unknown:
1205 while unknown:
1180 n = unknown.pop(0)
1206 n = unknown.pop(0)
1181 if n[0] in seen:
1207 if n[0] in seen:
1182 continue
1208 continue
1183
1209
1184 self.ui.debug(_("examining %s:%s\n")
1210 self.ui.debug(_("examining %s:%s\n")
1185 % (short(n[0]), short(n[1])))
1211 % (short(n[0]), short(n[1])))
1186 if n[0] == nullid: # found the end of the branch
1212 if n[0] == nullid: # found the end of the branch
1187 pass
1213 pass
1188 elif n in seenbranch:
1214 elif n in seenbranch:
1189 self.ui.debug(_("branch already found\n"))
1215 self.ui.debug(_("branch already found\n"))
1190 continue
1216 continue
1191 elif n[1] and n[1] in m: # do we know the base?
1217 elif n[1] and n[1] in m: # do we know the base?
1192 self.ui.debug(_("found incomplete branch %s:%s\n")
1218 self.ui.debug(_("found incomplete branch %s:%s\n")
1193 % (short(n[0]), short(n[1])))
1219 % (short(n[0]), short(n[1])))
1194 search.append(n) # schedule branch range for scanning
1220 search.append(n) # schedule branch range for scanning
1195 seenbranch[n] = 1
1221 seenbranch[n] = 1
1196 else:
1222 else:
1197 if n[1] not in seen and n[1] not in fetch:
1223 if n[1] not in seen and n[1] not in fetch:
1198 if n[2] in m and n[3] in m:
1224 if n[2] in m and n[3] in m:
1199 self.ui.debug(_("found new changeset %s\n") %
1225 self.ui.debug(_("found new changeset %s\n") %
1200 short(n[1]))
1226 short(n[1]))
1201 fetch[n[1]] = 1 # earliest unknown
1227 fetch[n[1]] = 1 # earliest unknown
1202 for p in n[2:4]:
1228 for p in n[2:4]:
1203 if p in m:
1229 if p in m:
1204 base[p] = 1 # latest known
1230 base[p] = 1 # latest known
1205
1231
1206 for p in n[2:4]:
1232 for p in n[2:4]:
1207 if p not in req and p not in m:
1233 if p not in req and p not in m:
1208 r.append(p)
1234 r.append(p)
1209 req[p] = 1
1235 req[p] = 1
1210 seen[n[0]] = 1
1236 seen[n[0]] = 1
1211
1237
1212 if r:
1238 if r:
1213 reqcnt += 1
1239 reqcnt += 1
1214 self.ui.debug(_("request %d: %s\n") %
1240 self.ui.debug(_("request %d: %s\n") %
1215 (reqcnt, " ".join(map(short, r))))
1241 (reqcnt, " ".join(map(short, r))))
1216 for p in xrange(0, len(r), 10):
1242 for p in xrange(0, len(r), 10):
1217 for b in remote.branches(r[p:p+10]):
1243 for b in remote.branches(r[p:p+10]):
1218 self.ui.debug(_("received %s:%s\n") %
1244 self.ui.debug(_("received %s:%s\n") %
1219 (short(b[0]), short(b[1])))
1245 (short(b[0]), short(b[1])))
1220 unknown.append(b)
1246 unknown.append(b)
1221
1247
1222 # do binary search on the branches we found
1248 # do binary search on the branches we found
1223 while search:
1249 while search:
1224 n = search.pop(0)
1250 n = search.pop(0)
1225 reqcnt += 1
1251 reqcnt += 1
1226 l = remote.between([(n[0], n[1])])[0]
1252 l = remote.between([(n[0], n[1])])[0]
1227 l.append(n[1])
1253 l.append(n[1])
1228 p = n[0]
1254 p = n[0]
1229 f = 1
1255 f = 1
1230 for i in l:
1256 for i in l:
1231 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1257 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1232 if i in m:
1258 if i in m:
1233 if f <= 2:
1259 if f <= 2:
1234 self.ui.debug(_("found new branch changeset %s\n") %
1260 self.ui.debug(_("found new branch changeset %s\n") %
1235 short(p))
1261 short(p))
1236 fetch[p] = 1
1262 fetch[p] = 1
1237 base[i] = 1
1263 base[i] = 1
1238 else:
1264 else:
1239 self.ui.debug(_("narrowed branch search to %s:%s\n")
1265 self.ui.debug(_("narrowed branch search to %s:%s\n")
1240 % (short(p), short(i)))
1266 % (short(p), short(i)))
1241 search.append((p, i))
1267 search.append((p, i))
1242 break
1268 break
1243 p, f = i, f * 2
1269 p, f = i, f * 2
1244
1270
1245 # sanity check our fetch list
1271 # sanity check our fetch list
1246 for f in fetch.keys():
1272 for f in fetch.keys():
1247 if f in m:
1273 if f in m:
1248 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1274 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1249
1275
1250 if base.keys() == [nullid]:
1276 if base.keys() == [nullid]:
1251 if force:
1277 if force:
1252 self.ui.warn(_("warning: repository is unrelated\n"))
1278 self.ui.warn(_("warning: repository is unrelated\n"))
1253 else:
1279 else:
1254 raise util.Abort(_("repository is unrelated"))
1280 raise util.Abort(_("repository is unrelated"))
1255
1281
1256 self.ui.debug(_("found new changesets starting at ") +
1282 self.ui.debug(_("found new changesets starting at ") +
1257 " ".join([short(f) for f in fetch]) + "\n")
1283 " ".join([short(f) for f in fetch]) + "\n")
1258
1284
1259 self.ui.debug(_("%d total queries\n") % reqcnt)
1285 self.ui.debug(_("%d total queries\n") % reqcnt)
1260
1286
1261 return fetch.keys()
1287 return fetch.keys()
1262
1288
1263 def findoutgoing(self, remote, base=None, heads=None, force=False):
1289 def findoutgoing(self, remote, base=None, heads=None, force=False):
1264 """Return list of nodes that are roots of subsets not in remote
1290 """Return list of nodes that are roots of subsets not in remote
1265
1291
1266 If base dict is specified, assume that these nodes and their parents
1292 If base dict is specified, assume that these nodes and their parents
1267 exist on the remote side.
1293 exist on the remote side.
1268 If a list of heads is specified, return only nodes which are heads
1294 If a list of heads is specified, return only nodes which are heads
1269 or ancestors of these heads, and return a second element which
1295 or ancestors of these heads, and return a second element which
1270 contains all remote heads which get new children.
1296 contains all remote heads which get new children.
1271 """
1297 """
1272 if base == None:
1298 if base == None:
1273 base = {}
1299 base = {}
1274 self.findincoming(remote, base, heads, force=force)
1300 self.findincoming(remote, base, heads, force=force)
1275
1301
1276 self.ui.debug(_("common changesets up to ")
1302 self.ui.debug(_("common changesets up to ")
1277 + " ".join(map(short, base.keys())) + "\n")
1303 + " ".join(map(short, base.keys())) + "\n")
1278
1304
1279 remain = dict.fromkeys(self.changelog.nodemap)
1305 remain = dict.fromkeys(self.changelog.nodemap)
1280
1306
1281 # prune everything remote has from the tree
1307 # prune everything remote has from the tree
1282 del remain[nullid]
1308 del remain[nullid]
1283 remove = base.keys()
1309 remove = base.keys()
1284 while remove:
1310 while remove:
1285 n = remove.pop(0)
1311 n = remove.pop(0)
1286 if n in remain:
1312 if n in remain:
1287 del remain[n]
1313 del remain[n]
1288 for p in self.changelog.parents(n):
1314 for p in self.changelog.parents(n):
1289 remove.append(p)
1315 remove.append(p)
1290
1316
1291 # find every node whose parents have been pruned
1317 # find every node whose parents have been pruned
1292 subset = []
1318 subset = []
1293 # find every remote head that will get new children
1319 # find every remote head that will get new children
1294 updated_heads = {}
1320 updated_heads = {}
1295 for n in remain:
1321 for n in remain:
1296 p1, p2 = self.changelog.parents(n)
1322 p1, p2 = self.changelog.parents(n)
1297 if p1 not in remain and p2 not in remain:
1323 if p1 not in remain and p2 not in remain:
1298 subset.append(n)
1324 subset.append(n)
1299 if heads:
1325 if heads:
1300 if p1 in heads:
1326 if p1 in heads:
1301 updated_heads[p1] = True
1327 updated_heads[p1] = True
1302 if p2 in heads:
1328 if p2 in heads:
1303 updated_heads[p2] = True
1329 updated_heads[p2] = True
1304
1330
1305 # this is the set of all roots we have to push
1331 # this is the set of all roots we have to push
1306 if heads:
1332 if heads:
1307 return subset, updated_heads.keys()
1333 return subset, updated_heads.keys()
1308 else:
1334 else:
1309 return subset
1335 return subset
1310
1336
1311 def pull(self, remote, heads=None, force=False, lock=None):
1337 def pull(self, remote, heads=None, force=False, lock=None):
1312 mylock = False
1313 if not lock:
1314 lock = self.lock()
1315 mylock = True
1316
1317 try:
1338 try:
1339 if not lock:
1340 lock = self.lock()
1318 fetch = self.findincoming(remote, force=force)
1341 fetch = self.findincoming(remote, force=force)
1319 if fetch == [nullid]:
1342 if fetch == [nullid]:
1320 self.ui.status(_("requesting all changes\n"))
1343 self.ui.status(_("requesting all changes\n"))
1321
1344
1322 if not fetch:
1345 if not fetch:
1323 self.ui.status(_("no changes found\n"))
1346 self.ui.status(_("no changes found\n"))
1324 return 0
1347 return 0
1325
1348
1326 if heads is None:
1349 if heads is None:
1327 cg = remote.changegroup(fetch, 'pull')
1350 cg = remote.changegroup(fetch, 'pull')
1328 else:
1351 else:
1329 if 'changegroupsubset' not in remote.capabilities:
1352 if 'changegroupsubset' not in remote.capabilities:
1330 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1353 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1331 cg = remote.changegroupsubset(fetch, heads, 'pull')
1354 cg = remote.changegroupsubset(fetch, heads, 'pull')
1332 return self.addchangegroup(cg, 'pull', remote.url())
1355 return self.addchangegroup(cg, 'pull', remote.url())
1333 finally:
1356 finally:
1334 if mylock:
1357 del lock
1335 lock.release()
1336
1358
1337 def push(self, remote, force=False, revs=None):
1359 def push(self, remote, force=False, revs=None):
1338 # there are two ways to push to remote repo:
1360 # there are two ways to push to remote repo:
1339 #
1361 #
1340 # addchangegroup assumes local user can lock remote
1362 # addchangegroup assumes local user can lock remote
1341 # repo (local filesystem, old ssh servers).
1363 # repo (local filesystem, old ssh servers).
1342 #
1364 #
1343 # unbundle assumes local user cannot lock remote repo (new ssh
1365 # unbundle assumes local user cannot lock remote repo (new ssh
1344 # servers, http servers).
1366 # servers, http servers).
1345
1367
1346 if remote.capable('unbundle'):
1368 if remote.capable('unbundle'):
1347 return self.push_unbundle(remote, force, revs)
1369 return self.push_unbundle(remote, force, revs)
1348 return self.push_addchangegroup(remote, force, revs)
1370 return self.push_addchangegroup(remote, force, revs)
1349
1371
1350 def prepush(self, remote, force, revs):
1372 def prepush(self, remote, force, revs):
1351 base = {}
1373 base = {}
1352 remote_heads = remote.heads()
1374 remote_heads = remote.heads()
1353 inc = self.findincoming(remote, base, remote_heads, force=force)
1375 inc = self.findincoming(remote, base, remote_heads, force=force)
1354
1376
1355 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1377 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1356 if revs is not None:
1378 if revs is not None:
1357 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1379 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1358 else:
1380 else:
1359 bases, heads = update, self.changelog.heads()
1381 bases, heads = update, self.changelog.heads()
1360
1382
1361 if not bases:
1383 if not bases:
1362 self.ui.status(_("no changes found\n"))
1384 self.ui.status(_("no changes found\n"))
1363 return None, 1
1385 return None, 1
1364 elif not force:
1386 elif not force:
1365 # check if we're creating new remote heads
1387 # check if we're creating new remote heads
1366 # to be a remote head after push, node must be either
1388 # to be a remote head after push, node must be either
1367 # - unknown locally
1389 # - unknown locally
1368 # - a local outgoing head descended from update
1390 # - a local outgoing head descended from update
1369 # - a remote head that's known locally and not
1391 # - a remote head that's known locally and not
1370 # ancestral to an outgoing head
1392 # ancestral to an outgoing head
1371
1393
1372 warn = 0
1394 warn = 0
1373
1395
1374 if remote_heads == [nullid]:
1396 if remote_heads == [nullid]:
1375 warn = 0
1397 warn = 0
1376 elif not revs and len(heads) > len(remote_heads):
1398 elif not revs and len(heads) > len(remote_heads):
1377 warn = 1
1399 warn = 1
1378 else:
1400 else:
1379 newheads = list(heads)
1401 newheads = list(heads)
1380 for r in remote_heads:
1402 for r in remote_heads:
1381 if r in self.changelog.nodemap:
1403 if r in self.changelog.nodemap:
1382 desc = self.changelog.heads(r, heads)
1404 desc = self.changelog.heads(r, heads)
1383 l = [h for h in heads if h in desc]
1405 l = [h for h in heads if h in desc]
1384 if not l:
1406 if not l:
1385 newheads.append(r)
1407 newheads.append(r)
1386 else:
1408 else:
1387 newheads.append(r)
1409 newheads.append(r)
1388 if len(newheads) > len(remote_heads):
1410 if len(newheads) > len(remote_heads):
1389 warn = 1
1411 warn = 1
1390
1412
1391 if warn:
1413 if warn:
1392 self.ui.warn(_("abort: push creates new remote branches!\n"))
1414 self.ui.warn(_("abort: push creates new remote branches!\n"))
1393 self.ui.status(_("(did you forget to merge?"
1415 self.ui.status(_("(did you forget to merge?"
1394 " use push -f to force)\n"))
1416 " use push -f to force)\n"))
1395 return None, 1
1417 return None, 1
1396 elif inc:
1418 elif inc:
1397 self.ui.warn(_("note: unsynced remote changes!\n"))
1419 self.ui.warn(_("note: unsynced remote changes!\n"))
1398
1420
1399
1421
1400 if revs is None:
1422 if revs is None:
1401 cg = self.changegroup(update, 'push')
1423 cg = self.changegroup(update, 'push')
1402 else:
1424 else:
1403 cg = self.changegroupsubset(update, revs, 'push')
1425 cg = self.changegroupsubset(update, revs, 'push')
1404 return cg, remote_heads
1426 return cg, remote_heads
1405
1427
1406 def push_addchangegroup(self, remote, force, revs):
1428 def push_addchangegroup(self, remote, force, revs):
1407 lock = remote.lock()
1429 lock = remote.lock()
1408
1430 try:
1409 ret = self.prepush(remote, force, revs)
1431 ret = self.prepush(remote, force, revs)
1410 if ret[0] is not None:
1432 if ret[0] is not None:
1411 cg, remote_heads = ret
1433 cg, remote_heads = ret
1412 return remote.addchangegroup(cg, 'push', self.url())
1434 return remote.addchangegroup(cg, 'push', self.url())
1413 return ret[1]
1435 return ret[1]
1436 finally:
1437 del lock
1414
1438
1415 def push_unbundle(self, remote, force, revs):
1439 def push_unbundle(self, remote, force, revs):
1416 # local repo finds heads on server, finds out what revs it
1440 # local repo finds heads on server, finds out what revs it
1417 # must push. once revs transferred, if server finds it has
1441 # must push. once revs transferred, if server finds it has
1418 # different heads (someone else won commit/push race), server
1442 # different heads (someone else won commit/push race), server
1419 # aborts.
1443 # aborts.
1420
1444
1421 ret = self.prepush(remote, force, revs)
1445 ret = self.prepush(remote, force, revs)
1422 if ret[0] is not None:
1446 if ret[0] is not None:
1423 cg, remote_heads = ret
1447 cg, remote_heads = ret
1424 if force: remote_heads = ['force']
1448 if force: remote_heads = ['force']
1425 return remote.unbundle(cg, remote_heads, 'push')
1449 return remote.unbundle(cg, remote_heads, 'push')
1426 return ret[1]
1450 return ret[1]
1427
1451
1428 def changegroupinfo(self, nodes):
1452 def changegroupinfo(self, nodes):
1429 self.ui.note(_("%d changesets found\n") % len(nodes))
1453 self.ui.note(_("%d changesets found\n") % len(nodes))
1430 if self.ui.debugflag:
1454 if self.ui.debugflag:
1431 self.ui.debug(_("List of changesets:\n"))
1455 self.ui.debug(_("List of changesets:\n"))
1432 for node in nodes:
1456 for node in nodes:
1433 self.ui.debug("%s\n" % hex(node))
1457 self.ui.debug("%s\n" % hex(node))
1434
1458
1435 def changegroupsubset(self, bases, heads, source):
1459 def changegroupsubset(self, bases, heads, source):
1436 """This function generates a changegroup consisting of all the nodes
1460 """This function generates a changegroup consisting of all the nodes
1437 that are descendents of any of the bases, and ancestors of any of
1461 that are descendents of any of the bases, and ancestors of any of
1438 the heads.
1462 the heads.
1439
1463
1440 It is fairly complex as determining which filenodes and which
1464 It is fairly complex as determining which filenodes and which
1441 manifest nodes need to be included for the changeset to be complete
1465 manifest nodes need to be included for the changeset to be complete
1442 is non-trivial.
1466 is non-trivial.
1443
1467
1444 Another wrinkle is doing the reverse, figuring out which changeset in
1468 Another wrinkle is doing the reverse, figuring out which changeset in
1445 the changegroup a particular filenode or manifestnode belongs to."""
1469 the changegroup a particular filenode or manifestnode belongs to."""
1446
1470
1447 self.hook('preoutgoing', throw=True, source=source)
1471 self.hook('preoutgoing', throw=True, source=source)
1448
1472
1449 # Set up some initial variables
1473 # Set up some initial variables
1450 # Make it easy to refer to self.changelog
1474 # Make it easy to refer to self.changelog
1451 cl = self.changelog
1475 cl = self.changelog
1452 # msng is short for missing - compute the list of changesets in this
1476 # msng is short for missing - compute the list of changesets in this
1453 # changegroup.
1477 # changegroup.
1454 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1478 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1455 self.changegroupinfo(msng_cl_lst)
1479 self.changegroupinfo(msng_cl_lst)
1456 # Some bases may turn out to be superfluous, and some heads may be
1480 # Some bases may turn out to be superfluous, and some heads may be
1457 # too. nodesbetween will return the minimal set of bases and heads
1481 # too. nodesbetween will return the minimal set of bases and heads
1458 # necessary to re-create the changegroup.
1482 # necessary to re-create the changegroup.
1459
1483
1460 # Known heads are the list of heads that it is assumed the recipient
1484 # Known heads are the list of heads that it is assumed the recipient
1461 # of this changegroup will know about.
1485 # of this changegroup will know about.
1462 knownheads = {}
1486 knownheads = {}
1463 # We assume that all parents of bases are known heads.
1487 # We assume that all parents of bases are known heads.
1464 for n in bases:
1488 for n in bases:
1465 for p in cl.parents(n):
1489 for p in cl.parents(n):
1466 if p != nullid:
1490 if p != nullid:
1467 knownheads[p] = 1
1491 knownheads[p] = 1
1468 knownheads = knownheads.keys()
1492 knownheads = knownheads.keys()
1469 if knownheads:
1493 if knownheads:
1470 # Now that we know what heads are known, we can compute which
1494 # Now that we know what heads are known, we can compute which
1471 # changesets are known. The recipient must know about all
1495 # changesets are known. The recipient must know about all
1472 # changesets required to reach the known heads from the null
1496 # changesets required to reach the known heads from the null
1473 # changeset.
1497 # changeset.
1474 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1498 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1475 junk = None
1499 junk = None
1476 # Transform the list into an ersatz set.
1500 # Transform the list into an ersatz set.
1477 has_cl_set = dict.fromkeys(has_cl_set)
1501 has_cl_set = dict.fromkeys(has_cl_set)
1478 else:
1502 else:
1479 # If there were no known heads, the recipient cannot be assumed to
1503 # If there were no known heads, the recipient cannot be assumed to
1480 # know about any changesets.
1504 # know about any changesets.
1481 has_cl_set = {}
1505 has_cl_set = {}
1482
1506
1483 # Make it easy to refer to self.manifest
1507 # Make it easy to refer to self.manifest
1484 mnfst = self.manifest
1508 mnfst = self.manifest
1485 # We don't know which manifests are missing yet
1509 # We don't know which manifests are missing yet
1486 msng_mnfst_set = {}
1510 msng_mnfst_set = {}
1487 # Nor do we know which filenodes are missing.
1511 # Nor do we know which filenodes are missing.
1488 msng_filenode_set = {}
1512 msng_filenode_set = {}
1489
1513
1490 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1514 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1491 junk = None
1515 junk = None
1492
1516
1493 # A changeset always belongs to itself, so the changenode lookup
1517 # A changeset always belongs to itself, so the changenode lookup
1494 # function for a changenode is identity.
1518 # function for a changenode is identity.
1495 def identity(x):
1519 def identity(x):
1496 return x
1520 return x
1497
1521
1498 # A function generating function. Sets up an environment for the
1522 # A function generating function. Sets up an environment for the
1499 # inner function.
1523 # inner function.
1500 def cmp_by_rev_func(revlog):
1524 def cmp_by_rev_func(revlog):
1501 # Compare two nodes by their revision number in the environment's
1525 # Compare two nodes by their revision number in the environment's
1502 # revision history. Since the revision number both represents the
1526 # revision history. Since the revision number both represents the
1503 # most efficient order to read the nodes in, and represents a
1527 # most efficient order to read the nodes in, and represents a
1504 # topological sorting of the nodes, this function is often useful.
1528 # topological sorting of the nodes, this function is often useful.
1505 def cmp_by_rev(a, b):
1529 def cmp_by_rev(a, b):
1506 return cmp(revlog.rev(a), revlog.rev(b))
1530 return cmp(revlog.rev(a), revlog.rev(b))
1507 return cmp_by_rev
1531 return cmp_by_rev
1508
1532
1509 # If we determine that a particular file or manifest node must be a
1533 # If we determine that a particular file or manifest node must be a
1510 # node that the recipient of the changegroup will already have, we can
1534 # node that the recipient of the changegroup will already have, we can
1511 # also assume the recipient will have all the parents. This function
1535 # also assume the recipient will have all the parents. This function
1512 # prunes them from the set of missing nodes.
1536 # prunes them from the set of missing nodes.
1513 def prune_parents(revlog, hasset, msngset):
1537 def prune_parents(revlog, hasset, msngset):
1514 haslst = hasset.keys()
1538 haslst = hasset.keys()
1515 haslst.sort(cmp_by_rev_func(revlog))
1539 haslst.sort(cmp_by_rev_func(revlog))
1516 for node in haslst:
1540 for node in haslst:
1517 parentlst = [p for p in revlog.parents(node) if p != nullid]
1541 parentlst = [p for p in revlog.parents(node) if p != nullid]
1518 while parentlst:
1542 while parentlst:
1519 n = parentlst.pop()
1543 n = parentlst.pop()
1520 if n not in hasset:
1544 if n not in hasset:
1521 hasset[n] = 1
1545 hasset[n] = 1
1522 p = [p for p in revlog.parents(n) if p != nullid]
1546 p = [p for p in revlog.parents(n) if p != nullid]
1523 parentlst.extend(p)
1547 parentlst.extend(p)
1524 for n in hasset:
1548 for n in hasset:
1525 msngset.pop(n, None)
1549 msngset.pop(n, None)
1526
1550
1527 # This is a function generating function used to set up an environment
1551 # This is a function generating function used to set up an environment
1528 # for the inner function to execute in.
1552 # for the inner function to execute in.
1529 def manifest_and_file_collector(changedfileset):
1553 def manifest_and_file_collector(changedfileset):
1530 # This is an information gathering function that gathers
1554 # This is an information gathering function that gathers
1531 # information from each changeset node that goes out as part of
1555 # information from each changeset node that goes out as part of
1532 # the changegroup. The information gathered is a list of which
1556 # the changegroup. The information gathered is a list of which
1533 # manifest nodes are potentially required (the recipient may
1557 # manifest nodes are potentially required (the recipient may
1534 # already have them) and total list of all files which were
1558 # already have them) and total list of all files which were
1535 # changed in any changeset in the changegroup.
1559 # changed in any changeset in the changegroup.
1536 #
1560 #
1537 # We also remember the first changenode we saw any manifest
1561 # We also remember the first changenode we saw any manifest
1538 # referenced by so we can later determine which changenode 'owns'
1562 # referenced by so we can later determine which changenode 'owns'
1539 # the manifest.
1563 # the manifest.
1540 def collect_manifests_and_files(clnode):
1564 def collect_manifests_and_files(clnode):
1541 c = cl.read(clnode)
1565 c = cl.read(clnode)
1542 for f in c[3]:
1566 for f in c[3]:
1543 # This is to make sure we only have one instance of each
1567 # This is to make sure we only have one instance of each
1544 # filename string for each filename.
1568 # filename string for each filename.
1545 changedfileset.setdefault(f, f)
1569 changedfileset.setdefault(f, f)
1546 msng_mnfst_set.setdefault(c[0], clnode)
1570 msng_mnfst_set.setdefault(c[0], clnode)
1547 return collect_manifests_and_files
1571 return collect_manifests_and_files
1548
1572
1549 # Figure out which manifest nodes (of the ones we think might be part
1573 # Figure out which manifest nodes (of the ones we think might be part
1550 # of the changegroup) the recipient must know about and remove them
1574 # of the changegroup) the recipient must know about and remove them
1551 # from the changegroup.
1575 # from the changegroup.
1552 def prune_manifests():
1576 def prune_manifests():
1553 has_mnfst_set = {}
1577 has_mnfst_set = {}
1554 for n in msng_mnfst_set:
1578 for n in msng_mnfst_set:
1555 # If a 'missing' manifest thinks it belongs to a changenode
1579 # If a 'missing' manifest thinks it belongs to a changenode
1556 # the recipient is assumed to have, obviously the recipient
1580 # the recipient is assumed to have, obviously the recipient
1557 # must have that manifest.
1581 # must have that manifest.
1558 linknode = cl.node(mnfst.linkrev(n))
1582 linknode = cl.node(mnfst.linkrev(n))
1559 if linknode in has_cl_set:
1583 if linknode in has_cl_set:
1560 has_mnfst_set[n] = 1
1584 has_mnfst_set[n] = 1
1561 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1585 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1562
1586
1563 # Use the information collected in collect_manifests_and_files to say
1587 # Use the information collected in collect_manifests_and_files to say
1564 # which changenode any manifestnode belongs to.
1588 # which changenode any manifestnode belongs to.
1565 def lookup_manifest_link(mnfstnode):
1589 def lookup_manifest_link(mnfstnode):
1566 return msng_mnfst_set[mnfstnode]
1590 return msng_mnfst_set[mnfstnode]
1567
1591
1568 # A function generating function that sets up the initial environment
1592 # A function generating function that sets up the initial environment
1569 # the inner function.
1593 # the inner function.
1570 def filenode_collector(changedfiles):
1594 def filenode_collector(changedfiles):
1571 next_rev = [0]
1595 next_rev = [0]
1572 # This gathers information from each manifestnode included in the
1596 # This gathers information from each manifestnode included in the
1573 # changegroup about which filenodes the manifest node references
1597 # changegroup about which filenodes the manifest node references
1574 # so we can include those in the changegroup too.
1598 # so we can include those in the changegroup too.
1575 #
1599 #
1576 # It also remembers which changenode each filenode belongs to. It
1600 # It also remembers which changenode each filenode belongs to. It
1577 # does this by assuming the a filenode belongs to the changenode
1601 # does this by assuming the a filenode belongs to the changenode
1578 # the first manifest that references it belongs to.
1602 # the first manifest that references it belongs to.
1579 def collect_msng_filenodes(mnfstnode):
1603 def collect_msng_filenodes(mnfstnode):
1580 r = mnfst.rev(mnfstnode)
1604 r = mnfst.rev(mnfstnode)
1581 if r == next_rev[0]:
1605 if r == next_rev[0]:
1582 # If the last rev we looked at was the one just previous,
1606 # If the last rev we looked at was the one just previous,
1583 # we only need to see a diff.
1607 # we only need to see a diff.
1584 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1608 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1585 # For each line in the delta
1609 # For each line in the delta
1586 for dline in delta.splitlines():
1610 for dline in delta.splitlines():
1587 # get the filename and filenode for that line
1611 # get the filename and filenode for that line
1588 f, fnode = dline.split('\0')
1612 f, fnode = dline.split('\0')
1589 fnode = bin(fnode[:40])
1613 fnode = bin(fnode[:40])
1590 f = changedfiles.get(f, None)
1614 f = changedfiles.get(f, None)
1591 # And if the file is in the list of files we care
1615 # And if the file is in the list of files we care
1592 # about.
1616 # about.
1593 if f is not None:
1617 if f is not None:
1594 # Get the changenode this manifest belongs to
1618 # Get the changenode this manifest belongs to
1595 clnode = msng_mnfst_set[mnfstnode]
1619 clnode = msng_mnfst_set[mnfstnode]
1596 # Create the set of filenodes for the file if
1620 # Create the set of filenodes for the file if
1597 # there isn't one already.
1621 # there isn't one already.
1598 ndset = msng_filenode_set.setdefault(f, {})
1622 ndset = msng_filenode_set.setdefault(f, {})
1599 # And set the filenode's changelog node to the
1623 # And set the filenode's changelog node to the
1600 # manifest's if it hasn't been set already.
1624 # manifest's if it hasn't been set already.
1601 ndset.setdefault(fnode, clnode)
1625 ndset.setdefault(fnode, clnode)
1602 else:
1626 else:
1603 # Otherwise we need a full manifest.
1627 # Otherwise we need a full manifest.
1604 m = mnfst.read(mnfstnode)
1628 m = mnfst.read(mnfstnode)
1605 # For every file in we care about.
1629 # For every file in we care about.
1606 for f in changedfiles:
1630 for f in changedfiles:
1607 fnode = m.get(f, None)
1631 fnode = m.get(f, None)
1608 # If it's in the manifest
1632 # If it's in the manifest
1609 if fnode is not None:
1633 if fnode is not None:
1610 # See comments above.
1634 # See comments above.
1611 clnode = msng_mnfst_set[mnfstnode]
1635 clnode = msng_mnfst_set[mnfstnode]
1612 ndset = msng_filenode_set.setdefault(f, {})
1636 ndset = msng_filenode_set.setdefault(f, {})
1613 ndset.setdefault(fnode, clnode)
1637 ndset.setdefault(fnode, clnode)
1614 # Remember the revision we hope to see next.
1638 # Remember the revision we hope to see next.
1615 next_rev[0] = r + 1
1639 next_rev[0] = r + 1
1616 return collect_msng_filenodes
1640 return collect_msng_filenodes
1617
1641
1618 # We have a list of filenodes we think we need for a file, lets remove
1642 # We have a list of filenodes we think we need for a file, lets remove
1619 # all those we now the recipient must have.
1643 # all those we now the recipient must have.
1620 def prune_filenodes(f, filerevlog):
1644 def prune_filenodes(f, filerevlog):
1621 msngset = msng_filenode_set[f]
1645 msngset = msng_filenode_set[f]
1622 hasset = {}
1646 hasset = {}
1623 # If a 'missing' filenode thinks it belongs to a changenode we
1647 # If a 'missing' filenode thinks it belongs to a changenode we
1624 # assume the recipient must have, then the recipient must have
1648 # assume the recipient must have, then the recipient must have
1625 # that filenode.
1649 # that filenode.
1626 for n in msngset:
1650 for n in msngset:
1627 clnode = cl.node(filerevlog.linkrev(n))
1651 clnode = cl.node(filerevlog.linkrev(n))
1628 if clnode in has_cl_set:
1652 if clnode in has_cl_set:
1629 hasset[n] = 1
1653 hasset[n] = 1
1630 prune_parents(filerevlog, hasset, msngset)
1654 prune_parents(filerevlog, hasset, msngset)
1631
1655
1632 # A function generator function that sets up the a context for the
1656 # A function generator function that sets up the a context for the
1633 # inner function.
1657 # inner function.
1634 def lookup_filenode_link_func(fname):
1658 def lookup_filenode_link_func(fname):
1635 msngset = msng_filenode_set[fname]
1659 msngset = msng_filenode_set[fname]
1636 # Lookup the changenode the filenode belongs to.
1660 # Lookup the changenode the filenode belongs to.
1637 def lookup_filenode_link(fnode):
1661 def lookup_filenode_link(fnode):
1638 return msngset[fnode]
1662 return msngset[fnode]
1639 return lookup_filenode_link
1663 return lookup_filenode_link
1640
1664
1641 # Now that we have all theses utility functions to help out and
1665 # Now that we have all theses utility functions to help out and
1642 # logically divide up the task, generate the group.
1666 # logically divide up the task, generate the group.
1643 def gengroup():
1667 def gengroup():
1644 # The set of changed files starts empty.
1668 # The set of changed files starts empty.
1645 changedfiles = {}
1669 changedfiles = {}
1646 # Create a changenode group generator that will call our functions
1670 # Create a changenode group generator that will call our functions
1647 # back to lookup the owning changenode and collect information.
1671 # back to lookup the owning changenode and collect information.
1648 group = cl.group(msng_cl_lst, identity,
1672 group = cl.group(msng_cl_lst, identity,
1649 manifest_and_file_collector(changedfiles))
1673 manifest_and_file_collector(changedfiles))
1650 for chnk in group:
1674 for chnk in group:
1651 yield chnk
1675 yield chnk
1652
1676
1653 # The list of manifests has been collected by the generator
1677 # The list of manifests has been collected by the generator
1654 # calling our functions back.
1678 # calling our functions back.
1655 prune_manifests()
1679 prune_manifests()
1656 msng_mnfst_lst = msng_mnfst_set.keys()
1680 msng_mnfst_lst = msng_mnfst_set.keys()
1657 # Sort the manifestnodes by revision number.
1681 # Sort the manifestnodes by revision number.
1658 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1682 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1659 # Create a generator for the manifestnodes that calls our lookup
1683 # Create a generator for the manifestnodes that calls our lookup
1660 # and data collection functions back.
1684 # and data collection functions back.
1661 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1685 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1662 filenode_collector(changedfiles))
1686 filenode_collector(changedfiles))
1663 for chnk in group:
1687 for chnk in group:
1664 yield chnk
1688 yield chnk
1665
1689
1666 # These are no longer needed, dereference and toss the memory for
1690 # These are no longer needed, dereference and toss the memory for
1667 # them.
1691 # them.
1668 msng_mnfst_lst = None
1692 msng_mnfst_lst = None
1669 msng_mnfst_set.clear()
1693 msng_mnfst_set.clear()
1670
1694
1671 changedfiles = changedfiles.keys()
1695 changedfiles = changedfiles.keys()
1672 changedfiles.sort()
1696 changedfiles.sort()
1673 # Go through all our files in order sorted by name.
1697 # Go through all our files in order sorted by name.
1674 for fname in changedfiles:
1698 for fname in changedfiles:
1675 filerevlog = self.file(fname)
1699 filerevlog = self.file(fname)
1676 # Toss out the filenodes that the recipient isn't really
1700 # Toss out the filenodes that the recipient isn't really
1677 # missing.
1701 # missing.
1678 if msng_filenode_set.has_key(fname):
1702 if msng_filenode_set.has_key(fname):
1679 prune_filenodes(fname, filerevlog)
1703 prune_filenodes(fname, filerevlog)
1680 msng_filenode_lst = msng_filenode_set[fname].keys()
1704 msng_filenode_lst = msng_filenode_set[fname].keys()
1681 else:
1705 else:
1682 msng_filenode_lst = []
1706 msng_filenode_lst = []
1683 # If any filenodes are left, generate the group for them,
1707 # If any filenodes are left, generate the group for them,
1684 # otherwise don't bother.
1708 # otherwise don't bother.
1685 if len(msng_filenode_lst) > 0:
1709 if len(msng_filenode_lst) > 0:
1686 yield changegroup.genchunk(fname)
1710 yield changegroup.genchunk(fname)
1687 # Sort the filenodes by their revision #
1711 # Sort the filenodes by their revision #
1688 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1712 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1689 # Create a group generator and only pass in a changenode
1713 # Create a group generator and only pass in a changenode
1690 # lookup function as we need to collect no information
1714 # lookup function as we need to collect no information
1691 # from filenodes.
1715 # from filenodes.
1692 group = filerevlog.group(msng_filenode_lst,
1716 group = filerevlog.group(msng_filenode_lst,
1693 lookup_filenode_link_func(fname))
1717 lookup_filenode_link_func(fname))
1694 for chnk in group:
1718 for chnk in group:
1695 yield chnk
1719 yield chnk
1696 if msng_filenode_set.has_key(fname):
1720 if msng_filenode_set.has_key(fname):
1697 # Don't need this anymore, toss it to free memory.
1721 # Don't need this anymore, toss it to free memory.
1698 del msng_filenode_set[fname]
1722 del msng_filenode_set[fname]
1699 # Signal that no more groups are left.
1723 # Signal that no more groups are left.
1700 yield changegroup.closechunk()
1724 yield changegroup.closechunk()
1701
1725
1702 if msng_cl_lst:
1726 if msng_cl_lst:
1703 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1727 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1704
1728
1705 return util.chunkbuffer(gengroup())
1729 return util.chunkbuffer(gengroup())
1706
1730
1707 def changegroup(self, basenodes, source):
1731 def changegroup(self, basenodes, source):
1708 """Generate a changegroup of all nodes that we have that a recipient
1732 """Generate a changegroup of all nodes that we have that a recipient
1709 doesn't.
1733 doesn't.
1710
1734
1711 This is much easier than the previous function as we can assume that
1735 This is much easier than the previous function as we can assume that
1712 the recipient has any changenode we aren't sending them."""
1736 the recipient has any changenode we aren't sending them."""
1713
1737
1714 self.hook('preoutgoing', throw=True, source=source)
1738 self.hook('preoutgoing', throw=True, source=source)
1715
1739
1716 cl = self.changelog
1740 cl = self.changelog
1717 nodes = cl.nodesbetween(basenodes, None)[0]
1741 nodes = cl.nodesbetween(basenodes, None)[0]
1718 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1742 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1719 self.changegroupinfo(nodes)
1743 self.changegroupinfo(nodes)
1720
1744
1721 def identity(x):
1745 def identity(x):
1722 return x
1746 return x
1723
1747
1724 def gennodelst(revlog):
1748 def gennodelst(revlog):
1725 for r in xrange(0, revlog.count()):
1749 for r in xrange(0, revlog.count()):
1726 n = revlog.node(r)
1750 n = revlog.node(r)
1727 if revlog.linkrev(n) in revset:
1751 if revlog.linkrev(n) in revset:
1728 yield n
1752 yield n
1729
1753
1730 def changed_file_collector(changedfileset):
1754 def changed_file_collector(changedfileset):
1731 def collect_changed_files(clnode):
1755 def collect_changed_files(clnode):
1732 c = cl.read(clnode)
1756 c = cl.read(clnode)
1733 for fname in c[3]:
1757 for fname in c[3]:
1734 changedfileset[fname] = 1
1758 changedfileset[fname] = 1
1735 return collect_changed_files
1759 return collect_changed_files
1736
1760
1737 def lookuprevlink_func(revlog):
1761 def lookuprevlink_func(revlog):
1738 def lookuprevlink(n):
1762 def lookuprevlink(n):
1739 return cl.node(revlog.linkrev(n))
1763 return cl.node(revlog.linkrev(n))
1740 return lookuprevlink
1764 return lookuprevlink
1741
1765
1742 def gengroup():
1766 def gengroup():
1743 # construct a list of all changed files
1767 # construct a list of all changed files
1744 changedfiles = {}
1768 changedfiles = {}
1745
1769
1746 for chnk in cl.group(nodes, identity,
1770 for chnk in cl.group(nodes, identity,
1747 changed_file_collector(changedfiles)):
1771 changed_file_collector(changedfiles)):
1748 yield chnk
1772 yield chnk
1749 changedfiles = changedfiles.keys()
1773 changedfiles = changedfiles.keys()
1750 changedfiles.sort()
1774 changedfiles.sort()
1751
1775
1752 mnfst = self.manifest
1776 mnfst = self.manifest
1753 nodeiter = gennodelst(mnfst)
1777 nodeiter = gennodelst(mnfst)
1754 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1778 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1755 yield chnk
1779 yield chnk
1756
1780
1757 for fname in changedfiles:
1781 for fname in changedfiles:
1758 filerevlog = self.file(fname)
1782 filerevlog = self.file(fname)
1759 nodeiter = gennodelst(filerevlog)
1783 nodeiter = gennodelst(filerevlog)
1760 nodeiter = list(nodeiter)
1784 nodeiter = list(nodeiter)
1761 if nodeiter:
1785 if nodeiter:
1762 yield changegroup.genchunk(fname)
1786 yield changegroup.genchunk(fname)
1763 lookup = lookuprevlink_func(filerevlog)
1787 lookup = lookuprevlink_func(filerevlog)
1764 for chnk in filerevlog.group(nodeiter, lookup):
1788 for chnk in filerevlog.group(nodeiter, lookup):
1765 yield chnk
1789 yield chnk
1766
1790
1767 yield changegroup.closechunk()
1791 yield changegroup.closechunk()
1768
1792
1769 if nodes:
1793 if nodes:
1770 self.hook('outgoing', node=hex(nodes[0]), source=source)
1794 self.hook('outgoing', node=hex(nodes[0]), source=source)
1771
1795
1772 return util.chunkbuffer(gengroup())
1796 return util.chunkbuffer(gengroup())
1773
1797
1774 def addchangegroup(self, source, srctype, url):
1798 def addchangegroup(self, source, srctype, url):
1775 """add changegroup to repo.
1799 """add changegroup to repo.
1776
1800
1777 return values:
1801 return values:
1778 - nothing changed or no source: 0
1802 - nothing changed or no source: 0
1779 - more heads than before: 1+added heads (2..n)
1803 - more heads than before: 1+added heads (2..n)
1780 - less heads than before: -1-removed heads (-2..-n)
1804 - less heads than before: -1-removed heads (-2..-n)
1781 - number of heads stays the same: 1
1805 - number of heads stays the same: 1
1782 """
1806 """
1783 def csmap(x):
1807 def csmap(x):
1784 self.ui.debug(_("add changeset %s\n") % short(x))
1808 self.ui.debug(_("add changeset %s\n") % short(x))
1785 return cl.count()
1809 return cl.count()
1786
1810
1787 def revmap(x):
1811 def revmap(x):
1788 return cl.rev(x)
1812 return cl.rev(x)
1789
1813
1790 if not source:
1814 if not source:
1791 return 0
1815 return 0
1792
1816
1793 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1817 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1794
1818
1795 changesets = files = revisions = 0
1819 changesets = files = revisions = 0
1796
1820
1797 tr = self.transaction()
1798
1799 # write changelog data to temp files so concurrent readers will not see
1821 # write changelog data to temp files so concurrent readers will not see
1800 # inconsistent view
1822 # inconsistent view
1801 cl = self.changelog
1823 cl = self.changelog
1802 cl.delayupdate()
1824 cl.delayupdate()
1803 oldheads = len(cl.heads())
1825 oldheads = len(cl.heads())
1804
1826
1805 # pull off the changeset group
1827 tr = self.transaction()
1806 self.ui.status(_("adding changesets\n"))
1828 try:
1807 cor = cl.count() - 1
1829 # pull off the changeset group
1808 chunkiter = changegroup.chunkiter(source)
1830 self.ui.status(_("adding changesets\n"))
1809 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1831 cor = cl.count() - 1
1810 raise util.Abort(_("received changelog group is empty"))
1832 chunkiter = changegroup.chunkiter(source)
1811 cnr = cl.count() - 1
1833 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1812 changesets = cnr - cor
1834 raise util.Abort(_("received changelog group is empty"))
1835 cnr = cl.count() - 1
1836 changesets = cnr - cor
1813
1837
1814 # pull off the manifest group
1838 # pull off the manifest group
1815 self.ui.status(_("adding manifests\n"))
1839 self.ui.status(_("adding manifests\n"))
1816 chunkiter = changegroup.chunkiter(source)
1840 chunkiter = changegroup.chunkiter(source)
1817 # no need to check for empty manifest group here:
1841 # no need to check for empty manifest group here:
1818 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1842 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1819 # no new manifest will be created and the manifest group will
1843 # no new manifest will be created and the manifest group will
1820 # be empty during the pull
1844 # be empty during the pull
1821 self.manifest.addgroup(chunkiter, revmap, tr)
1845 self.manifest.addgroup(chunkiter, revmap, tr)
1822
1846
1823 # process the files
1847 # process the files
1824 self.ui.status(_("adding file changes\n"))
1848 self.ui.status(_("adding file changes\n"))
1825 while 1:
1849 while 1:
1826 f = changegroup.getchunk(source)
1850 f = changegroup.getchunk(source)
1827 if not f:
1851 if not f:
1828 break
1852 break
1829 self.ui.debug(_("adding %s revisions\n") % f)
1853 self.ui.debug(_("adding %s revisions\n") % f)
1830 fl = self.file(f)
1854 fl = self.file(f)
1831 o = fl.count()
1855 o = fl.count()
1832 chunkiter = changegroup.chunkiter(source)
1856 chunkiter = changegroup.chunkiter(source)
1833 if fl.addgroup(chunkiter, revmap, tr) is None:
1857 if fl.addgroup(chunkiter, revmap, tr) is None:
1834 raise util.Abort(_("received file revlog group is empty"))
1858 raise util.Abort(_("received file revlog group is empty"))
1835 revisions += fl.count() - o
1859 revisions += fl.count() - o
1836 files += 1
1860 files += 1
1861
1862 # make changelog see real files again
1863 cl.finalize(tr)
1837
1864
1838 # make changelog see real files again
1865 newheads = len(self.changelog.heads())
1839 cl.finalize(tr)
1866 heads = ""
1867 if oldheads and newheads != oldheads:
1868 heads = _(" (%+d heads)") % (newheads - oldheads)
1840
1869
1841 newheads = len(self.changelog.heads())
1870 self.ui.status(_("added %d changesets"
1842 heads = ""
1871 " with %d changes to %d files%s\n")
1843 if oldheads and newheads != oldheads:
1872 % (changesets, revisions, files, heads))
1844 heads = _(" (%+d heads)") % (newheads - oldheads)
1845
1873
1846 self.ui.status(_("added %d changesets"
1874 if changesets > 0:
1847 " with %d changes to %d files%s\n")
1875 self.hook('pretxnchangegroup', throw=True,
1848 % (changesets, revisions, files, heads))
1876 node=hex(self.changelog.node(cor+1)), source=srctype,
1877 url=url)
1849
1878
1850 if changesets > 0:
1879 tr.close()
1851 self.hook('pretxnchangegroup', throw=True,
1880 finally:
1852 node=hex(self.changelog.node(cor+1)), source=srctype,
1881 del tr
1853 url=url)
1854
1855 tr.close()
1856
1882
1857 if changesets > 0:
1883 if changesets > 0:
1858 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1884 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1859 source=srctype, url=url)
1885 source=srctype, url=url)
1860
1886
1861 for i in xrange(cor + 1, cnr + 1):
1887 for i in xrange(cor + 1, cnr + 1):
1862 self.hook("incoming", node=hex(self.changelog.node(i)),
1888 self.hook("incoming", node=hex(self.changelog.node(i)),
1863 source=srctype, url=url)
1889 source=srctype, url=url)
1864
1890
1865 # never return 0 here:
1891 # never return 0 here:
1866 if newheads < oldheads:
1892 if newheads < oldheads:
1867 return newheads - oldheads - 1
1893 return newheads - oldheads - 1
1868 else:
1894 else:
1869 return newheads - oldheads + 1
1895 return newheads - oldheads + 1
1870
1896
1871
1897
1872 def stream_in(self, remote):
1898 def stream_in(self, remote):
1873 fp = remote.stream_out()
1899 fp = remote.stream_out()
1874 l = fp.readline()
1900 l = fp.readline()
1875 try:
1901 try:
1876 resp = int(l)
1902 resp = int(l)
1877 except ValueError:
1903 except ValueError:
1878 raise util.UnexpectedOutput(
1904 raise util.UnexpectedOutput(
1879 _('Unexpected response from remote server:'), l)
1905 _('Unexpected response from remote server:'), l)
1880 if resp == 1:
1906 if resp == 1:
1881 raise util.Abort(_('operation forbidden by server'))
1907 raise util.Abort(_('operation forbidden by server'))
1882 elif resp == 2:
1908 elif resp == 2:
1883 raise util.Abort(_('locking the remote repository failed'))
1909 raise util.Abort(_('locking the remote repository failed'))
1884 elif resp != 0:
1910 elif resp != 0:
1885 raise util.Abort(_('the server sent an unknown error code'))
1911 raise util.Abort(_('the server sent an unknown error code'))
1886 self.ui.status(_('streaming all changes\n'))
1912 self.ui.status(_('streaming all changes\n'))
1887 l = fp.readline()
1913 l = fp.readline()
1888 try:
1914 try:
1889 total_files, total_bytes = map(int, l.split(' ', 1))
1915 total_files, total_bytes = map(int, l.split(' ', 1))
1890 except ValueError, TypeError:
1916 except ValueError, TypeError:
1891 raise util.UnexpectedOutput(
1917 raise util.UnexpectedOutput(
1892 _('Unexpected response from remote server:'), l)
1918 _('Unexpected response from remote server:'), l)
1893 self.ui.status(_('%d files to transfer, %s of data\n') %
1919 self.ui.status(_('%d files to transfer, %s of data\n') %
1894 (total_files, util.bytecount(total_bytes)))
1920 (total_files, util.bytecount(total_bytes)))
1895 start = time.time()
1921 start = time.time()
1896 for i in xrange(total_files):
1922 for i in xrange(total_files):
1897 # XXX doesn't support '\n' or '\r' in filenames
1923 # XXX doesn't support '\n' or '\r' in filenames
1898 l = fp.readline()
1924 l = fp.readline()
1899 try:
1925 try:
1900 name, size = l.split('\0', 1)
1926 name, size = l.split('\0', 1)
1901 size = int(size)
1927 size = int(size)
1902 except ValueError, TypeError:
1928 except ValueError, TypeError:
1903 raise util.UnexpectedOutput(
1929 raise util.UnexpectedOutput(
1904 _('Unexpected response from remote server:'), l)
1930 _('Unexpected response from remote server:'), l)
1905 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1931 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1906 ofp = self.sopener(name, 'w')
1932 ofp = self.sopener(name, 'w')
1907 for chunk in util.filechunkiter(fp, limit=size):
1933 for chunk in util.filechunkiter(fp, limit=size):
1908 ofp.write(chunk)
1934 ofp.write(chunk)
1909 ofp.close()
1935 ofp.close()
1910 elapsed = time.time() - start
1936 elapsed = time.time() - start
1911 if elapsed <= 0:
1937 if elapsed <= 0:
1912 elapsed = 0.001
1938 elapsed = 0.001
1913 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1939 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1914 (util.bytecount(total_bytes), elapsed,
1940 (util.bytecount(total_bytes), elapsed,
1915 util.bytecount(total_bytes / elapsed)))
1941 util.bytecount(total_bytes / elapsed)))
1916 self.invalidate()
1942 self.invalidate()
1917 return len(self.heads()) + 1
1943 return len(self.heads()) + 1
1918
1944
1919 def clone(self, remote, heads=[], stream=False):
1945 def clone(self, remote, heads=[], stream=False):
1920 '''clone remote repository.
1946 '''clone remote repository.
1921
1947
1922 keyword arguments:
1948 keyword arguments:
1923 heads: list of revs to clone (forces use of pull)
1949 heads: list of revs to clone (forces use of pull)
1924 stream: use streaming clone if possible'''
1950 stream: use streaming clone if possible'''
1925
1951
1926 # now, all clients that can request uncompressed clones can
1952 # now, all clients that can request uncompressed clones can
1927 # read repo formats supported by all servers that can serve
1953 # read repo formats supported by all servers that can serve
1928 # them.
1954 # them.
1929
1955
1930 # if revlog format changes, client will have to check version
1956 # if revlog format changes, client will have to check version
1931 # and format flags on "stream" capability, and use
1957 # and format flags on "stream" capability, and use
1932 # uncompressed only if compatible.
1958 # uncompressed only if compatible.
1933
1959
1934 if stream and not heads and remote.capable('stream'):
1960 if stream and not heads and remote.capable('stream'):
1935 return self.stream_in(remote)
1961 return self.stream_in(remote)
1936 return self.pull(remote, heads)
1962 return self.pull(remote, heads)
1937
1963
1938 # used to avoid circular references so destructors work
1964 # used to avoid circular references so destructors work
1939 def aftertrans(files):
1965 def aftertrans(files):
1940 renamefiles = [tuple(t) for t in files]
1966 renamefiles = [tuple(t) for t in files]
1941 def a():
1967 def a():
1942 for src, dest in renamefiles:
1968 for src, dest in renamefiles:
1943 util.rename(src, dest)
1969 util.rename(src, dest)
1944 return a
1970 return a
1945
1971
1946 def instance(ui, path, create):
1972 def instance(ui, path, create):
1947 return localrepository(ui, util.drop_scheme('file', path), create)
1973 return localrepository(ui, util.drop_scheme('file', path), create)
1948
1974
1949 def islocal(path):
1975 def islocal(path):
1950 return True
1976 return True
@@ -1,570 +1,572 b''
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import errno, util, os, tempfile, context
10 import errno, util, os, tempfile, context
11
11
12 def filemerge(repo, fw, fo, wctx, mctx):
12 def filemerge(repo, fw, fo, wctx, mctx):
13 """perform a 3-way merge in the working directory
13 """perform a 3-way merge in the working directory
14
14
15 fw = filename in the working directory
15 fw = filename in the working directory
16 fo = filename in other parent
16 fo = filename in other parent
17 wctx, mctx = working and merge changecontexts
17 wctx, mctx = working and merge changecontexts
18 """
18 """
19
19
20 def temp(prefix, ctx):
20 def temp(prefix, ctx):
21 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
21 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
22 (fd, name) = tempfile.mkstemp(prefix=pre)
22 (fd, name) = tempfile.mkstemp(prefix=pre)
23 data = repo.wwritedata(ctx.path(), ctx.data())
23 data = repo.wwritedata(ctx.path(), ctx.data())
24 f = os.fdopen(fd, "wb")
24 f = os.fdopen(fd, "wb")
25 f.write(data)
25 f.write(data)
26 f.close()
26 f.close()
27 return name
27 return name
28
28
29 fcm = wctx.filectx(fw)
29 fcm = wctx.filectx(fw)
30 fco = mctx.filectx(fo)
30 fco = mctx.filectx(fo)
31
31
32 if not fco.cmp(fcm.data()): # files identical?
32 if not fco.cmp(fcm.data()): # files identical?
33 return None
33 return None
34
34
35 fca = fcm.ancestor(fco)
35 fca = fcm.ancestor(fco)
36 if not fca:
36 if not fca:
37 fca = repo.filectx(fw, fileid=nullrev)
37 fca = repo.filectx(fw, fileid=nullrev)
38 a = repo.wjoin(fw)
38 a = repo.wjoin(fw)
39 b = temp("base", fca)
39 b = temp("base", fca)
40 c = temp("other", fco)
40 c = temp("other", fco)
41
41
42 if fw != fo:
42 if fw != fo:
43 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
43 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
44 else:
44 else:
45 repo.ui.status(_("merging %s\n") % fw)
45 repo.ui.status(_("merging %s\n") % fw)
46
46
47 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
47 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
48
48
49 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
49 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
50 or "hgmerge")
50 or "hgmerge")
51 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
51 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
52 environ={'HG_FILE': fw,
52 environ={'HG_FILE': fw,
53 'HG_MY_NODE': str(wctx.parents()[0]),
53 'HG_MY_NODE': str(wctx.parents()[0]),
54 'HG_OTHER_NODE': str(mctx)})
54 'HG_OTHER_NODE': str(mctx)})
55 if r:
55 if r:
56 repo.ui.warn(_("merging %s failed!\n") % fw)
56 repo.ui.warn(_("merging %s failed!\n") % fw)
57
57
58 os.unlink(b)
58 os.unlink(b)
59 os.unlink(c)
59 os.unlink(c)
60 return r
60 return r
61
61
62 def checkunknown(wctx, mctx):
62 def checkunknown(wctx, mctx):
63 "check for collisions between unknown files and files in mctx"
63 "check for collisions between unknown files and files in mctx"
64 man = mctx.manifest()
64 man = mctx.manifest()
65 for f in wctx.unknown():
65 for f in wctx.unknown():
66 if f in man:
66 if f in man:
67 if mctx.filectx(f).cmp(wctx.filectx(f).data()):
67 if mctx.filectx(f).cmp(wctx.filectx(f).data()):
68 raise util.Abort(_("untracked local file '%s' differs"
68 raise util.Abort(_("untracked local file '%s' differs"
69 " from remote version") % f)
69 " from remote version") % f)
70
70
71 def checkcollision(mctx):
71 def checkcollision(mctx):
72 "check for case folding collisions in the destination context"
72 "check for case folding collisions in the destination context"
73 folded = {}
73 folded = {}
74 for fn in mctx.manifest():
74 for fn in mctx.manifest():
75 fold = fn.lower()
75 fold = fn.lower()
76 if fold in folded:
76 if fold in folded:
77 raise util.Abort(_("case-folding collision between %s and %s")
77 raise util.Abort(_("case-folding collision between %s and %s")
78 % (fn, folded[fold]))
78 % (fn, folded[fold]))
79 folded[fold] = fn
79 folded[fold] = fn
80
80
81 def forgetremoved(wctx, mctx):
81 def forgetremoved(wctx, mctx):
82 """
82 """
83 Forget removed files
83 Forget removed files
84
84
85 If we're jumping between revisions (as opposed to merging), and if
85 If we're jumping between revisions (as opposed to merging), and if
86 neither the working directory nor the target rev has the file,
86 neither the working directory nor the target rev has the file,
87 then we need to remove it from the dirstate, to prevent the
87 then we need to remove it from the dirstate, to prevent the
88 dirstate from listing the file when it is no longer in the
88 dirstate from listing the file when it is no longer in the
89 manifest.
89 manifest.
90 """
90 """
91
91
92 action = []
92 action = []
93 man = mctx.manifest()
93 man = mctx.manifest()
94 for f in wctx.deleted() + wctx.removed():
94 for f in wctx.deleted() + wctx.removed():
95 if f not in man:
95 if f not in man:
96 action.append((f, "f"))
96 action.append((f, "f"))
97
97
98 return action
98 return action
99
99
100 def findcopies(repo, m1, m2, ma, limit):
100 def findcopies(repo, m1, m2, ma, limit):
101 """
101 """
102 Find moves and copies between m1 and m2 back to limit linkrev
102 Find moves and copies between m1 and m2 back to limit linkrev
103 """
103 """
104
104
105 def nonoverlap(d1, d2, d3):
105 def nonoverlap(d1, d2, d3):
106 "Return list of elements in d1 not in d2 or d3"
106 "Return list of elements in d1 not in d2 or d3"
107 l = [d for d in d1 if d not in d3 and d not in d2]
107 l = [d for d in d1 if d not in d3 and d not in d2]
108 l.sort()
108 l.sort()
109 return l
109 return l
110
110
111 def dirname(f):
111 def dirname(f):
112 s = f.rfind("/")
112 s = f.rfind("/")
113 if s == -1:
113 if s == -1:
114 return ""
114 return ""
115 return f[:s]
115 return f[:s]
116
116
117 def dirs(files):
117 def dirs(files):
118 d = {}
118 d = {}
119 for f in files:
119 for f in files:
120 f = dirname(f)
120 f = dirname(f)
121 while f not in d:
121 while f not in d:
122 d[f] = True
122 d[f] = True
123 f = dirname(f)
123 f = dirname(f)
124 return d
124 return d
125
125
126 wctx = repo.workingctx()
126 wctx = repo.workingctx()
127
127
128 def makectx(f, n):
128 def makectx(f, n):
129 if len(n) == 20:
129 if len(n) == 20:
130 return repo.filectx(f, fileid=n)
130 return repo.filectx(f, fileid=n)
131 return wctx.filectx(f)
131 return wctx.filectx(f)
132 ctx = util.cachefunc(makectx)
132 ctx = util.cachefunc(makectx)
133
133
134 def findold(fctx):
134 def findold(fctx):
135 "find files that path was copied from, back to linkrev limit"
135 "find files that path was copied from, back to linkrev limit"
136 old = {}
136 old = {}
137 seen = {}
137 seen = {}
138 orig = fctx.path()
138 orig = fctx.path()
139 visit = [fctx]
139 visit = [fctx]
140 while visit:
140 while visit:
141 fc = visit.pop()
141 fc = visit.pop()
142 s = str(fc)
142 s = str(fc)
143 if s in seen:
143 if s in seen:
144 continue
144 continue
145 seen[s] = 1
145 seen[s] = 1
146 if fc.path() != orig and fc.path() not in old:
146 if fc.path() != orig and fc.path() not in old:
147 old[fc.path()] = 1
147 old[fc.path()] = 1
148 if fc.rev() < limit:
148 if fc.rev() < limit:
149 continue
149 continue
150 visit += fc.parents()
150 visit += fc.parents()
151
151
152 old = old.keys()
152 old = old.keys()
153 old.sort()
153 old.sort()
154 return old
154 return old
155
155
156 copy = {}
156 copy = {}
157 fullcopy = {}
157 fullcopy = {}
158 diverge = {}
158 diverge = {}
159
159
160 def checkcopies(c, man, aman):
160 def checkcopies(c, man, aman):
161 '''check possible copies for filectx c'''
161 '''check possible copies for filectx c'''
162 for of in findold(c):
162 for of in findold(c):
163 fullcopy[c.path()] = of # remember for dir rename detection
163 fullcopy[c.path()] = of # remember for dir rename detection
164 if of not in man: # original file not in other manifest?
164 if of not in man: # original file not in other manifest?
165 if of in ma:
165 if of in ma:
166 diverge.setdefault(of, []).append(c.path())
166 diverge.setdefault(of, []).append(c.path())
167 continue
167 continue
168 # if the original file is unchanged on the other branch,
168 # if the original file is unchanged on the other branch,
169 # no merge needed
169 # no merge needed
170 if man[of] == aman.get(of):
170 if man[of] == aman.get(of):
171 continue
171 continue
172 c2 = ctx(of, man[of])
172 c2 = ctx(of, man[of])
173 ca = c.ancestor(c2)
173 ca = c.ancestor(c2)
174 if not ca: # unrelated?
174 if not ca: # unrelated?
175 continue
175 continue
176 # named changed on only one side?
176 # named changed on only one side?
177 if ca.path() == c.path() or ca.path() == c2.path():
177 if ca.path() == c.path() or ca.path() == c2.path():
178 if c == ca or c2 == ca: # no merge needed, ignore copy
178 if c == ca or c2 == ca: # no merge needed, ignore copy
179 continue
179 continue
180 copy[c.path()] = of
180 copy[c.path()] = of
181
181
182 if not repo.ui.configbool("merge", "followcopies", True):
182 if not repo.ui.configbool("merge", "followcopies", True):
183 return {}, {}
183 return {}, {}
184
184
185 # avoid silly behavior for update from empty dir
185 # avoid silly behavior for update from empty dir
186 if not m1 or not m2 or not ma:
186 if not m1 or not m2 or not ma:
187 return {}, {}
187 return {}, {}
188
188
189 u1 = nonoverlap(m1, m2, ma)
189 u1 = nonoverlap(m1, m2, ma)
190 u2 = nonoverlap(m2, m1, ma)
190 u2 = nonoverlap(m2, m1, ma)
191
191
192 for f in u1:
192 for f in u1:
193 checkcopies(ctx(f, m1[f]), m2, ma)
193 checkcopies(ctx(f, m1[f]), m2, ma)
194
194
195 for f in u2:
195 for f in u2:
196 checkcopies(ctx(f, m2[f]), m1, ma)
196 checkcopies(ctx(f, m2[f]), m1, ma)
197
197
198 d2 = {}
198 d2 = {}
199 for of, fl in diverge.items():
199 for of, fl in diverge.items():
200 for f in fl:
200 for f in fl:
201 fo = list(fl)
201 fo = list(fl)
202 fo.remove(f)
202 fo.remove(f)
203 d2[f] = (of, fo)
203 d2[f] = (of, fo)
204
204
205 if not fullcopy or not repo.ui.configbool("merge", "followdirs", True):
205 if not fullcopy or not repo.ui.configbool("merge", "followdirs", True):
206 return copy, diverge
206 return copy, diverge
207
207
208 # generate a directory move map
208 # generate a directory move map
209 d1, d2 = dirs(m1), dirs(m2)
209 d1, d2 = dirs(m1), dirs(m2)
210 invalid = {}
210 invalid = {}
211 dirmove = {}
211 dirmove = {}
212
212
213 # examine each file copy for a potential directory move, which is
213 # examine each file copy for a potential directory move, which is
214 # when all the files in a directory are moved to a new directory
214 # when all the files in a directory are moved to a new directory
215 for dst, src in fullcopy.items():
215 for dst, src in fullcopy.items():
216 dsrc, ddst = dirname(src), dirname(dst)
216 dsrc, ddst = dirname(src), dirname(dst)
217 if dsrc in invalid:
217 if dsrc in invalid:
218 # already seen to be uninteresting
218 # already seen to be uninteresting
219 continue
219 continue
220 elif dsrc in d1 and ddst in d1:
220 elif dsrc in d1 and ddst in d1:
221 # directory wasn't entirely moved locally
221 # directory wasn't entirely moved locally
222 invalid[dsrc] = True
222 invalid[dsrc] = True
223 elif dsrc in d2 and ddst in d2:
223 elif dsrc in d2 and ddst in d2:
224 # directory wasn't entirely moved remotely
224 # directory wasn't entirely moved remotely
225 invalid[dsrc] = True
225 invalid[dsrc] = True
226 elif dsrc in dirmove and dirmove[dsrc] != ddst:
226 elif dsrc in dirmove and dirmove[dsrc] != ddst:
227 # files from the same directory moved to two different places
227 # files from the same directory moved to two different places
228 invalid[dsrc] = True
228 invalid[dsrc] = True
229 else:
229 else:
230 # looks good so far
230 # looks good so far
231 dirmove[dsrc + "/"] = ddst + "/"
231 dirmove[dsrc + "/"] = ddst + "/"
232
232
233 for i in invalid:
233 for i in invalid:
234 if i in dirmove:
234 if i in dirmove:
235 del dirmove[i]
235 del dirmove[i]
236
236
237 del d1, d2, invalid
237 del d1, d2, invalid
238
238
239 if not dirmove:
239 if not dirmove:
240 return copy, diverge
240 return copy, diverge
241
241
242 # check unaccounted nonoverlapping files against directory moves
242 # check unaccounted nonoverlapping files against directory moves
243 for f in u1 + u2:
243 for f in u1 + u2:
244 if f not in fullcopy:
244 if f not in fullcopy:
245 for d in dirmove:
245 for d in dirmove:
246 if f.startswith(d):
246 if f.startswith(d):
247 # new file added in a directory that was moved, move it
247 # new file added in a directory that was moved, move it
248 copy[f] = dirmove[d] + f[len(d):]
248 copy[f] = dirmove[d] + f[len(d):]
249 break
249 break
250
250
251 return copy, diverge
251 return copy, diverge
252
252
253 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
253 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
254 """
254 """
255 Merge p1 and p2 with ancestor ma and generate merge action list
255 Merge p1 and p2 with ancestor ma and generate merge action list
256
256
257 overwrite = whether we clobber working files
257 overwrite = whether we clobber working files
258 partial = function to filter file lists
258 partial = function to filter file lists
259 """
259 """
260
260
261 repo.ui.note(_("resolving manifests\n"))
261 repo.ui.note(_("resolving manifests\n"))
262 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
262 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
263 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
263 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
264
264
265 m1 = p1.manifest()
265 m1 = p1.manifest()
266 m2 = p2.manifest()
266 m2 = p2.manifest()
267 ma = pa.manifest()
267 ma = pa.manifest()
268 backwards = (pa == p2)
268 backwards = (pa == p2)
269 action = []
269 action = []
270 copy = {}
270 copy = {}
271 diverge = {}
271 diverge = {}
272
272
273 def fmerge(f, f2=None, fa=None):
273 def fmerge(f, f2=None, fa=None):
274 """merge flags"""
274 """merge flags"""
275 if not f2:
275 if not f2:
276 f2 = f
276 f2 = f
277 fa = f
277 fa = f
278 a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2)
278 a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2)
279 if ((a^b) | (a^c)) ^ a:
279 if ((a^b) | (a^c)) ^ a:
280 return 'x'
280 return 'x'
281 a, b, c = ma.linkf(fa), m1.linkf(f), m2.linkf(f2)
281 a, b, c = ma.linkf(fa), m1.linkf(f), m2.linkf(f2)
282 if ((a^b) | (a^c)) ^ a:
282 if ((a^b) | (a^c)) ^ a:
283 return 'l'
283 return 'l'
284 return ''
284 return ''
285
285
286 def act(msg, m, f, *args):
286 def act(msg, m, f, *args):
287 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
287 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
288 action.append((f, m) + args)
288 action.append((f, m) + args)
289
289
290 if not (backwards or overwrite):
290 if not (backwards or overwrite):
291 copy, diverge = findcopies(repo, m1, m2, ma, pa.rev())
291 copy, diverge = findcopies(repo, m1, m2, ma, pa.rev())
292
292
293 for of, fl in diverge.items():
293 for of, fl in diverge.items():
294 act("divergent renames", "dr", of, fl)
294 act("divergent renames", "dr", of, fl)
295
295
296 copied = dict.fromkeys(copy.values())
296 copied = dict.fromkeys(copy.values())
297
297
298 # Compare manifests
298 # Compare manifests
299 for f, n in m1.iteritems():
299 for f, n in m1.iteritems():
300 if partial and not partial(f):
300 if partial and not partial(f):
301 continue
301 continue
302 if f in m2:
302 if f in m2:
303 # are files different?
303 # are files different?
304 if n != m2[f]:
304 if n != m2[f]:
305 a = ma.get(f, nullid)
305 a = ma.get(f, nullid)
306 # are both different from the ancestor?
306 # are both different from the ancestor?
307 if not overwrite and n != a and m2[f] != a:
307 if not overwrite and n != a and m2[f] != a:
308 act("versions differ", "m", f, f, f, fmerge(f), False)
308 act("versions differ", "m", f, f, f, fmerge(f), False)
309 # are we clobbering?
309 # are we clobbering?
310 # is remote's version newer?
310 # is remote's version newer?
311 # or are we going back in time and clean?
311 # or are we going back in time and clean?
312 elif overwrite or m2[f] != a or (backwards and not n[20:]):
312 elif overwrite or m2[f] != a or (backwards and not n[20:]):
313 act("remote is newer", "g", f, m2.flags(f))
313 act("remote is newer", "g", f, m2.flags(f))
314 # local is newer, not overwrite, check mode bits
314 # local is newer, not overwrite, check mode bits
315 elif fmerge(f) != m1.flags(f):
315 elif fmerge(f) != m1.flags(f):
316 act("update permissions", "e", f, m2.flags(f))
316 act("update permissions", "e", f, m2.flags(f))
317 # contents same, check mode bits
317 # contents same, check mode bits
318 elif m1.flags(f) != m2.flags(f):
318 elif m1.flags(f) != m2.flags(f):
319 if overwrite or fmerge(f) != m1.flags(f):
319 if overwrite or fmerge(f) != m1.flags(f):
320 act("update permissions", "e", f, m2.flags(f))
320 act("update permissions", "e", f, m2.flags(f))
321 elif f in copied:
321 elif f in copied:
322 continue
322 continue
323 elif f in copy:
323 elif f in copy:
324 f2 = copy[f]
324 f2 = copy[f]
325 if f2 not in m2: # directory rename
325 if f2 not in m2: # directory rename
326 act("remote renamed directory to " + f2, "d",
326 act("remote renamed directory to " + f2, "d",
327 f, None, f2, m1.flags(f))
327 f, None, f2, m1.flags(f))
328 elif f2 in m1: # case 2 A,B/B/B
328 elif f2 in m1: # case 2 A,B/B/B
329 act("local copied to " + f2, "m",
329 act("local copied to " + f2, "m",
330 f, f2, f, fmerge(f, f2, f2), False)
330 f, f2, f, fmerge(f, f2, f2), False)
331 else: # case 4,21 A/B/B
331 else: # case 4,21 A/B/B
332 act("local moved to " + f2, "m",
332 act("local moved to " + f2, "m",
333 f, f2, f, fmerge(f, f2, f2), False)
333 f, f2, f, fmerge(f, f2, f2), False)
334 elif f in ma:
334 elif f in ma:
335 if n != ma[f] and not overwrite:
335 if n != ma[f] and not overwrite:
336 if repo.ui.prompt(
336 if repo.ui.prompt(
337 (_(" local changed %s which remote deleted\n") % f) +
337 (_(" local changed %s which remote deleted\n") % f) +
338 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("d"):
338 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("d"):
339 act("prompt delete", "r", f)
339 act("prompt delete", "r", f)
340 else:
340 else:
341 act("other deleted", "r", f)
341 act("other deleted", "r", f)
342 else:
342 else:
343 # file is created on branch or in working directory
343 # file is created on branch or in working directory
344 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
344 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
345 act("remote deleted", "r", f)
345 act("remote deleted", "r", f)
346
346
347 for f, n in m2.iteritems():
347 for f, n in m2.iteritems():
348 if partial and not partial(f):
348 if partial and not partial(f):
349 continue
349 continue
350 if f in m1:
350 if f in m1:
351 continue
351 continue
352 if f in copied:
352 if f in copied:
353 continue
353 continue
354 if f in copy:
354 if f in copy:
355 f2 = copy[f]
355 f2 = copy[f]
356 if f2 not in m1: # directory rename
356 if f2 not in m1: # directory rename
357 act("local renamed directory to " + f2, "d",
357 act("local renamed directory to " + f2, "d",
358 None, f, f2, m2.flags(f))
358 None, f, f2, m2.flags(f))
359 elif f2 in m2: # rename case 1, A/A,B/A
359 elif f2 in m2: # rename case 1, A/A,B/A
360 act("remote copied to " + f, "m",
360 act("remote copied to " + f, "m",
361 f2, f, f, fmerge(f2, f, f2), False)
361 f2, f, f, fmerge(f2, f, f2), False)
362 else: # case 3,20 A/B/A
362 else: # case 3,20 A/B/A
363 act("remote moved to " + f, "m",
363 act("remote moved to " + f, "m",
364 f2, f, f, fmerge(f2, f, f2), True)
364 f2, f, f, fmerge(f2, f, f2), True)
365 elif f in ma:
365 elif f in ma:
366 if overwrite or backwards:
366 if overwrite or backwards:
367 act("recreating", "g", f, m2.flags(f))
367 act("recreating", "g", f, m2.flags(f))
368 elif n != ma[f]:
368 elif n != ma[f]:
369 if repo.ui.prompt(
369 if repo.ui.prompt(
370 (_("remote changed %s which local deleted\n") % f) +
370 (_("remote changed %s which local deleted\n") % f) +
371 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("k"):
371 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("k"):
372 act("prompt recreating", "g", f, m2.flags(f))
372 act("prompt recreating", "g", f, m2.flags(f))
373 else:
373 else:
374 act("remote created", "g", f, m2.flags(f))
374 act("remote created", "g", f, m2.flags(f))
375
375
376 return action
376 return action
377
377
378 def applyupdates(repo, action, wctx, mctx):
378 def applyupdates(repo, action, wctx, mctx):
379 "apply the merge action list to the working directory"
379 "apply the merge action list to the working directory"
380
380
381 updated, merged, removed, unresolved = 0, 0, 0, 0
381 updated, merged, removed, unresolved = 0, 0, 0, 0
382 action.sort()
382 action.sort()
383 for a in action:
383 for a in action:
384 f, m = a[:2]
384 f, m = a[:2]
385 if f and f[0] == "/":
385 if f and f[0] == "/":
386 continue
386 continue
387 if m == "r": # remove
387 if m == "r": # remove
388 repo.ui.note(_("removing %s\n") % f)
388 repo.ui.note(_("removing %s\n") % f)
389 util.audit_path(f)
389 util.audit_path(f)
390 try:
390 try:
391 util.unlink(repo.wjoin(f))
391 util.unlink(repo.wjoin(f))
392 except OSError, inst:
392 except OSError, inst:
393 if inst.errno != errno.ENOENT:
393 if inst.errno != errno.ENOENT:
394 repo.ui.warn(_("update failed to remove %s: %s!\n") %
394 repo.ui.warn(_("update failed to remove %s: %s!\n") %
395 (f, inst.strerror))
395 (f, inst.strerror))
396 removed += 1
396 removed += 1
397 elif m == "m": # merge
397 elif m == "m": # merge
398 f2, fd, flags, move = a[2:]
398 f2, fd, flags, move = a[2:]
399 r = filemerge(repo, f, f2, wctx, mctx)
399 r = filemerge(repo, f, f2, wctx, mctx)
400 if r > 0:
400 if r > 0:
401 unresolved += 1
401 unresolved += 1
402 else:
402 else:
403 if r is None:
403 if r is None:
404 updated += 1
404 updated += 1
405 else:
405 else:
406 merged += 1
406 merged += 1
407 if f != fd:
407 if f != fd:
408 repo.ui.debug(_("copying %s to %s\n") % (f, fd))
408 repo.ui.debug(_("copying %s to %s\n") % (f, fd))
409 repo.wwrite(fd, repo.wread(f), flags)
409 repo.wwrite(fd, repo.wread(f), flags)
410 if move:
410 if move:
411 repo.ui.debug(_("removing %s\n") % f)
411 repo.ui.debug(_("removing %s\n") % f)
412 os.unlink(repo.wjoin(f))
412 os.unlink(repo.wjoin(f))
413 util.set_exec(repo.wjoin(fd), "x" in flags)
413 util.set_exec(repo.wjoin(fd), "x" in flags)
414 elif m == "g": # get
414 elif m == "g": # get
415 flags = a[2]
415 flags = a[2]
416 repo.ui.note(_("getting %s\n") % f)
416 repo.ui.note(_("getting %s\n") % f)
417 t = mctx.filectx(f).data()
417 t = mctx.filectx(f).data()
418 repo.wwrite(f, t, flags)
418 repo.wwrite(f, t, flags)
419 updated += 1
419 updated += 1
420 elif m == "d": # directory rename
420 elif m == "d": # directory rename
421 f2, fd, flags = a[2:]
421 f2, fd, flags = a[2:]
422 if f:
422 if f:
423 repo.ui.note(_("moving %s to %s\n") % (f, fd))
423 repo.ui.note(_("moving %s to %s\n") % (f, fd))
424 t = wctx.filectx(f).data()
424 t = wctx.filectx(f).data()
425 repo.wwrite(fd, t, flags)
425 repo.wwrite(fd, t, flags)
426 util.unlink(repo.wjoin(f))
426 util.unlink(repo.wjoin(f))
427 if f2:
427 if f2:
428 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
428 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
429 t = mctx.filectx(f2).data()
429 t = mctx.filectx(f2).data()
430 repo.wwrite(fd, t, flags)
430 repo.wwrite(fd, t, flags)
431 updated += 1
431 updated += 1
432 elif m == "dr": # divergent renames
432 elif m == "dr": # divergent renames
433 fl = a[2]
433 fl = a[2]
434 repo.ui.warn("warning: detected divergent renames of %s to:\n" % f)
434 repo.ui.warn("warning: detected divergent renames of %s to:\n" % f)
435 for nf in fl:
435 for nf in fl:
436 repo.ui.warn(" %s\n" % nf)
436 repo.ui.warn(" %s\n" % nf)
437 elif m == "e": # exec
437 elif m == "e": # exec
438 flags = a[2]
438 flags = a[2]
439 util.set_exec(repo.wjoin(f), flags)
439 util.set_exec(repo.wjoin(f), flags)
440
440
441 return updated, merged, removed, unresolved
441 return updated, merged, removed, unresolved
442
442
443 def recordupdates(repo, action, branchmerge):
443 def recordupdates(repo, action, branchmerge):
444 "record merge actions to the dirstate"
444 "record merge actions to the dirstate"
445
445
446 for a in action:
446 for a in action:
447 f, m = a[:2]
447 f, m = a[:2]
448 if m == "r": # remove
448 if m == "r": # remove
449 if branchmerge:
449 if branchmerge:
450 repo.dirstate.remove(f)
450 repo.dirstate.remove(f)
451 else:
451 else:
452 repo.dirstate.forget(f)
452 repo.dirstate.forget(f)
453 elif m == "f": # forget
453 elif m == "f": # forget
454 repo.dirstate.forget(f)
454 repo.dirstate.forget(f)
455 elif m == "g": # get
455 elif m == "g": # get
456 if branchmerge:
456 if branchmerge:
457 repo.dirstate.normaldirty(f)
457 repo.dirstate.normaldirty(f)
458 else:
458 else:
459 repo.dirstate.normal(f)
459 repo.dirstate.normal(f)
460 elif m == "m": # merge
460 elif m == "m": # merge
461 f2, fd, flag, move = a[2:]
461 f2, fd, flag, move = a[2:]
462 if branchmerge:
462 if branchmerge:
463 # We've done a branch merge, mark this file as merged
463 # We've done a branch merge, mark this file as merged
464 # so that we properly record the merger later
464 # so that we properly record the merger later
465 repo.dirstate.merge(fd)
465 repo.dirstate.merge(fd)
466 if f != f2: # copy/rename
466 if f != f2: # copy/rename
467 if move:
467 if move:
468 repo.dirstate.remove(f)
468 repo.dirstate.remove(f)
469 if f != fd:
469 if f != fd:
470 repo.dirstate.copy(f, fd)
470 repo.dirstate.copy(f, fd)
471 else:
471 else:
472 repo.dirstate.copy(f2, fd)
472 repo.dirstate.copy(f2, fd)
473 else:
473 else:
474 # We've update-merged a locally modified file, so
474 # We've update-merged a locally modified file, so
475 # we set the dirstate to emulate a normal checkout
475 # we set the dirstate to emulate a normal checkout
476 # of that file some time in the past. Thus our
476 # of that file some time in the past. Thus our
477 # merge will appear as a normal local file
477 # merge will appear as a normal local file
478 # modification.
478 # modification.
479 repo.dirstate.normaldirty(fd)
479 repo.dirstate.normaldirty(fd)
480 if move:
480 if move:
481 repo.dirstate.forget(f)
481 repo.dirstate.forget(f)
482 elif m == "d": # directory rename
482 elif m == "d": # directory rename
483 f2, fd, flag = a[2:]
483 f2, fd, flag = a[2:]
484 if not f2 and f not in repo.dirstate:
484 if not f2 and f not in repo.dirstate:
485 # untracked file moved
485 # untracked file moved
486 continue
486 continue
487 if branchmerge:
487 if branchmerge:
488 repo.dirstate.add(fd)
488 repo.dirstate.add(fd)
489 if f:
489 if f:
490 repo.dirstate.remove(f)
490 repo.dirstate.remove(f)
491 repo.dirstate.copy(f, fd)
491 repo.dirstate.copy(f, fd)
492 if f2:
492 if f2:
493 repo.dirstate.copy(f2, fd)
493 repo.dirstate.copy(f2, fd)
494 else:
494 else:
495 repo.dirstate.normal(fd)
495 repo.dirstate.normal(fd)
496 if f:
496 if f:
497 repo.dirstate.forget(f)
497 repo.dirstate.forget(f)
498
498
499 def update(repo, node, branchmerge, force, partial, wlock):
499 def update(repo, node, branchmerge, force, partial, wlock):
500 """
500 """
501 Perform a merge between the working directory and the given node
501 Perform a merge between the working directory and the given node
502
502
503 branchmerge = whether to merge between branches
503 branchmerge = whether to merge between branches
504 force = whether to force branch merging or file overwriting
504 force = whether to force branch merging or file overwriting
505 partial = a function to filter file lists (dirstate not updated)
505 partial = a function to filter file lists (dirstate not updated)
506 wlock = working dir lock, if already held
506 wlock = working dir lock, if already held
507 """
507 """
508
508
509 if not wlock:
509 try:
510 wlock = repo.wlock()
510 if not wlock:
511 wlock = repo.wlock()
511
512
512 wc = repo.workingctx()
513 wc = repo.workingctx()
513 if node is None:
514 if node is None:
514 # tip of current branch
515 # tip of current branch
515 try:
516 try:
516 node = repo.branchtags()[wc.branch()]
517 node = repo.branchtags()[wc.branch()]
517 except KeyError:
518 except KeyError:
518 raise util.Abort(_("branch %s not found") % wc.branch())
519 raise util.Abort(_("branch %s not found") % wc.branch())
519 overwrite = force and not branchmerge
520 overwrite = force and not branchmerge
520 forcemerge = force and branchmerge
521 forcemerge = force and branchmerge
521 pl = wc.parents()
522 pl = wc.parents()
522 p1, p2 = pl[0], repo.changectx(node)
523 p1, p2 = pl[0], repo.changectx(node)
523 pa = p1.ancestor(p2)
524 pa = p1.ancestor(p2)
524 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
525 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
525 fastforward = False
526 fastforward = False
526
527
527 ### check phase
528 ### check phase
528 if not overwrite and len(pl) > 1:
529 if not overwrite and len(pl) > 1:
529 raise util.Abort(_("outstanding uncommitted merges"))
530 raise util.Abort(_("outstanding uncommitted merges"))
530 if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
531 if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
531 if branchmerge:
532 if branchmerge:
532 if p1.branch() != p2.branch() and pa != p2:
533 if p1.branch() != p2.branch() and pa != p2:
533 fastforward = True
534 fastforward = True
534 else:
535 else:
535 raise util.Abort(_("there is nothing to merge, just use "
536 raise util.Abort(_("there is nothing to merge, just use "
536 "'hg update' or look at 'hg heads'"))
537 "'hg update' or look at 'hg heads'"))
537 elif not (overwrite or branchmerge):
538 elif not (overwrite or branchmerge):
538 raise util.Abort(_("update spans branches, use 'hg merge' "
539 raise util.Abort(_("update spans branches, use 'hg merge' "
539 "or 'hg update -C' to lose changes"))
540 "or 'hg update -C' to lose changes"))
540 if branchmerge and not forcemerge:
541 if branchmerge and not forcemerge:
541 if wc.files():
542 if wc.files():
542 raise util.Abort(_("outstanding uncommitted changes"))
543 raise util.Abort(_("outstanding uncommitted changes"))
543
544
544 ### calculate phase
545 ### calculate phase
545 action = []
546 action = []
546 if not force:
547 if not force:
547 checkunknown(wc, p2)
548 checkunknown(wc, p2)
548 if not util.checkfolding(repo.path):
549 if not util.checkfolding(repo.path):
549 checkcollision(p2)
550 checkcollision(p2)
550 if not branchmerge:
551 if not branchmerge:
551 action += forgetremoved(wc, p2)
552 action += forgetremoved(wc, p2)
552 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
553 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
553
554
554 ### apply phase
555 ### apply phase
555 if not branchmerge: # just jump to the new rev
556 if not branchmerge: # just jump to the new rev
556 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
557 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
557 if not partial:
558 if not partial:
558 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
559 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
559
560
560 stats = applyupdates(repo, action, wc, p2)
561 stats = applyupdates(repo, action, wc, p2)
561
562
562 if not partial:
563 if not partial:
563 recordupdates(repo, action, branchmerge)
564 recordupdates(repo, action, branchmerge)
564 repo.dirstate.setparents(fp1, fp2)
565 repo.dirstate.setparents(fp1, fp2)
565 if not branchmerge and not fastforward:
566 if not branchmerge and not fastforward:
566 repo.dirstate.setbranch(p2.branch())
567 repo.dirstate.setbranch(p2.branch())
567 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
568 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
568
569
569 return stats
570 return stats
570
571 finally:
572 del wlock
@@ -1,95 +1,98 b''
1 # streamclone.py - streaming clone server support for mercurial
1 # streamclone.py - streaming clone server support for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 import os, stat, util, lock
9 import os, stat, util, lock
10
10
11 # if server supports streaming clone, it advertises "stream"
11 # if server supports streaming clone, it advertises "stream"
12 # capability with value that is version+flags of repo it is serving.
12 # capability with value that is version+flags of repo it is serving.
13 # client only streams if it can read that repo format.
13 # client only streams if it can read that repo format.
14
14
15 def walkrepo(root):
15 def walkrepo(root):
16 '''iterate over metadata files in repository.
16 '''iterate over metadata files in repository.
17 walk in natural (sorted) order.
17 walk in natural (sorted) order.
18 yields 2-tuples: name of .d or .i file, size of file.'''
18 yields 2-tuples: name of .d or .i file, size of file.'''
19
19
20 strip_count = len(root) + len(os.sep)
20 strip_count = len(root) + len(os.sep)
21 def walk(path, recurse):
21 def walk(path, recurse):
22 ents = os.listdir(path)
22 ents = os.listdir(path)
23 ents.sort()
23 ents.sort()
24 for e in ents:
24 for e in ents:
25 pe = os.path.join(path, e)
25 pe = os.path.join(path, e)
26 st = os.lstat(pe)
26 st = os.lstat(pe)
27 if stat.S_ISDIR(st.st_mode):
27 if stat.S_ISDIR(st.st_mode):
28 if recurse:
28 if recurse:
29 for x in walk(pe, True):
29 for x in walk(pe, True):
30 yield x
30 yield x
31 else:
31 else:
32 if not stat.S_ISREG(st.st_mode) or len(e) < 2:
32 if not stat.S_ISREG(st.st_mode) or len(e) < 2:
33 continue
33 continue
34 sfx = e[-2:]
34 sfx = e[-2:]
35 if sfx in ('.d', '.i'):
35 if sfx in ('.d', '.i'):
36 yield pe[strip_count:], st.st_size
36 yield pe[strip_count:], st.st_size
37 # write file data first
37 # write file data first
38 for x in walk(os.path.join(root, 'data'), True):
38 for x in walk(os.path.join(root, 'data'), True):
39 yield x
39 yield x
40 # write manifest before changelog
40 # write manifest before changelog
41 meta = list(walk(root, False))
41 meta = list(walk(root, False))
42 meta.sort()
42 meta.sort()
43 meta.reverse()
43 meta.reverse()
44 for x in meta:
44 for x in meta:
45 yield x
45 yield x
46
46
47 # stream file format is simple.
47 # stream file format is simple.
48 #
48 #
49 # server writes out line that says how many files, how many total
49 # server writes out line that says how many files, how many total
50 # bytes. separator is ascii space, byte counts are strings.
50 # bytes. separator is ascii space, byte counts are strings.
51 #
51 #
52 # then for each file:
52 # then for each file:
53 #
53 #
54 # server writes out line that says file name, how many bytes in
54 # server writes out line that says file name, how many bytes in
55 # file. separator is ascii nul, byte count is string.
55 # file. separator is ascii nul, byte count is string.
56 #
56 #
57 # server writes out raw file data.
57 # server writes out raw file data.
58
58
59 def stream_out(repo, fileobj, untrusted=False):
59 def stream_out(repo, fileobj, untrusted=False):
60 '''stream out all metadata files in repository.
60 '''stream out all metadata files in repository.
61 writes to file-like object, must support write() and optional flush().'''
61 writes to file-like object, must support write() and optional flush().'''
62
62
63 if not repo.ui.configbool('server', 'uncompressed', untrusted=untrusted):
63 if not repo.ui.configbool('server', 'uncompressed', untrusted=untrusted):
64 fileobj.write('1\n')
64 fileobj.write('1\n')
65 return
65 return
66
66
67 # get consistent snapshot of repo. lock during scan so lock not
67 # get consistent snapshot of repo. lock during scan so lock not
68 # needed while we stream, and commits can happen.
68 # needed while we stream, and commits can happen.
69 lock = None
69 try:
70 try:
70 repolock = repo.lock()
71 try:
71 except (lock.LockHeld, lock.LockUnavailable), inst:
72 repolock = repo.lock()
72 repo.ui.warn('locking the repository failed: %s\n' % (inst,))
73 except (lock.LockHeld, lock.LockUnavailable), inst:
73 fileobj.write('2\n')
74 repo.ui.warn('locking the repository failed: %s\n' % (inst,))
74 return
75 fileobj.write('2\n')
76 return
75
77
76 fileobj.write('0\n')
78 fileobj.write('0\n')
77 repo.ui.debug('scanning\n')
79 repo.ui.debug('scanning\n')
78 entries = []
80 entries = []
79 total_bytes = 0
81 total_bytes = 0
80 for name, size in walkrepo(repo.spath):
82 for name, size in walkrepo(repo.spath):
81 name = repo.decodefn(util.pconvert(name))
83 name = repo.decodefn(util.pconvert(name))
82 entries.append((name, size))
84 entries.append((name, size))
83 total_bytes += size
85 total_bytes += size
84 repolock.release()
86 finally:
87 del repolock
85
88
86 repo.ui.debug('%d files, %d bytes to transfer\n' %
89 repo.ui.debug('%d files, %d bytes to transfer\n' %
87 (len(entries), total_bytes))
90 (len(entries), total_bytes))
88 fileobj.write('%d %d\n' % (len(entries), total_bytes))
91 fileobj.write('%d %d\n' % (len(entries), total_bytes))
89 for name, size in entries:
92 for name, size in entries:
90 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
93 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
91 fileobj.write('%s\0%d\n' % (name, size))
94 fileobj.write('%s\0%d\n' % (name, size))
92 for chunk in util.filechunkiter(repo.sopener(name), limit=size):
95 for chunk in util.filechunkiter(repo.sopener(name), limit=size):
93 fileobj.write(chunk)
96 fileobj.write(chunk)
94 flush = getattr(fileobj, 'flush', None)
97 flush = getattr(fileobj, 'flush', None)
95 if flush: flush()
98 if flush: flush()
@@ -1,208 +1,213 b''
1 # verify.py - repository integrity checking for Mercurial
1 # verify.py - repository integrity checking for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import revlog, mdiff
10 import revlog, mdiff
11
11
12 def verify(repo):
12 def verify(repo):
13 lock = repo.lock()
14 try:
15 return _verify(repo)
16 finally:
17 del lock
18
19 def _verify(repo):
13 filelinkrevs = {}
20 filelinkrevs = {}
14 filenodes = {}
21 filenodes = {}
15 changesets = revisions = files = 0
22 changesets = revisions = files = 0
16 errors = [0]
23 errors = [0]
17 warnings = [0]
24 warnings = [0]
18 neededmanifests = {}
25 neededmanifests = {}
19
26
20 lock = repo.lock()
21
22 def err(msg):
27 def err(msg):
23 repo.ui.warn(msg + "\n")
28 repo.ui.warn(msg + "\n")
24 errors[0] += 1
29 errors[0] += 1
25
30
26 def warn(msg):
31 def warn(msg):
27 repo.ui.warn(msg + "\n")
32 repo.ui.warn(msg + "\n")
28 warnings[0] += 1
33 warnings[0] += 1
29
34
30 def checksize(obj, name):
35 def checksize(obj, name):
31 d = obj.checksize()
36 d = obj.checksize()
32 if d[0]:
37 if d[0]:
33 err(_("%s data length off by %d bytes") % (name, d[0]))
38 err(_("%s data length off by %d bytes") % (name, d[0]))
34 if d[1]:
39 if d[1]:
35 err(_("%s index contains %d extra bytes") % (name, d[1]))
40 err(_("%s index contains %d extra bytes") % (name, d[1]))
36
41
37 def checkversion(obj, name):
42 def checkversion(obj, name):
38 if obj.version != revlog.REVLOGV0:
43 if obj.version != revlog.REVLOGV0:
39 if not revlogv1:
44 if not revlogv1:
40 warn(_("warning: `%s' uses revlog format 1") % name)
45 warn(_("warning: `%s' uses revlog format 1") % name)
41 elif revlogv1:
46 elif revlogv1:
42 warn(_("warning: `%s' uses revlog format 0") % name)
47 warn(_("warning: `%s' uses revlog format 0") % name)
43
48
44 revlogv1 = repo.changelog.version != revlog.REVLOGV0
49 revlogv1 = repo.changelog.version != revlog.REVLOGV0
45 if repo.ui.verbose or not revlogv1:
50 if repo.ui.verbose or not revlogv1:
46 repo.ui.status(_("repository uses revlog format %d\n") %
51 repo.ui.status(_("repository uses revlog format %d\n") %
47 (revlogv1 and 1 or 0))
52 (revlogv1 and 1 or 0))
48
53
49 seen = {}
54 seen = {}
50 repo.ui.status(_("checking changesets\n"))
55 repo.ui.status(_("checking changesets\n"))
51 checksize(repo.changelog, "changelog")
56 checksize(repo.changelog, "changelog")
52
57
53 for i in xrange(repo.changelog.count()):
58 for i in xrange(repo.changelog.count()):
54 changesets += 1
59 changesets += 1
55 n = repo.changelog.node(i)
60 n = repo.changelog.node(i)
56 l = repo.changelog.linkrev(n)
61 l = repo.changelog.linkrev(n)
57 if l != i:
62 if l != i:
58 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
63 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
59 if n in seen:
64 if n in seen:
60 err(_("duplicate changeset at revision %d") % i)
65 err(_("duplicate changeset at revision %d") % i)
61 seen[n] = 1
66 seen[n] = 1
62
67
63 for p in repo.changelog.parents(n):
68 for p in repo.changelog.parents(n):
64 if p not in repo.changelog.nodemap:
69 if p not in repo.changelog.nodemap:
65 err(_("changeset %s has unknown parent %s") %
70 err(_("changeset %s has unknown parent %s") %
66 (short(n), short(p)))
71 (short(n), short(p)))
67 try:
72 try:
68 changes = repo.changelog.read(n)
73 changes = repo.changelog.read(n)
69 except KeyboardInterrupt:
74 except KeyboardInterrupt:
70 repo.ui.warn(_("interrupted"))
75 repo.ui.warn(_("interrupted"))
71 raise
76 raise
72 except Exception, inst:
77 except Exception, inst:
73 err(_("unpacking changeset %s: %s") % (short(n), inst))
78 err(_("unpacking changeset %s: %s") % (short(n), inst))
74 continue
79 continue
75
80
76 neededmanifests[changes[0]] = n
81 neededmanifests[changes[0]] = n
77
82
78 for f in changes[3]:
83 for f in changes[3]:
79 filelinkrevs.setdefault(f, []).append(i)
84 filelinkrevs.setdefault(f, []).append(i)
80
85
81 seen = {}
86 seen = {}
82 repo.ui.status(_("checking manifests\n"))
87 repo.ui.status(_("checking manifests\n"))
83 checkversion(repo.manifest, "manifest")
88 checkversion(repo.manifest, "manifest")
84 checksize(repo.manifest, "manifest")
89 checksize(repo.manifest, "manifest")
85
90
86 for i in xrange(repo.manifest.count()):
91 for i in xrange(repo.manifest.count()):
87 n = repo.manifest.node(i)
92 n = repo.manifest.node(i)
88 l = repo.manifest.linkrev(n)
93 l = repo.manifest.linkrev(n)
89
94
90 if l < 0 or l >= repo.changelog.count():
95 if l < 0 or l >= repo.changelog.count():
91 err(_("bad manifest link (%d) at revision %d") % (l, i))
96 err(_("bad manifest link (%d) at revision %d") % (l, i))
92
97
93 if n in neededmanifests:
98 if n in neededmanifests:
94 del neededmanifests[n]
99 del neededmanifests[n]
95
100
96 if n in seen:
101 if n in seen:
97 err(_("duplicate manifest at revision %d") % i)
102 err(_("duplicate manifest at revision %d") % i)
98
103
99 seen[n] = 1
104 seen[n] = 1
100
105
101 for p in repo.manifest.parents(n):
106 for p in repo.manifest.parents(n):
102 if p not in repo.manifest.nodemap:
107 if p not in repo.manifest.nodemap:
103 err(_("manifest %s has unknown parent %s") %
108 err(_("manifest %s has unknown parent %s") %
104 (short(n), short(p)))
109 (short(n), short(p)))
105
110
106 try:
111 try:
107 for f, fn in repo.manifest.readdelta(n).iteritems():
112 for f, fn in repo.manifest.readdelta(n).iteritems():
108 filenodes.setdefault(f, {})[fn] = 1
113 filenodes.setdefault(f, {})[fn] = 1
109 except KeyboardInterrupt:
114 except KeyboardInterrupt:
110 repo.ui.warn(_("interrupted"))
115 repo.ui.warn(_("interrupted"))
111 raise
116 raise
112 except Exception, inst:
117 except Exception, inst:
113 err(_("reading delta for manifest %s: %s") % (short(n), inst))
118 err(_("reading delta for manifest %s: %s") % (short(n), inst))
114 continue
119 continue
115
120
116 repo.ui.status(_("crosschecking files in changesets and manifests\n"))
121 repo.ui.status(_("crosschecking files in changesets and manifests\n"))
117
122
118 for m, c in neededmanifests.items():
123 for m, c in neededmanifests.items():
119 err(_("Changeset %s refers to unknown manifest %s") %
124 err(_("Changeset %s refers to unknown manifest %s") %
120 (short(m), short(c)))
125 (short(m), short(c)))
121 del neededmanifests
126 del neededmanifests
122
127
123 for f in filenodes:
128 for f in filenodes:
124 if f not in filelinkrevs:
129 if f not in filelinkrevs:
125 err(_("file %s in manifest but not in changesets") % f)
130 err(_("file %s in manifest but not in changesets") % f)
126
131
127 for f in filelinkrevs:
132 for f in filelinkrevs:
128 if f not in filenodes:
133 if f not in filenodes:
129 err(_("file %s in changeset but not in manifest") % f)
134 err(_("file %s in changeset but not in manifest") % f)
130
135
131 repo.ui.status(_("checking files\n"))
136 repo.ui.status(_("checking files\n"))
132 ff = filenodes.keys()
137 ff = filenodes.keys()
133 ff.sort()
138 ff.sort()
134 for f in ff:
139 for f in ff:
135 if f == "/dev/null":
140 if f == "/dev/null":
136 continue
141 continue
137 files += 1
142 files += 1
138 if not f:
143 if not f:
139 err(_("file without name in manifest %s") % short(n))
144 err(_("file without name in manifest %s") % short(n))
140 continue
145 continue
141 fl = repo.file(f)
146 fl = repo.file(f)
142 checkversion(fl, f)
147 checkversion(fl, f)
143 checksize(fl, f)
148 checksize(fl, f)
144
149
145 nodes = {nullid: 1}
150 nodes = {nullid: 1}
146 seen = {}
151 seen = {}
147 for i in xrange(fl.count()):
152 for i in xrange(fl.count()):
148 revisions += 1
153 revisions += 1
149 n = fl.node(i)
154 n = fl.node(i)
150
155
151 if n in seen:
156 if n in seen:
152 err(_("%s: duplicate revision %d") % (f, i))
157 err(_("%s: duplicate revision %d") % (f, i))
153 if n not in filenodes[f]:
158 if n not in filenodes[f]:
154 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
159 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
155 else:
160 else:
156 del filenodes[f][n]
161 del filenodes[f][n]
157
162
158 flr = fl.linkrev(n)
163 flr = fl.linkrev(n)
159 if flr not in filelinkrevs.get(f, []):
164 if flr not in filelinkrevs.get(f, []):
160 err(_("%s:%s points to unexpected changeset %d")
165 err(_("%s:%s points to unexpected changeset %d")
161 % (f, short(n), flr))
166 % (f, short(n), flr))
162 else:
167 else:
163 filelinkrevs[f].remove(flr)
168 filelinkrevs[f].remove(flr)
164
169
165 # verify contents
170 # verify contents
166 try:
171 try:
167 t = fl.read(n)
172 t = fl.read(n)
168 except KeyboardInterrupt:
173 except KeyboardInterrupt:
169 repo.ui.warn(_("interrupted"))
174 repo.ui.warn(_("interrupted"))
170 raise
175 raise
171 except Exception, inst:
176 except Exception, inst:
172 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
177 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
173
178
174 # verify parents
179 # verify parents
175 (p1, p2) = fl.parents(n)
180 (p1, p2) = fl.parents(n)
176 if p1 not in nodes:
181 if p1 not in nodes:
177 err(_("file %s:%s unknown parent 1 %s") %
182 err(_("file %s:%s unknown parent 1 %s") %
178 (f, short(n), short(p1)))
183 (f, short(n), short(p1)))
179 if p2 not in nodes:
184 if p2 not in nodes:
180 err(_("file %s:%s unknown parent 2 %s") %
185 err(_("file %s:%s unknown parent 2 %s") %
181 (f, short(n), short(p1)))
186 (f, short(n), short(p1)))
182 nodes[n] = 1
187 nodes[n] = 1
183
188
184 # check renames
189 # check renames
185 try:
190 try:
186 rp = fl.renamed(n)
191 rp = fl.renamed(n)
187 if rp:
192 if rp:
188 fl2 = repo.file(rp[0])
193 fl2 = repo.file(rp[0])
189 rev = fl2.rev(rp[1])
194 rev = fl2.rev(rp[1])
190 except KeyboardInterrupt:
195 except KeyboardInterrupt:
191 repo.ui.warn(_("interrupted"))
196 repo.ui.warn(_("interrupted"))
192 raise
197 raise
193 except Exception, inst:
198 except Exception, inst:
194 err(_("checking rename on file %s %s: %s") % (f, short(n), inst))
199 err(_("checking rename on file %s %s: %s") % (f, short(n), inst))
195
200
196 # cross-check
201 # cross-check
197 for node in filenodes[f]:
202 for node in filenodes[f]:
198 err(_("node %s in manifests not in %s") % (hex(node), f))
203 err(_("node %s in manifests not in %s") % (hex(node), f))
199
204
200 repo.ui.status(_("%d files, %d changesets, %d total revisions\n") %
205 repo.ui.status(_("%d files, %d changesets, %d total revisions\n") %
201 (files, changesets, revisions))
206 (files, changesets, revisions))
202
207
203 if warnings[0]:
208 if warnings[0]:
204 repo.ui.warn(_("%d warnings encountered!\n") % warnings[0])
209 repo.ui.warn(_("%d warnings encountered!\n") % warnings[0])
205 if errors[0]:
210 if errors[0]:
206 repo.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
211 repo.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
207 return 1
212 return 1
208
213
General Comments 0
You need to be logged in to leave comments. Login now