##// END OF EJS Templates
Merge with crew
Matt Mackall -
r2803:987c31e2 merge default
parent child Browse files
Show More
@@ -0,0 +1,93
1 # fetch.py - pull and merge remote changes
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
7
8 from mercurial.demandload import *
9 from mercurial.i18n import gettext as _
10 from mercurial.node import *
11 demandload(globals(), 'mercurial:commands,hg,node,util')
12
13 def fetch(ui, repo, source='default', **opts):
14 '''Pull changes from a remote repository, merge new changes if needed.
15
16 This finds all changes from the repository at the specified path
17 or URL and adds them to the local repository.
18
19 If the pulled changes add a new head, the head is automatically
20 merged, and the result of the merge is committed. Otherwise, the
21 working directory is updated.'''
22
23 def postincoming(other, modheads):
24 if modheads == 0:
25 return 0
26 if modheads == 1:
27 return commands.doupdate(ui, repo)
28 newheads = repo.heads(parent)
29 newchildren = [n for n in repo.heads(parent) if n != parent]
30 newparent = parent
31 if newchildren:
32 commands.doupdate(ui, repo, node=hex(newchildren[0]))
33 newparent = newchildren[0]
34 newheads = [n for n in repo.heads() if n != newparent]
35 err = False
36 if newheads:
37 ui.status(_('merging with new head %d:%s\n') %
38 (repo.changelog.rev(newheads[0]), short(newheads[0])))
39 err = repo.update(newheads[0], allow=True, remind=False)
40 if not err and len(newheads) > 1:
41 ui.status(_('not merging with %d other new heads '
42 '(use "hg heads" and "hg merge" to merge them)') %
43 (len(newheads) - 1))
44 if not err:
45 mod, add, rem = repo.status()[:3]
46 message = (commands.logmessage(opts) or
47 (_('Automated merge with %s') % other.url()))
48 n = repo.commit(mod + add + rem, message,
49 opts['user'], opts['date'],
50 force_editor=opts.get('force_editor'))
51 ui.status(_('new changeset %d:%s merges remote changes '
52 'with local\n') % (repo.changelog.rev(n),
53 short(n)))
54 def pull():
55 commands.setremoteconfig(ui, opts)
56
57 other = hg.repository(ui, ui.expandpath(source))
58 ui.status(_('pulling from %s\n') % source)
59 revs = None
60 if opts['rev'] and not other.local():
61 raise util.Abort(_("fetch -r doesn't work for remote repositories yet"))
62 elif opts['rev']:
63 revs = [other.lookup(rev) for rev in opts['rev']]
64 modheads = repo.pull(other, heads=revs)
65 return postincoming(other, modheads)
66
67 parent, p2 = repo.dirstate.parents()
68 if parent != repo.changelog.tip():
69 raise util.Abort(_('working dir not at tip '
70 '(use "hg update" to check out tip)'))
71 if p2 != nullid:
72 raise util.Abort(_('outstanding uncommitted merge'))
73 mod, add, rem = repo.status()[:3]
74 if mod or add or rem:
75 raise util.Abort(_('outstanding uncommitted changes'))
76 if len(repo.heads()) > 1:
77 raise util.Abort(_('multiple heads in this repository '
78 '(use "hg heads" and "hg merge" to merge them)'))
79 return pull()
80
81 cmdtable = {
82 'fetch':
83 (fetch,
84 [('e', 'ssh', '', _('specify ssh command to use')),
85 ('m', 'message', '', _('use <text> as commit message')),
86 ('l', 'logfile', '', _('read the commit message from <file>')),
87 ('d', 'date', '', _('record datecode as commit date')),
88 ('u', 'user', '', _('record user as commiter')),
89 ('r', 'rev', [], _('a specific revision you would like to pull')),
90 ('f', 'force-editor', None, _('edit commit message')),
91 ('', 'remotecmd', '', _('hg command to run on the remote side'))],
92 'hg fetch [SOURCE]'),
93 }
@@ -1,1680 +1,1677
1
1
2 # queue.py - patch queues for mercurial
2 # queue.py - patch queues for mercurial
3 #
3 #
4 # Copyright 2005 Chris Mason <mason@suse.com>
4 # Copyright 2005 Chris Mason <mason@suse.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 '''patch management and development
9 '''patch management and development
10
10
11 This extension lets you work with a stack of patches in a Mercurial
11 This extension lets you work with a stack of patches in a Mercurial
12 repository. It manages two stacks of patches - all known patches, and
12 repository. It manages two stacks of patches - all known patches, and
13 applied patches (subset of known patches).
13 applied patches (subset of known patches).
14
14
15 Known patches are represented as patch files in the .hg/patches
15 Known patches are represented as patch files in the .hg/patches
16 directory. Applied patches are both patch files and changesets.
16 directory. Applied patches are both patch files and changesets.
17
17
18 Common tasks (use "hg help command" for more details):
18 Common tasks (use "hg help command" for more details):
19
19
20 prepare repository to work with patches qinit
20 prepare repository to work with patches qinit
21 create new patch qnew
21 create new patch qnew
22 import existing patch qimport
22 import existing patch qimport
23
23
24 print patch series qseries
24 print patch series qseries
25 print applied patches qapplied
25 print applied patches qapplied
26 print name of top applied patch qtop
26 print name of top applied patch qtop
27
27
28 add known patch to applied stack qpush
28 add known patch to applied stack qpush
29 remove patch from applied stack qpop
29 remove patch from applied stack qpop
30 refresh contents of top applied patch qrefresh
30 refresh contents of top applied patch qrefresh
31 '''
31 '''
32
32
33 from mercurial.demandload import *
33 from mercurial.demandload import *
34 demandload(globals(), "os sys re struct traceback errno bz2")
34 demandload(globals(), "os sys re struct traceback errno bz2")
35 from mercurial.i18n import gettext as _
35 from mercurial.i18n import gettext as _
36 from mercurial import ui, hg, revlog, commands, util
36 from mercurial import ui, hg, revlog, commands, util
37
37
38 versionstr = "0.45"
38 versionstr = "0.45"
39
39
40 commands.norepo += " qclone qversion"
40 commands.norepo += " qclone qversion"
41
41
42 class StatusEntry:
43 def __init__(self, rev, name=None):
44 if not name:
45 self.rev, self.name = rev.split(':')
46 else:
47 self.rev, self.name = rev, name
48
49 def __str__(self):
50 return self.rev + ':' + self.name
51
42 class queue:
52 class queue:
43 def __init__(self, ui, path, patchdir=None):
53 def __init__(self, ui, path, patchdir=None):
44 self.basepath = path
54 self.basepath = path
45 if patchdir:
55 if patchdir:
46 self.path = patchdir
56 self.path = patchdir
47 else:
57 else:
48 self.path = os.path.join(path, "patches")
58 self.path = os.path.join(path, "patches")
49 self.opener = util.opener(self.path)
59 self.opener = util.opener(self.path)
50 self.ui = ui
60 self.ui = ui
51 self.applied = []
61 self.applied = []
52 self.full_series = []
62 self.full_series = []
53 self.applied_dirty = 0
63 self.applied_dirty = 0
54 self.series_dirty = 0
64 self.series_dirty = 0
55 self.series_path = "series"
65 self.series_path = "series"
56 self.status_path = "status"
66 self.status_path = "status"
57
67
58 if os.path.exists(os.path.join(self.path, self.series_path)):
68 if os.path.exists(os.path.join(self.path, self.series_path)):
59 self.full_series = self.opener(self.series_path).read().splitlines()
69 self.full_series = self.opener(self.series_path).read().splitlines()
60 self.parse_series()
70 self.parse_series()
61
71
62 if os.path.exists(os.path.join(self.path, self.status_path)):
72 if os.path.exists(os.path.join(self.path, self.status_path)):
63 self.applied = self.opener(self.status_path).read().splitlines()
73 self.applied = [StatusEntry(l)
74 for l in self.opener(self.status_path).read().splitlines()]
64
75
65 def find_series(self, patch):
76 def find_series(self, patch):
66 pre = re.compile("(\s*)([^#]+)")
77 pre = re.compile("(\s*)([^#]+)")
67 index = 0
78 index = 0
68 for l in self.full_series:
79 for l in self.full_series:
69 m = pre.match(l)
80 m = pre.match(l)
70 if m:
81 if m:
71 s = m.group(2)
82 s = m.group(2)
72 s = s.rstrip()
83 s = s.rstrip()
73 if s == patch:
84 if s == patch:
74 return index
85 return index
75 index += 1
86 index += 1
76 return None
87 return None
77
88
78 def parse_series(self):
89 def parse_series(self):
79 self.series = []
90 self.series = []
80 for l in self.full_series:
91 for l in self.full_series:
81 s = l.split('#', 1)[0].strip()
92 s = l.split('#', 1)[0].strip()
82 if s:
93 if s:
83 self.series.append(s)
94 self.series.append(s)
84
95
85 def save_dirty(self):
96 def save_dirty(self):
86 def write_list(items, path):
97 def write_list(items, path):
87 fp = self.opener(path, 'w')
98 fp = self.opener(path, 'w')
88 for i in items:
99 for i in items:
89 print >> fp, i
100 print >> fp, i
90 fp.close()
101 fp.close()
91 if self.applied_dirty: write_list(self.applied, self.status_path)
102 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
92 if self.series_dirty: write_list(self.full_series, self.series_path)
103 if self.series_dirty: write_list(self.full_series, self.series_path)
93
104
94 def readheaders(self, patch):
105 def readheaders(self, patch):
95 def eatdiff(lines):
106 def eatdiff(lines):
96 while lines:
107 while lines:
97 l = lines[-1]
108 l = lines[-1]
98 if (l.startswith("diff -") or
109 if (l.startswith("diff -") or
99 l.startswith("Index:") or
110 l.startswith("Index:") or
100 l.startswith("===========")):
111 l.startswith("===========")):
101 del lines[-1]
112 del lines[-1]
102 else:
113 else:
103 break
114 break
104 def eatempty(lines):
115 def eatempty(lines):
105 while lines:
116 while lines:
106 l = lines[-1]
117 l = lines[-1]
107 if re.match('\s*$', l):
118 if re.match('\s*$', l):
108 del lines[-1]
119 del lines[-1]
109 else:
120 else:
110 break
121 break
111
122
112 pf = os.path.join(self.path, patch)
123 pf = os.path.join(self.path, patch)
113 message = []
124 message = []
114 comments = []
125 comments = []
115 user = None
126 user = None
116 date = None
127 date = None
117 format = None
128 format = None
118 subject = None
129 subject = None
119 diffstart = 0
130 diffstart = 0
120
131
121 for line in file(pf):
132 for line in file(pf):
122 line = line.rstrip()
133 line = line.rstrip()
123 if diffstart:
134 if diffstart:
124 if line.startswith('+++ '):
135 if line.startswith('+++ '):
125 diffstart = 2
136 diffstart = 2
126 break
137 break
127 if line.startswith("--- "):
138 if line.startswith("--- "):
128 diffstart = 1
139 diffstart = 1
129 continue
140 continue
130 elif format == "hgpatch":
141 elif format == "hgpatch":
131 # parse values when importing the result of an hg export
142 # parse values when importing the result of an hg export
132 if line.startswith("# User "):
143 if line.startswith("# User "):
133 user = line[7:]
144 user = line[7:]
134 elif line.startswith("# Date "):
145 elif line.startswith("# Date "):
135 date = line[7:]
146 date = line[7:]
136 elif not line.startswith("# ") and line:
147 elif not line.startswith("# ") and line:
137 message.append(line)
148 message.append(line)
138 format = None
149 format = None
139 elif line == '# HG changeset patch':
150 elif line == '# HG changeset patch':
140 format = "hgpatch"
151 format = "hgpatch"
141 elif (format != "tagdone" and (line.startswith("Subject: ") or
152 elif (format != "tagdone" and (line.startswith("Subject: ") or
142 line.startswith("subject: "))):
153 line.startswith("subject: "))):
143 subject = line[9:]
154 subject = line[9:]
144 format = "tag"
155 format = "tag"
145 elif (format != "tagdone" and (line.startswith("From: ") or
156 elif (format != "tagdone" and (line.startswith("From: ") or
146 line.startswith("from: "))):
157 line.startswith("from: "))):
147 user = line[6:]
158 user = line[6:]
148 format = "tag"
159 format = "tag"
149 elif format == "tag" and line == "":
160 elif format == "tag" and line == "":
150 # when looking for tags (subject: from: etc) they
161 # when looking for tags (subject: from: etc) they
151 # end once you find a blank line in the source
162 # end once you find a blank line in the source
152 format = "tagdone"
163 format = "tagdone"
153 elif message or line:
164 elif message or line:
154 message.append(line)
165 message.append(line)
155 comments.append(line)
166 comments.append(line)
156
167
157 eatdiff(message)
168 eatdiff(message)
158 eatdiff(comments)
169 eatdiff(comments)
159 eatempty(message)
170 eatempty(message)
160 eatempty(comments)
171 eatempty(comments)
161
172
162 # make sure message isn't empty
173 # make sure message isn't empty
163 if format and format.startswith("tag") and subject:
174 if format and format.startswith("tag") and subject:
164 message.insert(0, "")
175 message.insert(0, "")
165 message.insert(0, subject)
176 message.insert(0, subject)
166 return (message, comments, user, date, diffstart > 1)
177 return (message, comments, user, date, diffstart > 1)
167
178
168 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
179 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
169 # first try just applying the patch
180 # first try just applying the patch
170 (err, n) = self.apply(repo, [ patch ], update_status=False,
181 (err, n) = self.apply(repo, [ patch ], update_status=False,
171 strict=True, merge=rev, wlock=wlock)
182 strict=True, merge=rev, wlock=wlock)
172
183
173 if err == 0:
184 if err == 0:
174 return (err, n)
185 return (err, n)
175
186
176 if n is None:
187 if n is None:
177 raise util.Abort(_("apply failed for patch %s") % patch)
188 raise util.Abort(_("apply failed for patch %s") % patch)
178
189
179 self.ui.warn("patch didn't work out, merging %s\n" % patch)
190 self.ui.warn("patch didn't work out, merging %s\n" % patch)
180
191
181 # apply failed, strip away that rev and merge.
192 # apply failed, strip away that rev and merge.
182 hg.update(repo, head, allow=False, force=True, wlock=wlock)
193 hg.update(repo, head, allow=False, force=True, wlock=wlock)
183 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
194 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
184
195
185 c = repo.changelog.read(rev)
196 c = repo.changelog.read(rev)
186 ret = hg.update(repo, rev, allow=True, wlock=wlock)
197 ret = hg.update(repo, rev, allow=True, wlock=wlock)
187 if ret:
198 if ret:
188 raise util.Abort(_("update returned %d") % ret)
199 raise util.Abort(_("update returned %d") % ret)
189 n = repo.commit(None, c[4], c[1], force=1, wlock=wlock)
200 n = repo.commit(None, c[4], c[1], force=1, wlock=wlock)
190 if n == None:
201 if n == None:
191 raise util.Abort(_("repo commit failed"))
202 raise util.Abort(_("repo commit failed"))
192 try:
203 try:
193 message, comments, user, date, patchfound = mergeq.readheaders(patch)
204 message, comments, user, date, patchfound = mergeq.readheaders(patch)
194 except:
205 except:
195 raise util.Abort(_("unable to read %s") % patch)
206 raise util.Abort(_("unable to read %s") % patch)
196
207
197 patchf = self.opener(patch, "w")
208 patchf = self.opener(patch, "w")
198 if comments:
209 if comments:
199 comments = "\n".join(comments) + '\n\n'
210 comments = "\n".join(comments) + '\n\n'
200 patchf.write(comments)
211 patchf.write(comments)
201 commands.dodiff(patchf, self.ui, repo, head, n)
212 commands.dodiff(patchf, self.ui, repo, head, n)
202 patchf.close()
213 patchf.close()
203 return (0, n)
214 return (0, n)
204
215
205 def qparents(self, repo, rev=None):
216 def qparents(self, repo, rev=None):
206 if rev is None:
217 if rev is None:
207 (p1, p2) = repo.dirstate.parents()
218 (p1, p2) = repo.dirstate.parents()
208 if p2 == revlog.nullid:
219 if p2 == revlog.nullid:
209 return p1
220 return p1
210 if len(self.applied) == 0:
221 if len(self.applied) == 0:
211 return None
222 return None
212 (top, patch) = self.applied[-1].split(':')
223 return revlog.bin(self.applied[-1].rev)
213 top = revlog.bin(top)
214 return top
215 pp = repo.changelog.parents(rev)
224 pp = repo.changelog.parents(rev)
216 if pp[1] != revlog.nullid:
225 if pp[1] != revlog.nullid:
217 arevs = [ x.split(':')[0] for x in self.applied ]
226 arevs = [ x.rev for x in self.applied ]
218 p0 = revlog.hex(pp[0])
227 p0 = revlog.hex(pp[0])
219 p1 = revlog.hex(pp[1])
228 p1 = revlog.hex(pp[1])
220 if p0 in arevs:
229 if p0 in arevs:
221 return pp[0]
230 return pp[0]
222 if p1 in arevs:
231 if p1 in arevs:
223 return pp[1]
232 return pp[1]
224 return pp[0]
233 return pp[0]
225
234
226 def mergepatch(self, repo, mergeq, series, wlock):
235 def mergepatch(self, repo, mergeq, series, wlock):
227 if len(self.applied) == 0:
236 if len(self.applied) == 0:
228 # each of the patches merged in will have two parents. This
237 # each of the patches merged in will have two parents. This
229 # can confuse the qrefresh, qdiff, and strip code because it
238 # can confuse the qrefresh, qdiff, and strip code because it
230 # needs to know which parent is actually in the patch queue.
239 # needs to know which parent is actually in the patch queue.
231 # so, we insert a merge marker with only one parent. This way
240 # so, we insert a merge marker with only one parent. This way
232 # the first patch in the queue is never a merge patch
241 # the first patch in the queue is never a merge patch
233 #
242 #
234 pname = ".hg.patches.merge.marker"
243 pname = ".hg.patches.merge.marker"
235 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
244 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
236 wlock=wlock)
245 wlock=wlock)
237 self.applied.append(revlog.hex(n) + ":" + pname)
246 self.applied.append(StatusEntry(revlog.hex(n), pname))
238 self.applied_dirty = 1
247 self.applied_dirty = 1
239
248
240 head = self.qparents(repo)
249 head = self.qparents(repo)
241
250
242 for patch in series:
251 for patch in series:
243 patch = mergeq.lookup(patch, strict=True)
252 patch = mergeq.lookup(patch, strict=True)
244 if not patch:
253 if not patch:
245 self.ui.warn("patch %s does not exist\n" % patch)
254 self.ui.warn("patch %s does not exist\n" % patch)
246 return (1, None)
255 return (1, None)
247
256
248 info = mergeq.isapplied(patch)
257 info = mergeq.isapplied(patch)
249 if not info:
258 if not info:
250 self.ui.warn("patch %s is not applied\n" % patch)
259 self.ui.warn("patch %s is not applied\n" % patch)
251 return (1, None)
260 return (1, None)
252 rev = revlog.bin(info[1])
261 rev = revlog.bin(info[1])
253 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
262 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
254 if head:
263 if head:
255 self.applied.append(revlog.hex(head) + ":" + patch)
264 self.applied.append(StatusEntry(revlog.hex(head), patch))
256 self.applied_dirty = 1
265 self.applied_dirty = 1
257 if err:
266 if err:
258 return (err, head)
267 return (err, head)
259 return (0, head)
268 return (0, head)
260
269
261 def patch(self, repo, patchfile):
270 def patch(self, repo, patchfile):
262 '''Apply patchfile to the working directory.
271 '''Apply patchfile to the working directory.
263 patchfile: file name of patch'''
272 patchfile: file name of patch'''
264 try:
273 try:
265 pp = util.find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
274 pp = util.find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
266 f = os.popen("%s -d '%s' -p1 --no-backup-if-mismatch < '%s'" %
275 f = os.popen("%s -d %s -p1 --no-backup-if-mismatch < %s" %
267 (pp, repo.root, patchfile))
276 (pp, util.shellquote(repo.root), util.shellquote(patchfile)))
268 except:
277 except:
269 self.ui.warn("patch failed, unable to continue (try -v)\n")
278 self.ui.warn("patch failed, unable to continue (try -v)\n")
270 return (None, [], False)
279 return (None, [], False)
271 files = []
280 files = []
272 fuzz = False
281 fuzz = False
273 for l in f:
282 for l in f:
274 l = l.rstrip('\r\n');
283 l = l.rstrip('\r\n');
275 if self.ui.verbose:
284 if self.ui.verbose:
276 self.ui.warn(l + "\n")
285 self.ui.warn(l + "\n")
277 if l[:14] == 'patching file ':
286 if l[:14] == 'patching file ':
278 pf = os.path.normpath(l[14:])
287 pf = os.path.normpath(util.parse_patch_output(l))
279 # when patch finds a space in the file name, it puts
280 # single quotes around the filename. strip them off
281 if pf[0] == "'" and pf[-1] == "'":
282 pf = pf[1:-1]
283 if pf not in files:
288 if pf not in files:
284 files.append(pf)
289 files.append(pf)
285 printed_file = False
290 printed_file = False
286 file_str = l
291 file_str = l
287 elif l.find('with fuzz') >= 0:
292 elif l.find('with fuzz') >= 0:
288 if not printed_file:
293 if not printed_file:
289 self.ui.warn(file_str + '\n')
294 self.ui.warn(file_str + '\n')
290 printed_file = True
295 printed_file = True
291 self.ui.warn(l + '\n')
296 self.ui.warn(l + '\n')
292 fuzz = True
297 fuzz = True
293 elif l.find('saving rejects to file') >= 0:
298 elif l.find('saving rejects to file') >= 0:
294 self.ui.warn(l + '\n')
299 self.ui.warn(l + '\n')
295 elif l.find('FAILED') >= 0:
300 elif l.find('FAILED') >= 0:
296 if not printed_file:
301 if not printed_file:
297 self.ui.warn(file_str + '\n')
302 self.ui.warn(file_str + '\n')
298 printed_file = True
303 printed_file = True
299 self.ui.warn(l + '\n')
304 self.ui.warn(l + '\n')
300
305
301 return (not f.close(), files, fuzz)
306 return (not f.close(), files, fuzz)
302
307
303 def apply(self, repo, series, list=False, update_status=True,
308 def apply(self, repo, series, list=False, update_status=True,
304 strict=False, patchdir=None, merge=None, wlock=None):
309 strict=False, patchdir=None, merge=None, wlock=None):
305 # TODO unify with commands.py
310 # TODO unify with commands.py
306 if not patchdir:
311 if not patchdir:
307 patchdir = self.path
312 patchdir = self.path
308 err = 0
313 err = 0
309 if not wlock:
314 if not wlock:
310 wlock = repo.wlock()
315 wlock = repo.wlock()
311 lock = repo.lock()
316 lock = repo.lock()
312 tr = repo.transaction()
317 tr = repo.transaction()
313 n = None
318 n = None
314 for patch in series:
319 for patch in series:
315 self.ui.warn("applying %s\n" % patch)
320 self.ui.warn("applying %s\n" % patch)
316 pf = os.path.join(patchdir, patch)
321 pf = os.path.join(patchdir, patch)
317
322
318 try:
323 try:
319 message, comments, user, date, patchfound = self.readheaders(patch)
324 message, comments, user, date, patchfound = self.readheaders(patch)
320 except:
325 except:
321 self.ui.warn("Unable to read %s\n" % pf)
326 self.ui.warn("Unable to read %s\n" % pf)
322 err = 1
327 err = 1
323 break
328 break
324
329
325 if not message:
330 if not message:
326 message = "imported patch %s\n" % patch
331 message = "imported patch %s\n" % patch
327 else:
332 else:
328 if list:
333 if list:
329 message.append("\nimported patch %s" % patch)
334 message.append("\nimported patch %s" % patch)
330 message = '\n'.join(message)
335 message = '\n'.join(message)
331
336
332 (patcherr, files, fuzz) = self.patch(repo, pf)
337 (patcherr, files, fuzz) = self.patch(repo, pf)
333 patcherr = not patcherr
338 patcherr = not patcherr
334
339
335 if merge and len(files) > 0:
340 if merge and len(files) > 0:
336 # Mark as merged and update dirstate parent info
341 # Mark as merged and update dirstate parent info
337 repo.dirstate.update(repo.dirstate.filterfiles(files), 'm')
342 repo.dirstate.update(repo.dirstate.filterfiles(files), 'm')
338 p1, p2 = repo.dirstate.parents()
343 p1, p2 = repo.dirstate.parents()
339 repo.dirstate.setparents(p1, merge)
344 repo.dirstate.setparents(p1, merge)
340 if len(files) > 0:
345 if len(files) > 0:
341 cwd = repo.getcwd()
346 cwd = repo.getcwd()
342 cfiles = files
347 cfiles = files
343 if cwd:
348 if cwd:
344 cfiles = [util.pathto(cwd, f) for f in files]
349 cfiles = [util.pathto(cwd, f) for f in files]
345 commands.addremove_lock(self.ui, repo, cfiles,
350 commands.addremove_lock(self.ui, repo, cfiles,
346 opts={}, wlock=wlock)
351 opts={}, wlock=wlock)
347 n = repo.commit(files, message, user, date, force=1, lock=lock,
352 n = repo.commit(files, message, user, date, force=1, lock=lock,
348 wlock=wlock)
353 wlock=wlock)
349
354
350 if n == None:
355 if n == None:
351 raise util.Abort(_("repo commit failed"))
356 raise util.Abort(_("repo commit failed"))
352
357
353 if update_status:
358 if update_status:
354 self.applied.append(revlog.hex(n) + ":" + patch)
359 self.applied.append(StatusEntry(revlog.hex(n), patch))
355
360
356 if patcherr:
361 if patcherr:
357 if not patchfound:
362 if not patchfound:
358 self.ui.warn("patch %s is empty\n" % patch)
363 self.ui.warn("patch %s is empty\n" % patch)
359 err = 0
364 err = 0
360 else:
365 else:
361 self.ui.warn("patch failed, rejects left in working dir\n")
366 self.ui.warn("patch failed, rejects left in working dir\n")
362 err = 1
367 err = 1
363 break
368 break
364
369
365 if fuzz and strict:
370 if fuzz and strict:
366 self.ui.warn("fuzz found when applying patch, stopping\n")
371 self.ui.warn("fuzz found when applying patch, stopping\n")
367 err = 1
372 err = 1
368 break
373 break
369 tr.close()
374 tr.close()
370 return (err, n)
375 return (err, n)
371
376
372 def delete(self, repo, patch, force=False):
377 def delete(self, repo, patch, force=False):
373 patch = self.lookup(patch, strict=True)
378 patch = self.lookup(patch, strict=True)
374 info = self.isapplied(patch)
379 info = self.isapplied(patch)
375 if info:
380 if info:
376 raise util.Abort(_("cannot delete applied patch %s") % patch)
381 raise util.Abort(_("cannot delete applied patch %s") % patch)
377 if patch not in self.series:
382 if patch not in self.series:
378 raise util.Abort(_("patch %s not in series file") % patch)
383 raise util.Abort(_("patch %s not in series file") % patch)
379 if force:
384 if force:
380 r = self.qrepo()
385 r = self.qrepo()
381 if r:
386 if r:
382 r.remove([patch], True)
387 r.remove([patch], True)
383 else:
388 else:
384 os.unlink(os.path.join(self.path, patch))
389 os.unlink(os.path.join(self.path, patch))
385 i = self.find_series(patch)
390 i = self.find_series(patch)
386 del self.full_series[i]
391 del self.full_series[i]
387 self.parse_series()
392 self.parse_series()
388 self.series_dirty = 1
393 self.series_dirty = 1
389
394
390 def check_toppatch(self, repo):
395 def check_toppatch(self, repo):
391 if len(self.applied) > 0:
396 if len(self.applied) > 0:
392 (top, patch) = self.applied[-1].split(':')
397 top = revlog.bin(self.applied[-1].rev)
393 top = revlog.bin(top)
394 pp = repo.dirstate.parents()
398 pp = repo.dirstate.parents()
395 if top not in pp:
399 if top not in pp:
396 raise util.Abort(_("queue top not at same revision as working directory"))
400 raise util.Abort(_("queue top not at same revision as working directory"))
397 return top
401 return top
398 return None
402 return None
399 def check_localchanges(self, repo):
403 def check_localchanges(self, repo):
400 (c, a, r, d, u) = repo.changes(None, None)
404 (c, a, r, d, u) = repo.changes(None, None)
401 if c or a or d or r:
405 if c or a or d or r:
402 raise util.Abort(_("local changes found, refresh first"))
406 raise util.Abort(_("local changes found, refresh first"))
403 def new(self, repo, patch, msg=None, force=None):
407 def new(self, repo, patch, msg=None, force=None):
404 if os.path.exists(os.path.join(self.path, patch)):
408 if os.path.exists(os.path.join(self.path, patch)):
405 raise util.Abort(_('patch "%s" already exists') % patch)
409 raise util.Abort(_('patch "%s" already exists') % patch)
406 commitfiles = []
410 commitfiles = []
407 (c, a, r, d, u) = repo.changes(None, None)
411 (c, a, r, d, u) = repo.changes(None, None)
408 if c or a or d or r:
412 if c or a or d or r:
409 if not force:
413 if not force:
410 raise util.Abort(_("local changes found, refresh first"))
414 raise util.Abort(_("local changes found, refresh first"))
411 commitfiles = c + a + r
415 commitfiles = c + a + r
412 self.check_toppatch(repo)
416 self.check_toppatch(repo)
413 wlock = repo.wlock()
417 wlock = repo.wlock()
414 insert = self.full_series_end()
418 insert = self.full_series_end()
415 if msg:
419 if msg:
416 n = repo.commit(commitfiles, "[mq]: %s" % msg, force=True,
420 n = repo.commit(commitfiles, "[mq]: %s" % msg, force=True,
417 wlock=wlock)
421 wlock=wlock)
418 else:
422 else:
419 n = repo.commit(commitfiles,
423 n = repo.commit(commitfiles,
420 "New patch: %s" % patch, force=True, wlock=wlock)
424 "New patch: %s" % patch, force=True, wlock=wlock)
421 if n == None:
425 if n == None:
422 raise util.Abort(_("repo commit failed"))
426 raise util.Abort(_("repo commit failed"))
423 self.full_series[insert:insert] = [patch]
427 self.full_series[insert:insert] = [patch]
424 self.applied.append(revlog.hex(n) + ":" + patch)
428 self.applied.append(StatusEntry(revlog.hex(n), patch))
425 self.parse_series()
429 self.parse_series()
426 self.series_dirty = 1
430 self.series_dirty = 1
427 self.applied_dirty = 1
431 self.applied_dirty = 1
428 p = self.opener(patch, "w")
432 p = self.opener(patch, "w")
429 if msg:
433 if msg:
430 msg = msg + "\n"
434 msg = msg + "\n"
431 p.write(msg)
435 p.write(msg)
432 p.close()
436 p.close()
433 wlock = None
437 wlock = None
434 r = self.qrepo()
438 r = self.qrepo()
435 if r: r.add([patch])
439 if r: r.add([patch])
436 if commitfiles:
440 if commitfiles:
437 self.refresh(repo, msg=None, short=True)
441 self.refresh(repo, msg=None, short=True)
438
442
439 def strip(self, repo, rev, update=True, backup="all", wlock=None):
443 def strip(self, repo, rev, update=True, backup="all", wlock=None):
440 def limitheads(chlog, stop):
444 def limitheads(chlog, stop):
441 """return the list of all nodes that have no children"""
445 """return the list of all nodes that have no children"""
442 p = {}
446 p = {}
443 h = []
447 h = []
444 stoprev = 0
448 stoprev = 0
445 if stop in chlog.nodemap:
449 if stop in chlog.nodemap:
446 stoprev = chlog.rev(stop)
450 stoprev = chlog.rev(stop)
447
451
448 for r in range(chlog.count() - 1, -1, -1):
452 for r in range(chlog.count() - 1, -1, -1):
449 n = chlog.node(r)
453 n = chlog.node(r)
450 if n not in p:
454 if n not in p:
451 h.append(n)
455 h.append(n)
452 if n == stop:
456 if n == stop:
453 break
457 break
454 if r < stoprev:
458 if r < stoprev:
455 break
459 break
456 for pn in chlog.parents(n):
460 for pn in chlog.parents(n):
457 p[pn] = 1
461 p[pn] = 1
458 return h
462 return h
459
463
460 def bundle(cg):
464 def bundle(cg):
461 backupdir = repo.join("strip-backup")
465 backupdir = repo.join("strip-backup")
462 if not os.path.isdir(backupdir):
466 if not os.path.isdir(backupdir):
463 os.mkdir(backupdir)
467 os.mkdir(backupdir)
464 name = os.path.join(backupdir, "%s" % revlog.short(rev))
468 name = os.path.join(backupdir, "%s" % revlog.short(rev))
465 name = savename(name)
469 name = savename(name)
466 self.ui.warn("saving bundle to %s\n" % name)
470 self.ui.warn("saving bundle to %s\n" % name)
467 # TODO, exclusive open
471 # TODO, exclusive open
468 f = open(name, "wb")
472 f = open(name, "wb")
469 try:
473 try:
470 f.write("HG10")
474 f.write("HG10")
471 z = bz2.BZ2Compressor(9)
475 z = bz2.BZ2Compressor(9)
472 while 1:
476 while 1:
473 chunk = cg.read(4096)
477 chunk = cg.read(4096)
474 if not chunk:
478 if not chunk:
475 break
479 break
476 f.write(z.compress(chunk))
480 f.write(z.compress(chunk))
477 f.write(z.flush())
481 f.write(z.flush())
478 except:
482 except:
479 os.unlink(name)
483 os.unlink(name)
480 raise
484 raise
481 f.close()
485 f.close()
482 return name
486 return name
483
487
484 def stripall(rev, revnum):
488 def stripall(rev, revnum):
485 cl = repo.changelog
489 cl = repo.changelog
486 c = cl.read(rev)
490 c = cl.read(rev)
487 mm = repo.manifest.read(c[0])
491 mm = repo.manifest.read(c[0])
488 seen = {}
492 seen = {}
489
493
490 for x in xrange(revnum, cl.count()):
494 for x in xrange(revnum, cl.count()):
491 c = cl.read(cl.node(x))
495 c = cl.read(cl.node(x))
492 for f in c[3]:
496 for f in c[3]:
493 if f in seen:
497 if f in seen:
494 continue
498 continue
495 seen[f] = 1
499 seen[f] = 1
496 if f in mm:
500 if f in mm:
497 filerev = mm[f]
501 filerev = mm[f]
498 else:
502 else:
499 filerev = 0
503 filerev = 0
500 seen[f] = filerev
504 seen[f] = filerev
501 # we go in two steps here so the strip loop happens in a
505 # we go in two steps here so the strip loop happens in a
502 # sensible order. When stripping many files, this helps keep
506 # sensible order. When stripping many files, this helps keep
503 # our disk access patterns under control.
507 # our disk access patterns under control.
504 list = seen.keys()
508 seen_list = seen.keys()
505 list.sort()
509 seen_list.sort()
506 for f in list:
510 for f in seen_list:
507 ff = repo.file(f)
511 ff = repo.file(f)
508 filerev = seen[f]
512 filerev = seen[f]
509 if filerev != 0:
513 if filerev != 0:
510 if filerev in ff.nodemap:
514 if filerev in ff.nodemap:
511 filerev = ff.rev(filerev)
515 filerev = ff.rev(filerev)
512 else:
516 else:
513 filerev = 0
517 filerev = 0
514 ff.strip(filerev, revnum)
518 ff.strip(filerev, revnum)
515
519
516 if not wlock:
520 if not wlock:
517 wlock = repo.wlock()
521 wlock = repo.wlock()
518 lock = repo.lock()
522 lock = repo.lock()
519 chlog = repo.changelog
523 chlog = repo.changelog
520 # TODO delete the undo files, and handle undo of merge sets
524 # TODO delete the undo files, and handle undo of merge sets
521 pp = chlog.parents(rev)
525 pp = chlog.parents(rev)
522 revnum = chlog.rev(rev)
526 revnum = chlog.rev(rev)
523
527
524 if update:
528 if update:
525 (c, a, r, d, u) = repo.changes(None, None)
529 (c, a, r, d, u) = repo.changes(None, None)
526 if c or a or d or r:
530 if c or a or d or r:
527 raise util.Abort(_("local changes found"))
531 raise util.Abort(_("local changes found"))
528 urev = self.qparents(repo, rev)
532 urev = self.qparents(repo, rev)
529 hg.update(repo, urev, allow=False, force=True, wlock=wlock)
533 hg.update(repo, urev, allow=False, force=True, wlock=wlock)
530 repo.dirstate.write()
534 repo.dirstate.write()
531
535
532 # save is a list of all the branches we are truncating away
536 # save is a list of all the branches we are truncating away
533 # that we actually want to keep. changegroup will be used
537 # that we actually want to keep. changegroup will be used
534 # to preserve them and add them back after the truncate
538 # to preserve them and add them back after the truncate
535 saveheads = []
539 saveheads = []
536 savebases = {}
540 savebases = {}
537
541
538 tip = chlog.tip()
539 heads = limitheads(chlog, rev)
542 heads = limitheads(chlog, rev)
540 seen = {}
543 seen = {}
541
544
542 # search through all the heads, finding those where the revision
545 # search through all the heads, finding those where the revision
543 # we want to strip away is an ancestor. Also look for merges
546 # we want to strip away is an ancestor. Also look for merges
544 # that might be turned into new heads by the strip.
547 # that might be turned into new heads by the strip.
545 while heads:
548 while heads:
546 h = heads.pop()
549 h = heads.pop()
547 n = h
550 n = h
548 while True:
551 while True:
549 seen[n] = 1
552 seen[n] = 1
550 pp = chlog.parents(n)
553 pp = chlog.parents(n)
551 if pp[1] != revlog.nullid and chlog.rev(pp[1]) > revnum:
554 if pp[1] != revlog.nullid and chlog.rev(pp[1]) > revnum:
552 if pp[1] not in seen:
555 if pp[1] not in seen:
553 heads.append(pp[1])
556 heads.append(pp[1])
554 if pp[0] == revlog.nullid:
557 if pp[0] == revlog.nullid:
555 break
558 break
556 if chlog.rev(pp[0]) < revnum:
559 if chlog.rev(pp[0]) < revnum:
557 break
560 break
558 n = pp[0]
561 n = pp[0]
559 if n == rev:
562 if n == rev:
560 break
563 break
561 r = chlog.reachable(h, rev)
564 r = chlog.reachable(h, rev)
562 if rev not in r:
565 if rev not in r:
563 saveheads.append(h)
566 saveheads.append(h)
564 for x in r:
567 for x in r:
565 if chlog.rev(x) > revnum:
568 if chlog.rev(x) > revnum:
566 savebases[x] = 1
569 savebases[x] = 1
567
570
568 # create a changegroup for all the branches we need to keep
571 # create a changegroup for all the branches we need to keep
569 if backup is "all":
572 if backup == "all":
570 backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
573 backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
571 bundle(backupch)
574 bundle(backupch)
572 if saveheads:
575 if saveheads:
573 backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
576 backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
574 chgrpfile = bundle(backupch)
577 chgrpfile = bundle(backupch)
575
578
576 stripall(rev, revnum)
579 stripall(rev, revnum)
577
580
578 change = chlog.read(rev)
581 change = chlog.read(rev)
579 repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
582 repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
580 chlog.strip(revnum, revnum)
583 chlog.strip(revnum, revnum)
581 if saveheads:
584 if saveheads:
582 self.ui.status("adding branch\n")
585 self.ui.status("adding branch\n")
583 commands.unbundle(self.ui, repo, chgrpfile, update=False)
586 commands.unbundle(self.ui, repo, chgrpfile, update=False)
584 if backup is not "strip":
587 if backup != "strip":
585 os.unlink(chgrpfile)
588 os.unlink(chgrpfile)
586
589
587 def isapplied(self, patch):
590 def isapplied(self, patch):
588 """returns (index, rev, patch)"""
591 """returns (index, rev, patch)"""
589 for i in xrange(len(self.applied)):
592 for i in xrange(len(self.applied)):
590 p = self.applied[i]
593 a = self.applied[i]
591 a = p.split(':')
594 if a.name == patch:
592 if a[1] == patch:
595 return (i, a.rev, a.name)
593 return (i, a[0], a[1])
594 return None
596 return None
595
597
596 # if the exact patch name does not exist, we try a few
598 # if the exact patch name does not exist, we try a few
597 # variations. If strict is passed, we try only #1
599 # variations. If strict is passed, we try only #1
598 #
600 #
599 # 1) a number to indicate an offset in the series file
601 # 1) a number to indicate an offset in the series file
600 # 2) a unique substring of the patch name was given
602 # 2) a unique substring of the patch name was given
601 # 3) patchname[-+]num to indicate an offset in the series file
603 # 3) patchname[-+]num to indicate an offset in the series file
602 def lookup(self, patch, strict=False):
604 def lookup(self, patch, strict=False):
603 def partial_name(s):
605 def partial_name(s):
604 if s in self.series:
606 if s in self.series:
605 return s
607 return s
606 matches = [x for x in self.series if s in x]
608 matches = [x for x in self.series if s in x]
607 if len(matches) > 1:
609 if len(matches) > 1:
608 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
610 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
609 for m in matches:
611 for m in matches:
610 self.ui.warn(' %s\n' % m)
612 self.ui.warn(' %s\n' % m)
611 return None
613 return None
612 if matches:
614 if matches:
613 return matches[0]
615 return matches[0]
614 if len(self.series) > 0 and len(self.applied) > 0:
616 if len(self.series) > 0 and len(self.applied) > 0:
615 if s == 'qtip':
617 if s == 'qtip':
616 return self.series[self.series_end()-1]
618 return self.series[self.series_end()-1]
617 if s == 'qbase':
619 if s == 'qbase':
618 return self.series[0]
620 return self.series[0]
619 return None
621 return None
620 if patch == None:
622 if patch == None:
621 return None
623 return None
622
624
623 # we don't want to return a partial match until we make
625 # we don't want to return a partial match until we make
624 # sure the file name passed in does not exist (checked below)
626 # sure the file name passed in does not exist (checked below)
625 res = partial_name(patch)
627 res = partial_name(patch)
626 if res and res == patch:
628 if res and res == patch:
627 return res
629 return res
628
630
629 if not os.path.isfile(os.path.join(self.path, patch)):
631 if not os.path.isfile(os.path.join(self.path, patch)):
630 try:
632 try:
631 sno = int(patch)
633 sno = int(patch)
632 except(ValueError, OverflowError):
634 except(ValueError, OverflowError):
633 pass
635 pass
634 else:
636 else:
635 if sno < len(self.series):
637 if sno < len(self.series):
636 patch = self.series[sno]
638 patch = self.series[sno]
637 return patch
639 return patch
638 if not strict:
640 if not strict:
639 # return any partial match made above
641 # return any partial match made above
640 if res:
642 if res:
641 return res
643 return res
642 minus = patch.rsplit('-', 1)
644 minus = patch.rsplit('-', 1)
643 if len(minus) > 1:
645 if len(minus) > 1:
644 res = partial_name(minus[0])
646 res = partial_name(minus[0])
645 if res:
647 if res:
646 i = self.series.index(res)
648 i = self.series.index(res)
647 try:
649 try:
648 off = int(minus[1] or 1)
650 off = int(minus[1] or 1)
649 except(ValueError, OverflowError):
651 except(ValueError, OverflowError):
650 pass
652 pass
651 else:
653 else:
652 if i - off >= 0:
654 if i - off >= 0:
653 return self.series[i - off]
655 return self.series[i - off]
654 plus = patch.rsplit('+', 1)
656 plus = patch.rsplit('+', 1)
655 if len(plus) > 1:
657 if len(plus) > 1:
656 res = partial_name(plus[0])
658 res = partial_name(plus[0])
657 if res:
659 if res:
658 i = self.series.index(res)
660 i = self.series.index(res)
659 try:
661 try:
660 off = int(plus[1] or 1)
662 off = int(plus[1] or 1)
661 except(ValueError, OverflowError):
663 except(ValueError, OverflowError):
662 pass
664 pass
663 else:
665 else:
664 if i + off < len(self.series):
666 if i + off < len(self.series):
665 return self.series[i + off]
667 return self.series[i + off]
666 raise util.Abort(_("patch %s not in series") % patch)
668 raise util.Abort(_("patch %s not in series") % patch)
667
669
668 def push(self, repo, patch=None, force=False, list=False,
670 def push(self, repo, patch=None, force=False, list=False,
669 mergeq=None, wlock=None):
671 mergeq=None, wlock=None):
670 if not wlock:
672 if not wlock:
671 wlock = repo.wlock()
673 wlock = repo.wlock()
672 patch = self.lookup(patch)
674 patch = self.lookup(patch)
673 if patch and self.isapplied(patch):
675 if patch and self.isapplied(patch):
674 self.ui.warn(_("patch %s is already applied\n") % patch)
676 self.ui.warn(_("patch %s is already applied\n") % patch)
675 sys.exit(1)
677 sys.exit(1)
676 if self.series_end() == len(self.series):
678 if self.series_end() == len(self.series):
677 self.ui.warn(_("patch series fully applied\n"))
679 self.ui.warn(_("patch series fully applied\n"))
678 sys.exit(1)
680 sys.exit(1)
679 if not force:
681 if not force:
680 self.check_localchanges(repo)
682 self.check_localchanges(repo)
681
683
682 self.applied_dirty = 1;
684 self.applied_dirty = 1;
683 start = self.series_end()
685 start = self.series_end()
684 if start > 0:
686 if start > 0:
685 self.check_toppatch(repo)
687 self.check_toppatch(repo)
686 if not patch:
688 if not patch:
687 patch = self.series[start]
689 patch = self.series[start]
688 end = start + 1
690 end = start + 1
689 else:
691 else:
690 end = self.series.index(patch, start) + 1
692 end = self.series.index(patch, start) + 1
691 s = self.series[start:end]
693 s = self.series[start:end]
692 if mergeq:
694 if mergeq:
693 ret = self.mergepatch(repo, mergeq, s, wlock)
695 ret = self.mergepatch(repo, mergeq, s, wlock)
694 else:
696 else:
695 ret = self.apply(repo, s, list, wlock=wlock)
697 ret = self.apply(repo, s, list, wlock=wlock)
696 top = self.applied[-1].split(':')[1]
698 top = self.applied[-1].name
697 if ret[0]:
699 if ret[0]:
698 self.ui.write("Errors during apply, please fix and refresh %s\n" %
700 self.ui.write("Errors during apply, please fix and refresh %s\n" %
699 top)
701 top)
700 else:
702 else:
701 self.ui.write("Now at: %s\n" % top)
703 self.ui.write("Now at: %s\n" % top)
702 return ret[0]
704 return ret[0]
703
705
704 def pop(self, repo, patch=None, force=False, update=True, all=False,
706 def pop(self, repo, patch=None, force=False, update=True, all=False,
705 wlock=None):
707 wlock=None):
706 def getfile(f, rev):
708 def getfile(f, rev):
707 t = repo.file(f).read(rev)
709 t = repo.file(f).read(rev)
708 try:
710 try:
709 repo.wfile(f, "w").write(t)
711 repo.wfile(f, "w").write(t)
710 except IOError:
712 except IOError:
711 try:
713 try:
712 os.makedirs(os.path.dirname(repo.wjoin(f)))
714 os.makedirs(os.path.dirname(repo.wjoin(f)))
713 except OSError, err:
715 except OSError, err:
714 if err.errno != errno.EEXIST: raise
716 if err.errno != errno.EEXIST: raise
715 repo.wfile(f, "w").write(t)
717 repo.wfile(f, "w").write(t)
716
718
717 if not wlock:
719 if not wlock:
718 wlock = repo.wlock()
720 wlock = repo.wlock()
719 if patch:
721 if patch:
720 # index, rev, patch
722 # index, rev, patch
721 info = self.isapplied(patch)
723 info = self.isapplied(patch)
722 if not info:
724 if not info:
723 patch = self.lookup(patch)
725 patch = self.lookup(patch)
724 info = self.isapplied(patch)
726 info = self.isapplied(patch)
725 if not info:
727 if not info:
726 raise util.Abort(_("patch %s is not applied") % patch)
728 raise util.Abort(_("patch %s is not applied") % patch)
727 if len(self.applied) == 0:
729 if len(self.applied) == 0:
728 self.ui.warn(_("no patches applied\n"))
730 self.ui.warn(_("no patches applied\n"))
729 sys.exit(1)
731 sys.exit(1)
730
732
731 if not update:
733 if not update:
732 parents = repo.dirstate.parents()
734 parents = repo.dirstate.parents()
733 rr = [ revlog.bin(x.split(':')[0]) for x in self.applied ]
735 rr = [ revlog.bin(x.rev) for x in self.applied ]
734 for p in parents:
736 for p in parents:
735 if p in rr:
737 if p in rr:
736 self.ui.warn("qpop: forcing dirstate update\n")
738 self.ui.warn("qpop: forcing dirstate update\n")
737 update = True
739 update = True
738
740
739 if not force and update:
741 if not force and update:
740 self.check_localchanges(repo)
742 self.check_localchanges(repo)
741
743
742 self.applied_dirty = 1;
744 self.applied_dirty = 1;
743 end = len(self.applied)
745 end = len(self.applied)
744 if not patch:
746 if not patch:
745 if all:
747 if all:
746 popi = 0
748 popi = 0
747 else:
749 else:
748 popi = len(self.applied) - 1
750 popi = len(self.applied) - 1
749 else:
751 else:
750 popi = info[0] + 1
752 popi = info[0] + 1
751 if popi >= end:
753 if popi >= end:
752 self.ui.warn("qpop: %s is already at the top\n" % patch)
754 self.ui.warn("qpop: %s is already at the top\n" % patch)
753 return
755 return
754 info = [ popi ] + self.applied[popi].split(':')
756 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
755
757
756 start = info[0]
758 start = info[0]
757 rev = revlog.bin(info[1])
759 rev = revlog.bin(info[1])
758
760
759 # we know there are no local changes, so we can make a simplified
761 # we know there are no local changes, so we can make a simplified
760 # form of hg.update.
762 # form of hg.update.
761 if update:
763 if update:
762 top = self.check_toppatch(repo)
764 top = self.check_toppatch(repo)
763 qp = self.qparents(repo, rev)
765 qp = self.qparents(repo, rev)
764 changes = repo.changelog.read(qp)
766 changes = repo.changelog.read(qp)
765 mf1 = repo.manifest.readflags(changes[0])
767 mf1 = repo.manifest.readflags(changes[0])
766 mmap = repo.manifest.read(changes[0])
768 mmap = repo.manifest.read(changes[0])
767 (c, a, r, d, u) = repo.changes(qp, top)
769 (c, a, r, d, u) = repo.changes(qp, top)
768 if d:
770 if d:
769 raise util.Abort("deletions found between repo revs")
771 raise util.Abort("deletions found between repo revs")
770 for f in c:
772 for f in c:
771 getfile(f, mmap[f])
773 getfile(f, mmap[f])
772 for f in r:
774 for f in r:
773 getfile(f, mmap[f])
775 getfile(f, mmap[f])
774 util.set_exec(repo.wjoin(f), mf1[f])
776 util.set_exec(repo.wjoin(f), mf1[f])
775 repo.dirstate.update(c + r, 'n')
777 repo.dirstate.update(c + r, 'n')
776 for f in a:
778 for f in a:
777 try: os.unlink(repo.wjoin(f))
779 try: os.unlink(repo.wjoin(f))
778 except: raise
780 except: raise
779 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
781 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
780 except: pass
782 except: pass
781 if a:
783 if a:
782 repo.dirstate.forget(a)
784 repo.dirstate.forget(a)
783 repo.dirstate.setparents(qp, revlog.nullid)
785 repo.dirstate.setparents(qp, revlog.nullid)
784 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
786 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
785 del self.applied[start:end]
787 del self.applied[start:end]
786 if len(self.applied):
788 if len(self.applied):
787 self.ui.write("Now at: %s\n" % self.applied[-1].split(':')[1])
789 self.ui.write("Now at: %s\n" % self.applied[-1].name)
788 else:
790 else:
789 self.ui.write("Patch queue now empty\n")
791 self.ui.write("Patch queue now empty\n")
790
792
791 def diff(self, repo, files):
793 def diff(self, repo, files):
792 top = self.check_toppatch(repo)
794 top = self.check_toppatch(repo)
793 if not top:
795 if not top:
794 self.ui.write("No patches applied\n")
796 self.ui.write("No patches applied\n")
795 return
797 return
796 qp = self.qparents(repo, top)
798 qp = self.qparents(repo, top)
797 commands.dodiff(sys.stdout, self.ui, repo, qp, None, files)
799 commands.dodiff(sys.stdout, self.ui, repo, qp, None, files)
798
800
799 def refresh(self, repo, msg=None, short=False):
801 def refresh(self, repo, msg=None, short=False):
800 if len(self.applied) == 0:
802 if len(self.applied) == 0:
801 self.ui.write("No patches applied\n")
803 self.ui.write("No patches applied\n")
802 return
804 return
803 wlock = repo.wlock()
805 wlock = repo.wlock()
804 self.check_toppatch(repo)
806 self.check_toppatch(repo)
805 qp = self.qparents(repo)
807 (top, patch) = (self.applied[-1].rev, self.applied[-1].name)
806 (top, patch) = self.applied[-1].split(':')
807 top = revlog.bin(top)
808 top = revlog.bin(top)
808 cparents = repo.changelog.parents(top)
809 cparents = repo.changelog.parents(top)
809 patchparent = self.qparents(repo, top)
810 patchparent = self.qparents(repo, top)
810 message, comments, user, date, patchfound = self.readheaders(patch)
811 message, comments, user, date, patchfound = self.readheaders(patch)
811
812
812 patchf = self.opener(patch, "w")
813 patchf = self.opener(patch, "w")
813 msg = msg.rstrip()
814 msg = msg.rstrip()
814 if msg:
815 if msg:
815 if comments:
816 if comments:
816 # Remove existing message.
817 # Remove existing message.
817 ci = 0
818 ci = 0
818 for mi in range(len(message)):
819 for mi in range(len(message)):
819 while message[mi] != comments[ci]:
820 while message[mi] != comments[ci]:
820 ci += 1
821 ci += 1
821 del comments[ci]
822 del comments[ci]
822 comments.append(msg)
823 comments.append(msg)
823 if comments:
824 if comments:
824 comments = "\n".join(comments) + '\n\n'
825 comments = "\n".join(comments) + '\n\n'
825 patchf.write(comments)
826 patchf.write(comments)
826
827
827 tip = repo.changelog.tip()
828 tip = repo.changelog.tip()
828 if top == tip:
829 if top == tip:
829 # if the top of our patch queue is also the tip, there is an
830 # if the top of our patch queue is also the tip, there is an
830 # optimization here. We update the dirstate in place and strip
831 # optimization here. We update the dirstate in place and strip
831 # off the tip commit. Then just commit the current directory
832 # off the tip commit. Then just commit the current directory
832 # tree. We can also send repo.commit the list of files
833 # tree. We can also send repo.commit the list of files
833 # changed to speed up the diff
834 # changed to speed up the diff
834 #
835 #
835 # in short mode, we only diff the files included in the
836 # in short mode, we only diff the files included in the
836 # patch already
837 # patch already
837 #
838 #
838 # this should really read:
839 # this should really read:
839 #(cc, dd, aa, aa2, uu) = repo.changes(tip, patchparent)
840 #(cc, dd, aa, aa2, uu) = repo.changes(tip, patchparent)
840 # but we do it backwards to take advantage of manifest/chlog
841 # but we do it backwards to take advantage of manifest/chlog
841 # caching against the next repo.changes call
842 # caching against the next repo.changes call
842 #
843 #
843 (cc, aa, dd, aa2, uu) = repo.changes(patchparent, tip)
844 (cc, aa, dd, aa2, uu) = repo.changes(patchparent, tip)
844 if short:
845 if short:
845 filelist = cc + aa + dd
846 filelist = cc + aa + dd
846 else:
847 else:
847 filelist = None
848 filelist = None
848 (c, a, r, d, u) = repo.changes(None, None, filelist)
849 (c, a, r, d, u) = repo.changes(None, None, filelist)
849
850
850 # we might end up with files that were added between tip and
851 # we might end up with files that were added between tip and
851 # the dirstate parent, but then changed in the local dirstate.
852 # the dirstate parent, but then changed in the local dirstate.
852 # in this case, we want them to only show up in the added section
853 # in this case, we want them to only show up in the added section
853 for x in c:
854 for x in c:
854 if x not in aa:
855 if x not in aa:
855 cc.append(x)
856 cc.append(x)
856 # we might end up with files added by the local dirstate that
857 # we might end up with files added by the local dirstate that
857 # were deleted by the patch. In this case, they should only
858 # were deleted by the patch. In this case, they should only
858 # show up in the changed section.
859 # show up in the changed section.
859 for x in a:
860 for x in a:
860 if x in dd:
861 if x in dd:
861 del dd[dd.index(x)]
862 del dd[dd.index(x)]
862 cc.append(x)
863 cc.append(x)
863 else:
864 else:
864 aa.append(x)
865 aa.append(x)
865 # make sure any files deleted in the local dirstate
866 # make sure any files deleted in the local dirstate
866 # are not in the add or change column of the patch
867 # are not in the add or change column of the patch
867 forget = []
868 forget = []
868 for x in d + r:
869 for x in d + r:
869 if x in aa:
870 if x in aa:
870 del aa[aa.index(x)]
871 del aa[aa.index(x)]
871 forget.append(x)
872 forget.append(x)
872 continue
873 continue
873 elif x in cc:
874 elif x in cc:
874 del cc[cc.index(x)]
875 del cc[cc.index(x)]
875 dd.append(x)
876 dd.append(x)
876
877
877 c = list(util.unique(cc))
878 c = list(util.unique(cc))
878 r = list(util.unique(dd))
879 r = list(util.unique(dd))
879 a = list(util.unique(aa))
880 a = list(util.unique(aa))
880 filelist = list(util.unique(c + r + a ))
881 filelist = list(util.unique(c + r + a ))
881 commands.dodiff(patchf, self.ui, repo, patchparent, None,
882 commands.dodiff(patchf, self.ui, repo, patchparent, None,
882 filelist, changes=(c, a, r, [], u))
883 filelist, changes=(c, a, r, [], u))
883 patchf.close()
884 patchf.close()
884
885
885 changes = repo.changelog.read(tip)
886 changes = repo.changelog.read(tip)
886 repo.dirstate.setparents(*cparents)
887 repo.dirstate.setparents(*cparents)
887 repo.dirstate.update(a, 'a')
888 repo.dirstate.update(a, 'a')
888 repo.dirstate.update(r, 'r')
889 repo.dirstate.update(r, 'r')
889 repo.dirstate.update(c, 'n')
890 repo.dirstate.update(c, 'n')
890 repo.dirstate.forget(forget)
891 repo.dirstate.forget(forget)
891
892
892 if not msg:
893 if not msg:
893 if not message:
894 if not message:
894 message = "patch queue: %s\n" % patch
895 message = "patch queue: %s\n" % patch
895 else:
896 else:
896 message = "\n".join(message)
897 message = "\n".join(message)
897 else:
898 else:
898 message = msg
899 message = msg
899
900
900 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
901 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
901 n = repo.commit(filelist, message, changes[1], force=1, wlock=wlock)
902 n = repo.commit(filelist, message, changes[1], force=1, wlock=wlock)
902 self.applied[-1] = revlog.hex(n) + ':' + patch
903 self.applied[-1] = StatusEntry(revlog.hex(n), patch)
903 self.applied_dirty = 1
904 self.applied_dirty = 1
904 else:
905 else:
905 commands.dodiff(patchf, self.ui, repo, patchparent, None)
906 commands.dodiff(patchf, self.ui, repo, patchparent, None)
906 patchf.close()
907 patchf.close()
907 self.pop(repo, force=True, wlock=wlock)
908 self.pop(repo, force=True, wlock=wlock)
908 self.push(repo, force=True, wlock=wlock)
909 self.push(repo, force=True, wlock=wlock)
909
910
910 def init(self, repo, create=False):
911 def init(self, repo, create=False):
911 if os.path.isdir(self.path):
912 if os.path.isdir(self.path):
912 raise util.Abort(_("patch queue directory already exists"))
913 raise util.Abort(_("patch queue directory already exists"))
913 os.mkdir(self.path)
914 os.mkdir(self.path)
914 if create:
915 if create:
915 return self.qrepo(create=True)
916 return self.qrepo(create=True)
916
917
917 def unapplied(self, repo, patch=None):
918 def unapplied(self, repo, patch=None):
918 if patch and patch not in self.series:
919 if patch and patch not in self.series:
919 raise util.Abort(_("patch %s is not in series file") % patch)
920 raise util.Abort(_("patch %s is not in series file") % patch)
920 if not patch:
921 if not patch:
921 start = self.series_end()
922 start = self.series_end()
922 else:
923 else:
923 start = self.series.index(patch) + 1
924 start = self.series.index(patch) + 1
924 for p in self.series[start:]:
925 return [(i, self.series[i]) for i in xrange(start, len(self.series))]
925 if self.ui.verbose:
926 self.ui.write("%d " % self.series.index(p))
927 self.ui.write("%s\n" % p)
928
926
929 def qseries(self, repo, missing=None, summary=False):
927 def qseries(self, repo, missing=None, summary=False):
930 start = self.series_end()
928 start = self.series_end()
931 if not missing:
929 if not missing:
932 for i in range(len(self.series)):
930 for i in range(len(self.series)):
933 patch = self.series[i]
931 patch = self.series[i]
934 if self.ui.verbose:
932 if self.ui.verbose:
935 if i < start:
933 if i < start:
936 status = 'A'
934 status = 'A'
937 else:
935 else:
938 status = 'U'
936 status = 'U'
939 self.ui.write('%d %s ' % (i, status))
937 self.ui.write('%d %s ' % (i, status))
940 if summary:
938 if summary:
941 msg = self.readheaders(patch)[0]
939 msg = self.readheaders(patch)[0]
942 msg = msg and ': ' + msg[0] or ': '
940 msg = msg and ': ' + msg[0] or ': '
943 else:
941 else:
944 msg = ''
942 msg = ''
945 self.ui.write('%s%s\n' % (patch, msg))
943 self.ui.write('%s%s\n' % (patch, msg))
946 else:
944 else:
947 list = []
945 msng_list = []
948 for root, dirs, files in os.walk(self.path):
946 for root, dirs, files in os.walk(self.path):
949 d = root[len(self.path) + 1:]
947 d = root[len(self.path) + 1:]
950 for f in files:
948 for f in files:
951 fl = os.path.join(d, f)
949 fl = os.path.join(d, f)
952 if (fl not in self.series and
950 if (fl not in self.series and
953 fl not in (self.status_path, self.series_path)
951 fl not in (self.status_path, self.series_path)
954 and not fl.startswith('.')):
952 and not fl.startswith('.')):
955 list.append(fl)
953 msng_list.append(fl)
956 list.sort()
954 msng_list.sort()
957 if list:
955 for x in msng_list:
958 for x in list:
956 if self.ui.verbose:
959 if self.ui.verbose:
957 self.ui.write("D ")
960 self.ui.write("D ")
958 self.ui.write("%s\n" % x)
961 self.ui.write("%s\n" % x)
962
959
963 def issaveline(self, l):
960 def issaveline(self, l):
964 name = l.split(':')[1]
961 name = l.split(':')[1]
965 if name == '.hg.patches.save.line':
962 if name == '.hg.patches.save.line':
966 return True
963 return True
967
964
968 def qrepo(self, create=False):
965 def qrepo(self, create=False):
969 if create or os.path.isdir(os.path.join(self.path, ".hg")):
966 if create or os.path.isdir(os.path.join(self.path, ".hg")):
970 return hg.repository(self.ui, path=self.path, create=create)
967 return hg.repository(self.ui, path=self.path, create=create)
971
968
972 def restore(self, repo, rev, delete=None, qupdate=None):
969 def restore(self, repo, rev, delete=None, qupdate=None):
973 c = repo.changelog.read(rev)
970 c = repo.changelog.read(rev)
974 desc = c[4].strip()
971 desc = c[4].strip()
975 lines = desc.splitlines()
972 lines = desc.splitlines()
976 i = 0
973 i = 0
977 datastart = None
974 datastart = None
978 series = []
975 series = []
979 applied = []
976 applied = []
980 qpp = None
977 qpp = None
981 for i in xrange(0, len(lines)):
978 for i in xrange(0, len(lines)):
982 if lines[i] == 'Patch Data:':
979 if lines[i] == 'Patch Data:':
983 datastart = i + 1
980 datastart = i + 1
984 elif lines[i].startswith('Dirstate:'):
981 elif lines[i].startswith('Dirstate:'):
985 l = lines[i].rstrip()
982 l = lines[i].rstrip()
986 l = l[10:].split(' ')
983 l = l[10:].split(' ')
987 qpp = [ hg.bin(x) for x in l ]
984 qpp = [ hg.bin(x) for x in l ]
988 elif datastart != None:
985 elif datastart != None:
989 l = lines[i].rstrip()
986 l = lines[i].rstrip()
990 index = l.index(':')
987 se = StatusEntry(l)
991 id = l[:index]
988 file_ = se.name
992 file = l[index + 1:]
989 if se.rev:
993 if id:
990 applied.append(se)
994 applied.append(l)
991 series.append(file_)
995 series.append(file)
996 if datastart == None:
992 if datastart == None:
997 self.ui.warn("No saved patch data found\n")
993 self.ui.warn("No saved patch data found\n")
998 return 1
994 return 1
999 self.ui.warn("restoring status: %s\n" % lines[0])
995 self.ui.warn("restoring status: %s\n" % lines[0])
1000 self.full_series = series
996 self.full_series = series
1001 self.applied = applied
997 self.applied = applied
1002 self.parse_series()
998 self.parse_series()
1003 self.series_dirty = 1
999 self.series_dirty = 1
1004 self.applied_dirty = 1
1000 self.applied_dirty = 1
1005 heads = repo.changelog.heads()
1001 heads = repo.changelog.heads()
1006 if delete:
1002 if delete:
1007 if rev not in heads:
1003 if rev not in heads:
1008 self.ui.warn("save entry has children, leaving it alone\n")
1004 self.ui.warn("save entry has children, leaving it alone\n")
1009 else:
1005 else:
1010 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1006 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1011 pp = repo.dirstate.parents()
1007 pp = repo.dirstate.parents()
1012 if rev in pp:
1008 if rev in pp:
1013 update = True
1009 update = True
1014 else:
1010 else:
1015 update = False
1011 update = False
1016 self.strip(repo, rev, update=update, backup='strip')
1012 self.strip(repo, rev, update=update, backup='strip')
1017 if qpp:
1013 if qpp:
1018 self.ui.warn("saved queue repository parents: %s %s\n" %
1014 self.ui.warn("saved queue repository parents: %s %s\n" %
1019 (hg.short(qpp[0]), hg.short(qpp[1])))
1015 (hg.short(qpp[0]), hg.short(qpp[1])))
1020 if qupdate:
1016 if qupdate:
1021 print "queue directory updating"
1017 print "queue directory updating"
1022 r = self.qrepo()
1018 r = self.qrepo()
1023 if not r:
1019 if not r:
1024 self.ui.warn("Unable to load queue repository\n")
1020 self.ui.warn("Unable to load queue repository\n")
1025 return 1
1021 return 1
1026 hg.update(r, qpp[0], allow=False, force=True)
1022 hg.update(r, qpp[0], allow=False, force=True)
1027
1023
1028 def save(self, repo, msg=None):
1024 def save(self, repo, msg=None):
1029 if len(self.applied) == 0:
1025 if len(self.applied) == 0:
1030 self.ui.warn("save: no patches applied, exiting\n")
1026 self.ui.warn("save: no patches applied, exiting\n")
1031 return 1
1027 return 1
1032 if self.issaveline(self.applied[-1]):
1028 if self.issaveline(self.applied[-1]):
1033 self.ui.warn("status is already saved\n")
1029 self.ui.warn("status is already saved\n")
1034 return 1
1030 return 1
1035
1031
1036 ar = [ ':' + x for x in self.full_series ]
1032 ar = [ ':' + x for x in self.full_series ]
1037 if not msg:
1033 if not msg:
1038 msg = "hg patches saved state"
1034 msg = "hg patches saved state"
1039 else:
1035 else:
1040 msg = "hg patches: " + msg.rstrip('\r\n')
1036 msg = "hg patches: " + msg.rstrip('\r\n')
1041 r = self.qrepo()
1037 r = self.qrepo()
1042 if r:
1038 if r:
1043 pp = r.dirstate.parents()
1039 pp = r.dirstate.parents()
1044 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1040 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1045 msg += "\n\nPatch Data:\n"
1041 msg += "\n\nPatch Data:\n"
1046 text = msg + "\n".join(self.applied) + '\n' + (ar and "\n".join(ar)
1042 text = msg + "\n".join(str(self.applied)) + '\n' + (ar and "\n".join(ar)
1047 + '\n' or "")
1043 + '\n' or "")
1048 n = repo.commit(None, text, user=None, force=1)
1044 n = repo.commit(None, text, user=None, force=1)
1049 if not n:
1045 if not n:
1050 self.ui.warn("repo commit failed\n")
1046 self.ui.warn("repo commit failed\n")
1051 return 1
1047 return 1
1052 self.applied.append(revlog.hex(n) + ":" + '.hg.patches.save.line')
1048 self.applied.append(StatusEntry(revlog.hex(n),'.hg.patches.save.line'))
1053 self.applied_dirty = 1
1049 self.applied_dirty = 1
1054
1050
1055 def full_series_end(self):
1051 def full_series_end(self):
1056 if len(self.applied) > 0:
1052 if len(self.applied) > 0:
1057 (top, p) = self.applied[-1].split(':')
1053 p = self.applied[-1].name
1058 end = self.find_series(p)
1054 end = self.find_series(p)
1059 if end == None:
1055 if end == None:
1060 return len(self.full_series)
1056 return len(self.full_series)
1061 return end + 1
1057 return end + 1
1062 return 0
1058 return 0
1063
1059
1064 def series_end(self):
1060 def series_end(self):
1065 end = 0
1061 end = 0
1066 if len(self.applied) > 0:
1062 if len(self.applied) > 0:
1067 (top, p) = self.applied[-1].split(':')
1063 p = self.applied[-1].name
1068 try:
1064 try:
1069 end = self.series.index(p)
1065 end = self.series.index(p)
1070 except ValueError:
1066 except ValueError:
1071 return 0
1067 return 0
1072 return end + 1
1068 return end + 1
1073 return end
1069 return end
1074
1070
1075 def qapplied(self, repo, patch=None):
1071 def qapplied(self, repo, patch=None):
1076 if patch and patch not in self.series:
1072 if patch and patch not in self.series:
1077 raise util.Abort(_("patch %s is not in series file") % patch)
1073 raise util.Abort(_("patch %s is not in series file") % patch)
1078 if not patch:
1074 if not patch:
1079 end = len(self.applied)
1075 end = len(self.applied)
1080 else:
1076 else:
1081 end = self.series.index(patch) + 1
1077 end = self.series.index(patch) + 1
1082 for x in xrange(end):
1078 for x in xrange(end):
1083 p = self.appliedname(x)
1079 p = self.appliedname(x)
1084 self.ui.write("%s\n" % p)
1080 self.ui.write("%s\n" % p)
1085
1081
1086 def appliedname(self, index):
1082 def appliedname(self, index):
1087 p = self.applied[index]
1083 pname = self.applied[index].name
1088 pname = p.split(':')[1]
1089 if not self.ui.verbose:
1084 if not self.ui.verbose:
1090 p = pname
1085 p = pname
1091 else:
1086 else:
1092 p = str(self.series.index(pname)) + " " + p
1087 p = str(self.series.index(pname)) + " " + p
1093 return p
1088 return p
1094
1089
1095 def top(self, repo):
1090 def top(self, repo):
1096 if len(self.applied):
1091 if len(self.applied):
1097 p = self.appliedname(-1)
1092 p = self.appliedname(-1)
1098 self.ui.write(p + '\n')
1093 self.ui.write(p + '\n')
1099 else:
1094 else:
1100 self.ui.write("No patches applied\n")
1095 self.ui.write("No patches applied\n")
1101
1096
1102 def next(self, repo):
1097 def next(self, repo):
1103 end = self.series_end()
1098 end = self.series_end()
1104 if end == len(self.series):
1099 if end == len(self.series):
1105 self.ui.write("All patches applied\n")
1100 self.ui.write("All patches applied\n")
1106 else:
1101 else:
1107 p = self.series[end]
1102 p = self.series[end]
1108 if self.ui.verbose:
1103 if self.ui.verbose:
1109 self.ui.write("%d " % self.series.index(p))
1104 self.ui.write("%d " % self.series.index(p))
1110 self.ui.write(p + '\n')
1105 self.ui.write(p + '\n')
1111
1106
1112 def prev(self, repo):
1107 def prev(self, repo):
1113 if len(self.applied) > 1:
1108 if len(self.applied) > 1:
1114 p = self.appliedname(-2)
1109 p = self.appliedname(-2)
1115 self.ui.write(p + '\n')
1110 self.ui.write(p + '\n')
1116 elif len(self.applied) == 1:
1111 elif len(self.applied) == 1:
1117 self.ui.write("Only one patch applied\n")
1112 self.ui.write("Only one patch applied\n")
1118 else:
1113 else:
1119 self.ui.write("No patches applied\n")
1114 self.ui.write("No patches applied\n")
1120
1115
1121 def qimport(self, repo, files, patch=None, existing=None, force=None):
1116 def qimport(self, repo, files, patch=None, existing=None, force=None):
1122 if len(files) > 1 and patch:
1117 if len(files) > 1 and patch:
1123 raise util.Abort(_('option "-n" not valid when importing multiple '
1118 raise util.Abort(_('option "-n" not valid when importing multiple '
1124 'files'))
1119 'files'))
1125 i = 0
1120 i = 0
1126 added = []
1121 added = []
1127 for filename in files:
1122 for filename in files:
1128 if existing:
1123 if existing:
1129 if not patch:
1124 if not patch:
1130 patch = filename
1125 patch = filename
1131 if not os.path.isfile(os.path.join(self.path, patch)):
1126 if not os.path.isfile(os.path.join(self.path, patch)):
1132 raise util.Abort(_("patch %s does not exist") % patch)
1127 raise util.Abort(_("patch %s does not exist") % patch)
1133 else:
1128 else:
1134 try:
1129 try:
1135 text = file(filename).read()
1130 text = file(filename).read()
1136 except IOError:
1131 except IOError:
1137 raise util.Abort(_("unable to read %s") % patch)
1132 raise util.Abort(_("unable to read %s") % patch)
1138 if not patch:
1133 if not patch:
1139 patch = os.path.split(filename)[1]
1134 patch = os.path.split(filename)[1]
1140 if not force and os.path.exists(os.path.join(self.path, patch)):
1135 if not force and os.path.exists(os.path.join(self.path, patch)):
1141 raise util.Abort(_('patch "%s" already exists') % patch)
1136 raise util.Abort(_('patch "%s" already exists') % patch)
1142 patchf = self.opener(patch, "w")
1137 patchf = self.opener(patch, "w")
1143 patchf.write(text)
1138 patchf.write(text)
1144 if patch in self.series:
1139 if patch in self.series:
1145 raise util.Abort(_('patch %s is already in the series file')
1140 raise util.Abort(_('patch %s is already in the series file')
1146 % patch)
1141 % patch)
1147 index = self.full_series_end() + i
1142 index = self.full_series_end() + i
1148 self.full_series[index:index] = [patch]
1143 self.full_series[index:index] = [patch]
1149 self.parse_series()
1144 self.parse_series()
1150 self.ui.warn("adding %s to series file\n" % patch)
1145 self.ui.warn("adding %s to series file\n" % patch)
1151 i += 1
1146 i += 1
1152 added.append(patch)
1147 added.append(patch)
1153 patch = None
1148 patch = None
1154 self.series_dirty = 1
1149 self.series_dirty = 1
1155 qrepo = self.qrepo()
1150 qrepo = self.qrepo()
1156 if qrepo:
1151 if qrepo:
1157 qrepo.add(added)
1152 qrepo.add(added)
1158
1153
1159 def delete(ui, repo, patch, **opts):
1154 def delete(ui, repo, patch, **opts):
1160 """remove a patch from the series file
1155 """remove a patch from the series file
1161
1156
1162 The patch must not be applied.
1157 The patch must not be applied.
1163 With -f, deletes the patch file as well as the series entry."""
1158 With -f, deletes the patch file as well as the series entry."""
1164 q = repo.mq
1159 q = repo.mq
1165 q.delete(repo, patch, force=opts.get('force'))
1160 q.delete(repo, patch, force=opts.get('force'))
1166 q.save_dirty()
1161 q.save_dirty()
1167 return 0
1162 return 0
1168
1163
1169 def applied(ui, repo, patch=None, **opts):
1164 def applied(ui, repo, patch=None, **opts):
1170 """print the patches already applied"""
1165 """print the patches already applied"""
1171 repo.mq.qapplied(repo, patch)
1166 repo.mq.qapplied(repo, patch)
1172 return 0
1167 return 0
1173
1168
1174 def unapplied(ui, repo, patch=None, **opts):
1169 def unapplied(ui, repo, patch=None, **opts):
1175 """print the patches not yet applied"""
1170 """print the patches not yet applied"""
1176 repo.mq.unapplied(repo, patch)
1171 for i, p in repo.mq.unapplied(repo, patch):
1177 return 0
1172 if ui.verbose:
1173 ui.write("%d " % i)
1174 ui.write("%s\n" % p)
1178
1175
1179 def qimport(ui, repo, *filename, **opts):
1176 def qimport(ui, repo, *filename, **opts):
1180 """import a patch"""
1177 """import a patch"""
1181 q = repo.mq
1178 q = repo.mq
1182 q.qimport(repo, filename, patch=opts['name'],
1179 q.qimport(repo, filename, patch=opts['name'],
1183 existing=opts['existing'], force=opts['force'])
1180 existing=opts['existing'], force=opts['force'])
1184 q.save_dirty()
1181 q.save_dirty()
1185 return 0
1182 return 0
1186
1183
1187 def init(ui, repo, **opts):
1184 def init(ui, repo, **opts):
1188 """init a new queue repository
1185 """init a new queue repository
1189
1186
1190 The queue repository is unversioned by default. If -c is
1187 The queue repository is unversioned by default. If -c is
1191 specified, qinit will create a separate nested repository
1188 specified, qinit will create a separate nested repository
1192 for patches. Use qcommit to commit changes to this queue
1189 for patches. Use qcommit to commit changes to this queue
1193 repository."""
1190 repository."""
1194 q = repo.mq
1191 q = repo.mq
1195 r = q.init(repo, create=opts['create_repo'])
1192 r = q.init(repo, create=opts['create_repo'])
1196 q.save_dirty()
1193 q.save_dirty()
1197 if r:
1194 if r:
1198 fp = r.wopener('.hgignore', 'w')
1195 fp = r.wopener('.hgignore', 'w')
1199 print >> fp, 'syntax: glob'
1196 print >> fp, 'syntax: glob'
1200 print >> fp, 'status'
1197 print >> fp, 'status'
1201 fp.close()
1198 fp.close()
1202 r.wopener('series', 'w').close()
1199 r.wopener('series', 'w').close()
1203 r.add(['.hgignore', 'series'])
1200 r.add(['.hgignore', 'series'])
1204 return 0
1201 return 0
1205
1202
1206 def clone(ui, source, dest=None, **opts):
1203 def clone(ui, source, dest=None, **opts):
1207 '''clone main and patch repository at same time
1204 '''clone main and patch repository at same time
1208
1205
1209 If source is local, destination will have no patches applied. If
1206 If source is local, destination will have no patches applied. If
1210 source is remote, this command can not check if patches are
1207 source is remote, this command can not check if patches are
1211 applied in source, so cannot guarantee that patches are not
1208 applied in source, so cannot guarantee that patches are not
1212 applied in destination. If you clone remote repository, be sure
1209 applied in destination. If you clone remote repository, be sure
1213 before that it has no patches applied.
1210 before that it has no patches applied.
1214
1211
1215 Source patch repository is looked for in <src>/.hg/patches by
1212 Source patch repository is looked for in <src>/.hg/patches by
1216 default. Use -p <url> to change.
1213 default. Use -p <url> to change.
1217 '''
1214 '''
1218 commands.setremoteconfig(ui, opts)
1215 commands.setremoteconfig(ui, opts)
1219 if dest is None:
1216 if dest is None:
1220 dest = hg.defaultdest(source)
1217 dest = hg.defaultdest(source)
1221 sr = hg.repository(ui, ui.expandpath(source))
1218 sr = hg.repository(ui, ui.expandpath(source))
1222 qbase, destrev = None, None
1219 qbase, destrev = None, None
1223 if sr.local():
1220 if sr.local():
1224 reposetup(ui, sr)
1221 reposetup(ui, sr)
1225 if sr.mq.applied:
1222 if sr.mq.applied:
1226 qbase = revlog.bin(sr.mq.applied[0].split(':')[0])
1223 qbase = revlog.bin(sr.mq.applied[0].rev)
1227 if not hg.islocal(dest):
1224 if not hg.islocal(dest):
1228 destrev = sr.parents(qbase)[0]
1225 destrev = sr.parents(qbase)[0]
1229 ui.note(_('cloning main repo\n'))
1226 ui.note(_('cloning main repo\n'))
1230 sr, dr = hg.clone(ui, sr, dest,
1227 sr, dr = hg.clone(ui, sr, dest,
1231 pull=opts['pull'],
1228 pull=opts['pull'],
1232 rev=destrev,
1229 rev=destrev,
1233 update=False,
1230 update=False,
1234 stream=opts['uncompressed'])
1231 stream=opts['uncompressed'])
1235 ui.note(_('cloning patch repo\n'))
1232 ui.note(_('cloning patch repo\n'))
1236 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1233 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1237 dr.url() + '/.hg/patches',
1234 dr.url() + '/.hg/patches',
1238 pull=opts['pull'],
1235 pull=opts['pull'],
1239 update=not opts['noupdate'],
1236 update=not opts['noupdate'],
1240 stream=opts['uncompressed'])
1237 stream=opts['uncompressed'])
1241 if dr.local():
1238 if dr.local():
1242 if qbase:
1239 if qbase:
1243 ui.note(_('stripping applied patches from destination repo\n'))
1240 ui.note(_('stripping applied patches from destination repo\n'))
1244 reposetup(ui, dr)
1241 reposetup(ui, dr)
1245 dr.mq.strip(dr, qbase, update=False, backup=None)
1242 dr.mq.strip(dr, qbase, update=False, backup=None)
1246 if not opts['noupdate']:
1243 if not opts['noupdate']:
1247 ui.note(_('updating destination repo\n'))
1244 ui.note(_('updating destination repo\n'))
1248 hg.update(dr, dr.changelog.tip())
1245 hg.update(dr, dr.changelog.tip())
1249
1246
1250 def commit(ui, repo, *pats, **opts):
1247 def commit(ui, repo, *pats, **opts):
1251 """commit changes in the queue repository"""
1248 """commit changes in the queue repository"""
1252 q = repo.mq
1249 q = repo.mq
1253 r = q.qrepo()
1250 r = q.qrepo()
1254 if not r: raise util.Abort('no queue repository')
1251 if not r: raise util.Abort('no queue repository')
1255 commands.commit(r.ui, r, *pats, **opts)
1252 commands.commit(r.ui, r, *pats, **opts)
1256
1253
1257 def series(ui, repo, **opts):
1254 def series(ui, repo, **opts):
1258 """print the entire series file"""
1255 """print the entire series file"""
1259 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1256 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1260 return 0
1257 return 0
1261
1258
1262 def top(ui, repo, **opts):
1259 def top(ui, repo, **opts):
1263 """print the name of the current patch"""
1260 """print the name of the current patch"""
1264 repo.mq.top(repo)
1261 repo.mq.top(repo)
1265 return 0
1262 return 0
1266
1263
1267 def next(ui, repo, **opts):
1264 def next(ui, repo, **opts):
1268 """print the name of the next patch"""
1265 """print the name of the next patch"""
1269 repo.mq.next(repo)
1266 repo.mq.next(repo)
1270 return 0
1267 return 0
1271
1268
1272 def prev(ui, repo, **opts):
1269 def prev(ui, repo, **opts):
1273 """print the name of the previous patch"""
1270 """print the name of the previous patch"""
1274 repo.mq.prev(repo)
1271 repo.mq.prev(repo)
1275 return 0
1272 return 0
1276
1273
1277 def new(ui, repo, patch, **opts):
1274 def new(ui, repo, patch, **opts):
1278 """create a new patch
1275 """create a new patch
1279
1276
1280 qnew creates a new patch on top of the currently-applied patch
1277 qnew creates a new patch on top of the currently-applied patch
1281 (if any). It will refuse to run if there are any outstanding
1278 (if any). It will refuse to run if there are any outstanding
1282 changes unless -f is specified, in which case the patch will
1279 changes unless -f is specified, in which case the patch will
1283 be initialised with them.
1280 be initialised with them.
1284
1281
1285 -m or -l set the patch header as well as the commit message.
1282 -m or -l set the patch header as well as the commit message.
1286 If neither is specified, the patch header is empty and the
1283 If neither is specified, the patch header is empty and the
1287 commit message is 'New patch: PATCH'"""
1284 commit message is 'New patch: PATCH'"""
1288 q = repo.mq
1285 q = repo.mq
1289 message=commands.logmessage(**opts)
1286 message = commands.logmessage(**opts)
1290 q.new(repo, patch, msg=message, force=opts['force'])
1287 q.new(repo, patch, msg=message, force=opts['force'])
1291 q.save_dirty()
1288 q.save_dirty()
1292 return 0
1289 return 0
1293
1290
1294 def refresh(ui, repo, **opts):
1291 def refresh(ui, repo, **opts):
1295 """update the current patch"""
1292 """update the current patch"""
1296 q = repo.mq
1293 q = repo.mq
1297 message=commands.logmessage(**opts)
1294 message = commands.logmessage(**opts)
1298 if opts['edit']:
1295 if opts['edit']:
1299 if message:
1296 if message:
1300 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1297 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1301 patch = q.applied[-1].split(':')[1]
1298 patch = q.applied[-1].name
1302 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1299 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1303 message = ui.edit('\n'.join(message), user or ui.username())
1300 message = ui.edit('\n'.join(message), user or ui.username())
1304 q.refresh(repo, msg=message, short=opts['short'])
1301 q.refresh(repo, msg=message, short=opts['short'])
1305 q.save_dirty()
1302 q.save_dirty()
1306 return 0
1303 return 0
1307
1304
1308 def diff(ui, repo, *files, **opts):
1305 def diff(ui, repo, *files, **opts):
1309 """diff of the current patch"""
1306 """diff of the current patch"""
1310 # deep in the dirstate code, the walkhelper method wants a list, not a tuple
1307 # deep in the dirstate code, the walkhelper method wants a list, not a tuple
1311 repo.mq.diff(repo, list(files))
1308 repo.mq.diff(repo, list(files))
1312 return 0
1309 return 0
1313
1310
1314 def fold(ui, repo, *files, **opts):
1311 def fold(ui, repo, *files, **opts):
1315 """fold the named patches into the current patch
1312 """fold the named patches into the current patch
1316
1313
1317 Patches must not yet be applied. Each patch will be successively
1314 Patches must not yet be applied. Each patch will be successively
1318 applied to the current patch in the order given. If all the
1315 applied to the current patch in the order given. If all the
1319 patches apply successfully, the current patch will be refreshed
1316 patches apply successfully, the current patch will be refreshed
1320 with the new cumulative patch, and the folded patches will
1317 with the new cumulative patch, and the folded patches will
1321 be deleted. With -f/--force, the folded patch files will
1318 be deleted. With -f/--force, the folded patch files will
1322 be removed afterwards.
1319 be removed afterwards.
1323
1320
1324 The header for each folded patch will be concatenated with
1321 The header for each folded patch will be concatenated with
1325 the current patch header, separated by a line of '* * *'."""
1322 the current patch header, separated by a line of '* * *'."""
1326
1323
1327 q = repo.mq
1324 q = repo.mq
1328
1325
1329 if not files:
1326 if not files:
1330 raise util.Abort(_('qfold requires at least one patch name'))
1327 raise util.Abort(_('qfold requires at least one patch name'))
1331 if not q.check_toppatch(repo):
1328 if not q.check_toppatch(repo):
1332 raise util.Abort(_('No patches applied\n'))
1329 raise util.Abort(_('No patches applied\n'))
1333
1330
1334 message=commands.logmessage(**opts)
1331 message = commands.logmessage(**opts)
1335 if opts['edit']:
1332 if opts['edit']:
1336 if message:
1333 if message:
1337 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1334 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1338
1335
1339 parent = q.lookup('qtip')
1336 parent = q.lookup('qtip')
1340 patches = []
1337 patches = []
1341 messages = []
1338 messages = []
1342 for f in files:
1339 for f in files:
1343 patch = q.lookup(f)
1340 patch = q.lookup(f)
1344 if patch in patches or patch == parent:
1341 if patch in patches or patch == parent:
1345 self.ui.warn(_('Skipping already folded patch %s') % patch)
1342 ui.warn(_('Skipping already folded patch %s') % patch)
1346 if q.isapplied(patch):
1343 if q.isapplied(patch):
1347 raise util.Abort(_('qfold cannot fold already applied patch %s') % patch)
1344 raise util.Abort(_('qfold cannot fold already applied patch %s') % patch)
1348 patches.append(patch)
1345 patches.append(patch)
1349
1346
1350 for patch in patches:
1347 for patch in patches:
1351 if not message:
1348 if not message:
1352 messages.append(q.readheaders(patch)[0])
1349 messages.append(q.readheaders(patch)[0])
1353 pf = os.path.join(q.path, patch)
1350 pf = os.path.join(q.path, patch)
1354 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1351 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1355 if not patchsuccess:
1352 if not patchsuccess:
1356 raise util.Abort(_('Error folding patch %s') % patch)
1353 raise util.Abort(_('Error folding patch %s') % patch)
1357
1354
1358 if not message:
1355 if not message:
1359 message, comments, user = q.readheaders(parent)[0:3]
1356 message, comments, user = q.readheaders(parent)[0:3]
1360 for msg in messages:
1357 for msg in messages:
1361 message.append('* * *')
1358 message.append('* * *')
1362 message.extend(msg)
1359 message.extend(msg)
1363 message = '\n'.join(message)
1360 message = '\n'.join(message)
1364
1361
1365 if opts['edit']:
1362 if opts['edit']:
1366 message = ui.edit(message, user or ui.username())
1363 message = ui.edit(message, user or ui.username())
1367
1364
1368 q.refresh(repo, msg=message)
1365 q.refresh(repo, msg=message)
1369
1366
1370 for patch in patches:
1367 for patch in patches:
1371 q.delete(repo, patch, force=opts['force'])
1368 q.delete(repo, patch, force=opts['force'])
1372
1369
1373 q.save_dirty()
1370 q.save_dirty()
1374
1371
1375 def header(ui, repo, patch=None):
1372 def header(ui, repo, patch=None):
1376 """Print the header of the topmost or specified patch"""
1373 """Print the header of the topmost or specified patch"""
1377 q = repo.mq
1374 q = repo.mq
1378
1375
1379 if patch:
1376 if patch:
1380 patch = q.lookup(patch)
1377 patch = q.lookup(patch)
1381 else:
1378 else:
1382 if not q.applied:
1379 if not q.applied:
1383 ui.write('No patches applied\n')
1380 ui.write('No patches applied\n')
1384 return
1381 return
1385 patch = q.lookup('qtip')
1382 patch = q.lookup('qtip')
1386 message = repo.mq.readheaders(patch)[0]
1383 message = repo.mq.readheaders(patch)[0]
1387
1384
1388 ui.write('\n'.join(message) + '\n')
1385 ui.write('\n'.join(message) + '\n')
1389
1386
1390 def lastsavename(path):
1387 def lastsavename(path):
1391 (dir, base) = os.path.split(path)
1388 (directory, base) = os.path.split(path)
1392 names = os.listdir(dir)
1389 names = os.listdir(directory)
1393 namere = re.compile("%s.([0-9]+)" % base)
1390 namere = re.compile("%s.([0-9]+)" % base)
1394 max = None
1391 maxindex = None
1395 maxname = None
1392 maxname = None
1396 for f in names:
1393 for f in names:
1397 m = namere.match(f)
1394 m = namere.match(f)
1398 if m:
1395 if m:
1399 index = int(m.group(1))
1396 index = int(m.group(1))
1400 if max == None or index > max:
1397 if maxindex == None or index > maxindex:
1401 max = index
1398 maxindex = index
1402 maxname = f
1399 maxname = f
1403 if maxname:
1400 if maxname:
1404 return (os.path.join(dir, maxname), max)
1401 return (os.path.join(directory, maxname), maxindex)
1405 return (None, None)
1402 return (None, None)
1406
1403
1407 def savename(path):
1404 def savename(path):
1408 (last, index) = lastsavename(path)
1405 (last, index) = lastsavename(path)
1409 if last is None:
1406 if last is None:
1410 index = 0
1407 index = 0
1411 newpath = path + ".%d" % (index + 1)
1408 newpath = path + ".%d" % (index + 1)
1412 return newpath
1409 return newpath
1413
1410
1414 def push(ui, repo, patch=None, **opts):
1411 def push(ui, repo, patch=None, **opts):
1415 """push the next patch onto the stack"""
1412 """push the next patch onto the stack"""
1416 q = repo.mq
1413 q = repo.mq
1417 mergeq = None
1414 mergeq = None
1418
1415
1419 if opts['all']:
1416 if opts['all']:
1420 patch = q.series[-1]
1417 patch = q.series[-1]
1421 if opts['merge']:
1418 if opts['merge']:
1422 if opts['name']:
1419 if opts['name']:
1423 newpath = opts['name']
1420 newpath = opts['name']
1424 else:
1421 else:
1425 newpath, i = lastsavename(q.path)
1422 newpath, i = lastsavename(q.path)
1426 if not newpath:
1423 if not newpath:
1427 ui.warn("no saved queues found, please use -n\n")
1424 ui.warn("no saved queues found, please use -n\n")
1428 return 1
1425 return 1
1429 mergeq = queue(ui, repo.join(""), newpath)
1426 mergeq = queue(ui, repo.join(""), newpath)
1430 ui.warn("merging with queue at: %s\n" % mergeq.path)
1427 ui.warn("merging with queue at: %s\n" % mergeq.path)
1431 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1428 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1432 mergeq=mergeq)
1429 mergeq=mergeq)
1433 q.save_dirty()
1430 q.save_dirty()
1434 return ret
1431 return ret
1435
1432
1436 def pop(ui, repo, patch=None, **opts):
1433 def pop(ui, repo, patch=None, **opts):
1437 """pop the current patch off the stack"""
1434 """pop the current patch off the stack"""
1438 localupdate = True
1435 localupdate = True
1439 if opts['name']:
1436 if opts['name']:
1440 q = queue(ui, repo.join(""), repo.join(opts['name']))
1437 q = queue(ui, repo.join(""), repo.join(opts['name']))
1441 ui.warn('using patch queue: %s\n' % q.path)
1438 ui.warn('using patch queue: %s\n' % q.path)
1442 localupdate = False
1439 localupdate = False
1443 else:
1440 else:
1444 q = repo.mq
1441 q = repo.mq
1445 q.pop(repo, patch, force=opts['force'], update=localupdate, all=opts['all'])
1442 q.pop(repo, patch, force=opts['force'], update=localupdate, all=opts['all'])
1446 q.save_dirty()
1443 q.save_dirty()
1447 return 0
1444 return 0
1448
1445
1449 def rename(ui, repo, patch, name=None, **opts):
1446 def rename(ui, repo, patch, name=None, **opts):
1450 """rename a patch
1447 """rename a patch
1451
1448
1452 With one argument, renames the current patch to PATCH1.
1449 With one argument, renames the current patch to PATCH1.
1453 With two arguments, renames PATCH1 to PATCH2."""
1450 With two arguments, renames PATCH1 to PATCH2."""
1454
1451
1455 q = repo.mq
1452 q = repo.mq
1456
1453
1457 if not name:
1454 if not name:
1458 name = patch
1455 name = patch
1459 patch = None
1456 patch = None
1460
1457
1461 if name in q.series:
1458 if name in q.series:
1462 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1459 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1463
1460
1464 absdest = os.path.join(q.path, name)
1461 absdest = os.path.join(q.path, name)
1465 if os.path.exists(absdest):
1462 if os.path.exists(absdest):
1466 raise util.Abort(_('%s already exists') % absdest)
1463 raise util.Abort(_('%s already exists') % absdest)
1467
1464
1468 if patch:
1465 if patch:
1469 patch = q.lookup(patch)
1466 patch = q.lookup(patch)
1470 else:
1467 else:
1471 if not q.applied:
1468 if not q.applied:
1472 ui.write(_('No patches applied\n'))
1469 ui.write(_('No patches applied\n'))
1473 return
1470 return
1474 patch = q.lookup('qtip')
1471 patch = q.lookup('qtip')
1475
1472
1476 if ui.verbose:
1473 if ui.verbose:
1477 ui.write('Renaming %s to %s\n' % (patch, name))
1474 ui.write('Renaming %s to %s\n' % (patch, name))
1478 i = q.find_series(patch)
1475 i = q.find_series(patch)
1479 q.full_series[i] = name
1476 q.full_series[i] = name
1480 q.parse_series()
1477 q.parse_series()
1481 q.series_dirty = 1
1478 q.series_dirty = 1
1482
1479
1483 info = q.isapplied(patch)
1480 info = q.isapplied(patch)
1484 if info:
1481 if info:
1485 q.applied[info[0]] = info[1] + ':' + name
1482 q.applied[info[0]] = StatusEntry(info[1], name)
1486 q.applied_dirty = 1
1483 q.applied_dirty = 1
1487
1484
1488 util.rename(os.path.join(q.path, patch), absdest)
1485 util.rename(os.path.join(q.path, patch), absdest)
1489 r = q.qrepo()
1486 r = q.qrepo()
1490 if r:
1487 if r:
1491 wlock = r.wlock()
1488 wlock = r.wlock()
1492 if r.dirstate.state(name) == 'r':
1489 if r.dirstate.state(name) == 'r':
1493 r.undelete([name], wlock)
1490 r.undelete([name], wlock)
1494 r.copy(patch, name, wlock)
1491 r.copy(patch, name, wlock)
1495 r.remove([patch], False, wlock)
1492 r.remove([patch], False, wlock)
1496
1493
1497 q.save_dirty()
1494 q.save_dirty()
1498
1495
1499 def restore(ui, repo, rev, **opts):
1496 def restore(ui, repo, rev, **opts):
1500 """restore the queue state saved by a rev"""
1497 """restore the queue state saved by a rev"""
1501 rev = repo.lookup(rev)
1498 rev = repo.lookup(rev)
1502 q = repo.mq
1499 q = repo.mq
1503 q.restore(repo, rev, delete=opts['delete'],
1500 q.restore(repo, rev, delete=opts['delete'],
1504 qupdate=opts['update'])
1501 qupdate=opts['update'])
1505 q.save_dirty()
1502 q.save_dirty()
1506 return 0
1503 return 0
1507
1504
1508 def save(ui, repo, **opts):
1505 def save(ui, repo, **opts):
1509 """save current queue state"""
1506 """save current queue state"""
1510 q = repo.mq
1507 q = repo.mq
1511 message=commands.logmessage(**opts)
1508 message = commands.logmessage(**opts)
1512 ret = q.save(repo, msg=message)
1509 ret = q.save(repo, msg=message)
1513 if ret:
1510 if ret:
1514 return ret
1511 return ret
1515 q.save_dirty()
1512 q.save_dirty()
1516 if opts['copy']:
1513 if opts['copy']:
1517 path = q.path
1514 path = q.path
1518 if opts['name']:
1515 if opts['name']:
1519 newpath = os.path.join(q.basepath, opts['name'])
1516 newpath = os.path.join(q.basepath, opts['name'])
1520 if os.path.exists(newpath):
1517 if os.path.exists(newpath):
1521 if not os.path.isdir(newpath):
1518 if not os.path.isdir(newpath):
1522 raise util.Abort(_('destination %s exists and is not '
1519 raise util.Abort(_('destination %s exists and is not '
1523 'a directory') % newpath)
1520 'a directory') % newpath)
1524 if not opts['force']:
1521 if not opts['force']:
1525 raise util.Abort(_('destination %s exists, '
1522 raise util.Abort(_('destination %s exists, '
1526 'use -f to force') % newpath)
1523 'use -f to force') % newpath)
1527 else:
1524 else:
1528 newpath = savename(path)
1525 newpath = savename(path)
1529 ui.warn("copy %s to %s\n" % (path, newpath))
1526 ui.warn("copy %s to %s\n" % (path, newpath))
1530 util.copyfiles(path, newpath)
1527 util.copyfiles(path, newpath)
1531 if opts['empty']:
1528 if opts['empty']:
1532 try:
1529 try:
1533 os.unlink(os.path.join(q.path, q.status_path))
1530 os.unlink(os.path.join(q.path, q.status_path))
1534 except:
1531 except:
1535 pass
1532 pass
1536 return 0
1533 return 0
1537
1534
1538 def strip(ui, repo, rev, **opts):
1535 def strip(ui, repo, rev, **opts):
1539 """strip a revision and all later revs on the same branch"""
1536 """strip a revision and all later revs on the same branch"""
1540 rev = repo.lookup(rev)
1537 rev = repo.lookup(rev)
1541 backup = 'all'
1538 backup = 'all'
1542 if opts['backup']:
1539 if opts['backup']:
1543 backup = 'strip'
1540 backup = 'strip'
1544 elif opts['nobackup']:
1541 elif opts['nobackup']:
1545 backup = 'none'
1542 backup = 'none'
1546 repo.mq.strip(repo, rev, backup=backup)
1543 repo.mq.strip(repo, rev, backup=backup)
1547 return 0
1544 return 0
1548
1545
1549 def version(ui, q=None):
1546 def version(ui, q=None):
1550 """print the version number of the mq extension"""
1547 """print the version number of the mq extension"""
1551 ui.write("mq version %s\n" % versionstr)
1548 ui.write("mq version %s\n" % versionstr)
1552 return 0
1549 return 0
1553
1550
1554 def reposetup(ui, repo):
1551 def reposetup(ui, repo):
1555 class MqRepo(repo.__class__):
1552 class MqRepo(repo.__class__):
1556 def tags(self):
1553 def tags(self):
1557 if self.tagscache:
1554 if self.tagscache:
1558 return self.tagscache
1555 return self.tagscache
1559
1556
1560 tagscache = super(MqRepo, self).tags()
1557 tagscache = super(MqRepo, self).tags()
1561
1558
1562 q = self.mq
1559 q = self.mq
1563 if not q.applied:
1560 if not q.applied:
1564 return tagscache
1561 return tagscache
1565
1562
1566 mqtags = [patch.split(':') for patch in q.applied]
1563 mqtags = [(patch.rev, patch.name) for patch in q.applied]
1567 mqtags.append((mqtags[-1][0], 'qtip'))
1564 mqtags.append((mqtags[-1][0], 'qtip'))
1568 mqtags.append((mqtags[0][0], 'qbase'))
1565 mqtags.append((mqtags[0][0], 'qbase'))
1569 for patch in mqtags:
1566 for patch in mqtags:
1570 if patch[1] in tagscache:
1567 if patch[1] in tagscache:
1571 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
1568 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
1572 else:
1569 else:
1573 tagscache[patch[1]] = revlog.bin(patch[0])
1570 tagscache[patch[1]] = revlog.bin(patch[0])
1574
1571
1575 return tagscache
1572 return tagscache
1576
1573
1577 repo.__class__ = MqRepo
1574 repo.__class__ = MqRepo
1578 repo.mq = queue(ui, repo.join(""))
1575 repo.mq = queue(ui, repo.join(""))
1579
1576
1580 cmdtable = {
1577 cmdtable = {
1581 "qapplied": (applied, [], 'hg qapplied [PATCH]'),
1578 "qapplied": (applied, [], 'hg qapplied [PATCH]'),
1582 "qclone": (clone,
1579 "qclone": (clone,
1583 [('', 'pull', None, _('use pull protocol to copy metadata')),
1580 [('', 'pull', None, _('use pull protocol to copy metadata')),
1584 ('U', 'noupdate', None, _('do not update the new working directories')),
1581 ('U', 'noupdate', None, _('do not update the new working directories')),
1585 ('', 'uncompressed', None,
1582 ('', 'uncompressed', None,
1586 _('use uncompressed transfer (fast over LAN)')),
1583 _('use uncompressed transfer (fast over LAN)')),
1587 ('e', 'ssh', '', _('specify ssh command to use')),
1584 ('e', 'ssh', '', _('specify ssh command to use')),
1588 ('p', 'patches', '', _('location of source patch repo')),
1585 ('p', 'patches', '', _('location of source patch repo')),
1589 ('', 'remotecmd', '',
1586 ('', 'remotecmd', '',
1590 _('specify hg command to run on the remote side'))],
1587 _('specify hg command to run on the remote side'))],
1591 'hg qclone [OPTION]... SOURCE [DEST]'),
1588 'hg qclone [OPTION]... SOURCE [DEST]'),
1592 "qcommit|qci":
1589 "qcommit|qci":
1593 (commit,
1590 (commit,
1594 commands.table["^commit|ci"][1],
1591 commands.table["^commit|ci"][1],
1595 'hg qcommit [OPTION]... [FILE]...'),
1592 'hg qcommit [OPTION]... [FILE]...'),
1596 "^qdiff": (diff, [], 'hg qdiff [FILE]...'),
1593 "^qdiff": (diff, [], 'hg qdiff [FILE]...'),
1597 "qdelete":
1594 "qdelete":
1598 (delete,
1595 (delete,
1599 [('f', 'force', None, _('delete patch file'))],
1596 [('f', 'force', None, _('delete patch file'))],
1600 'hg qdelete [-f] PATCH'),
1597 'hg qdelete [-f] PATCH'),
1601 'qfold':
1598 'qfold':
1602 (fold,
1599 (fold,
1603 [('e', 'edit', None, _('edit patch header')),
1600 [('e', 'edit', None, _('edit patch header')),
1604 ('f', 'force', None, _('delete folded patch files')),
1601 ('f', 'force', None, _('delete folded patch files')),
1605 ('m', 'message', '', _('set patch header to <text>')),
1602 ('m', 'message', '', _('set patch header to <text>')),
1606 ('l', 'logfile', '', _('set patch header to contents of <file>'))],
1603 ('l', 'logfile', '', _('set patch header to contents of <file>'))],
1607 'hg qfold [-e] [-m <text>] [-l <file] PATCH...'),
1604 'hg qfold [-e] [-m <text>] [-l <file] PATCH...'),
1608 'qheader': (header, [],
1605 'qheader': (header, [],
1609 _('hg qheader [PATCH]')),
1606 _('hg qheader [PATCH]')),
1610 "^qimport":
1607 "^qimport":
1611 (qimport,
1608 (qimport,
1612 [('e', 'existing', None, 'import file in patch dir'),
1609 [('e', 'existing', None, 'import file in patch dir'),
1613 ('n', 'name', '', 'patch file name'),
1610 ('n', 'name', '', 'patch file name'),
1614 ('f', 'force', None, 'overwrite existing files')],
1611 ('f', 'force', None, 'overwrite existing files')],
1615 'hg qimport [-e] [-n NAME] [-f] FILE...'),
1612 'hg qimport [-e] [-n NAME] [-f] FILE...'),
1616 "^qinit":
1613 "^qinit":
1617 (init,
1614 (init,
1618 [('c', 'create-repo', None, 'create queue repository')],
1615 [('c', 'create-repo', None, 'create queue repository')],
1619 'hg qinit [-c]'),
1616 'hg qinit [-c]'),
1620 "qnew":
1617 "qnew":
1621 (new,
1618 (new,
1622 [('m', 'message', '', _('use <text> as commit message')),
1619 [('m', 'message', '', _('use <text> as commit message')),
1623 ('l', 'logfile', '', _('read the commit message from <file>')),
1620 ('l', 'logfile', '', _('read the commit message from <file>')),
1624 ('f', 'force', None, _('import uncommitted changes into patch'))],
1621 ('f', 'force', None, _('import uncommitted changes into patch'))],
1625 'hg qnew [-m TEXT] [-l FILE] [-f] PATCH'),
1622 'hg qnew [-m TEXT] [-l FILE] [-f] PATCH'),
1626 "qnext": (next, [], 'hg qnext'),
1623 "qnext": (next, [], 'hg qnext'),
1627 "qprev": (prev, [], 'hg qprev'),
1624 "qprev": (prev, [], 'hg qprev'),
1628 "^qpop":
1625 "^qpop":
1629 (pop,
1626 (pop,
1630 [('a', 'all', None, 'pop all patches'),
1627 [('a', 'all', None, 'pop all patches'),
1631 ('n', 'name', '', 'queue name to pop'),
1628 ('n', 'name', '', 'queue name to pop'),
1632 ('f', 'force', None, 'forget any local changes')],
1629 ('f', 'force', None, 'forget any local changes')],
1633 'hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]'),
1630 'hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]'),
1634 "^qpush":
1631 "^qpush":
1635 (push,
1632 (push,
1636 [('f', 'force', None, 'apply if the patch has rejects'),
1633 [('f', 'force', None, 'apply if the patch has rejects'),
1637 ('l', 'list', None, 'list patch name in commit text'),
1634 ('l', 'list', None, 'list patch name in commit text'),
1638 ('a', 'all', None, 'apply all patches'),
1635 ('a', 'all', None, 'apply all patches'),
1639 ('m', 'merge', None, 'merge from another queue'),
1636 ('m', 'merge', None, 'merge from another queue'),
1640 ('n', 'name', '', 'merge queue name')],
1637 ('n', 'name', '', 'merge queue name')],
1641 'hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]'),
1638 'hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]'),
1642 "^qrefresh":
1639 "^qrefresh":
1643 (refresh,
1640 (refresh,
1644 [('e', 'edit', None, _('edit commit message')),
1641 [('e', 'edit', None, _('edit commit message')),
1645 ('m', 'message', '', _('change commit message with <text>')),
1642 ('m', 'message', '', _('change commit message with <text>')),
1646 ('l', 'logfile', '', _('change commit message with <file> content')),
1643 ('l', 'logfile', '', _('change commit message with <file> content')),
1647 ('s', 'short', None, 'short refresh')],
1644 ('s', 'short', None, 'short refresh')],
1648 'hg qrefresh [-e] [-m TEXT] [-l FILE] [-s]'),
1645 'hg qrefresh [-e] [-m TEXT] [-l FILE] [-s]'),
1649 'qrename|qmv':
1646 'qrename|qmv':
1650 (rename, [], 'hg qrename PATCH1 [PATCH2]'),
1647 (rename, [], 'hg qrename PATCH1 [PATCH2]'),
1651 "qrestore":
1648 "qrestore":
1652 (restore,
1649 (restore,
1653 [('d', 'delete', None, 'delete save entry'),
1650 [('d', 'delete', None, 'delete save entry'),
1654 ('u', 'update', None, 'update queue working dir')],
1651 ('u', 'update', None, 'update queue working dir')],
1655 'hg qrestore [-d] [-u] REV'),
1652 'hg qrestore [-d] [-u] REV'),
1656 "qsave":
1653 "qsave":
1657 (save,
1654 (save,
1658 [('m', 'message', '', _('use <text> as commit message')),
1655 [('m', 'message', '', _('use <text> as commit message')),
1659 ('l', 'logfile', '', _('read the commit message from <file>')),
1656 ('l', 'logfile', '', _('read the commit message from <file>')),
1660 ('c', 'copy', None, 'copy patch directory'),
1657 ('c', 'copy', None, 'copy patch directory'),
1661 ('n', 'name', '', 'copy directory name'),
1658 ('n', 'name', '', 'copy directory name'),
1662 ('e', 'empty', None, 'clear queue status file'),
1659 ('e', 'empty', None, 'clear queue status file'),
1663 ('f', 'force', None, 'force copy')],
1660 ('f', 'force', None, 'force copy')],
1664 'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
1661 'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
1665 "qseries":
1662 "qseries":
1666 (series,
1663 (series,
1667 [('m', 'missing', None, 'print patches not in series'),
1664 [('m', 'missing', None, 'print patches not in series'),
1668 ('s', 'summary', None, _('print first line of patch header'))],
1665 ('s', 'summary', None, _('print first line of patch header'))],
1669 'hg qseries [-m]'),
1666 'hg qseries [-m]'),
1670 "^strip":
1667 "^strip":
1671 (strip,
1668 (strip,
1672 [('f', 'force', None, 'force multi-head removal'),
1669 [('f', 'force', None, 'force multi-head removal'),
1673 ('b', 'backup', None, 'bundle unrelated changesets'),
1670 ('b', 'backup', None, 'bundle unrelated changesets'),
1674 ('n', 'nobackup', None, 'no backups')],
1671 ('n', 'nobackup', None, 'no backups')],
1675 'hg strip [-f] [-b] [-n] REV'),
1672 'hg strip [-f] [-b] [-n] REV'),
1676 "qtop": (top, [], 'hg qtop'),
1673 "qtop": (top, [], 'hg qtop'),
1677 "qunapplied": (unapplied, [], 'hg qunapplied [PATCH]'),
1674 "qunapplied": (unapplied, [], 'hg qunapplied [PATCH]'),
1678 "qversion": (version, [], 'hg qversion')
1675 "qversion": (version, [], 'hg qversion')
1679 }
1676 }
1680
1677
@@ -1,276 +1,276
1 # notify.py - email notifications for mercurial
1 # notify.py - email notifications for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 # hook extension to email notifications to people when changesets are
8 # hook extension to email notifications to people when changesets are
9 # committed to a repo they subscribe to.
9 # committed to a repo they subscribe to.
10 #
10 #
11 # default mode is to print messages to stdout, for testing and
11 # default mode is to print messages to stdout, for testing and
12 # configuring.
12 # configuring.
13 #
13 #
14 # to use, configure notify extension and enable in hgrc like this:
14 # to use, configure notify extension and enable in hgrc like this:
15 #
15 #
16 # [extensions]
16 # [extensions]
17 # hgext.notify =
17 # hgext.notify =
18 #
18 #
19 # [hooks]
19 # [hooks]
20 # # one email for each incoming changeset
20 # # one email for each incoming changeset
21 # incoming.notify = python:hgext.notify.hook
21 # incoming.notify = python:hgext.notify.hook
22 # # batch emails when many changesets incoming at one time
22 # # batch emails when many changesets incoming at one time
23 # changegroup.notify = python:hgext.notify.hook
23 # changegroup.notify = python:hgext.notify.hook
24 #
24 #
25 # [notify]
25 # [notify]
26 # # config items go in here
26 # # config items go in here
27 #
27 #
28 # config items:
28 # config items:
29 #
29 #
30 # REQUIRED:
30 # REQUIRED:
31 # config = /path/to/file # file containing subscriptions
31 # config = /path/to/file # file containing subscriptions
32 #
32 #
33 # OPTIONAL:
33 # OPTIONAL:
34 # test = True # print messages to stdout for testing
34 # test = True # print messages to stdout for testing
35 # strip = 3 # number of slashes to strip for url paths
35 # strip = 3 # number of slashes to strip for url paths
36 # domain = example.com # domain to use if committer missing domain
36 # domain = example.com # domain to use if committer missing domain
37 # style = ... # style file to use when formatting email
37 # style = ... # style file to use when formatting email
38 # template = ... # template to use when formatting email
38 # template = ... # template to use when formatting email
39 # incoming = ... # template to use when run as incoming hook
39 # incoming = ... # template to use when run as incoming hook
40 # changegroup = ... # template when run as changegroup hook
40 # changegroup = ... # template when run as changegroup hook
41 # maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
41 # maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
42 # maxsubject = 67 # truncate subject line longer than this
42 # maxsubject = 67 # truncate subject line longer than this
43 # sources = serve # notify if source of incoming changes in this list
43 # sources = serve # notify if source of incoming changes in this list
44 # # (serve == ssh or http, push, pull, bundle)
44 # # (serve == ssh or http, push, pull, bundle)
45 # [email]
45 # [email]
46 # from = user@host.com # email address to send as if none given
46 # from = user@host.com # email address to send as if none given
47 # [web]
47 # [web]
48 # baseurl = http://hgserver/... # root of hg web site for browsing commits
48 # baseurl = http://hgserver/... # root of hg web site for browsing commits
49 #
49 #
50 # notify config file has same format as regular hgrc. it has two
50 # notify config file has same format as regular hgrc. it has two
51 # sections so you can express subscriptions in whatever way is handier
51 # sections so you can express subscriptions in whatever way is handier
52 # for you.
52 # for you.
53 #
53 #
54 # [usersubs]
54 # [usersubs]
55 # # key is subscriber email, value is ","-separated list of glob patterns
55 # # key is subscriber email, value is ","-separated list of glob patterns
56 # user@host = pattern
56 # user@host = pattern
57 #
57 #
58 # [reposubs]
58 # [reposubs]
59 # # key is glob pattern, value is ","-separated list of subscriber emails
59 # # key is glob pattern, value is ","-separated list of subscriber emails
60 # pattern = user@host
60 # pattern = user@host
61 #
61 #
62 # glob patterns are matched against path to repo root.
62 # glob patterns are matched against path to repo root.
63 #
63 #
64 # if you like, you can put notify config file in repo that users can
64 # if you like, you can put notify config file in repo that users can
65 # push changes to, they can manage their own subscriptions.
65 # push changes to, they can manage their own subscriptions.
66
66
67 from mercurial.demandload import *
67 from mercurial.demandload import *
68 from mercurial.i18n import gettext as _
68 from mercurial.i18n import gettext as _
69 from mercurial.node import *
69 from mercurial.node import *
70 demandload(globals(), 'email.Parser mercurial:commands,templater,util')
70 demandload(globals(), 'email.Parser mercurial:commands,templater,util')
71 demandload(globals(), 'fnmatch socket time')
71 demandload(globals(), 'fnmatch socket time')
72
72
73 # template for single changeset can include email headers.
73 # template for single changeset can include email headers.
74 single_template = '''
74 single_template = '''
75 Subject: changeset in {webroot}: {desc|firstline|strip}
75 Subject: changeset in {webroot}: {desc|firstline|strip}
76 From: {author}
76 From: {author}
77
77
78 changeset {node|short} in {root}
78 changeset {node|short} in {root}
79 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
79 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
80 description:
80 description:
81 \t{desc|tabindent|strip}
81 \t{desc|tabindent|strip}
82 '''.lstrip()
82 '''.lstrip()
83
83
84 # template for multiple changesets should not contain email headers,
84 # template for multiple changesets should not contain email headers,
85 # because only first set of headers will be used and result will look
85 # because only first set of headers will be used and result will look
86 # strange.
86 # strange.
87 multiple_template = '''
87 multiple_template = '''
88 changeset {node|short} in {root}
88 changeset {node|short} in {root}
89 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
89 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
90 summary: {desc|firstline}
90 summary: {desc|firstline}
91 '''
91 '''
92
92
93 deftemplates = {
93 deftemplates = {
94 'changegroup': multiple_template,
94 'changegroup': multiple_template,
95 }
95 }
96
96
97 class notifier(object):
97 class notifier(object):
98 '''email notification class.'''
98 '''email notification class.'''
99
99
100 def __init__(self, ui, repo, hooktype):
100 def __init__(self, ui, repo, hooktype):
101 self.ui = ui
101 self.ui = ui
102 cfg = self.ui.config('notify', 'config')
102 cfg = self.ui.config('notify', 'config')
103 if cfg:
103 if cfg:
104 self.ui.readconfig(cfg)
104 self.ui.readconfig(cfg)
105 self.repo = repo
105 self.repo = repo
106 self.stripcount = int(self.ui.config('notify', 'strip', 0))
106 self.stripcount = int(self.ui.config('notify', 'strip', 0))
107 self.root = self.strip(self.repo.root)
107 self.root = self.strip(self.repo.root)
108 self.domain = self.ui.config('notify', 'domain')
108 self.domain = self.ui.config('notify', 'domain')
109 self.sio = templater.stringio()
109 self.sio = templater.stringio()
110 self.subs = self.subscribers()
110 self.subs = self.subscribers()
111
111
112 mapfile = self.ui.config('notify', 'style')
112 mapfile = self.ui.config('notify', 'style')
113 template = (self.ui.config('notify', hooktype) or
113 template = (self.ui.config('notify', hooktype) or
114 self.ui.config('notify', 'template'))
114 self.ui.config('notify', 'template'))
115 self.t = templater.changeset_templater(self.ui, self.repo, mapfile,
115 self.t = templater.changeset_templater(self.ui, self.repo, mapfile,
116 self.sio)
116 self.sio)
117 if not mapfile and not template:
117 if not mapfile and not template:
118 template = deftemplates.get(hooktype) or single_template
118 template = deftemplates.get(hooktype) or single_template
119 if template:
119 if template:
120 template = templater.parsestring(template, quoted=False)
120 template = templater.parsestring(template, quoted=False)
121 self.t.use_template(template)
121 self.t.use_template(template)
122
122
123 def strip(self, path):
123 def strip(self, path):
124 '''strip leading slashes from local path, turn into web-safe path.'''
124 '''strip leading slashes from local path, turn into web-safe path.'''
125
125
126 path = util.pconvert(path)
126 path = util.pconvert(path)
127 count = self.stripcount
127 count = self.stripcount
128 while count > 0:
128 while count > 0:
129 c = path.find('/')
129 c = path.find('/')
130 if c == -1:
130 if c == -1:
131 break
131 break
132 path = path[c+1:]
132 path = path[c+1:]
133 count -= 1
133 count -= 1
134 return path
134 return path
135
135
136 def fixmail(self, addr):
136 def fixmail(self, addr):
137 '''try to clean up email addresses.'''
137 '''try to clean up email addresses.'''
138
138
139 addr = templater.email(addr.strip())
139 addr = templater.email(addr.strip())
140 a = addr.find('@localhost')
140 a = addr.find('@localhost')
141 if a != -1:
141 if a != -1:
142 addr = addr[:a]
142 addr = addr[:a]
143 if '@' not in addr:
143 if '@' not in addr:
144 return addr + '@' + self.domain
144 return addr + '@' + self.domain
145 return addr
145 return addr
146
146
147 def subscribers(self):
147 def subscribers(self):
148 '''return list of email addresses of subscribers to this repo.'''
148 '''return list of email addresses of subscribers to this repo.'''
149
149
150 subs = {}
150 subs = {}
151 for user, pats in self.ui.configitems('usersubs'):
151 for user, pats in self.ui.configitems('usersubs'):
152 for pat in pats.split(','):
152 for pat in pats.split(','):
153 if fnmatch.fnmatch(self.repo.root, pat.strip()):
153 if fnmatch.fnmatch(self.repo.root, pat.strip()):
154 subs[self.fixmail(user)] = 1
154 subs[self.fixmail(user)] = 1
155 for pat, users in self.ui.configitems('reposubs'):
155 for pat, users in self.ui.configitems('reposubs'):
156 if fnmatch.fnmatch(self.repo.root, pat):
156 if fnmatch.fnmatch(self.repo.root, pat):
157 for user in users.split(','):
157 for user in users.split(','):
158 subs[self.fixmail(user)] = 1
158 subs[self.fixmail(user)] = 1
159 subs = subs.keys()
159 subs = subs.keys()
160 subs.sort()
160 subs.sort()
161 return subs
161 return subs
162
162
163 def url(self, path=None):
163 def url(self, path=None):
164 return self.ui.config('web', 'baseurl') + (path or self.root)
164 return self.ui.config('web', 'baseurl') + (path or self.root)
165
165
166 def node(self, node):
166 def node(self, node):
167 '''format one changeset.'''
167 '''format one changeset.'''
168
168
169 self.t.show(changenode=node, changes=self.repo.changelog.read(node),
169 self.t.show(changenode=node, changes=self.repo.changelog.read(node),
170 baseurl=self.ui.config('web', 'baseurl'),
170 baseurl=self.ui.config('web', 'baseurl'),
171 root=self.repo.root,
171 root=self.repo.root,
172 webroot=self.root)
172 webroot=self.root)
173
173
174 def skipsource(self, source):
174 def skipsource(self, source):
175 '''true if incoming changes from this source should be skipped.'''
175 '''true if incoming changes from this source should be skipped.'''
176 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
176 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
177 return source not in ok_sources
177 return source not in ok_sources
178
178
179 def send(self, node, count):
179 def send(self, node, count):
180 '''send message.'''
180 '''send message.'''
181
181
182 p = email.Parser.Parser()
182 p = email.Parser.Parser()
183 self.sio.seek(0)
183 self.sio.seek(0)
184 msg = p.parse(self.sio)
184 msg = p.parse(self.sio)
185
185
186 def fix_subject():
186 def fix_subject():
187 '''try to make subject line exist and be useful.'''
187 '''try to make subject line exist and be useful.'''
188
188
189 subject = msg['Subject']
189 subject = msg['Subject']
190 if not subject:
190 if not subject:
191 if count > 1:
191 if count > 1:
192 subject = _('%s: %d new changesets') % (self.root, count)
192 subject = _('%s: %d new changesets') % (self.root, count)
193 else:
193 else:
194 changes = self.repo.changelog.read(node)
194 changes = self.repo.changelog.read(node)
195 s = changes[4].lstrip().split('\n', 1)[0].rstrip()
195 s = changes[4].lstrip().split('\n', 1)[0].rstrip()
196 subject = '%s: %s' % (self.root, s)
196 subject = '%s: %s' % (self.root, s)
197 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
197 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
198 if maxsubject and len(subject) > maxsubject:
198 if maxsubject and len(subject) > maxsubject:
199 subject = subject[:maxsubject-3] + '...'
199 subject = subject[:maxsubject-3] + '...'
200 del msg['Subject']
200 del msg['Subject']
201 msg['Subject'] = subject
201 msg['Subject'] = subject
202
202
203 def fix_sender():
203 def fix_sender():
204 '''try to make message have proper sender.'''
204 '''try to make message have proper sender.'''
205
205
206 sender = msg['From']
206 sender = msg['From']
207 if not sender:
207 if not sender:
208 sender = self.ui.config('email', 'from') or self.ui.username()
208 sender = self.ui.config('email', 'from') or self.ui.username()
209 if '@' not in sender or '@localhost' in sender:
209 if '@' not in sender or '@localhost' in sender:
210 sender = self.fixmail(sender)
210 sender = self.fixmail(sender)
211 del msg['From']
211 del msg['From']
212 msg['From'] = sender
212 msg['From'] = sender
213
213
214 fix_subject()
214 fix_subject()
215 fix_sender()
215 fix_sender()
216
216
217 msg['X-Hg-Notification'] = 'changeset ' + short(node)
217 msg['X-Hg-Notification'] = 'changeset ' + short(node)
218 if not msg['Message-Id']:
218 if not msg['Message-Id']:
219 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
219 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
220 (short(node), int(time.time()),
220 (short(node), int(time.time()),
221 hash(self.repo.root), socket.getfqdn()))
221 hash(self.repo.root), socket.getfqdn()))
222 msg['To'] = ', '.join(self.subs)
222 msg['To'] = ', '.join(self.subs)
223
223
224 msgtext = msg.as_string(0)
224 msgtext = msg.as_string(0)
225 if self.ui.configbool('notify', 'test', True):
225 if self.ui.configbool('notify', 'test', True):
226 self.ui.write(msgtext)
226 self.ui.write(msgtext)
227 if not msgtext.endswith('\n'):
227 if not msgtext.endswith('\n'):
228 self.ui.write('\n')
228 self.ui.write('\n')
229 else:
229 else:
230 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
230 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
231 (len(self.subs), count))
231 (len(self.subs), count))
232 mail = self.ui.sendmail()
232 mail = self.ui.sendmail()
233 mail.sendmail(templater.email(msg['From']), self.subs, msgtext)
233 mail.sendmail(templater.email(msg['From']), self.subs, msgtext)
234
234
235 def diff(self, node, ref):
235 def diff(self, node, ref):
236 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
236 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
237 if maxdiff == 0:
237 if maxdiff == 0:
238 return
238 return
239 fp = templater.stringio()
239 fp = templater.stringio()
240 prev = self.repo.changelog.parents(node)[0]
240 prev = self.repo.changelog.parents(node)[0]
241 commands.dodiff(fp, self.ui, self.repo, prev, ref)
241 commands.dodiff(fp, self.ui, self.repo, prev, ref)
242 difflines = fp.getvalue().splitlines(1)
242 difflines = fp.getvalue().splitlines(1)
243 if maxdiff > 0 and len(difflines) > maxdiff:
243 if maxdiff > 0 and len(difflines) > maxdiff:
244 self.sio.write(_('\ndiffs (truncated from %d to %d lines):\n\n') %
244 self.sio.write(_('\ndiffs (truncated from %d to %d lines):\n\n') %
245 (len(difflines), maxdiff))
245 (len(difflines), maxdiff))
246 difflines = difflines[:maxdiff]
246 difflines = difflines[:maxdiff]
247 elif difflines:
247 elif difflines:
248 self.sio.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
248 self.sio.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
249 self.sio.write(*difflines)
249 self.sio.write(*difflines)
250
250
251 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
251 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
252 '''send email notifications to interested subscribers.
252 '''send email notifications to interested subscribers.
253
253
254 if used as changegroup hook, send one email for all changesets in
254 if used as changegroup hook, send one email for all changesets in
255 changegroup. else send one email per changeset.'''
255 changegroup. else send one email per changeset.'''
256 n = notifier(ui, repo, hooktype)
256 n = notifier(ui, repo, hooktype)
257 if not n.subs:
257 if not n.subs:
258 ui.debug(_('notify: no subscribers to this repo\n'))
258 ui.debug(_('notify: no subscribers to repo %s\n' % n.root))
259 return
259 return
260 if n.skipsource(source):
260 if n.skipsource(source):
261 ui.debug(_('notify: changes have source "%s" - skipping\n') %
261 ui.debug(_('notify: changes have source "%s" - skipping\n') %
262 source)
262 source)
263 return
263 return
264 node = bin(node)
264 node = bin(node)
265 if hooktype == 'changegroup':
265 if hooktype == 'changegroup':
266 start = repo.changelog.rev(node)
266 start = repo.changelog.rev(node)
267 end = repo.changelog.count()
267 end = repo.changelog.count()
268 count = end - start
268 count = end - start
269 for rev in xrange(start, end):
269 for rev in xrange(start, end):
270 n.node(repo.changelog.node(rev))
270 n.node(repo.changelog.node(rev))
271 n.diff(node, repo.changelog.tip())
271 n.diff(node, repo.changelog.tip())
272 else:
272 else:
273 count = 1
273 count = 1
274 n.node(node)
274 n.node(node)
275 n.diff(node, node)
275 n.diff(node, node)
276 n.send(node, count)
276 n.send(node, count)
@@ -1,308 +1,309
1 # Command for sending a collection of Mercurial changesets as a series
1 # Command for sending a collection of Mercurial changesets as a series
2 # of patch emails.
2 # of patch emails.
3 #
3 #
4 # The series is started off with a "[PATCH 0 of N]" introduction,
4 # The series is started off with a "[PATCH 0 of N]" introduction,
5 # which describes the series as a whole.
5 # which describes the series as a whole.
6 #
6 #
7 # Each patch email has a Subject line of "[PATCH M of N] ...", using
7 # Each patch email has a Subject line of "[PATCH M of N] ...", using
8 # the first line of the changeset description as the subject text.
8 # the first line of the changeset description as the subject text.
9 # The message contains two or three body parts:
9 # The message contains two or three body parts:
10 #
10 #
11 # The remainder of the changeset description.
11 # The remainder of the changeset description.
12 #
12 #
13 # [Optional] If the diffstat program is installed, the result of
13 # [Optional] If the diffstat program is installed, the result of
14 # running diffstat on the patch.
14 # running diffstat on the patch.
15 #
15 #
16 # The patch itself, as generated by "hg export".
16 # The patch itself, as generated by "hg export".
17 #
17 #
18 # Each message refers to all of its predecessors using the In-Reply-To
18 # Each message refers to all of its predecessors using the In-Reply-To
19 # and References headers, so they will show up as a sequence in
19 # and References headers, so they will show up as a sequence in
20 # threaded mail and news readers, and in mail archives.
20 # threaded mail and news readers, and in mail archives.
21 #
21 #
22 # For each changeset, you will be prompted with a diffstat summary and
22 # For each changeset, you will be prompted with a diffstat summary and
23 # the changeset summary, so you can be sure you are sending the right
23 # the changeset summary, so you can be sure you are sending the right
24 # changes.
24 # changes.
25 #
25 #
26 # It is best to run this script with the "-n" (test only) flag before
26 # It is best to run this script with the "-n" (test only) flag before
27 # firing it up "for real", in which case it will use your pager to
27 # firing it up "for real", in which case it will use your pager to
28 # display each of the messages that it would send.
28 # display each of the messages that it would send.
29 #
29 #
30 # The "-m" (mbox) option will create an mbox file instead of sending
30 # The "-m" (mbox) option will create an mbox file instead of sending
31 # the messages directly. This can be reviewed e.g. with "mutt -R -f mbox",
31 # the messages directly. This can be reviewed e.g. with "mutt -R -f mbox",
32 # and finally sent with "formail -s sendmail -bm -t < mbox".
32 # and finally sent with "formail -s sendmail -bm -t < mbox".
33 #
33 #
34 # To configure other defaults, add a section like this to your hgrc
34 # To configure other defaults, add a section like this to your hgrc
35 # file:
35 # file:
36 #
36 #
37 # [email]
37 # [email]
38 # from = My Name <my@email>
38 # from = My Name <my@email>
39 # to = recipient1, recipient2, ...
39 # to = recipient1, recipient2, ...
40 # cc = cc1, cc2, ...
40 # cc = cc1, cc2, ...
41 # bcc = bcc1, bcc2, ...
41 # bcc = bcc1, bcc2, ...
42
42
43 from mercurial.demandload import *
43 from mercurial.demandload import *
44 demandload(globals(), '''email.MIMEMultipart email.MIMEText email.Utils
44 demandload(globals(), '''email.MIMEMultipart email.MIMEText email.Utils
45 mercurial:commands,hg,ui
45 mercurial:commands,hg,ui
46 os errno popen2 socket sys tempfile time''')
46 os errno popen2 socket sys tempfile time''')
47 from mercurial.i18n import gettext as _
47 from mercurial.i18n import gettext as _
48 from mercurial.node import *
48 from mercurial.node import *
49
49
50 try:
50 try:
51 # readline gives raw_input editing capabilities, but is not
51 # readline gives raw_input editing capabilities, but is not
52 # present on windows
52 # present on windows
53 import readline
53 import readline
54 except ImportError: pass
54 except ImportError: pass
55
55
56 def diffstat(patch):
56 def diffstat(patch):
57 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
57 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
58 try:
58 try:
59 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
59 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
60 try:
60 try:
61 for line in patch: print >> p.tochild, line
61 for line in patch: print >> p.tochild, line
62 p.tochild.close()
62 p.tochild.close()
63 if p.wait(): return
63 if p.wait(): return
64 fp = os.fdopen(fd, 'r')
64 fp = os.fdopen(fd, 'r')
65 stat = []
65 stat = []
66 for line in fp: stat.append(line.lstrip())
66 for line in fp: stat.append(line.lstrip())
67 last = stat.pop()
67 last = stat.pop()
68 stat.insert(0, last)
68 stat.insert(0, last)
69 stat = ''.join(stat)
69 stat = ''.join(stat)
70 if stat.startswith('0 files'): raise ValueError
70 if stat.startswith('0 files'): raise ValueError
71 return stat
71 return stat
72 except: raise
72 except: raise
73 finally:
73 finally:
74 try: os.unlink(name)
74 try: os.unlink(name)
75 except: pass
75 except: pass
76
76
77 def patchbomb(ui, repo, *revs, **opts):
77 def patchbomb(ui, repo, *revs, **opts):
78 '''send changesets as a series of patch emails
78 '''send changesets as a series of patch emails
79
79
80 The series starts with a "[PATCH 0 of N]" introduction, which
80 The series starts with a "[PATCH 0 of N]" introduction, which
81 describes the series as a whole.
81 describes the series as a whole.
82
82
83 Each patch email has a Subject line of "[PATCH M of N] ...", using
83 Each patch email has a Subject line of "[PATCH M of N] ...", using
84 the first line of the changeset description as the subject text.
84 the first line of the changeset description as the subject text.
85 The message contains two or three body parts. First, the rest of
85 The message contains two or three body parts. First, the rest of
86 the changeset description. Next, (optionally) if the diffstat
86 the changeset description. Next, (optionally) if the diffstat
87 program is installed, the result of running diffstat on the patch.
87 program is installed, the result of running diffstat on the patch.
88 Finally, the patch itself, as generated by "hg export".'''
88 Finally, the patch itself, as generated by "hg export".'''
89 def prompt(prompt, default = None, rest = ': ', empty_ok = False):
89 def prompt(prompt, default = None, rest = ': ', empty_ok = False):
90 if default: prompt += ' [%s]' % default
90 if default: prompt += ' [%s]' % default
91 prompt += rest
91 prompt += rest
92 while True:
92 while True:
93 r = raw_input(prompt)
93 r = raw_input(prompt)
94 if r: return r
94 if r: return r
95 if default is not None: return default
95 if default is not None: return default
96 if empty_ok: return r
96 if empty_ok: return r
97 ui.warn(_('Please enter a valid value.\n'))
97 ui.warn(_('Please enter a valid value.\n'))
98
98
99 def confirm(s):
99 def confirm(s):
100 if not prompt(s, default = 'y', rest = '? ').lower().startswith('y'):
100 if not prompt(s, default = 'y', rest = '? ').lower().startswith('y'):
101 raise ValueError
101 raise ValueError
102
102
103 def cdiffstat(summary, patch):
103 def cdiffstat(summary, patch):
104 s = diffstat(patch)
104 s = diffstat(patch)
105 if s:
105 if s:
106 if summary:
106 if summary:
107 ui.write(summary, '\n')
107 ui.write(summary, '\n')
108 ui.write(s, '\n')
108 ui.write(s, '\n')
109 confirm(_('Does the diffstat above look okay'))
109 confirm(_('Does the diffstat above look okay'))
110 return s
110 return s
111
111
112 def makepatch(patch, idx, total):
112 def makepatch(patch, idx, total):
113 desc = []
113 desc = []
114 node = None
114 node = None
115 body = ''
115 body = ''
116 for line in patch:
116 for line in patch:
117 if line.startswith('#'):
117 if line.startswith('#'):
118 if line.startswith('# Node ID'): node = line.split()[-1]
118 if line.startswith('# Node ID'): node = line.split()[-1]
119 continue
119 continue
120 if line.startswith('diff -r'): break
120 if line.startswith('diff -r'): break
121 desc.append(line)
121 desc.append(line)
122 if not node: raise ValueError
122 if not node: raise ValueError
123
123
124 #body = ('\n'.join(desc[1:]).strip() or
124 #body = ('\n'.join(desc[1:]).strip() or
125 # 'Patch subject is complete summary.')
125 # 'Patch subject is complete summary.')
126 #body += '\n\n\n'
126 #body += '\n\n\n'
127
127
128 if opts['plain']:
128 if opts['plain']:
129 while patch and patch[0].startswith('# '): patch.pop(0)
129 while patch and patch[0].startswith('# '): patch.pop(0)
130 if patch: patch.pop(0)
130 if patch: patch.pop(0)
131 while patch and not patch[0].strip(): patch.pop(0)
131 while patch and not patch[0].strip(): patch.pop(0)
132 if opts['diffstat']:
132 if opts['diffstat']:
133 body += cdiffstat('\n'.join(desc), patch) + '\n\n'
133 body += cdiffstat('\n'.join(desc), patch) + '\n\n'
134 if opts['attach']:
134 if opts['attach']:
135 msg = email.MIMEMultipart.MIMEMultipart()
135 msg = email.MIMEMultipart.MIMEMultipart()
136 if body: msg.attach(email.MIMEText.MIMEText(body, 'plain'))
136 if body: msg.attach(email.MIMEText.MIMEText(body, 'plain'))
137 p = email.MIMEText.MIMEText('\n'.join(patch), 'x-patch')
137 p = email.MIMEText.MIMEText('\n'.join(patch), 'x-patch')
138 binnode = bin(node)
138 binnode = bin(node)
139 # if node is mq patch, it will have patch file name as tag
139 # if node is mq patch, it will have patch file name as tag
140 patchname = [t for t in repo.nodetags(binnode)
140 patchname = [t for t in repo.nodetags(binnode)
141 if t.endswith('.patch') or t.endswith('.diff')]
141 if t.endswith('.patch') or t.endswith('.diff')]
142 if patchname:
142 if patchname:
143 patchname = patchname[0]
143 patchname = patchname[0]
144 elif total > 1:
144 elif total > 1:
145 patchname = commands.make_filename(repo, '%b-%n.patch',
145 patchname = commands.make_filename(repo, '%b-%n.patch',
146 binnode, idx, total)
146 binnode, idx, total)
147 else:
147 else:
148 patchname = commands.make_filename(repo, '%b.patch', binnode)
148 patchname = commands.make_filename(repo, '%b.patch', binnode)
149 p['Content-Disposition'] = 'inline; filename=' + patchname
149 p['Content-Disposition'] = 'inline; filename=' + patchname
150 msg.attach(p)
150 msg.attach(p)
151 else:
151 else:
152 body += '\n'.join(patch)
152 body += '\n'.join(patch)
153 msg = email.MIMEText.MIMEText(body)
153 msg = email.MIMEText.MIMEText(body)
154 if total == 1:
154 if total == 1:
155 subj = '[PATCH] ' + desc[0].strip()
155 subj = '[PATCH] ' + desc[0].strip()
156 else:
156 else:
157 subj = '[PATCH %d of %d] %s' % (idx, total, desc[0].strip())
157 subj = '[PATCH %d of %d] %s' % (idx, total, desc[0].strip())
158 if subj.endswith('.'): subj = subj[:-1]
158 if subj.endswith('.'): subj = subj[:-1]
159 msg['Subject'] = subj
159 msg['Subject'] = subj
160 msg['X-Mercurial-Node'] = node
160 msg['X-Mercurial-Node'] = node
161 return msg
161 return msg
162
162
163 start_time = int(time.time())
163 start_time = int(time.time())
164
164
165 def genmsgid(id):
165 def genmsgid(id):
166 return '<%s.%s@%s>' % (id[:20], start_time, socket.getfqdn())
166 return '<%s.%s@%s>' % (id[:20], start_time, socket.getfqdn())
167
167
168 patches = []
168 patches = []
169
169
170 class exportee:
170 class exportee:
171 def __init__(self, container):
171 def __init__(self, container):
172 self.lines = []
172 self.lines = []
173 self.container = container
173 self.container = container
174 self.name = 'email'
174 self.name = 'email'
175
175
176 def write(self, data):
176 def write(self, data):
177 self.lines.append(data)
177 self.lines.append(data)
178
178
179 def close(self):
179 def close(self):
180 self.container.append(''.join(self.lines).split('\n'))
180 self.container.append(''.join(self.lines).split('\n'))
181 self.lines = []
181 self.lines = []
182
182
183 commands.export(ui, repo, *revs, **{'output': exportee(patches),
183 commands.export(ui, repo, *revs, **{'output': exportee(patches),
184 'switch_parent': False,
184 'switch_parent': False,
185 'text': None})
185 'text': None})
186
186
187 jumbo = []
187 jumbo = []
188 msgs = []
188 msgs = []
189
189
190 ui.write(_('This patch series consists of %d patches.\n\n') % len(patches))
190 ui.write(_('This patch series consists of %d patches.\n\n') % len(patches))
191
191
192 for p, i in zip(patches, range(len(patches))):
192 for p, i in zip(patches, range(len(patches))):
193 jumbo.extend(p)
193 jumbo.extend(p)
194 msgs.append(makepatch(p, i + 1, len(patches)))
194 msgs.append(makepatch(p, i + 1, len(patches)))
195
195
196 sender = (opts['from'] or ui.config('email', 'from') or
196 sender = (opts['from'] or ui.config('email', 'from') or
197 ui.config('patchbomb', 'from') or
197 ui.config('patchbomb', 'from') or
198 prompt('From', ui.username()))
198 prompt('From', ui.username()))
199
199
200 def getaddrs(opt, prpt, default = None):
200 def getaddrs(opt, prpt, default = None):
201 addrs = opts[opt] or (ui.config('email', opt) or
201 addrs = opts[opt] or (ui.config('email', opt) or
202 ui.config('patchbomb', opt) or
202 ui.config('patchbomb', opt) or
203 prompt(prpt, default = default)).split(',')
203 prompt(prpt, default = default)).split(',')
204 return [a.strip() for a in addrs if a.strip()]
204 return [a.strip() for a in addrs if a.strip()]
205 to = getaddrs('to', 'To')
205 to = getaddrs('to', 'To')
206 cc = getaddrs('cc', 'Cc', '')
206 cc = getaddrs('cc', 'Cc', '')
207
207
208 bcc = opts['bcc'] or (ui.config('email', 'bcc') or
208 bcc = opts['bcc'] or (ui.config('email', 'bcc') or
209 ui.config('patchbomb', 'bcc') or '').split(',')
209 ui.config('patchbomb', 'bcc') or '').split(',')
210 bcc = [a.strip() for a in bcc if a.strip()]
210 bcc = [a.strip() for a in bcc if a.strip()]
211
211
212 if len(patches) > 1:
212 if len(patches) > 1:
213 ui.write(_('\nWrite the introductory message for the patch series.\n\n'))
213 ui.write(_('\nWrite the introductory message for the patch series.\n\n'))
214
214
215 subj = '[PATCH 0 of %d] %s' % (
215 subj = '[PATCH 0 of %d] %s' % (
216 len(patches),
216 len(patches),
217 opts['subject'] or
217 opts['subject'] or
218 prompt('Subject:', rest = ' [PATCH 0 of %d] ' % len(patches)))
218 prompt('Subject:', rest = ' [PATCH 0 of %d] ' % len(patches)))
219
219
220 ui.write(_('Finish with ^D or a dot on a line by itself.\n\n'))
220 ui.write(_('Finish with ^D or a dot on a line by itself.\n\n'))
221
221
222 body = []
222 body = []
223
223
224 while True:
224 while True:
225 try: l = raw_input()
225 try: l = raw_input()
226 except EOFError: break
226 except EOFError: break
227 if l == '.': break
227 if l == '.': break
228 body.append(l)
228 body.append(l)
229
229
230 if opts['diffstat']:
230 if opts['diffstat']:
231 d = cdiffstat(_('Final summary:\n'), jumbo)
231 d = cdiffstat(_('Final summary:\n'), jumbo)
232 if d: body.append('\n' + d)
232 if d: body.append('\n' + d)
233
233
234 body = '\n'.join(body) + '\n'
234 body = '\n'.join(body) + '\n'
235
235
236 msg = email.MIMEText.MIMEText(body)
236 msg = email.MIMEText.MIMEText(body)
237 msg['Subject'] = subj
237 msg['Subject'] = subj
238
238
239 msgs.insert(0, msg)
239 msgs.insert(0, msg)
240
240
241 ui.write('\n')
241 ui.write('\n')
242
242
243 if not opts['test'] and not opts['mbox']:
243 if not opts['test'] and not opts['mbox']:
244 mail = ui.sendmail()
244 mail = ui.sendmail()
245 parent = None
245 parent = None
246
246
247 # Calculate UTC offset
247 # Calculate UTC offset
248 if time.daylight: offset = time.altzone
248 if time.daylight: offset = time.altzone
249 else: offset = time.timezone
249 else: offset = time.timezone
250 if offset <= 0: sign, offset = '+', -offset
250 if offset <= 0: sign, offset = '+', -offset
251 else: sign = '-'
251 else: sign = '-'
252 offset = '%s%02d%02d' % (sign, offset / 3600, (offset % 3600) / 60)
252 offset = '%s%02d%02d' % (sign, offset / 3600, (offset % 3600) / 60)
253
253
254 sender_addr = email.Utils.parseaddr(sender)[1]
254 sender_addr = email.Utils.parseaddr(sender)[1]
255 for m in msgs:
255 for m in msgs:
256 try:
256 try:
257 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
257 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
258 except TypeError:
258 except TypeError:
259 m['Message-Id'] = genmsgid('patchbomb')
259 m['Message-Id'] = genmsgid('patchbomb')
260 if parent:
260 if parent:
261 m['In-Reply-To'] = parent
261 m['In-Reply-To'] = parent
262 else:
262 else:
263 parent = m['Message-Id']
263 parent = m['Message-Id']
264 m['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime(start_time)) + ' ' + offset
264 m['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime(start_time)) + ' ' + offset
265
265
266 start_time += 1
266 start_time += 1
267 m['From'] = sender
267 m['From'] = sender
268 m['To'] = ', '.join(to)
268 m['To'] = ', '.join(to)
269 if cc: m['Cc'] = ', '.join(cc)
269 if cc: m['Cc'] = ', '.join(cc)
270 if bcc: m['Bcc'] = ', '.join(bcc)
270 if bcc: m['Bcc'] = ', '.join(bcc)
271 if opts['test']:
271 if opts['test']:
272 ui.status('Displaying ', m['Subject'], ' ...\n')
272 ui.status('Displaying ', m['Subject'], ' ...\n')
273 fp = os.popen(os.getenv('PAGER', 'more'), 'w')
273 fp = os.popen(os.getenv('PAGER', 'more'), 'w')
274 try:
274 try:
275 fp.write(m.as_string(0))
275 fp.write(m.as_string(0))
276 fp.write('\n')
276 fp.write('\n')
277 except IOError, inst:
277 except IOError, inst:
278 if inst.errno != errno.EPIPE:
278 if inst.errno != errno.EPIPE:
279 raise
279 raise
280 fp.close()
280 fp.close()
281 elif opts['mbox']:
281 elif opts['mbox']:
282 ui.status('Writing ', m['Subject'], ' ...\n')
282 ui.status('Writing ', m['Subject'], ' ...\n')
283 fp = open(opts['mbox'], m.has_key('In-Reply-To') and 'ab+' or 'wb+')
283 fp = open(opts['mbox'], m.has_key('In-Reply-To') and 'ab+' or 'wb+')
284 date = time.asctime(time.localtime(start_time))
284 date = time.asctime(time.localtime(start_time))
285 fp.write('From %s %s\n' % (sender_addr, date))
285 fp.write('From %s %s\n' % (sender_addr, date))
286 fp.write(m.as_string(0))
286 fp.write(m.as_string(0))
287 fp.write('\n\n')
287 fp.write('\n\n')
288 fp.close()
288 fp.close()
289 else:
289 else:
290 ui.status('Sending ', m['Subject'], ' ...\n')
290 ui.status('Sending ', m['Subject'], ' ...\n')
291 m.__delitem__('bcc')
291 # Exim does not remove the Bcc field
292 del m['Bcc']
292 mail.sendmail(sender, to + bcc + cc, m.as_string(0))
293 mail.sendmail(sender, to + bcc + cc, m.as_string(0))
293
294
294 cmdtable = {
295 cmdtable = {
295 'email':
296 'email':
296 (patchbomb,
297 (patchbomb,
297 [('a', 'attach', None, 'send patches as inline attachments'),
298 [('a', 'attach', None, 'send patches as inline attachments'),
298 ('', 'bcc', [], 'email addresses of blind copy recipients'),
299 ('', 'bcc', [], 'email addresses of blind copy recipients'),
299 ('c', 'cc', [], 'email addresses of copy recipients'),
300 ('c', 'cc', [], 'email addresses of copy recipients'),
300 ('d', 'diffstat', None, 'add diffstat output to messages'),
301 ('d', 'diffstat', None, 'add diffstat output to messages'),
301 ('f', 'from', '', 'email address of sender'),
302 ('f', 'from', '', 'email address of sender'),
302 ('', 'plain', None, 'omit hg patch header'),
303 ('', 'plain', None, 'omit hg patch header'),
303 ('n', 'test', None, 'print messages that would be sent'),
304 ('n', 'test', None, 'print messages that would be sent'),
304 ('m', 'mbox', '', 'write messages to mbox file instead of sending them'),
305 ('m', 'mbox', '', 'write messages to mbox file instead of sending them'),
305 ('s', 'subject', '', 'subject of introductory message'),
306 ('s', 'subject', '', 'subject of introductory message'),
306 ('t', 'to', [], 'email addresses of recipients')],
307 ('t', 'to', [], 'email addresses of recipients')],
307 "hg email [OPTION]... [REV]...")
308 "hg email [OPTION]... [REV]...")
308 }
309 }
@@ -1,3617 +1,3680
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 demandload(globals(), "fnmatch mdiff random signal tempfile time")
13 demandload(globals(), "fnmatch mdiff random signal tempfile time")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 demandload(globals(), "archival cStringIO changegroup email.Parser")
15 demandload(globals(), "archival cStringIO changegroup email.Parser")
16 demandload(globals(), "hgweb.server sshserver")
16 demandload(globals(), "hgweb.server sshserver")
17
17
18 class UnknownCommand(Exception):
18 class UnknownCommand(Exception):
19 """Exception raised if command is not in the command table."""
19 """Exception raised if command is not in the command table."""
20 class AmbiguousCommand(Exception):
20 class AmbiguousCommand(Exception):
21 """Exception raised if command shortcut matches more than one command."""
21 """Exception raised if command shortcut matches more than one command."""
22
22
23 def bail_if_changed(repo):
23 def bail_if_changed(repo):
24 modified, added, removed, deleted, unknown = repo.changes()
24 modified, added, removed, deleted, unknown = repo.changes()
25 if modified or added or removed or deleted:
25 if modified or added or removed or deleted:
26 raise util.Abort(_("outstanding uncommitted changes"))
26 raise util.Abort(_("outstanding uncommitted changes"))
27
27
28 def filterfiles(filters, files):
28 def filterfiles(filters, files):
29 l = [x for x in files if x in filters]
29 l = [x for x in files if x in filters]
30
30
31 for t in filters:
31 for t in filters:
32 if t and t[-1] != "/":
32 if t and t[-1] != "/":
33 t += "/"
33 t += "/"
34 l += [x for x in files if x.startswith(t)]
34 l += [x for x in files if x.startswith(t)]
35 return l
35 return l
36
36
37 def relpath(repo, args):
37 def relpath(repo, args):
38 cwd = repo.getcwd()
38 cwd = repo.getcwd()
39 if cwd:
39 if cwd:
40 return [util.normpath(os.path.join(cwd, x)) for x in args]
40 return [util.normpath(os.path.join(cwd, x)) for x in args]
41 return args
41 return args
42
42
43 def logmessage(**opts):
43 def logmessage(opts):
44 """ get the log message according to -m and -l option """
44 """ get the log message according to -m and -l option """
45 message = opts['message']
45 message = opts['message']
46 logfile = opts['logfile']
46 logfile = opts['logfile']
47
47
48 if message and logfile:
48 if message and logfile:
49 raise util.Abort(_('options --message and --logfile are mutually '
49 raise util.Abort(_('options --message and --logfile are mutually '
50 'exclusive'))
50 'exclusive'))
51 if not message and logfile:
51 if not message and logfile:
52 try:
52 try:
53 if logfile == '-':
53 if logfile == '-':
54 message = sys.stdin.read()
54 message = sys.stdin.read()
55 else:
55 else:
56 message = open(logfile).read()
56 message = open(logfile).read()
57 except IOError, inst:
57 except IOError, inst:
58 raise util.Abort(_("can't read commit message '%s': %s") %
58 raise util.Abort(_("can't read commit message '%s': %s") %
59 (logfile, inst.strerror))
59 (logfile, inst.strerror))
60 return message
60 return message
61
61
62 def matchpats(repo, pats=[], opts={}, head=''):
62 def matchpats(repo, pats=[], opts={}, head=''):
63 cwd = repo.getcwd()
63 cwd = repo.getcwd()
64 if not pats and cwd:
64 if not pats and cwd:
65 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
65 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
66 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
66 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
67 cwd = ''
67 cwd = ''
68 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
68 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
69 opts.get('exclude'), head)
69 opts.get('exclude'), head)
70
70
71 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
71 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
72 files, matchfn, anypats = matchpats(repo, pats, opts, head)
72 files, matchfn, anypats = matchpats(repo, pats, opts, head)
73 exact = dict(zip(files, files))
73 exact = dict(zip(files, files))
74 def walk():
74 def walk():
75 for src, fn in repo.walk(node=node, files=files, match=matchfn,
75 for src, fn in repo.walk(node=node, files=files, match=matchfn,
76 badmatch=badmatch):
76 badmatch=badmatch):
77 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
77 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
78 return files, matchfn, walk()
78 return files, matchfn, walk()
79
79
80 def walk(repo, pats, opts, node=None, head='', badmatch=None):
80 def walk(repo, pats, opts, node=None, head='', badmatch=None):
81 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
81 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
82 for r in results:
82 for r in results:
83 yield r
83 yield r
84
84
85 def walkchangerevs(ui, repo, pats, opts):
85 def walkchangerevs(ui, repo, pats, opts):
86 '''Iterate over files and the revs they changed in.
86 '''Iterate over files and the revs they changed in.
87
87
88 Callers most commonly need to iterate backwards over the history
88 Callers most commonly need to iterate backwards over the history
89 it is interested in. Doing so has awful (quadratic-looking)
89 it is interested in. Doing so has awful (quadratic-looking)
90 performance, so we use iterators in a "windowed" way.
90 performance, so we use iterators in a "windowed" way.
91
91
92 We walk a window of revisions in the desired order. Within the
92 We walk a window of revisions in the desired order. Within the
93 window, we first walk forwards to gather data, then in the desired
93 window, we first walk forwards to gather data, then in the desired
94 order (usually backwards) to display it.
94 order (usually backwards) to display it.
95
95
96 This function returns an (iterator, getchange, matchfn) tuple. The
96 This function returns an (iterator, getchange, matchfn) tuple. The
97 getchange function returns the changelog entry for a numeric
97 getchange function returns the changelog entry for a numeric
98 revision. The iterator yields 3-tuples. They will be of one of
98 revision. The iterator yields 3-tuples. They will be of one of
99 the following forms:
99 the following forms:
100
100
101 "window", incrementing, lastrev: stepping through a window,
101 "window", incrementing, lastrev: stepping through a window,
102 positive if walking forwards through revs, last rev in the
102 positive if walking forwards through revs, last rev in the
103 sequence iterated over - use to reset state for the current window
103 sequence iterated over - use to reset state for the current window
104
104
105 "add", rev, fns: out-of-order traversal of the given file names
105 "add", rev, fns: out-of-order traversal of the given file names
106 fns, which changed during revision rev - use to gather data for
106 fns, which changed during revision rev - use to gather data for
107 possible display
107 possible display
108
108
109 "iter", rev, None: in-order traversal of the revs earlier iterated
109 "iter", rev, None: in-order traversal of the revs earlier iterated
110 over with "add" - use to display data'''
110 over with "add" - use to display data'''
111
111
112 def increasing_windows(start, end, windowsize=8, sizelimit=512):
112 def increasing_windows(start, end, windowsize=8, sizelimit=512):
113 if start < end:
113 if start < end:
114 while start < end:
114 while start < end:
115 yield start, min(windowsize, end-start)
115 yield start, min(windowsize, end-start)
116 start += windowsize
116 start += windowsize
117 if windowsize < sizelimit:
117 if windowsize < sizelimit:
118 windowsize *= 2
118 windowsize *= 2
119 else:
119 else:
120 while start > end:
120 while start > end:
121 yield start, min(windowsize, start-end-1)
121 yield start, min(windowsize, start-end-1)
122 start -= windowsize
122 start -= windowsize
123 if windowsize < sizelimit:
123 if windowsize < sizelimit:
124 windowsize *= 2
124 windowsize *= 2
125
125
126
126
127 files, matchfn, anypats = matchpats(repo, pats, opts)
127 files, matchfn, anypats = matchpats(repo, pats, opts)
128 follow = opts.get('follow')
128 follow = opts.get('follow') or opts.get('follow_first')
129
129
130 if repo.changelog.count() == 0:
130 if repo.changelog.count() == 0:
131 return [], False, matchfn
131 return [], False, matchfn
132
132
133 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
133 if follow:
134 p = repo.dirstate.parents()[0]
135 if p == nullid:
136 ui.warn(_('No working directory revision; defaulting to tip\n'))
137 start = 'tip'
138 else:
139 start = repo.changelog.rev(p)
140 defrange = '%s:0' % start
141 else:
142 defrange = 'tip:0'
143 revs = map(int, revrange(ui, repo, opts['rev'] or [defrange]))
134 wanted = {}
144 wanted = {}
135 slowpath = anypats
145 slowpath = anypats
136 fncache = {}
146 fncache = {}
137
147
138 chcache = {}
148 chcache = {}
139 def getchange(rev):
149 def getchange(rev):
140 ch = chcache.get(rev)
150 ch = chcache.get(rev)
141 if ch is None:
151 if ch is None:
142 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
152 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
143 return ch
153 return ch
144
154
145 if not slowpath and not files:
155 if not slowpath and not files:
146 # No files, no patterns. Display all revs.
156 # No files, no patterns. Display all revs.
147 wanted = dict(zip(revs, revs))
157 wanted = dict(zip(revs, revs))
148 copies = []
158 copies = []
149 if not slowpath:
159 if not slowpath:
150 # Only files, no patterns. Check the history of each file.
160 # Only files, no patterns. Check the history of each file.
151 def filerevgen(filelog, node):
161 def filerevgen(filelog, node):
152 cl_count = repo.changelog.count()
162 cl_count = repo.changelog.count()
153 if node is None:
163 if node is None:
154 last = filelog.count() - 1
164 last = filelog.count() - 1
155 else:
165 else:
156 last = filelog.rev(node)
166 last = filelog.rev(node)
157 for i, window in increasing_windows(last, -1):
167 for i, window in increasing_windows(last, -1):
158 revs = []
168 revs = []
159 for j in xrange(i - window, i + 1):
169 for j in xrange(i - window, i + 1):
160 n = filelog.node(j)
170 n = filelog.node(j)
161 revs.append((filelog.linkrev(n),
171 revs.append((filelog.linkrev(n),
162 follow and filelog.renamed(n)))
172 follow and filelog.renamed(n)))
163 revs.reverse()
173 revs.reverse()
164 for rev in revs:
174 for rev in revs:
165 # only yield rev for which we have the changelog, it can
175 # only yield rev for which we have the changelog, it can
166 # happen while doing "hg log" during a pull or commit
176 # happen while doing "hg log" during a pull or commit
167 if rev[0] < cl_count:
177 if rev[0] < cl_count:
168 yield rev
178 yield rev
169 def iterfiles():
179 def iterfiles():
170 for filename in files:
180 for filename in files:
171 yield filename, None
181 yield filename, None
172 for filename_node in copies:
182 for filename_node in copies:
173 yield filename_node
183 yield filename_node
174 minrev, maxrev = min(revs), max(revs)
184 minrev, maxrev = min(revs), max(revs)
175 for file_, node in iterfiles():
185 for file_, node in iterfiles():
176 filelog = repo.file(file_)
186 filelog = repo.file(file_)
177 # A zero count may be a directory or deleted file, so
187 # A zero count may be a directory or deleted file, so
178 # try to find matching entries on the slow path.
188 # try to find matching entries on the slow path.
179 if filelog.count() == 0:
189 if filelog.count() == 0:
180 slowpath = True
190 slowpath = True
181 break
191 break
182 for rev, copied in filerevgen(filelog, node):
192 for rev, copied in filerevgen(filelog, node):
183 if rev <= maxrev:
193 if rev <= maxrev:
184 if rev < minrev:
194 if rev < minrev:
185 break
195 break
186 fncache.setdefault(rev, [])
196 fncache.setdefault(rev, [])
187 fncache[rev].append(file_)
197 fncache[rev].append(file_)
188 wanted[rev] = 1
198 wanted[rev] = 1
189 if follow and copied:
199 if follow and copied:
190 copies.append(copied)
200 copies.append(copied)
191 if slowpath:
201 if slowpath:
192 if follow:
202 if follow:
193 raise util.Abort(_('can only follow copies/renames for explicit '
203 raise util.Abort(_('can only follow copies/renames for explicit '
194 'file names'))
204 'file names'))
195
205
196 # The slow path checks files modified in every changeset.
206 # The slow path checks files modified in every changeset.
197 def changerevgen():
207 def changerevgen():
198 for i, window in increasing_windows(repo.changelog.count()-1, -1):
208 for i, window in increasing_windows(repo.changelog.count()-1, -1):
199 for j in xrange(i - window, i + 1):
209 for j in xrange(i - window, i + 1):
200 yield j, getchange(j)[3]
210 yield j, getchange(j)[3]
201
211
202 for rev, changefiles in changerevgen():
212 for rev, changefiles in changerevgen():
203 matches = filter(matchfn, changefiles)
213 matches = filter(matchfn, changefiles)
204 if matches:
214 if matches:
205 fncache[rev] = matches
215 fncache[rev] = matches
206 wanted[rev] = 1
216 wanted[rev] = 1
207
217
208 def iterate():
218 def iterate():
219 class followfilter:
220 def __init__(self, onlyfirst=False):
221 self.startrev = -1
222 self.roots = []
223 self.onlyfirst = onlyfirst
224
225 def match(self, rev):
226 def realparents(rev):
227 if self.onlyfirst:
228 return repo.changelog.parentrevs(rev)[0:1]
229 else:
230 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
231
232 if self.startrev == -1:
233 self.startrev = rev
234 return True
235
236 if rev > self.startrev:
237 # forward: all descendants
238 if not self.roots:
239 self.roots.append(self.startrev)
240 for parent in realparents(rev):
241 if parent in self.roots:
242 self.roots.append(rev)
243 return True
244 else:
245 # backwards: all parents
246 if not self.roots:
247 self.roots.extend(realparents(self.startrev))
248 if rev in self.roots:
249 self.roots.remove(rev)
250 self.roots.extend(realparents(rev))
251 return True
252
253 return False
254
255 if follow and not files:
256 ff = followfilter(onlyfirst=opts.get('follow_first'))
257 def want(rev):
258 if rev not in wanted:
259 return False
260 return ff.match(rev)
261 else:
262 def want(rev):
263 return rev in wanted
264
209 for i, window in increasing_windows(0, len(revs)):
265 for i, window in increasing_windows(0, len(revs)):
210 yield 'window', revs[0] < revs[-1], revs[-1]
266 yield 'window', revs[0] < revs[-1], revs[-1]
211 nrevs = [rev for rev in revs[i:i+window]
267 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
212 if rev in wanted]
213 srevs = list(nrevs)
268 srevs = list(nrevs)
214 srevs.sort()
269 srevs.sort()
215 for rev in srevs:
270 for rev in srevs:
216 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
271 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
217 yield 'add', rev, fns
272 yield 'add', rev, fns
218 for rev in nrevs:
273 for rev in nrevs:
219 yield 'iter', rev, None
274 yield 'iter', rev, None
220 return iterate(), getchange, matchfn
275 return iterate(), getchange, matchfn
221
276
222 revrangesep = ':'
277 revrangesep = ':'
223
278
224 def revfix(repo, val, defval):
279 def revfix(repo, val, defval):
225 '''turn user-level id of changeset into rev number.
280 '''turn user-level id of changeset into rev number.
226 user-level id can be tag, changeset, rev number, or negative rev
281 user-level id can be tag, changeset, rev number, or negative rev
227 number relative to number of revs (-1 is tip, etc).'''
282 number relative to number of revs (-1 is tip, etc).'''
228 if not val:
283 if not val:
229 return defval
284 return defval
230 try:
285 try:
231 num = int(val)
286 num = int(val)
232 if str(num) != val:
287 if str(num) != val:
233 raise ValueError
288 raise ValueError
234 if num < 0:
289 if num < 0:
235 num += repo.changelog.count()
290 num += repo.changelog.count()
236 if num < 0:
291 if num < 0:
237 num = 0
292 num = 0
238 elif num >= repo.changelog.count():
293 elif num >= repo.changelog.count():
239 raise ValueError
294 raise ValueError
240 except ValueError:
295 except ValueError:
241 try:
296 try:
242 num = repo.changelog.rev(repo.lookup(val))
297 num = repo.changelog.rev(repo.lookup(val))
243 except KeyError:
298 except KeyError:
244 raise util.Abort(_('invalid revision identifier %s'), val)
299 raise util.Abort(_('invalid revision identifier %s'), val)
245 return num
300 return num
246
301
247 def revpair(ui, repo, revs):
302 def revpair(ui, repo, revs):
248 '''return pair of nodes, given list of revisions. second item can
303 '''return pair of nodes, given list of revisions. second item can
249 be None, meaning use working dir.'''
304 be None, meaning use working dir.'''
250 if not revs:
305 if not revs:
251 return repo.dirstate.parents()[0], None
306 return repo.dirstate.parents()[0], None
252 end = None
307 end = None
253 if len(revs) == 1:
308 if len(revs) == 1:
254 start = revs[0]
309 start = revs[0]
255 if revrangesep in start:
310 if revrangesep in start:
256 start, end = start.split(revrangesep, 1)
311 start, end = start.split(revrangesep, 1)
257 start = revfix(repo, start, 0)
312 start = revfix(repo, start, 0)
258 end = revfix(repo, end, repo.changelog.count() - 1)
313 end = revfix(repo, end, repo.changelog.count() - 1)
259 else:
314 else:
260 start = revfix(repo, start, None)
315 start = revfix(repo, start, None)
261 elif len(revs) == 2:
316 elif len(revs) == 2:
262 if revrangesep in revs[0] or revrangesep in revs[1]:
317 if revrangesep in revs[0] or revrangesep in revs[1]:
263 raise util.Abort(_('too many revisions specified'))
318 raise util.Abort(_('too many revisions specified'))
264 start = revfix(repo, revs[0], None)
319 start = revfix(repo, revs[0], None)
265 end = revfix(repo, revs[1], None)
320 end = revfix(repo, revs[1], None)
266 else:
321 else:
267 raise util.Abort(_('too many revisions specified'))
322 raise util.Abort(_('too many revisions specified'))
268 if end is not None: end = repo.lookup(str(end))
323 if end is not None: end = repo.lookup(str(end))
269 return repo.lookup(str(start)), end
324 return repo.lookup(str(start)), end
270
325
271 def revrange(ui, repo, revs):
326 def revrange(ui, repo, revs):
272 """Yield revision as strings from a list of revision specifications."""
327 """Yield revision as strings from a list of revision specifications."""
273 seen = {}
328 seen = {}
274 for spec in revs:
329 for spec in revs:
275 if revrangesep in spec:
330 if revrangesep in spec:
276 start, end = spec.split(revrangesep, 1)
331 start, end = spec.split(revrangesep, 1)
277 start = revfix(repo, start, 0)
332 start = revfix(repo, start, 0)
278 end = revfix(repo, end, repo.changelog.count() - 1)
333 end = revfix(repo, end, repo.changelog.count() - 1)
279 step = start > end and -1 or 1
334 step = start > end and -1 or 1
280 for rev in xrange(start, end+step, step):
335 for rev in xrange(start, end+step, step):
281 if rev in seen:
336 if rev in seen:
282 continue
337 continue
283 seen[rev] = 1
338 seen[rev] = 1
284 yield str(rev)
339 yield str(rev)
285 else:
340 else:
286 rev = revfix(repo, spec, None)
341 rev = revfix(repo, spec, None)
287 if rev in seen:
342 if rev in seen:
288 continue
343 continue
289 seen[rev] = 1
344 seen[rev] = 1
290 yield str(rev)
345 yield str(rev)
291
346
292 def make_filename(repo, pat, node,
347 def make_filename(repo, pat, node,
293 total=None, seqno=None, revwidth=None, pathname=None):
348 total=None, seqno=None, revwidth=None, pathname=None):
294 node_expander = {
349 node_expander = {
295 'H': lambda: hex(node),
350 'H': lambda: hex(node),
296 'R': lambda: str(repo.changelog.rev(node)),
351 'R': lambda: str(repo.changelog.rev(node)),
297 'h': lambda: short(node),
352 'h': lambda: short(node),
298 }
353 }
299 expander = {
354 expander = {
300 '%': lambda: '%',
355 '%': lambda: '%',
301 'b': lambda: os.path.basename(repo.root),
356 'b': lambda: os.path.basename(repo.root),
302 }
357 }
303
358
304 try:
359 try:
305 if node:
360 if node:
306 expander.update(node_expander)
361 expander.update(node_expander)
307 if node and revwidth is not None:
362 if node and revwidth is not None:
308 expander['r'] = (lambda:
363 expander['r'] = (lambda:
309 str(repo.changelog.rev(node)).zfill(revwidth))
364 str(repo.changelog.rev(node)).zfill(revwidth))
310 if total is not None:
365 if total is not None:
311 expander['N'] = lambda: str(total)
366 expander['N'] = lambda: str(total)
312 if seqno is not None:
367 if seqno is not None:
313 expander['n'] = lambda: str(seqno)
368 expander['n'] = lambda: str(seqno)
314 if total is not None and seqno is not None:
369 if total is not None and seqno is not None:
315 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
370 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
316 if pathname is not None:
371 if pathname is not None:
317 expander['s'] = lambda: os.path.basename(pathname)
372 expander['s'] = lambda: os.path.basename(pathname)
318 expander['d'] = lambda: os.path.dirname(pathname) or '.'
373 expander['d'] = lambda: os.path.dirname(pathname) or '.'
319 expander['p'] = lambda: pathname
374 expander['p'] = lambda: pathname
320
375
321 newname = []
376 newname = []
322 patlen = len(pat)
377 patlen = len(pat)
323 i = 0
378 i = 0
324 while i < patlen:
379 while i < patlen:
325 c = pat[i]
380 c = pat[i]
326 if c == '%':
381 if c == '%':
327 i += 1
382 i += 1
328 c = pat[i]
383 c = pat[i]
329 c = expander[c]()
384 c = expander[c]()
330 newname.append(c)
385 newname.append(c)
331 i += 1
386 i += 1
332 return ''.join(newname)
387 return ''.join(newname)
333 except KeyError, inst:
388 except KeyError, inst:
334 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
389 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
335 inst.args[0])
390 inst.args[0])
336
391
337 def make_file(repo, pat, node=None,
392 def make_file(repo, pat, node=None,
338 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
393 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
339 if not pat or pat == '-':
394 if not pat or pat == '-':
340 return 'w' in mode and sys.stdout or sys.stdin
395 return 'w' in mode and sys.stdout or sys.stdin
341 if hasattr(pat, 'write') and 'w' in mode:
396 if hasattr(pat, 'write') and 'w' in mode:
342 return pat
397 return pat
343 if hasattr(pat, 'read') and 'r' in mode:
398 if hasattr(pat, 'read') and 'r' in mode:
344 return pat
399 return pat
345 return open(make_filename(repo, pat, node, total, seqno, revwidth,
400 return open(make_filename(repo, pat, node, total, seqno, revwidth,
346 pathname),
401 pathname),
347 mode)
402 mode)
348
403
349 def write_bundle(cg, filename=None, compress=True):
404 def write_bundle(cg, filename=None, compress=True):
350 """Write a bundle file and return its filename.
405 """Write a bundle file and return its filename.
351
406
352 Existing files will not be overwritten.
407 Existing files will not be overwritten.
353 If no filename is specified, a temporary file is created.
408 If no filename is specified, a temporary file is created.
354 bz2 compression can be turned off.
409 bz2 compression can be turned off.
355 The bundle file will be deleted in case of errors.
410 The bundle file will be deleted in case of errors.
356 """
411 """
357 class nocompress(object):
412 class nocompress(object):
358 def compress(self, x):
413 def compress(self, x):
359 return x
414 return x
360 def flush(self):
415 def flush(self):
361 return ""
416 return ""
362
417
363 fh = None
418 fh = None
364 cleanup = None
419 cleanup = None
365 try:
420 try:
366 if filename:
421 if filename:
367 if os.path.exists(filename):
422 if os.path.exists(filename):
368 raise util.Abort(_("file '%s' already exists"), filename)
423 raise util.Abort(_("file '%s' already exists"), filename)
369 fh = open(filename, "wb")
424 fh = open(filename, "wb")
370 else:
425 else:
371 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
426 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
372 fh = os.fdopen(fd, "wb")
427 fh = os.fdopen(fd, "wb")
373 cleanup = filename
428 cleanup = filename
374
429
375 if compress:
430 if compress:
376 fh.write("HG10")
431 fh.write("HG10")
377 z = bz2.BZ2Compressor(9)
432 z = bz2.BZ2Compressor(9)
378 else:
433 else:
379 fh.write("HG10UN")
434 fh.write("HG10UN")
380 z = nocompress()
435 z = nocompress()
381 # parse the changegroup data, otherwise we will block
436 # parse the changegroup data, otherwise we will block
382 # in case of sshrepo because we don't know the end of the stream
437 # in case of sshrepo because we don't know the end of the stream
383
438
384 # an empty chunkiter is the end of the changegroup
439 # an empty chunkiter is the end of the changegroup
385 empty = False
440 empty = False
386 while not empty:
441 while not empty:
387 empty = True
442 empty = True
388 for chunk in changegroup.chunkiter(cg):
443 for chunk in changegroup.chunkiter(cg):
389 empty = False
444 empty = False
390 fh.write(z.compress(changegroup.genchunk(chunk)))
445 fh.write(z.compress(changegroup.genchunk(chunk)))
391 fh.write(z.compress(changegroup.closechunk()))
446 fh.write(z.compress(changegroup.closechunk()))
392 fh.write(z.flush())
447 fh.write(z.flush())
393 cleanup = None
448 cleanup = None
394 return filename
449 return filename
395 finally:
450 finally:
396 if fh is not None:
451 if fh is not None:
397 fh.close()
452 fh.close()
398 if cleanup is not None:
453 if cleanup is not None:
399 os.unlink(cleanup)
454 os.unlink(cleanup)
400
455
401 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
456 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
402 changes=None, text=False, opts={}):
457 changes=None, text=False, opts={}):
403 if not node1:
458 if not node1:
404 node1 = repo.dirstate.parents()[0]
459 node1 = repo.dirstate.parents()[0]
405 # reading the data for node1 early allows it to play nicely
460 # reading the data for node1 early allows it to play nicely
406 # with repo.changes and the revlog cache.
461 # with repo.changes and the revlog cache.
407 change = repo.changelog.read(node1)
462 change = repo.changelog.read(node1)
408 mmap = repo.manifest.read(change[0])
463 mmap = repo.manifest.read(change[0])
409 date1 = util.datestr(change[2])
464 date1 = util.datestr(change[2])
410
465
411 if not changes:
466 if not changes:
412 changes = repo.changes(node1, node2, files, match=match)
467 changes = repo.changes(node1, node2, files, match=match)
413 modified, added, removed, deleted, unknown = changes
468 modified, added, removed, deleted, unknown = changes
414 if files:
469 if files:
415 modified, added, removed = map(lambda x: filterfiles(files, x),
470 modified, added, removed = map(lambda x: filterfiles(files, x),
416 (modified, added, removed))
471 (modified, added, removed))
417
472
418 if not modified and not added and not removed:
473 if not modified and not added and not removed:
419 return
474 return
420
475
421 if node2:
476 if node2:
422 change = repo.changelog.read(node2)
477 change = repo.changelog.read(node2)
423 mmap2 = repo.manifest.read(change[0])
478 mmap2 = repo.manifest.read(change[0])
424 _date2 = util.datestr(change[2])
479 _date2 = util.datestr(change[2])
425 def date2(f):
480 def date2(f):
426 return _date2
481 return _date2
427 def read(f):
482 def read(f):
428 return repo.file(f).read(mmap2[f])
483 return repo.file(f).read(mmap2[f])
429 else:
484 else:
430 tz = util.makedate()[1]
485 tz = util.makedate()[1]
431 _date2 = util.datestr()
486 _date2 = util.datestr()
432 def date2(f):
487 def date2(f):
433 try:
488 try:
434 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
489 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
435 except OSError, err:
490 except OSError, err:
436 if err.errno != errno.ENOENT: raise
491 if err.errno != errno.ENOENT: raise
437 return _date2
492 return _date2
438 def read(f):
493 def read(f):
439 return repo.wread(f)
494 return repo.wread(f)
440
495
441 if ui.quiet:
496 if ui.quiet:
442 r = None
497 r = None
443 else:
498 else:
444 hexfunc = ui.verbose and hex or short
499 hexfunc = ui.verbose and hex or short
445 r = [hexfunc(node) for node in [node1, node2] if node]
500 r = [hexfunc(node) for node in [node1, node2] if node]
446
501
447 diffopts = ui.diffopts()
502 diffopts = ui.diffopts()
448 showfunc = opts.get('show_function') or diffopts['showfunc']
503 showfunc = opts.get('show_function') or diffopts['showfunc']
449 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
504 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
450 ignorewsamount = opts.get('ignore_space_change') or \
505 ignorewsamount = opts.get('ignore_space_change') or \
451 diffopts['ignorewsamount']
506 diffopts['ignorewsamount']
452 ignoreblanklines = opts.get('ignore_blank_lines') or \
507 ignoreblanklines = opts.get('ignore_blank_lines') or \
453 diffopts['ignoreblanklines']
508 diffopts['ignoreblanklines']
454
509
455 all = modified + added + removed
510 all = modified + added + removed
456 all.sort()
511 all.sort()
457 for f in all:
512 for f in all:
458 to = None
513 to = None
459 tn = None
514 tn = None
460 if f in mmap:
515 if f in mmap:
461 to = repo.file(f).read(mmap[f])
516 to = repo.file(f).read(mmap[f])
462 if f not in removed:
517 if f not in removed:
463 tn = read(f)
518 tn = read(f)
464 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
519 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
465 showfunc=showfunc, ignorews=ignorews,
520 showfunc=showfunc, ignorews=ignorews,
466 ignorewsamount=ignorewsamount,
521 ignorewsamount=ignorewsamount,
467 ignoreblanklines=ignoreblanklines))
522 ignoreblanklines=ignoreblanklines))
468
523
469 def trimuser(ui, name, rev, revcache):
524 def trimuser(ui, name, rev, revcache):
470 """trim the name of the user who committed a change"""
525 """trim the name of the user who committed a change"""
471 user = revcache.get(rev)
526 user = revcache.get(rev)
472 if user is None:
527 if user is None:
473 user = revcache[rev] = ui.shortuser(name)
528 user = revcache[rev] = ui.shortuser(name)
474 return user
529 return user
475
530
476 class changeset_printer(object):
531 class changeset_printer(object):
477 '''show changeset information when templating not requested.'''
532 '''show changeset information when templating not requested.'''
478
533
479 def __init__(self, ui, repo):
534 def __init__(self, ui, repo):
480 self.ui = ui
535 self.ui = ui
481 self.repo = repo
536 self.repo = repo
482
537
483 def show(self, rev=0, changenode=None, brinfo=None):
538 def show(self, rev=0, changenode=None, brinfo=None):
484 '''show a single changeset or file revision'''
539 '''show a single changeset or file revision'''
485 log = self.repo.changelog
540 log = self.repo.changelog
486 if changenode is None:
541 if changenode is None:
487 changenode = log.node(rev)
542 changenode = log.node(rev)
488 elif not rev:
543 elif not rev:
489 rev = log.rev(changenode)
544 rev = log.rev(changenode)
490
545
491 if self.ui.quiet:
546 if self.ui.quiet:
492 self.ui.write("%d:%s\n" % (rev, short(changenode)))
547 self.ui.write("%d:%s\n" % (rev, short(changenode)))
493 return
548 return
494
549
495 changes = log.read(changenode)
550 changes = log.read(changenode)
496 date = util.datestr(changes[2])
551 date = util.datestr(changes[2])
497
552
498 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
553 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
499 for p in log.parents(changenode)
554 for p in log.parents(changenode)
500 if self.ui.debugflag or p != nullid]
555 if self.ui.debugflag or p != nullid]
501 if (not self.ui.debugflag and len(parents) == 1 and
556 if (not self.ui.debugflag and len(parents) == 1 and
502 parents[0][0] == rev-1):
557 parents[0][0] == rev-1):
503 parents = []
558 parents = []
504
559
505 if self.ui.verbose:
560 if self.ui.verbose:
506 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
561 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
507 else:
562 else:
508 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
563 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
509
564
510 for tag in self.repo.nodetags(changenode):
565 for tag in self.repo.nodetags(changenode):
511 self.ui.status(_("tag: %s\n") % tag)
566 self.ui.status(_("tag: %s\n") % tag)
512 for parent in parents:
567 for parent in parents:
513 self.ui.write(_("parent: %d:%s\n") % parent)
568 self.ui.write(_("parent: %d:%s\n") % parent)
514
569
515 if brinfo and changenode in brinfo:
570 if brinfo and changenode in brinfo:
516 br = brinfo[changenode]
571 br = brinfo[changenode]
517 self.ui.write(_("branch: %s\n") % " ".join(br))
572 self.ui.write(_("branch: %s\n") % " ".join(br))
518
573
519 self.ui.debug(_("manifest: %d:%s\n") %
574 self.ui.debug(_("manifest: %d:%s\n") %
520 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
575 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
521 self.ui.status(_("user: %s\n") % changes[1])
576 self.ui.status(_("user: %s\n") % changes[1])
522 self.ui.status(_("date: %s\n") % date)
577 self.ui.status(_("date: %s\n") % date)
523
578
524 if self.ui.debugflag:
579 if self.ui.debugflag:
525 files = self.repo.changes(log.parents(changenode)[0], changenode)
580 files = self.repo.changes(log.parents(changenode)[0], changenode)
526 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
581 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
527 files):
582 files):
528 if value:
583 if value:
529 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
584 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
530 else:
585 else:
531 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
586 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
532
587
533 description = changes[4].strip()
588 description = changes[4].strip()
534 if description:
589 if description:
535 if self.ui.verbose:
590 if self.ui.verbose:
536 self.ui.status(_("description:\n"))
591 self.ui.status(_("description:\n"))
537 self.ui.status(description)
592 self.ui.status(description)
538 self.ui.status("\n\n")
593 self.ui.status("\n\n")
539 else:
594 else:
540 self.ui.status(_("summary: %s\n") %
595 self.ui.status(_("summary: %s\n") %
541 description.splitlines()[0])
596 description.splitlines()[0])
542 self.ui.status("\n")
597 self.ui.status("\n")
543
598
544 def show_changeset(ui, repo, opts):
599 def show_changeset(ui, repo, opts):
545 '''show one changeset. uses template or regular display. caller
600 '''show one changeset. uses template or regular display. caller
546 can pass in 'style' and 'template' options in opts.'''
601 can pass in 'style' and 'template' options in opts.'''
547
602
548 tmpl = opts.get('template')
603 tmpl = opts.get('template')
549 if tmpl:
604 if tmpl:
550 tmpl = templater.parsestring(tmpl, quoted=False)
605 tmpl = templater.parsestring(tmpl, quoted=False)
551 else:
606 else:
552 tmpl = ui.config('ui', 'logtemplate')
607 tmpl = ui.config('ui', 'logtemplate')
553 if tmpl: tmpl = templater.parsestring(tmpl)
608 if tmpl: tmpl = templater.parsestring(tmpl)
554 mapfile = opts.get('style') or ui.config('ui', 'style')
609 mapfile = opts.get('style') or ui.config('ui', 'style')
555 if tmpl or mapfile:
610 if tmpl or mapfile:
556 if mapfile:
611 if mapfile:
557 if not os.path.isfile(mapfile):
612 if not os.path.isfile(mapfile):
558 mapname = templater.templatepath('map-cmdline.' + mapfile)
613 mapname = templater.templatepath('map-cmdline.' + mapfile)
559 if not mapname: mapname = templater.templatepath(mapfile)
614 if not mapname: mapname = templater.templatepath(mapfile)
560 if mapname: mapfile = mapname
615 if mapname: mapfile = mapname
561 try:
616 try:
562 t = templater.changeset_templater(ui, repo, mapfile)
617 t = templater.changeset_templater(ui, repo, mapfile)
563 except SyntaxError, inst:
618 except SyntaxError, inst:
564 raise util.Abort(inst.args[0])
619 raise util.Abort(inst.args[0])
565 if tmpl: t.use_template(tmpl)
620 if tmpl: t.use_template(tmpl)
566 return t
621 return t
567 return changeset_printer(ui, repo)
622 return changeset_printer(ui, repo)
568
623
569 def setremoteconfig(ui, opts):
624 def setremoteconfig(ui, opts):
570 "copy remote options to ui tree"
625 "copy remote options to ui tree"
571 if opts.get('ssh'):
626 if opts.get('ssh'):
572 ui.setconfig("ui", "ssh", opts['ssh'])
627 ui.setconfig("ui", "ssh", opts['ssh'])
573 if opts.get('remotecmd'):
628 if opts.get('remotecmd'):
574 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
629 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
575
630
576 def show_version(ui):
631 def show_version(ui):
577 """output version and copyright information"""
632 """output version and copyright information"""
578 ui.write(_("Mercurial Distributed SCM (version %s)\n")
633 ui.write(_("Mercurial Distributed SCM (version %s)\n")
579 % version.get_version())
634 % version.get_version())
580 ui.status(_(
635 ui.status(_(
581 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
636 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
582 "This is free software; see the source for copying conditions. "
637 "This is free software; see the source for copying conditions. "
583 "There is NO\nwarranty; "
638 "There is NO\nwarranty; "
584 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
639 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
585 ))
640 ))
586
641
587 def help_(ui, name=None, with_version=False):
642 def help_(ui, name=None, with_version=False):
588 """show help for a command, extension, or list of commands
643 """show help for a command, extension, or list of commands
589
644
590 With no arguments, print a list of commands and short help.
645 With no arguments, print a list of commands and short help.
591
646
592 Given a command name, print help for that command.
647 Given a command name, print help for that command.
593
648
594 Given an extension name, print help for that extension, and the
649 Given an extension name, print help for that extension, and the
595 commands it provides."""
650 commands it provides."""
596 option_lists = []
651 option_lists = []
597
652
598 def helpcmd(name):
653 def helpcmd(name):
599 if with_version:
654 if with_version:
600 show_version(ui)
655 show_version(ui)
601 ui.write('\n')
656 ui.write('\n')
602 aliases, i = findcmd(name)
657 aliases, i = findcmd(name)
603 # synopsis
658 # synopsis
604 ui.write("%s\n\n" % i[2])
659 ui.write("%s\n\n" % i[2])
605
660
606 # description
661 # description
607 doc = i[0].__doc__
662 doc = i[0].__doc__
608 if not doc:
663 if not doc:
609 doc = _("(No help text available)")
664 doc = _("(No help text available)")
610 if ui.quiet:
665 if ui.quiet:
611 doc = doc.splitlines(0)[0]
666 doc = doc.splitlines(0)[0]
612 ui.write("%s\n" % doc.rstrip())
667 ui.write("%s\n" % doc.rstrip())
613
668
614 if not ui.quiet:
669 if not ui.quiet:
615 # aliases
670 # aliases
616 if len(aliases) > 1:
671 if len(aliases) > 1:
617 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
672 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
618
673
619 # options
674 # options
620 if i[1]:
675 if i[1]:
621 option_lists.append(("options", i[1]))
676 option_lists.append(("options", i[1]))
622
677
623 def helplist(select=None):
678 def helplist(select=None):
624 h = {}
679 h = {}
625 cmds = {}
680 cmds = {}
626 for c, e in table.items():
681 for c, e in table.items():
627 f = c.split("|", 1)[0]
682 f = c.split("|", 1)[0]
628 if select and not select(f):
683 if select and not select(f):
629 continue
684 continue
630 if name == "shortlist" and not f.startswith("^"):
685 if name == "shortlist" and not f.startswith("^"):
631 continue
686 continue
632 f = f.lstrip("^")
687 f = f.lstrip("^")
633 if not ui.debugflag and f.startswith("debug"):
688 if not ui.debugflag and f.startswith("debug"):
634 continue
689 continue
635 doc = e[0].__doc__
690 doc = e[0].__doc__
636 if not doc:
691 if not doc:
637 doc = _("(No help text available)")
692 doc = _("(No help text available)")
638 h[f] = doc.splitlines(0)[0].rstrip()
693 h[f] = doc.splitlines(0)[0].rstrip()
639 cmds[f] = c.lstrip("^")
694 cmds[f] = c.lstrip("^")
640
695
641 fns = h.keys()
696 fns = h.keys()
642 fns.sort()
697 fns.sort()
643 m = max(map(len, fns))
698 m = max(map(len, fns))
644 for f in fns:
699 for f in fns:
645 if ui.verbose:
700 if ui.verbose:
646 commands = cmds[f].replace("|",", ")
701 commands = cmds[f].replace("|",", ")
647 ui.write(" %s:\n %s\n"%(commands, h[f]))
702 ui.write(" %s:\n %s\n"%(commands, h[f]))
648 else:
703 else:
649 ui.write(' %-*s %s\n' % (m, f, h[f]))
704 ui.write(' %-*s %s\n' % (m, f, h[f]))
650
705
651 def helpext(name):
706 def helpext(name):
652 try:
707 try:
653 mod = findext(name)
708 mod = findext(name)
654 except KeyError:
709 except KeyError:
655 raise UnknownCommand(name)
710 raise UnknownCommand(name)
656
711
657 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
712 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
658 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
713 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
659 for d in doc[1:]:
714 for d in doc[1:]:
660 ui.write(d, '\n')
715 ui.write(d, '\n')
661
716
662 ui.status('\n')
717 ui.status('\n')
663 if ui.verbose:
718 if ui.verbose:
664 ui.status(_('list of commands:\n\n'))
719 ui.status(_('list of commands:\n\n'))
665 else:
720 else:
666 ui.status(_('list of commands (use "hg help -v %s" '
721 ui.status(_('list of commands (use "hg help -v %s" '
667 'to show aliases and global options):\n\n') % name)
722 'to show aliases and global options):\n\n') % name)
668
723
669 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
724 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
670 helplist(modcmds.has_key)
725 helplist(modcmds.has_key)
671
726
672 if name and name != 'shortlist':
727 if name and name != 'shortlist':
673 try:
728 try:
674 helpcmd(name)
729 helpcmd(name)
675 except UnknownCommand:
730 except UnknownCommand:
676 helpext(name)
731 helpext(name)
677
732
678 else:
733 else:
679 # program name
734 # program name
680 if ui.verbose or with_version:
735 if ui.verbose or with_version:
681 show_version(ui)
736 show_version(ui)
682 else:
737 else:
683 ui.status(_("Mercurial Distributed SCM\n"))
738 ui.status(_("Mercurial Distributed SCM\n"))
684 ui.status('\n')
739 ui.status('\n')
685
740
686 # list of commands
741 # list of commands
687 if name == "shortlist":
742 if name == "shortlist":
688 ui.status(_('basic commands (use "hg help" '
743 ui.status(_('basic commands (use "hg help" '
689 'for the full list or option "-v" for details):\n\n'))
744 'for the full list or option "-v" for details):\n\n'))
690 elif ui.verbose:
745 elif ui.verbose:
691 ui.status(_('list of commands:\n\n'))
746 ui.status(_('list of commands:\n\n'))
692 else:
747 else:
693 ui.status(_('list of commands (use "hg help -v" '
748 ui.status(_('list of commands (use "hg help -v" '
694 'to show aliases and global options):\n\n'))
749 'to show aliases and global options):\n\n'))
695
750
696 helplist()
751 helplist()
697
752
698 # global options
753 # global options
699 if ui.verbose:
754 if ui.verbose:
700 option_lists.append(("global options", globalopts))
755 option_lists.append(("global options", globalopts))
701
756
702 # list all option lists
757 # list all option lists
703 opt_output = []
758 opt_output = []
704 for title, options in option_lists:
759 for title, options in option_lists:
705 opt_output.append(("\n%s:\n" % title, None))
760 opt_output.append(("\n%s:\n" % title, None))
706 for shortopt, longopt, default, desc in options:
761 for shortopt, longopt, default, desc in options:
707 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
762 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
708 longopt and " --%s" % longopt),
763 longopt and " --%s" % longopt),
709 "%s%s" % (desc,
764 "%s%s" % (desc,
710 default
765 default
711 and _(" (default: %s)") % default
766 and _(" (default: %s)") % default
712 or "")))
767 or "")))
713
768
714 if opt_output:
769 if opt_output:
715 opts_len = max([len(line[0]) for line in opt_output if line[1]])
770 opts_len = max([len(line[0]) for line in opt_output if line[1]])
716 for first, second in opt_output:
771 for first, second in opt_output:
717 if second:
772 if second:
718 ui.write(" %-*s %s\n" % (opts_len, first, second))
773 ui.write(" %-*s %s\n" % (opts_len, first, second))
719 else:
774 else:
720 ui.write("%s\n" % first)
775 ui.write("%s\n" % first)
721
776
722 # Commands start here, listed alphabetically
777 # Commands start here, listed alphabetically
723
778
724 def add(ui, repo, *pats, **opts):
779 def add(ui, repo, *pats, **opts):
725 """add the specified files on the next commit
780 """add the specified files on the next commit
726
781
727 Schedule files to be version controlled and added to the repository.
782 Schedule files to be version controlled and added to the repository.
728
783
729 The files will be added to the repository at the next commit.
784 The files will be added to the repository at the next commit.
730
785
731 If no names are given, add all files in the repository.
786 If no names are given, add all files in the repository.
732 """
787 """
733
788
734 names = []
789 names = []
735 for src, abs, rel, exact in walk(repo, pats, opts):
790 for src, abs, rel, exact in walk(repo, pats, opts):
736 if exact:
791 if exact:
737 if ui.verbose:
792 if ui.verbose:
738 ui.status(_('adding %s\n') % rel)
793 ui.status(_('adding %s\n') % rel)
739 names.append(abs)
794 names.append(abs)
740 elif repo.dirstate.state(abs) == '?':
795 elif repo.dirstate.state(abs) == '?':
741 ui.status(_('adding %s\n') % rel)
796 ui.status(_('adding %s\n') % rel)
742 names.append(abs)
797 names.append(abs)
743 if not opts.get('dry_run'):
798 if not opts.get('dry_run'):
744 repo.add(names)
799 repo.add(names)
745
800
746 def addremove(ui, repo, *pats, **opts):
801 def addremove(ui, repo, *pats, **opts):
747 """add all new files, delete all missing files (DEPRECATED)
802 """add all new files, delete all missing files (DEPRECATED)
748
803
749 (DEPRECATED)
804 (DEPRECATED)
750 Add all new files and remove all missing files from the repository.
805 Add all new files and remove all missing files from the repository.
751
806
752 New files are ignored if they match any of the patterns in .hgignore. As
807 New files are ignored if they match any of the patterns in .hgignore. As
753 with add, these changes take effect at the next commit.
808 with add, these changes take effect at the next commit.
754
809
755 This command is now deprecated and will be removed in a future
810 This command is now deprecated and will be removed in a future
756 release. Please use add and remove --after instead.
811 release. Please use add and remove --after instead.
757 """
812 """
758 ui.warn(_('(the addremove command is deprecated; use add and remove '
813 ui.warn(_('(the addremove command is deprecated; use add and remove '
759 '--after instead)\n'))
814 '--after instead)\n'))
760 return addremove_lock(ui, repo, pats, opts)
815 return addremove_lock(ui, repo, pats, opts)
761
816
762 def addremove_lock(ui, repo, pats, opts, wlock=None):
817 def addremove_lock(ui, repo, pats, opts, wlock=None):
763 add, remove = [], []
818 add, remove = [], []
764 for src, abs, rel, exact in walk(repo, pats, opts):
819 for src, abs, rel, exact in walk(repo, pats, opts):
765 if src == 'f' and repo.dirstate.state(abs) == '?':
820 if src == 'f' and repo.dirstate.state(abs) == '?':
766 add.append(abs)
821 add.append(abs)
767 if ui.verbose or not exact:
822 if ui.verbose or not exact:
768 ui.status(_('adding %s\n') % ((pats and rel) or abs))
823 ui.status(_('adding %s\n') % ((pats and rel) or abs))
769 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
824 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
770 remove.append(abs)
825 remove.append(abs)
771 if ui.verbose or not exact:
826 if ui.verbose or not exact:
772 ui.status(_('removing %s\n') % ((pats and rel) or abs))
827 ui.status(_('removing %s\n') % ((pats and rel) or abs))
773 if not opts.get('dry_run'):
828 if not opts.get('dry_run'):
774 repo.add(add, wlock=wlock)
829 repo.add(add, wlock=wlock)
775 repo.remove(remove, wlock=wlock)
830 repo.remove(remove, wlock=wlock)
776
831
777 def annotate(ui, repo, *pats, **opts):
832 def annotate(ui, repo, *pats, **opts):
778 """show changeset information per file line
833 """show changeset information per file line
779
834
780 List changes in files, showing the revision id responsible for each line
835 List changes in files, showing the revision id responsible for each line
781
836
782 This command is useful to discover who did a change or when a change took
837 This command is useful to discover who did a change or when a change took
783 place.
838 place.
784
839
785 Without the -a option, annotate will avoid processing files it
840 Without the -a option, annotate will avoid processing files it
786 detects as binary. With -a, annotate will generate an annotation
841 detects as binary. With -a, annotate will generate an annotation
787 anyway, probably with undesirable results.
842 anyway, probably with undesirable results.
788 """
843 """
789 def getnode(rev):
844 def getnode(rev):
790 return short(repo.changelog.node(rev))
845 return short(repo.changelog.node(rev))
791
846
792 ucache = {}
847 ucache = {}
793 def getname(rev):
848 def getname(rev):
794 try:
849 try:
795 return ucache[rev]
850 return ucache[rev]
796 except:
851 except:
797 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
852 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
798 ucache[rev] = u
853 ucache[rev] = u
799 return u
854 return u
800
855
801 dcache = {}
856 dcache = {}
802 def getdate(rev):
857 def getdate(rev):
803 datestr = dcache.get(rev)
858 datestr = dcache.get(rev)
804 if datestr is None:
859 if datestr is None:
805 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
860 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
806 return datestr
861 return datestr
807
862
808 if not pats:
863 if not pats:
809 raise util.Abort(_('at least one file name or pattern required'))
864 raise util.Abort(_('at least one file name or pattern required'))
810
865
811 opmap = [['user', getname], ['number', str], ['changeset', getnode],
866 opmap = [['user', getname], ['number', str], ['changeset', getnode],
812 ['date', getdate]]
867 ['date', getdate]]
813 if not opts['user'] and not opts['changeset'] and not opts['date']:
868 if not opts['user'] and not opts['changeset'] and not opts['date']:
814 opts['number'] = 1
869 opts['number'] = 1
815
870
816 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
871 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
817
872
818 for src, abs, rel, exact in walk(repo, pats, opts, node=ctx.node()):
873 for src, abs, rel, exact in walk(repo, pats, opts, node=ctx.node()):
819 fctx = ctx.filectx(abs)
874 fctx = ctx.filectx(abs)
820 if not opts['text'] and util.binary(fctx.data()):
875 if not opts['text'] and util.binary(fctx.data()):
821 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
876 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
822 continue
877 continue
823
878
824 lines = fctx.annotate()
879 lines = fctx.annotate()
825 pieces = []
880 pieces = []
826
881
827 for o, f in opmap:
882 for o, f in opmap:
828 if opts[o]:
883 if opts[o]:
829 l = [f(n) for n, dummy in lines]
884 l = [f(n) for n, dummy in lines]
830 if l:
885 if l:
831 m = max(map(len, l))
886 m = max(map(len, l))
832 pieces.append(["%*s" % (m, x) for x in l])
887 pieces.append(["%*s" % (m, x) for x in l])
833
888
834 if pieces:
889 if pieces:
835 for p, l in zip(zip(*pieces), lines):
890 for p, l in zip(zip(*pieces), lines):
836 ui.write("%s: %s" % (" ".join(p), l[1]))
891 ui.write("%s: %s" % (" ".join(p), l[1]))
837
892
838 def archive(ui, repo, dest, **opts):
893 def archive(ui, repo, dest, **opts):
839 '''create unversioned archive of a repository revision
894 '''create unversioned archive of a repository revision
840
895
841 By default, the revision used is the parent of the working
896 By default, the revision used is the parent of the working
842 directory; use "-r" to specify a different revision.
897 directory; use "-r" to specify a different revision.
843
898
844 To specify the type of archive to create, use "-t". Valid
899 To specify the type of archive to create, use "-t". Valid
845 types are:
900 types are:
846
901
847 "files" (default): a directory full of files
902 "files" (default): a directory full of files
848 "tar": tar archive, uncompressed
903 "tar": tar archive, uncompressed
849 "tbz2": tar archive, compressed using bzip2
904 "tbz2": tar archive, compressed using bzip2
850 "tgz": tar archive, compressed using gzip
905 "tgz": tar archive, compressed using gzip
851 "uzip": zip archive, uncompressed
906 "uzip": zip archive, uncompressed
852 "zip": zip archive, compressed using deflate
907 "zip": zip archive, compressed using deflate
853
908
854 The exact name of the destination archive or directory is given
909 The exact name of the destination archive or directory is given
855 using a format string; see "hg help export" for details.
910 using a format string; see "hg help export" for details.
856
911
857 Each member added to an archive file has a directory prefix
912 Each member added to an archive file has a directory prefix
858 prepended. Use "-p" to specify a format string for the prefix.
913 prepended. Use "-p" to specify a format string for the prefix.
859 The default is the basename of the archive, with suffixes removed.
914 The default is the basename of the archive, with suffixes removed.
860 '''
915 '''
861
916
862 if opts['rev']:
917 if opts['rev']:
863 node = repo.lookup(opts['rev'])
918 node = repo.lookup(opts['rev'])
864 else:
919 else:
865 node, p2 = repo.dirstate.parents()
920 node, p2 = repo.dirstate.parents()
866 if p2 != nullid:
921 if p2 != nullid:
867 raise util.Abort(_('uncommitted merge - please provide a '
922 raise util.Abort(_('uncommitted merge - please provide a '
868 'specific revision'))
923 'specific revision'))
869
924
870 dest = make_filename(repo, dest, node)
925 dest = make_filename(repo, dest, node)
871 if os.path.realpath(dest) == repo.root:
926 if os.path.realpath(dest) == repo.root:
872 raise util.Abort(_('repository root cannot be destination'))
927 raise util.Abort(_('repository root cannot be destination'))
873 dummy, matchfn, dummy = matchpats(repo, [], opts)
928 dummy, matchfn, dummy = matchpats(repo, [], opts)
874 kind = opts.get('type') or 'files'
929 kind = opts.get('type') or 'files'
875 prefix = opts['prefix']
930 prefix = opts['prefix']
876 if dest == '-':
931 if dest == '-':
877 if kind == 'files':
932 if kind == 'files':
878 raise util.Abort(_('cannot archive plain files to stdout'))
933 raise util.Abort(_('cannot archive plain files to stdout'))
879 dest = sys.stdout
934 dest = sys.stdout
880 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
935 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
881 prefix = make_filename(repo, prefix, node)
936 prefix = make_filename(repo, prefix, node)
882 archival.archive(repo, dest, node, kind, not opts['no_decode'],
937 archival.archive(repo, dest, node, kind, not opts['no_decode'],
883 matchfn, prefix)
938 matchfn, prefix)
884
939
885 def backout(ui, repo, rev, **opts):
940 def backout(ui, repo, rev, **opts):
886 '''reverse effect of earlier changeset
941 '''reverse effect of earlier changeset
887
942
888 Commit the backed out changes as a new changeset. The new
943 Commit the backed out changes as a new changeset. The new
889 changeset is a child of the backed out changeset.
944 changeset is a child of the backed out changeset.
890
945
891 If you back out a changeset other than the tip, a new head is
946 If you back out a changeset other than the tip, a new head is
892 created. This head is the parent of the working directory. If
947 created. This head is the parent of the working directory. If
893 you back out an old changeset, your working directory will appear
948 you back out an old changeset, your working directory will appear
894 old after the backout. You should merge the backout changeset
949 old after the backout. You should merge the backout changeset
895 with another head.
950 with another head.
896
951
897 The --merge option remembers the parent of the working directory
952 The --merge option remembers the parent of the working directory
898 before starting the backout, then merges the new head with that
953 before starting the backout, then merges the new head with that
899 changeset afterwards. This saves you from doing the merge by
954 changeset afterwards. This saves you from doing the merge by
900 hand. The result of this merge is not committed, as for a normal
955 hand. The result of this merge is not committed, as for a normal
901 merge.'''
956 merge.'''
902
957
903 bail_if_changed(repo)
958 bail_if_changed(repo)
904 op1, op2 = repo.dirstate.parents()
959 op1, op2 = repo.dirstate.parents()
905 if op2 != nullid:
960 if op2 != nullid:
906 raise util.Abort(_('outstanding uncommitted merge'))
961 raise util.Abort(_('outstanding uncommitted merge'))
907 node = repo.lookup(rev)
962 node = repo.lookup(rev)
908 p1, p2 = repo.changelog.parents(node)
963 p1, p2 = repo.changelog.parents(node)
909 if p1 == nullid:
964 if p1 == nullid:
910 raise util.Abort(_('cannot back out a change with no parents'))
965 raise util.Abort(_('cannot back out a change with no parents'))
911 if p2 != nullid:
966 if p2 != nullid:
912 if not opts['parent']:
967 if not opts['parent']:
913 raise util.Abort(_('cannot back out a merge changeset without '
968 raise util.Abort(_('cannot back out a merge changeset without '
914 '--parent'))
969 '--parent'))
915 p = repo.lookup(opts['parent'])
970 p = repo.lookup(opts['parent'])
916 if p not in (p1, p2):
971 if p not in (p1, p2):
917 raise util.Abort(_('%s is not a parent of %s' %
972 raise util.Abort(_('%s is not a parent of %s' %
918 (short(p), short(node))))
973 (short(p), short(node))))
919 parent = p
974 parent = p
920 else:
975 else:
921 if opts['parent']:
976 if opts['parent']:
922 raise util.Abort(_('cannot use --parent on non-merge changeset'))
977 raise util.Abort(_('cannot use --parent on non-merge changeset'))
923 parent = p1
978 parent = p1
924 hg.update(repo, node, force=True, show_stats=False)
979 hg.update(repo, node, force=True, show_stats=False)
925 revert_opts = opts.copy()
980 revert_opts = opts.copy()
926 revert_opts['rev'] = hex(parent)
981 revert_opts['rev'] = hex(parent)
927 revert(ui, repo, **revert_opts)
982 revert(ui, repo, **revert_opts)
928 commit_opts = opts.copy()
983 commit_opts = opts.copy()
929 commit_opts['addremove'] = False
984 commit_opts['addremove'] = False
930 if not commit_opts['message'] and not commit_opts['logfile']:
985 if not commit_opts['message'] and not commit_opts['logfile']:
931 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
986 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
932 commit_opts['force_editor'] = True
987 commit_opts['force_editor'] = True
933 commit(ui, repo, **commit_opts)
988 commit(ui, repo, **commit_opts)
934 def nice(node):
989 def nice(node):
935 return '%d:%s' % (repo.changelog.rev(node), short(node))
990 return '%d:%s' % (repo.changelog.rev(node), short(node))
936 ui.status(_('changeset %s backs out changeset %s\n') %
991 ui.status(_('changeset %s backs out changeset %s\n') %
937 (nice(repo.changelog.tip()), nice(node)))
992 (nice(repo.changelog.tip()), nice(node)))
938 if op1 != node:
993 if op1 != node:
939 if opts['merge']:
994 if opts['merge']:
940 ui.status(_('merging with changeset %s\n') % nice(op1))
995 ui.status(_('merging with changeset %s\n') % nice(op1))
941 doupdate(ui, repo, hex(op1), merge=True)
996 doupdate(ui, repo, hex(op1), merge=True)
942 else:
997 else:
943 ui.status(_('the backout changeset is a new head - '
998 ui.status(_('the backout changeset is a new head - '
944 'do not forget to merge\n'))
999 'do not forget to merge\n'))
945 ui.status(_('(use "backout --merge" '
1000 ui.status(_('(use "backout --merge" '
946 'if you want to auto-merge)\n'))
1001 'if you want to auto-merge)\n'))
947
1002
948 def bundle(ui, repo, fname, dest=None, **opts):
1003 def bundle(ui, repo, fname, dest=None, **opts):
949 """create a changegroup file
1004 """create a changegroup file
950
1005
951 Generate a compressed changegroup file collecting all changesets
1006 Generate a compressed changegroup file collecting all changesets
952 not found in the other repository.
1007 not found in the other repository.
953
1008
954 This file can then be transferred using conventional means and
1009 This file can then be transferred using conventional means and
955 applied to another repository with the unbundle command. This is
1010 applied to another repository with the unbundle command. This is
956 useful when native push and pull are not available or when
1011 useful when native push and pull are not available or when
957 exporting an entire repository is undesirable. The standard file
1012 exporting an entire repository is undesirable. The standard file
958 extension is ".hg".
1013 extension is ".hg".
959
1014
960 Unlike import/export, this exactly preserves all changeset
1015 Unlike import/export, this exactly preserves all changeset
961 contents including permissions, rename data, and revision history.
1016 contents including permissions, rename data, and revision history.
962 """
1017 """
963 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1018 dest = ui.expandpath(dest or 'default-push', dest or 'default')
964 other = hg.repository(ui, dest)
1019 other = hg.repository(ui, dest)
965 o = repo.findoutgoing(other, force=opts['force'])
1020 o = repo.findoutgoing(other, force=opts['force'])
966 cg = repo.changegroup(o, 'bundle')
1021 cg = repo.changegroup(o, 'bundle')
967 write_bundle(cg, fname)
1022 write_bundle(cg, fname)
968
1023
969 def cat(ui, repo, file1, *pats, **opts):
1024 def cat(ui, repo, file1, *pats, **opts):
970 """output the latest or given revisions of files
1025 """output the latest or given revisions of files
971
1026
972 Print the specified files as they were at the given revision.
1027 Print the specified files as they were at the given revision.
973 If no revision is given then the tip is used.
1028 If no revision is given then the tip is used.
974
1029
975 Output may be to a file, in which case the name of the file is
1030 Output may be to a file, in which case the name of the file is
976 given using a format string. The formatting rules are the same as
1031 given using a format string. The formatting rules are the same as
977 for the export command, with the following additions:
1032 for the export command, with the following additions:
978
1033
979 %s basename of file being printed
1034 %s basename of file being printed
980 %d dirname of file being printed, or '.' if in repo root
1035 %d dirname of file being printed, or '.' if in repo root
981 %p root-relative path name of file being printed
1036 %p root-relative path name of file being printed
982 """
1037 """
983 ctx = repo.changectx(opts['rev'] or "-1")
1038 ctx = repo.changectx(opts['rev'] or "-1")
984 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, ctx.node()):
1039 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, ctx.node()):
985 fp = make_file(repo, opts['output'], ctx.node(), pathname=abs)
1040 fp = make_file(repo, opts['output'], ctx.node(), pathname=abs)
986 fp.write(ctx.filectx(abs).data())
1041 fp.write(ctx.filectx(abs).data())
987
1042
988 def clone(ui, source, dest=None, **opts):
1043 def clone(ui, source, dest=None, **opts):
989 """make a copy of an existing repository
1044 """make a copy of an existing repository
990
1045
991 Create a copy of an existing repository in a new directory.
1046 Create a copy of an existing repository in a new directory.
992
1047
993 If no destination directory name is specified, it defaults to the
1048 If no destination directory name is specified, it defaults to the
994 basename of the source.
1049 basename of the source.
995
1050
996 The location of the source is added to the new repository's
1051 The location of the source is added to the new repository's
997 .hg/hgrc file, as the default to be used for future pulls.
1052 .hg/hgrc file, as the default to be used for future pulls.
998
1053
999 For efficiency, hardlinks are used for cloning whenever the source
1054 For efficiency, hardlinks are used for cloning whenever the source
1000 and destination are on the same filesystem (note this applies only
1055 and destination are on the same filesystem (note this applies only
1001 to the repository data, not to the checked out files). Some
1056 to the repository data, not to the checked out files). Some
1002 filesystems, such as AFS, implement hardlinking incorrectly, but
1057 filesystems, such as AFS, implement hardlinking incorrectly, but
1003 do not report errors. In these cases, use the --pull option to
1058 do not report errors. In these cases, use the --pull option to
1004 avoid hardlinking.
1059 avoid hardlinking.
1005
1060
1006 You can safely clone repositories and checked out files using full
1061 You can safely clone repositories and checked out files using full
1007 hardlinks with
1062 hardlinks with
1008
1063
1009 $ cp -al REPO REPOCLONE
1064 $ cp -al REPO REPOCLONE
1010
1065
1011 which is the fastest way to clone. However, the operation is not
1066 which is the fastest way to clone. However, the operation is not
1012 atomic (making sure REPO is not modified during the operation is
1067 atomic (making sure REPO is not modified during the operation is
1013 up to you) and you have to make sure your editor breaks hardlinks
1068 up to you) and you have to make sure your editor breaks hardlinks
1014 (Emacs and most Linux Kernel tools do so).
1069 (Emacs and most Linux Kernel tools do so).
1015
1070
1016 If you use the -r option to clone up to a specific revision, no
1071 If you use the -r option to clone up to a specific revision, no
1017 subsequent revisions will be present in the cloned repository.
1072 subsequent revisions will be present in the cloned repository.
1018 This option implies --pull, even on local repositories.
1073 This option implies --pull, even on local repositories.
1019
1074
1020 See pull for valid source format details.
1075 See pull for valid source format details.
1021
1076
1022 It is possible to specify an ssh:// URL as the destination, but no
1077 It is possible to specify an ssh:// URL as the destination, but no
1023 .hg/hgrc will be created on the remote side. Look at the help text
1078 .hg/hgrc will be created on the remote side. Look at the help text
1024 for the pull command for important details about ssh:// URLs.
1079 for the pull command for important details about ssh:// URLs.
1025 """
1080 """
1026 setremoteconfig(ui, opts)
1081 setremoteconfig(ui, opts)
1027 hg.clone(ui, ui.expandpath(source), dest,
1082 hg.clone(ui, ui.expandpath(source), dest,
1028 pull=opts['pull'],
1083 pull=opts['pull'],
1029 stream=opts['uncompressed'],
1084 stream=opts['uncompressed'],
1030 rev=opts['rev'],
1085 rev=opts['rev'],
1031 update=not opts['noupdate'])
1086 update=not opts['noupdate'])
1032
1087
1033 def commit(ui, repo, *pats, **opts):
1088 def commit(ui, repo, *pats, **opts):
1034 """commit the specified files or all outstanding changes
1089 """commit the specified files or all outstanding changes
1035
1090
1036 Commit changes to the given files into the repository.
1091 Commit changes to the given files into the repository.
1037
1092
1038 If a list of files is omitted, all changes reported by "hg status"
1093 If a list of files is omitted, all changes reported by "hg status"
1039 will be committed.
1094 will be committed.
1040
1095
1041 If no commit message is specified, the editor configured in your hgrc
1096 If no commit message is specified, the editor configured in your hgrc
1042 or in the EDITOR environment variable is started to enter a message.
1097 or in the EDITOR environment variable is started to enter a message.
1043 """
1098 """
1044 message = logmessage(**opts)
1099 message = logmessage(opts)
1045
1100
1046 if opts['addremove']:
1101 if opts['addremove']:
1047 addremove_lock(ui, repo, pats, opts)
1102 addremove_lock(ui, repo, pats, opts)
1048 fns, match, anypats = matchpats(repo, pats, opts)
1103 fns, match, anypats = matchpats(repo, pats, opts)
1049 if pats:
1104 if pats:
1050 modified, added, removed, deleted, unknown = (
1105 modified, added, removed, deleted, unknown = (
1051 repo.changes(files=fns, match=match))
1106 repo.changes(files=fns, match=match))
1052 files = modified + added + removed
1107 files = modified + added + removed
1053 else:
1108 else:
1054 files = []
1109 files = []
1055 try:
1110 try:
1056 repo.commit(files, message, opts['user'], opts['date'], match,
1111 repo.commit(files, message, opts['user'], opts['date'], match,
1057 force_editor=opts.get('force_editor'))
1112 force_editor=opts.get('force_editor'))
1058 except ValueError, inst:
1113 except ValueError, inst:
1059 raise util.Abort(str(inst))
1114 raise util.Abort(str(inst))
1060
1115
1061 def docopy(ui, repo, pats, opts, wlock):
1116 def docopy(ui, repo, pats, opts, wlock):
1062 # called with the repo lock held
1117 # called with the repo lock held
1063 cwd = repo.getcwd()
1118 cwd = repo.getcwd()
1064 errors = 0
1119 errors = 0
1065 copied = []
1120 copied = []
1066 targets = {}
1121 targets = {}
1067
1122
1068 def okaytocopy(abs, rel, exact):
1123 def okaytocopy(abs, rel, exact):
1069 reasons = {'?': _('is not managed'),
1124 reasons = {'?': _('is not managed'),
1070 'a': _('has been marked for add'),
1125 'a': _('has been marked for add'),
1071 'r': _('has been marked for remove')}
1126 'r': _('has been marked for remove')}
1072 state = repo.dirstate.state(abs)
1127 state = repo.dirstate.state(abs)
1073 reason = reasons.get(state)
1128 reason = reasons.get(state)
1074 if reason:
1129 if reason:
1075 if state == 'a':
1130 if state == 'a':
1076 origsrc = repo.dirstate.copied(abs)
1131 origsrc = repo.dirstate.copied(abs)
1077 if origsrc is not None:
1132 if origsrc is not None:
1078 return origsrc
1133 return origsrc
1079 if exact:
1134 if exact:
1080 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1135 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1081 else:
1136 else:
1082 return abs
1137 return abs
1083
1138
1084 def copy(origsrc, abssrc, relsrc, target, exact):
1139 def copy(origsrc, abssrc, relsrc, target, exact):
1085 abstarget = util.canonpath(repo.root, cwd, target)
1140 abstarget = util.canonpath(repo.root, cwd, target)
1086 reltarget = util.pathto(cwd, abstarget)
1141 reltarget = util.pathto(cwd, abstarget)
1087 prevsrc = targets.get(abstarget)
1142 prevsrc = targets.get(abstarget)
1088 if prevsrc is not None:
1143 if prevsrc is not None:
1089 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1144 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1090 (reltarget, abssrc, prevsrc))
1145 (reltarget, abssrc, prevsrc))
1091 return
1146 return
1092 if (not opts['after'] and os.path.exists(reltarget) or
1147 if (not opts['after'] and os.path.exists(reltarget) or
1093 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1148 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1094 if not opts['force']:
1149 if not opts['force']:
1095 ui.warn(_('%s: not overwriting - file exists\n') %
1150 ui.warn(_('%s: not overwriting - file exists\n') %
1096 reltarget)
1151 reltarget)
1097 return
1152 return
1098 if not opts['after'] and not opts.get('dry_run'):
1153 if not opts['after'] and not opts.get('dry_run'):
1099 os.unlink(reltarget)
1154 os.unlink(reltarget)
1100 if opts['after']:
1155 if opts['after']:
1101 if not os.path.exists(reltarget):
1156 if not os.path.exists(reltarget):
1102 return
1157 return
1103 else:
1158 else:
1104 targetdir = os.path.dirname(reltarget) or '.'
1159 targetdir = os.path.dirname(reltarget) or '.'
1105 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1160 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1106 os.makedirs(targetdir)
1161 os.makedirs(targetdir)
1107 try:
1162 try:
1108 restore = repo.dirstate.state(abstarget) == 'r'
1163 restore = repo.dirstate.state(abstarget) == 'r'
1109 if restore and not opts.get('dry_run'):
1164 if restore and not opts.get('dry_run'):
1110 repo.undelete([abstarget], wlock)
1165 repo.undelete([abstarget], wlock)
1111 try:
1166 try:
1112 if not opts.get('dry_run'):
1167 if not opts.get('dry_run'):
1113 shutil.copyfile(relsrc, reltarget)
1168 shutil.copyfile(relsrc, reltarget)
1114 shutil.copymode(relsrc, reltarget)
1169 shutil.copymode(relsrc, reltarget)
1115 restore = False
1170 restore = False
1116 finally:
1171 finally:
1117 if restore:
1172 if restore:
1118 repo.remove([abstarget], wlock)
1173 repo.remove([abstarget], wlock)
1119 except shutil.Error, inst:
1174 except shutil.Error, inst:
1120 raise util.Abort(str(inst))
1175 raise util.Abort(str(inst))
1121 except IOError, inst:
1176 except IOError, inst:
1122 if inst.errno == errno.ENOENT:
1177 if inst.errno == errno.ENOENT:
1123 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1178 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1124 else:
1179 else:
1125 ui.warn(_('%s: cannot copy - %s\n') %
1180 ui.warn(_('%s: cannot copy - %s\n') %
1126 (relsrc, inst.strerror))
1181 (relsrc, inst.strerror))
1127 errors += 1
1182 errors += 1
1128 return
1183 return
1129 if ui.verbose or not exact:
1184 if ui.verbose or not exact:
1130 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1185 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1131 targets[abstarget] = abssrc
1186 targets[abstarget] = abssrc
1132 if abstarget != origsrc and not opts.get('dry_run'):
1187 if abstarget != origsrc and not opts.get('dry_run'):
1133 repo.copy(origsrc, abstarget, wlock)
1188 repo.copy(origsrc, abstarget, wlock)
1134 copied.append((abssrc, relsrc, exact))
1189 copied.append((abssrc, relsrc, exact))
1135
1190
1136 def targetpathfn(pat, dest, srcs):
1191 def targetpathfn(pat, dest, srcs):
1137 if os.path.isdir(pat):
1192 if os.path.isdir(pat):
1138 abspfx = util.canonpath(repo.root, cwd, pat)
1193 abspfx = util.canonpath(repo.root, cwd, pat)
1139 if destdirexists:
1194 if destdirexists:
1140 striplen = len(os.path.split(abspfx)[0])
1195 striplen = len(os.path.split(abspfx)[0])
1141 else:
1196 else:
1142 striplen = len(abspfx)
1197 striplen = len(abspfx)
1143 if striplen:
1198 if striplen:
1144 striplen += len(os.sep)
1199 striplen += len(os.sep)
1145 res = lambda p: os.path.join(dest, p[striplen:])
1200 res = lambda p: os.path.join(dest, p[striplen:])
1146 elif destdirexists:
1201 elif destdirexists:
1147 res = lambda p: os.path.join(dest, os.path.basename(p))
1202 res = lambda p: os.path.join(dest, os.path.basename(p))
1148 else:
1203 else:
1149 res = lambda p: dest
1204 res = lambda p: dest
1150 return res
1205 return res
1151
1206
1152 def targetpathafterfn(pat, dest, srcs):
1207 def targetpathafterfn(pat, dest, srcs):
1153 if util.patkind(pat, None)[0]:
1208 if util.patkind(pat, None)[0]:
1154 # a mercurial pattern
1209 # a mercurial pattern
1155 res = lambda p: os.path.join(dest, os.path.basename(p))
1210 res = lambda p: os.path.join(dest, os.path.basename(p))
1156 else:
1211 else:
1157 abspfx = util.canonpath(repo.root, cwd, pat)
1212 abspfx = util.canonpath(repo.root, cwd, pat)
1158 if len(abspfx) < len(srcs[0][0]):
1213 if len(abspfx) < len(srcs[0][0]):
1159 # A directory. Either the target path contains the last
1214 # A directory. Either the target path contains the last
1160 # component of the source path or it does not.
1215 # component of the source path or it does not.
1161 def evalpath(striplen):
1216 def evalpath(striplen):
1162 score = 0
1217 score = 0
1163 for s in srcs:
1218 for s in srcs:
1164 t = os.path.join(dest, s[0][striplen:])
1219 t = os.path.join(dest, s[0][striplen:])
1165 if os.path.exists(t):
1220 if os.path.exists(t):
1166 score += 1
1221 score += 1
1167 return score
1222 return score
1168
1223
1169 striplen = len(abspfx)
1224 striplen = len(abspfx)
1170 if striplen:
1225 if striplen:
1171 striplen += len(os.sep)
1226 striplen += len(os.sep)
1172 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1227 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1173 score = evalpath(striplen)
1228 score = evalpath(striplen)
1174 striplen1 = len(os.path.split(abspfx)[0])
1229 striplen1 = len(os.path.split(abspfx)[0])
1175 if striplen1:
1230 if striplen1:
1176 striplen1 += len(os.sep)
1231 striplen1 += len(os.sep)
1177 if evalpath(striplen1) > score:
1232 if evalpath(striplen1) > score:
1178 striplen = striplen1
1233 striplen = striplen1
1179 res = lambda p: os.path.join(dest, p[striplen:])
1234 res = lambda p: os.path.join(dest, p[striplen:])
1180 else:
1235 else:
1181 # a file
1236 # a file
1182 if destdirexists:
1237 if destdirexists:
1183 res = lambda p: os.path.join(dest, os.path.basename(p))
1238 res = lambda p: os.path.join(dest, os.path.basename(p))
1184 else:
1239 else:
1185 res = lambda p: dest
1240 res = lambda p: dest
1186 return res
1241 return res
1187
1242
1188
1243
1189 pats = list(pats)
1244 pats = list(pats)
1190 if not pats:
1245 if not pats:
1191 raise util.Abort(_('no source or destination specified'))
1246 raise util.Abort(_('no source or destination specified'))
1192 if len(pats) == 1:
1247 if len(pats) == 1:
1193 raise util.Abort(_('no destination specified'))
1248 raise util.Abort(_('no destination specified'))
1194 dest = pats.pop()
1249 dest = pats.pop()
1195 destdirexists = os.path.isdir(dest)
1250 destdirexists = os.path.isdir(dest)
1196 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1251 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1197 raise util.Abort(_('with multiple sources, destination must be an '
1252 raise util.Abort(_('with multiple sources, destination must be an '
1198 'existing directory'))
1253 'existing directory'))
1199 if opts['after']:
1254 if opts['after']:
1200 tfn = targetpathafterfn
1255 tfn = targetpathafterfn
1201 else:
1256 else:
1202 tfn = targetpathfn
1257 tfn = targetpathfn
1203 copylist = []
1258 copylist = []
1204 for pat in pats:
1259 for pat in pats:
1205 srcs = []
1260 srcs = []
1206 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1261 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1207 origsrc = okaytocopy(abssrc, relsrc, exact)
1262 origsrc = okaytocopy(abssrc, relsrc, exact)
1208 if origsrc:
1263 if origsrc:
1209 srcs.append((origsrc, abssrc, relsrc, exact))
1264 srcs.append((origsrc, abssrc, relsrc, exact))
1210 if not srcs:
1265 if not srcs:
1211 continue
1266 continue
1212 copylist.append((tfn(pat, dest, srcs), srcs))
1267 copylist.append((tfn(pat, dest, srcs), srcs))
1213 if not copylist:
1268 if not copylist:
1214 raise util.Abort(_('no files to copy'))
1269 raise util.Abort(_('no files to copy'))
1215
1270
1216 for targetpath, srcs in copylist:
1271 for targetpath, srcs in copylist:
1217 for origsrc, abssrc, relsrc, exact in srcs:
1272 for origsrc, abssrc, relsrc, exact in srcs:
1218 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1273 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1219
1274
1220 if errors:
1275 if errors:
1221 ui.warn(_('(consider using --after)\n'))
1276 ui.warn(_('(consider using --after)\n'))
1222 return errors, copied
1277 return errors, copied
1223
1278
1224 def copy(ui, repo, *pats, **opts):
1279 def copy(ui, repo, *pats, **opts):
1225 """mark files as copied for the next commit
1280 """mark files as copied for the next commit
1226
1281
1227 Mark dest as having copies of source files. If dest is a
1282 Mark dest as having copies of source files. If dest is a
1228 directory, copies are put in that directory. If dest is a file,
1283 directory, copies are put in that directory. If dest is a file,
1229 there can only be one source.
1284 there can only be one source.
1230
1285
1231 By default, this command copies the contents of files as they
1286 By default, this command copies the contents of files as they
1232 stand in the working directory. If invoked with --after, the
1287 stand in the working directory. If invoked with --after, the
1233 operation is recorded, but no copying is performed.
1288 operation is recorded, but no copying is performed.
1234
1289
1235 This command takes effect in the next commit.
1290 This command takes effect in the next commit.
1236
1291
1237 NOTE: This command should be treated as experimental. While it
1292 NOTE: This command should be treated as experimental. While it
1238 should properly record copied files, this information is not yet
1293 should properly record copied files, this information is not yet
1239 fully used by merge, nor fully reported by log.
1294 fully used by merge, nor fully reported by log.
1240 """
1295 """
1241 wlock = repo.wlock(0)
1296 wlock = repo.wlock(0)
1242 errs, copied = docopy(ui, repo, pats, opts, wlock)
1297 errs, copied = docopy(ui, repo, pats, opts, wlock)
1243 return errs
1298 return errs
1244
1299
1245 def debugancestor(ui, index, rev1, rev2):
1300 def debugancestor(ui, index, rev1, rev2):
1246 """find the ancestor revision of two revisions in a given index"""
1301 """find the ancestor revision of two revisions in a given index"""
1247 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1302 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1248 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1303 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1249 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1304 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1250
1305
1251 def debugcomplete(ui, cmd='', **opts):
1306 def debugcomplete(ui, cmd='', **opts):
1252 """returns the completion list associated with the given command"""
1307 """returns the completion list associated with the given command"""
1253
1308
1254 if opts['options']:
1309 if opts['options']:
1255 options = []
1310 options = []
1256 otables = [globalopts]
1311 otables = [globalopts]
1257 if cmd:
1312 if cmd:
1258 aliases, entry = findcmd(cmd)
1313 aliases, entry = findcmd(cmd)
1259 otables.append(entry[1])
1314 otables.append(entry[1])
1260 for t in otables:
1315 for t in otables:
1261 for o in t:
1316 for o in t:
1262 if o[0]:
1317 if o[0]:
1263 options.append('-%s' % o[0])
1318 options.append('-%s' % o[0])
1264 options.append('--%s' % o[1])
1319 options.append('--%s' % o[1])
1265 ui.write("%s\n" % "\n".join(options))
1320 ui.write("%s\n" % "\n".join(options))
1266 return
1321 return
1267
1322
1268 clist = findpossible(cmd).keys()
1323 clist = findpossible(cmd).keys()
1269 clist.sort()
1324 clist.sort()
1270 ui.write("%s\n" % "\n".join(clist))
1325 ui.write("%s\n" % "\n".join(clist))
1271
1326
1272 def debugrebuildstate(ui, repo, rev=None):
1327 def debugrebuildstate(ui, repo, rev=None):
1273 """rebuild the dirstate as it would look like for the given revision"""
1328 """rebuild the dirstate as it would look like for the given revision"""
1274 if not rev:
1329 if not rev:
1275 rev = repo.changelog.tip()
1330 rev = repo.changelog.tip()
1276 else:
1331 else:
1277 rev = repo.lookup(rev)
1332 rev = repo.lookup(rev)
1278 change = repo.changelog.read(rev)
1333 change = repo.changelog.read(rev)
1279 n = change[0]
1334 n = change[0]
1280 files = repo.manifest.readflags(n)
1335 files = repo.manifest.readflags(n)
1281 wlock = repo.wlock()
1336 wlock = repo.wlock()
1282 repo.dirstate.rebuild(rev, files.iteritems())
1337 repo.dirstate.rebuild(rev, files.iteritems())
1283
1338
1284 def debugcheckstate(ui, repo):
1339 def debugcheckstate(ui, repo):
1285 """validate the correctness of the current dirstate"""
1340 """validate the correctness of the current dirstate"""
1286 parent1, parent2 = repo.dirstate.parents()
1341 parent1, parent2 = repo.dirstate.parents()
1287 repo.dirstate.read()
1342 repo.dirstate.read()
1288 dc = repo.dirstate.map
1343 dc = repo.dirstate.map
1289 keys = dc.keys()
1344 keys = dc.keys()
1290 keys.sort()
1345 keys.sort()
1291 m1n = repo.changelog.read(parent1)[0]
1346 m1n = repo.changelog.read(parent1)[0]
1292 m2n = repo.changelog.read(parent2)[0]
1347 m2n = repo.changelog.read(parent2)[0]
1293 m1 = repo.manifest.read(m1n)
1348 m1 = repo.manifest.read(m1n)
1294 m2 = repo.manifest.read(m2n)
1349 m2 = repo.manifest.read(m2n)
1295 errors = 0
1350 errors = 0
1296 for f in dc:
1351 for f in dc:
1297 state = repo.dirstate.state(f)
1352 state = repo.dirstate.state(f)
1298 if state in "nr" and f not in m1:
1353 if state in "nr" and f not in m1:
1299 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1354 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1300 errors += 1
1355 errors += 1
1301 if state in "a" and f in m1:
1356 if state in "a" and f in m1:
1302 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1357 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1303 errors += 1
1358 errors += 1
1304 if state in "m" and f not in m1 and f not in m2:
1359 if state in "m" and f not in m1 and f not in m2:
1305 ui.warn(_("%s in state %s, but not in either manifest\n") %
1360 ui.warn(_("%s in state %s, but not in either manifest\n") %
1306 (f, state))
1361 (f, state))
1307 errors += 1
1362 errors += 1
1308 for f in m1:
1363 for f in m1:
1309 state = repo.dirstate.state(f)
1364 state = repo.dirstate.state(f)
1310 if state not in "nrm":
1365 if state not in "nrm":
1311 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1366 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1312 errors += 1
1367 errors += 1
1313 if errors:
1368 if errors:
1314 error = _(".hg/dirstate inconsistent with current parent's manifest")
1369 error = _(".hg/dirstate inconsistent with current parent's manifest")
1315 raise util.Abort(error)
1370 raise util.Abort(error)
1316
1371
1317 def debugconfig(ui, repo, *values):
1372 def debugconfig(ui, repo, *values):
1318 """show combined config settings from all hgrc files
1373 """show combined config settings from all hgrc files
1319
1374
1320 With no args, print names and values of all config items.
1375 With no args, print names and values of all config items.
1321
1376
1322 With one arg of the form section.name, print just the value of
1377 With one arg of the form section.name, print just the value of
1323 that config item.
1378 that config item.
1324
1379
1325 With multiple args, print names and values of all config items
1380 With multiple args, print names and values of all config items
1326 with matching section names."""
1381 with matching section names."""
1327
1382
1328 if values:
1383 if values:
1329 if len([v for v in values if '.' in v]) > 1:
1384 if len([v for v in values if '.' in v]) > 1:
1330 raise util.Abort(_('only one config item permitted'))
1385 raise util.Abort(_('only one config item permitted'))
1331 for section, name, value in ui.walkconfig():
1386 for section, name, value in ui.walkconfig():
1332 sectname = section + '.' + name
1387 sectname = section + '.' + name
1333 if values:
1388 if values:
1334 for v in values:
1389 for v in values:
1335 if v == section:
1390 if v == section:
1336 ui.write('%s=%s\n' % (sectname, value))
1391 ui.write('%s=%s\n' % (sectname, value))
1337 elif v == sectname:
1392 elif v == sectname:
1338 ui.write(value, '\n')
1393 ui.write(value, '\n')
1339 else:
1394 else:
1340 ui.write('%s=%s\n' % (sectname, value))
1395 ui.write('%s=%s\n' % (sectname, value))
1341
1396
1342 def debugsetparents(ui, repo, rev1, rev2=None):
1397 def debugsetparents(ui, repo, rev1, rev2=None):
1343 """manually set the parents of the current working directory
1398 """manually set the parents of the current working directory
1344
1399
1345 This is useful for writing repository conversion tools, but should
1400 This is useful for writing repository conversion tools, but should
1346 be used with care.
1401 be used with care.
1347 """
1402 """
1348
1403
1349 if not rev2:
1404 if not rev2:
1350 rev2 = hex(nullid)
1405 rev2 = hex(nullid)
1351
1406
1352 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1407 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1353
1408
1354 def debugstate(ui, repo):
1409 def debugstate(ui, repo):
1355 """show the contents of the current dirstate"""
1410 """show the contents of the current dirstate"""
1356 repo.dirstate.read()
1411 repo.dirstate.read()
1357 dc = repo.dirstate.map
1412 dc = repo.dirstate.map
1358 keys = dc.keys()
1413 keys = dc.keys()
1359 keys.sort()
1414 keys.sort()
1360 for file_ in keys:
1415 for file_ in keys:
1361 ui.write("%c %3o %10d %s %s\n"
1416 ui.write("%c %3o %10d %s %s\n"
1362 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1417 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1363 time.strftime("%x %X",
1418 time.strftime("%x %X",
1364 time.localtime(dc[file_][3])), file_))
1419 time.localtime(dc[file_][3])), file_))
1365 for f in repo.dirstate.copies:
1420 for f in repo.dirstate.copies:
1366 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1421 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1367
1422
1368 def debugdata(ui, file_, rev):
1423 def debugdata(ui, file_, rev):
1369 """dump the contents of an data file revision"""
1424 """dump the contents of an data file revision"""
1370 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1425 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1371 file_[:-2] + ".i", file_, 0)
1426 file_[:-2] + ".i", file_, 0)
1372 try:
1427 try:
1373 ui.write(r.revision(r.lookup(rev)))
1428 ui.write(r.revision(r.lookup(rev)))
1374 except KeyError:
1429 except KeyError:
1375 raise util.Abort(_('invalid revision identifier %s'), rev)
1430 raise util.Abort(_('invalid revision identifier %s'), rev)
1376
1431
1377 def debugindex(ui, file_):
1432 def debugindex(ui, file_):
1378 """dump the contents of an index file"""
1433 """dump the contents of an index file"""
1379 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1434 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1380 ui.write(" rev offset length base linkrev" +
1435 ui.write(" rev offset length base linkrev" +
1381 " nodeid p1 p2\n")
1436 " nodeid p1 p2\n")
1382 for i in range(r.count()):
1437 for i in range(r.count()):
1383 node = r.node(i)
1438 node = r.node(i)
1384 pp = r.parents(node)
1439 pp = r.parents(node)
1385 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1440 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1386 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1441 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1387 short(node), short(pp[0]), short(pp[1])))
1442 short(node), short(pp[0]), short(pp[1])))
1388
1443
1389 def debugindexdot(ui, file_):
1444 def debugindexdot(ui, file_):
1390 """dump an index DAG as a .dot file"""
1445 """dump an index DAG as a .dot file"""
1391 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1446 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1392 ui.write("digraph G {\n")
1447 ui.write("digraph G {\n")
1393 for i in range(r.count()):
1448 for i in range(r.count()):
1394 node = r.node(i)
1449 node = r.node(i)
1395 pp = r.parents(node)
1450 pp = r.parents(node)
1396 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1451 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1397 if pp[1] != nullid:
1452 if pp[1] != nullid:
1398 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1453 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1399 ui.write("}\n")
1454 ui.write("}\n")
1400
1455
1401 def debugrename(ui, repo, file, rev=None):
1456 def debugrename(ui, repo, file, rev=None):
1402 """dump rename information"""
1457 """dump rename information"""
1403 r = repo.file(relpath(repo, [file])[0])
1458 r = repo.file(relpath(repo, [file])[0])
1404 if rev:
1459 if rev:
1405 try:
1460 try:
1406 # assume all revision numbers are for changesets
1461 # assume all revision numbers are for changesets
1407 n = repo.lookup(rev)
1462 n = repo.lookup(rev)
1408 change = repo.changelog.read(n)
1463 change = repo.changelog.read(n)
1409 m = repo.manifest.read(change[0])
1464 m = repo.manifest.read(change[0])
1410 n = m[relpath(repo, [file])[0]]
1465 n = m[relpath(repo, [file])[0]]
1411 except (hg.RepoError, KeyError):
1466 except (hg.RepoError, KeyError):
1412 n = r.lookup(rev)
1467 n = r.lookup(rev)
1413 else:
1468 else:
1414 n = r.tip()
1469 n = r.tip()
1415 m = r.renamed(n)
1470 m = r.renamed(n)
1416 if m:
1471 if m:
1417 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1472 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1418 else:
1473 else:
1419 ui.write(_("not renamed\n"))
1474 ui.write(_("not renamed\n"))
1420
1475
1421 def debugwalk(ui, repo, *pats, **opts):
1476 def debugwalk(ui, repo, *pats, **opts):
1422 """show how files match on given patterns"""
1477 """show how files match on given patterns"""
1423 items = list(walk(repo, pats, opts))
1478 items = list(walk(repo, pats, opts))
1424 if not items:
1479 if not items:
1425 return
1480 return
1426 fmt = '%%s %%-%ds %%-%ds %%s' % (
1481 fmt = '%%s %%-%ds %%-%ds %%s' % (
1427 max([len(abs) for (src, abs, rel, exact) in items]),
1482 max([len(abs) for (src, abs, rel, exact) in items]),
1428 max([len(rel) for (src, abs, rel, exact) in items]))
1483 max([len(rel) for (src, abs, rel, exact) in items]))
1429 for src, abs, rel, exact in items:
1484 for src, abs, rel, exact in items:
1430 line = fmt % (src, abs, rel, exact and 'exact' or '')
1485 line = fmt % (src, abs, rel, exact and 'exact' or '')
1431 ui.write("%s\n" % line.rstrip())
1486 ui.write("%s\n" % line.rstrip())
1432
1487
1433 def diff(ui, repo, *pats, **opts):
1488 def diff(ui, repo, *pats, **opts):
1434 """diff repository (or selected files)
1489 """diff repository (or selected files)
1435
1490
1436 Show differences between revisions for the specified files.
1491 Show differences between revisions for the specified files.
1437
1492
1438 Differences between files are shown using the unified diff format.
1493 Differences between files are shown using the unified diff format.
1439
1494
1440 When two revision arguments are given, then changes are shown
1495 When two revision arguments are given, then changes are shown
1441 between those revisions. If only one revision is specified then
1496 between those revisions. If only one revision is specified then
1442 that revision is compared to the working directory, and, when no
1497 that revision is compared to the working directory, and, when no
1443 revisions are specified, the working directory files are compared
1498 revisions are specified, the working directory files are compared
1444 to its parent.
1499 to its parent.
1445
1500
1446 Without the -a option, diff will avoid generating diffs of files
1501 Without the -a option, diff will avoid generating diffs of files
1447 it detects as binary. With -a, diff will generate a diff anyway,
1502 it detects as binary. With -a, diff will generate a diff anyway,
1448 probably with undesirable results.
1503 probably with undesirable results.
1449 """
1504 """
1450 node1, node2 = revpair(ui, repo, opts['rev'])
1505 node1, node2 = revpair(ui, repo, opts['rev'])
1451
1506
1452 fns, matchfn, anypats = matchpats(repo, pats, opts)
1507 fns, matchfn, anypats = matchpats(repo, pats, opts)
1453
1508
1454 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1509 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1455 text=opts['text'], opts=opts)
1510 text=opts['text'], opts=opts)
1456
1511
1457 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1512 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1458 node = repo.lookup(changeset)
1513 node = repo.lookup(changeset)
1459 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1514 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1460 if opts['switch_parent']:
1515 if opts['switch_parent']:
1461 parents.reverse()
1516 parents.reverse()
1462 prev = (parents and parents[0]) or nullid
1517 prev = (parents and parents[0]) or nullid
1463 change = repo.changelog.read(node)
1518 change = repo.changelog.read(node)
1464
1519
1465 fp = make_file(repo, opts['output'], node, total=total, seqno=seqno,
1520 fp = make_file(repo, opts['output'], node, total=total, seqno=seqno,
1466 revwidth=revwidth)
1521 revwidth=revwidth)
1467 if fp != sys.stdout:
1522 if fp != sys.stdout:
1468 ui.note("%s\n" % fp.name)
1523 ui.note("%s\n" % fp.name)
1469
1524
1470 fp.write("# HG changeset patch\n")
1525 fp.write("# HG changeset patch\n")
1471 fp.write("# User %s\n" % change[1])
1526 fp.write("# User %s\n" % change[1])
1472 fp.write("# Date %d %d\n" % change[2])
1527 fp.write("# Date %d %d\n" % change[2])
1473 fp.write("# Node ID %s\n" % hex(node))
1528 fp.write("# Node ID %s\n" % hex(node))
1474 fp.write("# Parent %s\n" % hex(prev))
1529 fp.write("# Parent %s\n" % hex(prev))
1475 if len(parents) > 1:
1530 if len(parents) > 1:
1476 fp.write("# Parent %s\n" % hex(parents[1]))
1531 fp.write("# Parent %s\n" % hex(parents[1]))
1477 fp.write(change[4].rstrip())
1532 fp.write(change[4].rstrip())
1478 fp.write("\n\n")
1533 fp.write("\n\n")
1479
1534
1480 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1535 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1481 if fp != sys.stdout:
1536 if fp != sys.stdout:
1482 fp.close()
1537 fp.close()
1483
1538
1484 def export(ui, repo, *changesets, **opts):
1539 def export(ui, repo, *changesets, **opts):
1485 """dump the header and diffs for one or more changesets
1540 """dump the header and diffs for one or more changesets
1486
1541
1487 Print the changeset header and diffs for one or more revisions.
1542 Print the changeset header and diffs for one or more revisions.
1488
1543
1489 The information shown in the changeset header is: author,
1544 The information shown in the changeset header is: author,
1490 changeset hash, parent and commit comment.
1545 changeset hash, parent and commit comment.
1491
1546
1492 Output may be to a file, in which case the name of the file is
1547 Output may be to a file, in which case the name of the file is
1493 given using a format string. The formatting rules are as follows:
1548 given using a format string. The formatting rules are as follows:
1494
1549
1495 %% literal "%" character
1550 %% literal "%" character
1496 %H changeset hash (40 bytes of hexadecimal)
1551 %H changeset hash (40 bytes of hexadecimal)
1497 %N number of patches being generated
1552 %N number of patches being generated
1498 %R changeset revision number
1553 %R changeset revision number
1499 %b basename of the exporting repository
1554 %b basename of the exporting repository
1500 %h short-form changeset hash (12 bytes of hexadecimal)
1555 %h short-form changeset hash (12 bytes of hexadecimal)
1501 %n zero-padded sequence number, starting at 1
1556 %n zero-padded sequence number, starting at 1
1502 %r zero-padded changeset revision number
1557 %r zero-padded changeset revision number
1503
1558
1504 Without the -a option, export will avoid generating diffs of files
1559 Without the -a option, export will avoid generating diffs of files
1505 it detects as binary. With -a, export will generate a diff anyway,
1560 it detects as binary. With -a, export will generate a diff anyway,
1506 probably with undesirable results.
1561 probably with undesirable results.
1507
1562
1508 With the --switch-parent option, the diff will be against the second
1563 With the --switch-parent option, the diff will be against the second
1509 parent. It can be useful to review a merge.
1564 parent. It can be useful to review a merge.
1510 """
1565 """
1511 if not changesets:
1566 if not changesets:
1512 raise util.Abort(_("export requires at least one changeset"))
1567 raise util.Abort(_("export requires at least one changeset"))
1513 seqno = 0
1568 seqno = 0
1514 revs = list(revrange(ui, repo, changesets))
1569 revs = list(revrange(ui, repo, changesets))
1515 total = len(revs)
1570 total = len(revs)
1516 revwidth = max(map(len, revs))
1571 revwidth = max(map(len, revs))
1517 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1572 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1518 ui.note(msg)
1573 ui.note(msg)
1519 for cset in revs:
1574 for cset in revs:
1520 seqno += 1
1575 seqno += 1
1521 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1576 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1522
1577
1523 def forget(ui, repo, *pats, **opts):
1578 def forget(ui, repo, *pats, **opts):
1524 """don't add the specified files on the next commit (DEPRECATED)
1579 """don't add the specified files on the next commit (DEPRECATED)
1525
1580
1526 (DEPRECATED)
1581 (DEPRECATED)
1527 Undo an 'hg add' scheduled for the next commit.
1582 Undo an 'hg add' scheduled for the next commit.
1528
1583
1529 This command is now deprecated and will be removed in a future
1584 This command is now deprecated and will be removed in a future
1530 release. Please use revert instead.
1585 release. Please use revert instead.
1531 """
1586 """
1532 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1587 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1533 forget = []
1588 forget = []
1534 for src, abs, rel, exact in walk(repo, pats, opts):
1589 for src, abs, rel, exact in walk(repo, pats, opts):
1535 if repo.dirstate.state(abs) == 'a':
1590 if repo.dirstate.state(abs) == 'a':
1536 forget.append(abs)
1591 forget.append(abs)
1537 if ui.verbose or not exact:
1592 if ui.verbose or not exact:
1538 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1593 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1539 repo.forget(forget)
1594 repo.forget(forget)
1540
1595
1541 def grep(ui, repo, pattern, *pats, **opts):
1596 def grep(ui, repo, pattern, *pats, **opts):
1542 """search for a pattern in specified files and revisions
1597 """search for a pattern in specified files and revisions
1543
1598
1544 Search revisions of files for a regular expression.
1599 Search revisions of files for a regular expression.
1545
1600
1546 This command behaves differently than Unix grep. It only accepts
1601 This command behaves differently than Unix grep. It only accepts
1547 Python/Perl regexps. It searches repository history, not the
1602 Python/Perl regexps. It searches repository history, not the
1548 working directory. It always prints the revision number in which
1603 working directory. It always prints the revision number in which
1549 a match appears.
1604 a match appears.
1550
1605
1551 By default, grep only prints output for the first revision of a
1606 By default, grep only prints output for the first revision of a
1552 file in which it finds a match. To get it to print every revision
1607 file in which it finds a match. To get it to print every revision
1553 that contains a change in match status ("-" for a match that
1608 that contains a change in match status ("-" for a match that
1554 becomes a non-match, or "+" for a non-match that becomes a match),
1609 becomes a non-match, or "+" for a non-match that becomes a match),
1555 use the --all flag.
1610 use the --all flag.
1556 """
1611 """
1557 reflags = 0
1612 reflags = 0
1558 if opts['ignore_case']:
1613 if opts['ignore_case']:
1559 reflags |= re.I
1614 reflags |= re.I
1560 regexp = re.compile(pattern, reflags)
1615 regexp = re.compile(pattern, reflags)
1561 sep, eol = ':', '\n'
1616 sep, eol = ':', '\n'
1562 if opts['print0']:
1617 if opts['print0']:
1563 sep = eol = '\0'
1618 sep = eol = '\0'
1564
1619
1565 fcache = {}
1620 fcache = {}
1566 def getfile(fn):
1621 def getfile(fn):
1567 if fn not in fcache:
1622 if fn not in fcache:
1568 fcache[fn] = repo.file(fn)
1623 fcache[fn] = repo.file(fn)
1569 return fcache[fn]
1624 return fcache[fn]
1570
1625
1571 def matchlines(body):
1626 def matchlines(body):
1572 begin = 0
1627 begin = 0
1573 linenum = 0
1628 linenum = 0
1574 while True:
1629 while True:
1575 match = regexp.search(body, begin)
1630 match = regexp.search(body, begin)
1576 if not match:
1631 if not match:
1577 break
1632 break
1578 mstart, mend = match.span()
1633 mstart, mend = match.span()
1579 linenum += body.count('\n', begin, mstart) + 1
1634 linenum += body.count('\n', begin, mstart) + 1
1580 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1635 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1581 lend = body.find('\n', mend)
1636 lend = body.find('\n', mend)
1582 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1637 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1583 begin = lend + 1
1638 begin = lend + 1
1584
1639
1585 class linestate(object):
1640 class linestate(object):
1586 def __init__(self, line, linenum, colstart, colend):
1641 def __init__(self, line, linenum, colstart, colend):
1587 self.line = line
1642 self.line = line
1588 self.linenum = linenum
1643 self.linenum = linenum
1589 self.colstart = colstart
1644 self.colstart = colstart
1590 self.colend = colend
1645 self.colend = colend
1591 def __eq__(self, other):
1646 def __eq__(self, other):
1592 return self.line == other.line
1647 return self.line == other.line
1593 def __hash__(self):
1648 def __hash__(self):
1594 return hash(self.line)
1649 return hash(self.line)
1595
1650
1596 matches = {}
1651 matches = {}
1597 def grepbody(fn, rev, body):
1652 def grepbody(fn, rev, body):
1598 matches[rev].setdefault(fn, {})
1653 matches[rev].setdefault(fn, {})
1599 m = matches[rev][fn]
1654 m = matches[rev][fn]
1600 for lnum, cstart, cend, line in matchlines(body):
1655 for lnum, cstart, cend, line in matchlines(body):
1601 s = linestate(line, lnum, cstart, cend)
1656 s = linestate(line, lnum, cstart, cend)
1602 m[s] = s
1657 m[s] = s
1603
1658
1604 # FIXME: prev isn't used, why ?
1659 # FIXME: prev isn't used, why ?
1605 prev = {}
1660 prev = {}
1606 ucache = {}
1661 ucache = {}
1607 def display(fn, rev, states, prevstates):
1662 def display(fn, rev, states, prevstates):
1608 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1663 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1609 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1664 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1610 counts = {'-': 0, '+': 0}
1665 counts = {'-': 0, '+': 0}
1611 filerevmatches = {}
1666 filerevmatches = {}
1612 for l in diff:
1667 for l in diff:
1613 if incrementing or not opts['all']:
1668 if incrementing or not opts['all']:
1614 change = ((l in prevstates) and '-') or '+'
1669 change = ((l in prevstates) and '-') or '+'
1615 r = rev
1670 r = rev
1616 else:
1671 else:
1617 change = ((l in states) and '-') or '+'
1672 change = ((l in states) and '-') or '+'
1618 r = prev[fn]
1673 r = prev[fn]
1619 cols = [fn, str(rev)]
1674 cols = [fn, str(rev)]
1620 if opts['line_number']:
1675 if opts['line_number']:
1621 cols.append(str(l.linenum))
1676 cols.append(str(l.linenum))
1622 if opts['all']:
1677 if opts['all']:
1623 cols.append(change)
1678 cols.append(change)
1624 if opts['user']:
1679 if opts['user']:
1625 cols.append(trimuser(ui, getchange(rev)[1], rev,
1680 cols.append(trimuser(ui, getchange(rev)[1], rev,
1626 ucache))
1681 ucache))
1627 if opts['files_with_matches']:
1682 if opts['files_with_matches']:
1628 c = (fn, rev)
1683 c = (fn, rev)
1629 if c in filerevmatches:
1684 if c in filerevmatches:
1630 continue
1685 continue
1631 filerevmatches[c] = 1
1686 filerevmatches[c] = 1
1632 else:
1687 else:
1633 cols.append(l.line)
1688 cols.append(l.line)
1634 ui.write(sep.join(cols), eol)
1689 ui.write(sep.join(cols), eol)
1635 counts[change] += 1
1690 counts[change] += 1
1636 return counts['+'], counts['-']
1691 return counts['+'], counts['-']
1637
1692
1638 fstate = {}
1693 fstate = {}
1639 skip = {}
1694 skip = {}
1640 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1695 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1641 count = 0
1696 count = 0
1642 incrementing = False
1697 incrementing = False
1643 for st, rev, fns in changeiter:
1698 for st, rev, fns in changeiter:
1644 if st == 'window':
1699 if st == 'window':
1645 incrementing = rev
1700 incrementing = rev
1646 matches.clear()
1701 matches.clear()
1647 elif st == 'add':
1702 elif st == 'add':
1648 change = repo.changelog.read(repo.lookup(str(rev)))
1703 change = repo.changelog.read(repo.lookup(str(rev)))
1649 mf = repo.manifest.read(change[0])
1704 mf = repo.manifest.read(change[0])
1650 matches[rev] = {}
1705 matches[rev] = {}
1651 for fn in fns:
1706 for fn in fns:
1652 if fn in skip:
1707 if fn in skip:
1653 continue
1708 continue
1654 fstate.setdefault(fn, {})
1709 fstate.setdefault(fn, {})
1655 try:
1710 try:
1656 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1711 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1657 except KeyError:
1712 except KeyError:
1658 pass
1713 pass
1659 elif st == 'iter':
1714 elif st == 'iter':
1660 states = matches[rev].items()
1715 states = matches[rev].items()
1661 states.sort()
1716 states.sort()
1662 for fn, m in states:
1717 for fn, m in states:
1663 if fn in skip:
1718 if fn in skip:
1664 continue
1719 continue
1665 if incrementing or not opts['all'] or fstate[fn]:
1720 if incrementing or not opts['all'] or fstate[fn]:
1666 pos, neg = display(fn, rev, m, fstate[fn])
1721 pos, neg = display(fn, rev, m, fstate[fn])
1667 count += pos + neg
1722 count += pos + neg
1668 if pos and not opts['all']:
1723 if pos and not opts['all']:
1669 skip[fn] = True
1724 skip[fn] = True
1670 fstate[fn] = m
1725 fstate[fn] = m
1671 prev[fn] = rev
1726 prev[fn] = rev
1672
1727
1673 if not incrementing:
1728 if not incrementing:
1674 fstate = fstate.items()
1729 fstate = fstate.items()
1675 fstate.sort()
1730 fstate.sort()
1676 for fn, state in fstate:
1731 for fn, state in fstate:
1677 if fn in skip:
1732 if fn in skip:
1678 continue
1733 continue
1679 display(fn, rev, {}, state)
1734 display(fn, rev, {}, state)
1680 return (count == 0 and 1) or 0
1735 return (count == 0 and 1) or 0
1681
1736
1682 def heads(ui, repo, **opts):
1737 def heads(ui, repo, **opts):
1683 """show current repository heads
1738 """show current repository heads
1684
1739
1685 Show all repository head changesets.
1740 Show all repository head changesets.
1686
1741
1687 Repository "heads" are changesets that don't have children
1742 Repository "heads" are changesets that don't have children
1688 changesets. They are where development generally takes place and
1743 changesets. They are where development generally takes place and
1689 are the usual targets for update and merge operations.
1744 are the usual targets for update and merge operations.
1690 """
1745 """
1691 if opts['rev']:
1746 if opts['rev']:
1692 heads = repo.heads(repo.lookup(opts['rev']))
1747 heads = repo.heads(repo.lookup(opts['rev']))
1693 else:
1748 else:
1694 heads = repo.heads()
1749 heads = repo.heads()
1695 br = None
1750 br = None
1696 if opts['branches']:
1751 if opts['branches']:
1697 br = repo.branchlookup(heads)
1752 br = repo.branchlookup(heads)
1698 displayer = show_changeset(ui, repo, opts)
1753 displayer = show_changeset(ui, repo, opts)
1699 for n in heads:
1754 for n in heads:
1700 displayer.show(changenode=n, brinfo=br)
1755 displayer.show(changenode=n, brinfo=br)
1701
1756
1702 def identify(ui, repo):
1757 def identify(ui, repo):
1703 """print information about the working copy
1758 """print information about the working copy
1704
1759
1705 Print a short summary of the current state of the repo.
1760 Print a short summary of the current state of the repo.
1706
1761
1707 This summary identifies the repository state using one or two parent
1762 This summary identifies the repository state using one or two parent
1708 hash identifiers, followed by a "+" if there are uncommitted changes
1763 hash identifiers, followed by a "+" if there are uncommitted changes
1709 in the working directory, followed by a list of tags for this revision.
1764 in the working directory, followed by a list of tags for this revision.
1710 """
1765 """
1711 parents = [p for p in repo.dirstate.parents() if p != nullid]
1766 parents = [p for p in repo.dirstate.parents() if p != nullid]
1712 if not parents:
1767 if not parents:
1713 ui.write(_("unknown\n"))
1768 ui.write(_("unknown\n"))
1714 return
1769 return
1715
1770
1716 hexfunc = ui.verbose and hex or short
1771 hexfunc = ui.verbose and hex or short
1717 modified, added, removed, deleted, unknown = repo.changes()
1772 modified, added, removed, deleted, unknown = repo.changes()
1718 output = ["%s%s" %
1773 output = ["%s%s" %
1719 ('+'.join([hexfunc(parent) for parent in parents]),
1774 ('+'.join([hexfunc(parent) for parent in parents]),
1720 (modified or added or removed or deleted) and "+" or "")]
1775 (modified or added or removed or deleted) and "+" or "")]
1721
1776
1722 if not ui.quiet:
1777 if not ui.quiet:
1723 # multiple tags for a single parent separated by '/'
1778 # multiple tags for a single parent separated by '/'
1724 parenttags = ['/'.join(tags)
1779 parenttags = ['/'.join(tags)
1725 for tags in map(repo.nodetags, parents) if tags]
1780 for tags in map(repo.nodetags, parents) if tags]
1726 # tags for multiple parents separated by ' + '
1781 # tags for multiple parents separated by ' + '
1727 if parenttags:
1782 if parenttags:
1728 output.append(' + '.join(parenttags))
1783 output.append(' + '.join(parenttags))
1729
1784
1730 ui.write("%s\n" % ' '.join(output))
1785 ui.write("%s\n" % ' '.join(output))
1731
1786
1732 def import_(ui, repo, patch1, *patches, **opts):
1787 def import_(ui, repo, patch1, *patches, **opts):
1733 """import an ordered set of patches
1788 """import an ordered set of patches
1734
1789
1735 Import a list of patches and commit them individually.
1790 Import a list of patches and commit them individually.
1736
1791
1737 If there are outstanding changes in the working directory, import
1792 If there are outstanding changes in the working directory, import
1738 will abort unless given the -f flag.
1793 will abort unless given the -f flag.
1739
1794
1740 You can import a patch straight from a mail message. Even patches
1795 You can import a patch straight from a mail message. Even patches
1741 as attachments work (body part must be type text/plain or
1796 as attachments work (body part must be type text/plain or
1742 text/x-patch to be used). From and Subject headers of email
1797 text/x-patch to be used). From and Subject headers of email
1743 message are used as default committer and commit message. All
1798 message are used as default committer and commit message. All
1744 text/plain body parts before first diff are added to commit
1799 text/plain body parts before first diff are added to commit
1745 message.
1800 message.
1746
1801
1747 If imported patch was generated by hg export, user and description
1802 If imported patch was generated by hg export, user and description
1748 from patch override values from message headers and body. Values
1803 from patch override values from message headers and body. Values
1749 given on command line with -m and -u override these.
1804 given on command line with -m and -u override these.
1750
1805
1751 To read a patch from standard input, use patch name "-".
1806 To read a patch from standard input, use patch name "-".
1752 """
1807 """
1753 patches = (patch1,) + patches
1808 patches = (patch1,) + patches
1754
1809
1755 if not opts['force']:
1810 if not opts['force']:
1756 bail_if_changed(repo)
1811 bail_if_changed(repo)
1757
1812
1758 d = opts["base"]
1813 d = opts["base"]
1759 strip = opts["strip"]
1814 strip = opts["strip"]
1760
1815
1761 mailre = re.compile(r'(?:From |[\w-]+:)')
1816 mailre = re.compile(r'(?:From |[\w-]+:)')
1762
1817
1763 # attempt to detect the start of a patch
1818 # attempt to detect the start of a patch
1764 # (this heuristic is borrowed from quilt)
1819 # (this heuristic is borrowed from quilt)
1765 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1820 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1766 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1821 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1767 '(---|\*\*\*)[ \t])', re.MULTILINE)
1822 '(---|\*\*\*)[ \t])', re.MULTILINE)
1768
1823
1769 for patch in patches:
1824 for patch in patches:
1770 pf = os.path.join(d, patch)
1825 pf = os.path.join(d, patch)
1771
1826
1772 message = None
1827 message = None
1773 user = None
1828 user = None
1774 date = None
1829 date = None
1775 hgpatch = False
1830 hgpatch = False
1776
1831
1777 p = email.Parser.Parser()
1832 p = email.Parser.Parser()
1778 if pf == '-':
1833 if pf == '-':
1779 msg = p.parse(sys.stdin)
1834 msg = p.parse(sys.stdin)
1780 ui.status(_("applying patch from stdin\n"))
1835 ui.status(_("applying patch from stdin\n"))
1781 else:
1836 else:
1782 msg = p.parse(file(pf))
1837 msg = p.parse(file(pf))
1783 ui.status(_("applying %s\n") % patch)
1838 ui.status(_("applying %s\n") % patch)
1784
1839
1785 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
1840 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
1786 tmpfp = os.fdopen(fd, 'w')
1841 tmpfp = os.fdopen(fd, 'w')
1787 try:
1842 try:
1788 message = msg['Subject']
1843 message = msg['Subject']
1789 if message:
1844 if message:
1790 message = message.replace('\n\t', ' ')
1845 message = message.replace('\n\t', ' ')
1791 ui.debug('Subject: %s\n' % message)
1846 ui.debug('Subject: %s\n' % message)
1792 user = msg['From']
1847 user = msg['From']
1793 if user:
1848 if user:
1794 ui.debug('From: %s\n' % user)
1849 ui.debug('From: %s\n' % user)
1795 diffs_seen = 0
1850 diffs_seen = 0
1796 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
1851 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
1797 for part in msg.walk():
1852 for part in msg.walk():
1798 content_type = part.get_content_type()
1853 content_type = part.get_content_type()
1799 ui.debug('Content-Type: %s\n' % content_type)
1854 ui.debug('Content-Type: %s\n' % content_type)
1800 if content_type not in ok_types:
1855 if content_type not in ok_types:
1801 continue
1856 continue
1802 payload = part.get_payload(decode=True)
1857 payload = part.get_payload(decode=True)
1803 m = diffre.search(payload)
1858 m = diffre.search(payload)
1804 if m:
1859 if m:
1805 ui.debug(_('found patch at byte %d\n') % m.start(0))
1860 ui.debug(_('found patch at byte %d\n') % m.start(0))
1806 diffs_seen += 1
1861 diffs_seen += 1
1807 hgpatch = False
1862 hgpatch = False
1808 fp = cStringIO.StringIO()
1863 fp = cStringIO.StringIO()
1809 if message:
1864 if message:
1810 fp.write(message)
1865 fp.write(message)
1811 fp.write('\n')
1866 fp.write('\n')
1812 for line in payload[:m.start(0)].splitlines():
1867 for line in payload[:m.start(0)].splitlines():
1813 if line.startswith('# HG changeset patch'):
1868 if line.startswith('# HG changeset patch'):
1814 ui.debug(_('patch generated by hg export\n'))
1869 ui.debug(_('patch generated by hg export\n'))
1815 hgpatch = True
1870 hgpatch = True
1816 # drop earlier commit message content
1871 # drop earlier commit message content
1817 fp.seek(0)
1872 fp.seek(0)
1818 fp.truncate()
1873 fp.truncate()
1819 elif hgpatch:
1874 elif hgpatch:
1820 if line.startswith('# User '):
1875 if line.startswith('# User '):
1821 user = line[7:]
1876 user = line[7:]
1822 ui.debug('From: %s\n' % user)
1877 ui.debug('From: %s\n' % user)
1823 elif line.startswith("# Date "):
1878 elif line.startswith("# Date "):
1824 date = line[7:]
1879 date = line[7:]
1825 if not line.startswith('# '):
1880 if not line.startswith('# '):
1826 fp.write(line)
1881 fp.write(line)
1827 fp.write('\n')
1882 fp.write('\n')
1828 message = fp.getvalue()
1883 message = fp.getvalue()
1829 if tmpfp:
1884 if tmpfp:
1830 tmpfp.write(payload)
1885 tmpfp.write(payload)
1831 if not payload.endswith('\n'):
1886 if not payload.endswith('\n'):
1832 tmpfp.write('\n')
1887 tmpfp.write('\n')
1833 elif not diffs_seen and message and content_type == 'text/plain':
1888 elif not diffs_seen and message and content_type == 'text/plain':
1834 message += '\n' + payload
1889 message += '\n' + payload
1835
1890
1836 if opts['message']:
1891 if opts['message']:
1837 # pickup the cmdline msg
1892 # pickup the cmdline msg
1838 message = opts['message']
1893 message = opts['message']
1839 elif message:
1894 elif message:
1840 # pickup the patch msg
1895 # pickup the patch msg
1841 message = message.strip()
1896 message = message.strip()
1842 else:
1897 else:
1843 # launch the editor
1898 # launch the editor
1844 message = None
1899 message = None
1845 ui.debug(_('message:\n%s\n') % message)
1900 ui.debug(_('message:\n%s\n') % message)
1846
1901
1847 tmpfp.close()
1902 tmpfp.close()
1848 if not diffs_seen:
1903 if not diffs_seen:
1849 raise util.Abort(_('no diffs found'))
1904 raise util.Abort(_('no diffs found'))
1850
1905
1851 files = util.patch(strip, tmpname, ui, cwd=repo.root)
1906 files = util.patch(strip, tmpname, ui, cwd=repo.root)
1852 if len(files) > 0:
1907 if len(files) > 0:
1853 cfiles = files
1908 cfiles = files
1854 cwd = repo.getcwd()
1909 cwd = repo.getcwd()
1855 if cwd:
1910 if cwd:
1856 cfiles = [util.pathto(cwd, f) for f in files]
1911 cfiles = [util.pathto(cwd, f) for f in files]
1857 addremove_lock(ui, repo, cfiles, {})
1912 addremove_lock(ui, repo, cfiles, {})
1858 repo.commit(files, message, user, date)
1913 repo.commit(files, message, user, date)
1859 finally:
1914 finally:
1860 os.unlink(tmpname)
1915 os.unlink(tmpname)
1861
1916
1862 def incoming(ui, repo, source="default", **opts):
1917 def incoming(ui, repo, source="default", **opts):
1863 """show new changesets found in source
1918 """show new changesets found in source
1864
1919
1865 Show new changesets found in the specified path/URL or the default
1920 Show new changesets found in the specified path/URL or the default
1866 pull location. These are the changesets that would be pulled if a pull
1921 pull location. These are the changesets that would be pulled if a pull
1867 was requested.
1922 was requested.
1868
1923
1869 For remote repository, using --bundle avoids downloading the changesets
1924 For remote repository, using --bundle avoids downloading the changesets
1870 twice if the incoming is followed by a pull.
1925 twice if the incoming is followed by a pull.
1871
1926
1872 See pull for valid source format details.
1927 See pull for valid source format details.
1873 """
1928 """
1874 source = ui.expandpath(source)
1929 source = ui.expandpath(source)
1875 setremoteconfig(ui, opts)
1930 setremoteconfig(ui, opts)
1876
1931
1877 other = hg.repository(ui, source)
1932 other = hg.repository(ui, source)
1878 incoming = repo.findincoming(other, force=opts["force"])
1933 incoming = repo.findincoming(other, force=opts["force"])
1879 if not incoming:
1934 if not incoming:
1880 ui.status(_("no changes found\n"))
1935 ui.status(_("no changes found\n"))
1881 return
1936 return
1882
1937
1883 cleanup = None
1938 cleanup = None
1884 try:
1939 try:
1885 fname = opts["bundle"]
1940 fname = opts["bundle"]
1886 if fname or not other.local():
1941 if fname or not other.local():
1887 # create a bundle (uncompressed if other repo is not local)
1942 # create a bundle (uncompressed if other repo is not local)
1888 cg = other.changegroup(incoming, "incoming")
1943 cg = other.changegroup(incoming, "incoming")
1889 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1944 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1890 # keep written bundle?
1945 # keep written bundle?
1891 if opts["bundle"]:
1946 if opts["bundle"]:
1892 cleanup = None
1947 cleanup = None
1893 if not other.local():
1948 if not other.local():
1894 # use the created uncompressed bundlerepo
1949 # use the created uncompressed bundlerepo
1895 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1950 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1896
1951
1897 revs = None
1952 revs = None
1898 if opts['rev']:
1953 if opts['rev']:
1899 revs = [other.lookup(rev) for rev in opts['rev']]
1954 revs = [other.lookup(rev) for rev in opts['rev']]
1900 o = other.changelog.nodesbetween(incoming, revs)[0]
1955 o = other.changelog.nodesbetween(incoming, revs)[0]
1901 if opts['newest_first']:
1956 if opts['newest_first']:
1902 o.reverse()
1957 o.reverse()
1903 displayer = show_changeset(ui, other, opts)
1958 displayer = show_changeset(ui, other, opts)
1904 for n in o:
1959 for n in o:
1905 parents = [p for p in other.changelog.parents(n) if p != nullid]
1960 parents = [p for p in other.changelog.parents(n) if p != nullid]
1906 if opts['no_merges'] and len(parents) == 2:
1961 if opts['no_merges'] and len(parents) == 2:
1907 continue
1962 continue
1908 displayer.show(changenode=n)
1963 displayer.show(changenode=n)
1909 if opts['patch']:
1964 if opts['patch']:
1910 prev = (parents and parents[0]) or nullid
1965 prev = (parents and parents[0]) or nullid
1911 dodiff(ui, ui, other, prev, n)
1966 dodiff(ui, ui, other, prev, n)
1912 ui.write("\n")
1967 ui.write("\n")
1913 finally:
1968 finally:
1914 if hasattr(other, 'close'):
1969 if hasattr(other, 'close'):
1915 other.close()
1970 other.close()
1916 if cleanup:
1971 if cleanup:
1917 os.unlink(cleanup)
1972 os.unlink(cleanup)
1918
1973
1919 def init(ui, dest=".", **opts):
1974 def init(ui, dest=".", **opts):
1920 """create a new repository in the given directory
1975 """create a new repository in the given directory
1921
1976
1922 Initialize a new repository in the given directory. If the given
1977 Initialize a new repository in the given directory. If the given
1923 directory does not exist, it is created.
1978 directory does not exist, it is created.
1924
1979
1925 If no directory is given, the current directory is used.
1980 If no directory is given, the current directory is used.
1926
1981
1927 It is possible to specify an ssh:// URL as the destination.
1982 It is possible to specify an ssh:// URL as the destination.
1928 Look at the help text for the pull command for important details
1983 Look at the help text for the pull command for important details
1929 about ssh:// URLs.
1984 about ssh:// URLs.
1930 """
1985 """
1931 setremoteconfig(ui, opts)
1986 setremoteconfig(ui, opts)
1932 hg.repository(ui, dest, create=1)
1987 hg.repository(ui, dest, create=1)
1933
1988
1934 def locate(ui, repo, *pats, **opts):
1989 def locate(ui, repo, *pats, **opts):
1935 """locate files matching specific patterns
1990 """locate files matching specific patterns
1936
1991
1937 Print all files under Mercurial control whose names match the
1992 Print all files under Mercurial control whose names match the
1938 given patterns.
1993 given patterns.
1939
1994
1940 This command searches the current directory and its
1995 This command searches the current directory and its
1941 subdirectories. To search an entire repository, move to the root
1996 subdirectories. To search an entire repository, move to the root
1942 of the repository.
1997 of the repository.
1943
1998
1944 If no patterns are given to match, this command prints all file
1999 If no patterns are given to match, this command prints all file
1945 names.
2000 names.
1946
2001
1947 If you want to feed the output of this command into the "xargs"
2002 If you want to feed the output of this command into the "xargs"
1948 command, use the "-0" option to both this command and "xargs".
2003 command, use the "-0" option to both this command and "xargs".
1949 This will avoid the problem of "xargs" treating single filenames
2004 This will avoid the problem of "xargs" treating single filenames
1950 that contain white space as multiple filenames.
2005 that contain white space as multiple filenames.
1951 """
2006 """
1952 end = opts['print0'] and '\0' or '\n'
2007 end = opts['print0'] and '\0' or '\n'
1953 rev = opts['rev']
2008 rev = opts['rev']
1954 if rev:
2009 if rev:
1955 node = repo.lookup(rev)
2010 node = repo.lookup(rev)
1956 else:
2011 else:
1957 node = None
2012 node = None
1958
2013
1959 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2014 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1960 head='(?:.*/|)'):
2015 head='(?:.*/|)'):
1961 if not node and repo.dirstate.state(abs) == '?':
2016 if not node and repo.dirstate.state(abs) == '?':
1962 continue
2017 continue
1963 if opts['fullpath']:
2018 if opts['fullpath']:
1964 ui.write(os.path.join(repo.root, abs), end)
2019 ui.write(os.path.join(repo.root, abs), end)
1965 else:
2020 else:
1966 ui.write(((pats and rel) or abs), end)
2021 ui.write(((pats and rel) or abs), end)
1967
2022
1968 def log(ui, repo, *pats, **opts):
2023 def log(ui, repo, *pats, **opts):
1969 """show revision history of entire repository or files
2024 """show revision history of entire repository or files
1970
2025
1971 Print the revision history of the specified files or the entire
2026 Print the revision history of the specified files or the entire
1972 project.
2027 project.
1973
2028
1974 File history is shown without following rename or copy history of
2029 File history is shown without following rename or copy history of
1975 files. Use -f/--follow to follow history across renames and
2030 files. Use -f/--follow with a file name to follow history across
1976 copies.
2031 renames and copies. --follow without a file name will only show
2032 ancestors or descendants of the starting revision. --follow-first
2033 only follows the first parent of merge revisions.
2034
2035 If no revision range is specified, the default is tip:0 unless
2036 --follow is set, in which case the working directory parent is
2037 used as the starting revision.
1977
2038
1978 By default this command outputs: changeset id and hash, tags,
2039 By default this command outputs: changeset id and hash, tags,
1979 non-trivial parents, user, date and time, and a summary for each
2040 non-trivial parents, user, date and time, and a summary for each
1980 commit. When the -v/--verbose switch is used, the list of changed
2041 commit. When the -v/--verbose switch is used, the list of changed
1981 files and full commit message is shown.
2042 files and full commit message is shown.
1982 """
2043 """
1983 class dui(object):
2044 class dui(object):
1984 # Implement and delegate some ui protocol. Save hunks of
2045 # Implement and delegate some ui protocol. Save hunks of
1985 # output for later display in the desired order.
2046 # output for later display in the desired order.
1986 def __init__(self, ui):
2047 def __init__(self, ui):
1987 self.ui = ui
2048 self.ui = ui
1988 self.hunk = {}
2049 self.hunk = {}
1989 self.header = {}
2050 self.header = {}
1990 def bump(self, rev):
2051 def bump(self, rev):
1991 self.rev = rev
2052 self.rev = rev
1992 self.hunk[rev] = []
2053 self.hunk[rev] = []
1993 self.header[rev] = []
2054 self.header[rev] = []
1994 def note(self, *args):
2055 def note(self, *args):
1995 if self.verbose:
2056 if self.verbose:
1996 self.write(*args)
2057 self.write(*args)
1997 def status(self, *args):
2058 def status(self, *args):
1998 if not self.quiet:
2059 if not self.quiet:
1999 self.write(*args)
2060 self.write(*args)
2000 def write(self, *args):
2061 def write(self, *args):
2001 self.hunk[self.rev].append(args)
2062 self.hunk[self.rev].append(args)
2002 def write_header(self, *args):
2063 def write_header(self, *args):
2003 self.header[self.rev].append(args)
2064 self.header[self.rev].append(args)
2004 def debug(self, *args):
2065 def debug(self, *args):
2005 if self.debugflag:
2066 if self.debugflag:
2006 self.write(*args)
2067 self.write(*args)
2007 def __getattr__(self, key):
2068 def __getattr__(self, key):
2008 return getattr(self.ui, key)
2069 return getattr(self.ui, key)
2009
2070
2010 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
2071 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
2011
2072
2012 if opts['limit']:
2073 if opts['limit']:
2013 try:
2074 try:
2014 limit = int(opts['limit'])
2075 limit = int(opts['limit'])
2015 except ValueError:
2076 except ValueError:
2016 raise util.Abort(_('limit must be a positive integer'))
2077 raise util.Abort(_('limit must be a positive integer'))
2017 if limit <= 0: raise util.Abort(_('limit must be positive'))
2078 if limit <= 0: raise util.Abort(_('limit must be positive'))
2018 else:
2079 else:
2019 limit = sys.maxint
2080 limit = sys.maxint
2020 count = 0
2081 count = 0
2021
2082
2022 displayer = show_changeset(ui, repo, opts)
2083 displayer = show_changeset(ui, repo, opts)
2023 for st, rev, fns in changeiter:
2084 for st, rev, fns in changeiter:
2024 if st == 'window':
2085 if st == 'window':
2025 du = dui(ui)
2086 du = dui(ui)
2026 displayer.ui = du
2087 displayer.ui = du
2027 elif st == 'add':
2088 elif st == 'add':
2028 du.bump(rev)
2089 du.bump(rev)
2029 changenode = repo.changelog.node(rev)
2090 changenode = repo.changelog.node(rev)
2030 parents = [p for p in repo.changelog.parents(changenode)
2091 parents = [p for p in repo.changelog.parents(changenode)
2031 if p != nullid]
2092 if p != nullid]
2032 if opts['no_merges'] and len(parents) == 2:
2093 if opts['no_merges'] and len(parents) == 2:
2033 continue
2094 continue
2034 if opts['only_merges'] and len(parents) != 2:
2095 if opts['only_merges'] and len(parents) != 2:
2035 continue
2096 continue
2036
2097
2037 if opts['keyword']:
2098 if opts['keyword']:
2038 changes = getchange(rev)
2099 changes = getchange(rev)
2039 miss = 0
2100 miss = 0
2040 for k in [kw.lower() for kw in opts['keyword']]:
2101 for k in [kw.lower() for kw in opts['keyword']]:
2041 if not (k in changes[1].lower() or
2102 if not (k in changes[1].lower() or
2042 k in changes[4].lower() or
2103 k in changes[4].lower() or
2043 k in " ".join(changes[3][:20]).lower()):
2104 k in " ".join(changes[3][:20]).lower()):
2044 miss = 1
2105 miss = 1
2045 break
2106 break
2046 if miss:
2107 if miss:
2047 continue
2108 continue
2048
2109
2049 br = None
2110 br = None
2050 if opts['branches']:
2111 if opts['branches']:
2051 br = repo.branchlookup([repo.changelog.node(rev)])
2112 br = repo.branchlookup([repo.changelog.node(rev)])
2052
2113
2053 displayer.show(rev, brinfo=br)
2114 displayer.show(rev, brinfo=br)
2054 if opts['patch']:
2115 if opts['patch']:
2055 prev = (parents and parents[0]) or nullid
2116 prev = (parents and parents[0]) or nullid
2056 dodiff(du, du, repo, prev, changenode, match=matchfn)
2117 dodiff(du, du, repo, prev, changenode, match=matchfn)
2057 du.write("\n\n")
2118 du.write("\n\n")
2058 elif st == 'iter':
2119 elif st == 'iter':
2059 if count == limit: break
2120 if count == limit: break
2060 if du.header[rev]:
2121 if du.header[rev]:
2061 for args in du.header[rev]:
2122 for args in du.header[rev]:
2062 ui.write_header(*args)
2123 ui.write_header(*args)
2063 if du.hunk[rev]:
2124 if du.hunk[rev]:
2064 count += 1
2125 count += 1
2065 for args in du.hunk[rev]:
2126 for args in du.hunk[rev]:
2066 ui.write(*args)
2127 ui.write(*args)
2067
2128
2068 def manifest(ui, repo, rev=None):
2129 def manifest(ui, repo, rev=None):
2069 """output the latest or given revision of the project manifest
2130 """output the latest or given revision of the project manifest
2070
2131
2071 Print a list of version controlled files for the given revision.
2132 Print a list of version controlled files for the given revision.
2072
2133
2073 The manifest is the list of files being version controlled. If no revision
2134 The manifest is the list of files being version controlled. If no revision
2074 is given then the tip is used.
2135 is given then the tip is used.
2075 """
2136 """
2076 if rev:
2137 if rev:
2077 try:
2138 try:
2078 # assume all revision numbers are for changesets
2139 # assume all revision numbers are for changesets
2079 n = repo.lookup(rev)
2140 n = repo.lookup(rev)
2080 change = repo.changelog.read(n)
2141 change = repo.changelog.read(n)
2081 n = change[0]
2142 n = change[0]
2082 except hg.RepoError:
2143 except hg.RepoError:
2083 n = repo.manifest.lookup(rev)
2144 n = repo.manifest.lookup(rev)
2084 else:
2145 else:
2085 n = repo.manifest.tip()
2146 n = repo.manifest.tip()
2086 m = repo.manifest.read(n)
2147 m = repo.manifest.read(n)
2087 mf = repo.manifest.readflags(n)
2148 mf = repo.manifest.readflags(n)
2088 files = m.keys()
2149 files = m.keys()
2089 files.sort()
2150 files.sort()
2090
2151
2091 for f in files:
2152 for f in files:
2092 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
2153 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
2093
2154
2094 def merge(ui, repo, node=None, **opts):
2155 def merge(ui, repo, node=None, **opts):
2095 """Merge working directory with another revision
2156 """Merge working directory with another revision
2096
2157
2097 Merge the contents of the current working directory and the
2158 Merge the contents of the current working directory and the
2098 requested revision. Files that changed between either parent are
2159 requested revision. Files that changed between either parent are
2099 marked as changed for the next commit and a commit must be
2160 marked as changed for the next commit and a commit must be
2100 performed before any further updates are allowed.
2161 performed before any further updates are allowed.
2101 """
2162 """
2102 return doupdate(ui, repo, node=node, merge=True, **opts)
2163 return doupdate(ui, repo, node=node, merge=True, **opts)
2103
2164
2104 def outgoing(ui, repo, dest=None, **opts):
2165 def outgoing(ui, repo, dest=None, **opts):
2105 """show changesets not found in destination
2166 """show changesets not found in destination
2106
2167
2107 Show changesets not found in the specified destination repository or
2168 Show changesets not found in the specified destination repository or
2108 the default push location. These are the changesets that would be pushed
2169 the default push location. These are the changesets that would be pushed
2109 if a push was requested.
2170 if a push was requested.
2110
2171
2111 See pull for valid destination format details.
2172 See pull for valid destination format details.
2112 """
2173 """
2113 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2174 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2114 setremoteconfig(ui, opts)
2175 setremoteconfig(ui, opts)
2115 revs = None
2176 revs = None
2116 if opts['rev']:
2177 if opts['rev']:
2117 revs = [repo.lookup(rev) for rev in opts['rev']]
2178 revs = [repo.lookup(rev) for rev in opts['rev']]
2118
2179
2119 other = hg.repository(ui, dest)
2180 other = hg.repository(ui, dest)
2120 o = repo.findoutgoing(other, force=opts['force'])
2181 o = repo.findoutgoing(other, force=opts['force'])
2121 if not o:
2182 if not o:
2122 ui.status(_("no changes found\n"))
2183 ui.status(_("no changes found\n"))
2123 return
2184 return
2124 o = repo.changelog.nodesbetween(o, revs)[0]
2185 o = repo.changelog.nodesbetween(o, revs)[0]
2125 if opts['newest_first']:
2186 if opts['newest_first']:
2126 o.reverse()
2187 o.reverse()
2127 displayer = show_changeset(ui, repo, opts)
2188 displayer = show_changeset(ui, repo, opts)
2128 for n in o:
2189 for n in o:
2129 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2190 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2130 if opts['no_merges'] and len(parents) == 2:
2191 if opts['no_merges'] and len(parents) == 2:
2131 continue
2192 continue
2132 displayer.show(changenode=n)
2193 displayer.show(changenode=n)
2133 if opts['patch']:
2194 if opts['patch']:
2134 prev = (parents and parents[0]) or nullid
2195 prev = (parents and parents[0]) or nullid
2135 dodiff(ui, ui, repo, prev, n)
2196 dodiff(ui, ui, repo, prev, n)
2136 ui.write("\n")
2197 ui.write("\n")
2137
2198
2138 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2199 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2139 """show the parents of the working dir or revision
2200 """show the parents of the working dir or revision
2140
2201
2141 Print the working directory's parent revisions.
2202 Print the working directory's parent revisions.
2142 """
2203 """
2143 # legacy
2204 # legacy
2144 if file_ and not rev:
2205 if file_ and not rev:
2145 try:
2206 try:
2146 rev = repo.lookup(file_)
2207 rev = repo.lookup(file_)
2147 file_ = None
2208 file_ = None
2148 except hg.RepoError:
2209 except hg.RepoError:
2149 pass
2210 pass
2150 else:
2211 else:
2151 ui.warn(_("'hg parent REV' is deprecated, "
2212 ui.warn(_("'hg parent REV' is deprecated, "
2152 "please use 'hg parents -r REV instead\n"))
2213 "please use 'hg parents -r REV instead\n"))
2153
2214
2154 if rev:
2215 if rev:
2155 if file_:
2216 if file_:
2156 ctx = repo.filectx(file_, changeid=rev)
2217 ctx = repo.filectx(file_, changeid=rev)
2157 else:
2218 else:
2158 ctx = repo.changectx(rev)
2219 ctx = repo.changectx(rev)
2159 p = [cp.node() for cp in ctx.parents()]
2220 p = [cp.node() for cp in ctx.parents()]
2160 else:
2221 else:
2161 p = repo.dirstate.parents()
2222 p = repo.dirstate.parents()
2162
2223
2163 br = None
2224 br = None
2164 if branches is not None:
2225 if branches is not None:
2165 br = repo.branchlookup(p)
2226 br = repo.branchlookup(p)
2166 displayer = show_changeset(ui, repo, opts)
2227 displayer = show_changeset(ui, repo, opts)
2167 for n in p:
2228 for n in p:
2168 if n != nullid:
2229 if n != nullid:
2169 displayer.show(changenode=n, brinfo=br)
2230 displayer.show(changenode=n, brinfo=br)
2170
2231
2171 def paths(ui, repo, search=None):
2232 def paths(ui, repo, search=None):
2172 """show definition of symbolic path names
2233 """show definition of symbolic path names
2173
2234
2174 Show definition of symbolic path name NAME. If no name is given, show
2235 Show definition of symbolic path name NAME. If no name is given, show
2175 definition of available names.
2236 definition of available names.
2176
2237
2177 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2238 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2178 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2239 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2179 """
2240 """
2180 if search:
2241 if search:
2181 for name, path in ui.configitems("paths"):
2242 for name, path in ui.configitems("paths"):
2182 if name == search:
2243 if name == search:
2183 ui.write("%s\n" % path)
2244 ui.write("%s\n" % path)
2184 return
2245 return
2185 ui.warn(_("not found!\n"))
2246 ui.warn(_("not found!\n"))
2186 return 1
2247 return 1
2187 else:
2248 else:
2188 for name, path in ui.configitems("paths"):
2249 for name, path in ui.configitems("paths"):
2189 ui.write("%s = %s\n" % (name, path))
2250 ui.write("%s = %s\n" % (name, path))
2190
2251
2191 def postincoming(ui, repo, modheads, optupdate):
2252 def postincoming(ui, repo, modheads, optupdate):
2192 if modheads == 0:
2253 if modheads == 0:
2193 return
2254 return
2194 if optupdate:
2255 if optupdate:
2195 if modheads == 1:
2256 if modheads == 1:
2196 return doupdate(ui, repo)
2257 return doupdate(ui, repo)
2197 else:
2258 else:
2198 ui.status(_("not updating, since new heads added\n"))
2259 ui.status(_("not updating, since new heads added\n"))
2199 if modheads > 1:
2260 if modheads > 1:
2200 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2261 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2201 else:
2262 else:
2202 ui.status(_("(run 'hg update' to get a working copy)\n"))
2263 ui.status(_("(run 'hg update' to get a working copy)\n"))
2203
2264
2204 def pull(ui, repo, source="default", **opts):
2265 def pull(ui, repo, source="default", **opts):
2205 """pull changes from the specified source
2266 """pull changes from the specified source
2206
2267
2207 Pull changes from a remote repository to a local one.
2268 Pull changes from a remote repository to a local one.
2208
2269
2209 This finds all changes from the repository at the specified path
2270 This finds all changes from the repository at the specified path
2210 or URL and adds them to the local repository. By default, this
2271 or URL and adds them to the local repository. By default, this
2211 does not update the copy of the project in the working directory.
2272 does not update the copy of the project in the working directory.
2212
2273
2213 Valid URLs are of the form:
2274 Valid URLs are of the form:
2214
2275
2215 local/filesystem/path
2276 local/filesystem/path
2216 http://[user@]host[:port]/[path]
2277 http://[user@]host[:port]/[path]
2217 https://[user@]host[:port]/[path]
2278 https://[user@]host[:port]/[path]
2218 ssh://[user@]host[:port]/[path]
2279 ssh://[user@]host[:port]/[path]
2219
2280
2220 Some notes about using SSH with Mercurial:
2281 Some notes about using SSH with Mercurial:
2221 - SSH requires an accessible shell account on the destination machine
2282 - SSH requires an accessible shell account on the destination machine
2222 and a copy of hg in the remote path or specified with as remotecmd.
2283 and a copy of hg in the remote path or specified with as remotecmd.
2223 - path is relative to the remote user's home directory by default.
2284 - path is relative to the remote user's home directory by default.
2224 Use an extra slash at the start of a path to specify an absolute path:
2285 Use an extra slash at the start of a path to specify an absolute path:
2225 ssh://example.com//tmp/repository
2286 ssh://example.com//tmp/repository
2226 - Mercurial doesn't use its own compression via SSH; the right thing
2287 - Mercurial doesn't use its own compression via SSH; the right thing
2227 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2288 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2228 Host *.mylocalnetwork.example.com
2289 Host *.mylocalnetwork.example.com
2229 Compression off
2290 Compression off
2230 Host *
2291 Host *
2231 Compression on
2292 Compression on
2232 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2293 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2233 with the --ssh command line option.
2294 with the --ssh command line option.
2234 """
2295 """
2235 source = ui.expandpath(source)
2296 source = ui.expandpath(source)
2236 setremoteconfig(ui, opts)
2297 setremoteconfig(ui, opts)
2237
2298
2238 other = hg.repository(ui, source)
2299 other = hg.repository(ui, source)
2239 ui.status(_('pulling from %s\n') % (source))
2300 ui.status(_('pulling from %s\n') % (source))
2240 revs = None
2301 revs = None
2241 if opts['rev'] and not other.local():
2302 if opts['rev'] and not other.local():
2242 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2303 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2243 elif opts['rev']:
2304 elif opts['rev']:
2244 revs = [other.lookup(rev) for rev in opts['rev']]
2305 revs = [other.lookup(rev) for rev in opts['rev']]
2245 modheads = repo.pull(other, heads=revs, force=opts['force'])
2306 modheads = repo.pull(other, heads=revs, force=opts['force'])
2246 return postincoming(ui, repo, modheads, opts['update'])
2307 return postincoming(ui, repo, modheads, opts['update'])
2247
2308
2248 def push(ui, repo, dest=None, **opts):
2309 def push(ui, repo, dest=None, **opts):
2249 """push changes to the specified destination
2310 """push changes to the specified destination
2250
2311
2251 Push changes from the local repository to the given destination.
2312 Push changes from the local repository to the given destination.
2252
2313
2253 This is the symmetrical operation for pull. It helps to move
2314 This is the symmetrical operation for pull. It helps to move
2254 changes from the current repository to a different one. If the
2315 changes from the current repository to a different one. If the
2255 destination is local this is identical to a pull in that directory
2316 destination is local this is identical to a pull in that directory
2256 from the current one.
2317 from the current one.
2257
2318
2258 By default, push will refuse to run if it detects the result would
2319 By default, push will refuse to run if it detects the result would
2259 increase the number of remote heads. This generally indicates the
2320 increase the number of remote heads. This generally indicates the
2260 the client has forgotten to sync and merge before pushing.
2321 the client has forgotten to sync and merge before pushing.
2261
2322
2262 Valid URLs are of the form:
2323 Valid URLs are of the form:
2263
2324
2264 local/filesystem/path
2325 local/filesystem/path
2265 ssh://[user@]host[:port]/[path]
2326 ssh://[user@]host[:port]/[path]
2266
2327
2267 Look at the help text for the pull command for important details
2328 Look at the help text for the pull command for important details
2268 about ssh:// URLs.
2329 about ssh:// URLs.
2269
2330
2270 Pushing to http:// and https:// URLs is possible, too, if this
2331 Pushing to http:// and https:// URLs is possible, too, if this
2271 feature is enabled on the remote Mercurial server.
2332 feature is enabled on the remote Mercurial server.
2272 """
2333 """
2273 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2334 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2274 setremoteconfig(ui, opts)
2335 setremoteconfig(ui, opts)
2275
2336
2276 other = hg.repository(ui, dest)
2337 other = hg.repository(ui, dest)
2277 ui.status('pushing to %s\n' % (dest))
2338 ui.status('pushing to %s\n' % (dest))
2278 revs = None
2339 revs = None
2279 if opts['rev']:
2340 if opts['rev']:
2280 revs = [repo.lookup(rev) for rev in opts['rev']]
2341 revs = [repo.lookup(rev) for rev in opts['rev']]
2281 r = repo.push(other, opts['force'], revs=revs)
2342 r = repo.push(other, opts['force'], revs=revs)
2282 return r == 0
2343 return r == 0
2283
2344
2284 def rawcommit(ui, repo, *flist, **rc):
2345 def rawcommit(ui, repo, *flist, **rc):
2285 """raw commit interface (DEPRECATED)
2346 """raw commit interface (DEPRECATED)
2286
2347
2287 (DEPRECATED)
2348 (DEPRECATED)
2288 Lowlevel commit, for use in helper scripts.
2349 Lowlevel commit, for use in helper scripts.
2289
2350
2290 This command is not intended to be used by normal users, as it is
2351 This command is not intended to be used by normal users, as it is
2291 primarily useful for importing from other SCMs.
2352 primarily useful for importing from other SCMs.
2292
2353
2293 This command is now deprecated and will be removed in a future
2354 This command is now deprecated and will be removed in a future
2294 release, please use debugsetparents and commit instead.
2355 release, please use debugsetparents and commit instead.
2295 """
2356 """
2296
2357
2297 ui.warn(_("(the rawcommit command is deprecated)\n"))
2358 ui.warn(_("(the rawcommit command is deprecated)\n"))
2298
2359
2299 message = rc['message']
2360 message = rc['message']
2300 if not message and rc['logfile']:
2361 if not message and rc['logfile']:
2301 try:
2362 try:
2302 message = open(rc['logfile']).read()
2363 message = open(rc['logfile']).read()
2303 except IOError:
2364 except IOError:
2304 pass
2365 pass
2305 if not message and not rc['logfile']:
2366 if not message and not rc['logfile']:
2306 raise util.Abort(_("missing commit message"))
2367 raise util.Abort(_("missing commit message"))
2307
2368
2308 files = relpath(repo, list(flist))
2369 files = relpath(repo, list(flist))
2309 if rc['files']:
2370 if rc['files']:
2310 files += open(rc['files']).read().splitlines()
2371 files += open(rc['files']).read().splitlines()
2311
2372
2312 rc['parent'] = map(repo.lookup, rc['parent'])
2373 rc['parent'] = map(repo.lookup, rc['parent'])
2313
2374
2314 try:
2375 try:
2315 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2376 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2316 except ValueError, inst:
2377 except ValueError, inst:
2317 raise util.Abort(str(inst))
2378 raise util.Abort(str(inst))
2318
2379
2319 def recover(ui, repo):
2380 def recover(ui, repo):
2320 """roll back an interrupted transaction
2381 """roll back an interrupted transaction
2321
2382
2322 Recover from an interrupted commit or pull.
2383 Recover from an interrupted commit or pull.
2323
2384
2324 This command tries to fix the repository status after an interrupted
2385 This command tries to fix the repository status after an interrupted
2325 operation. It should only be necessary when Mercurial suggests it.
2386 operation. It should only be necessary when Mercurial suggests it.
2326 """
2387 """
2327 if repo.recover():
2388 if repo.recover():
2328 return hg.verify(repo)
2389 return hg.verify(repo)
2329 return 1
2390 return 1
2330
2391
2331 def remove(ui, repo, *pats, **opts):
2392 def remove(ui, repo, *pats, **opts):
2332 """remove the specified files on the next commit
2393 """remove the specified files on the next commit
2333
2394
2334 Schedule the indicated files for removal from the repository.
2395 Schedule the indicated files for removal from the repository.
2335
2396
2336 This command schedules the files to be removed at the next commit.
2397 This command schedules the files to be removed at the next commit.
2337 This only removes files from the current branch, not from the
2398 This only removes files from the current branch, not from the
2338 entire project history. If the files still exist in the working
2399 entire project history. If the files still exist in the working
2339 directory, they will be deleted from it. If invoked with --after,
2400 directory, they will be deleted from it. If invoked with --after,
2340 files that have been manually deleted are marked as removed.
2401 files that have been manually deleted are marked as removed.
2341
2402
2342 Modified files and added files are not removed by default. To
2403 Modified files and added files are not removed by default. To
2343 remove them, use the -f/--force option.
2404 remove them, use the -f/--force option.
2344 """
2405 """
2345 names = []
2406 names = []
2346 if not opts['after'] and not pats:
2407 if not opts['after'] and not pats:
2347 raise util.Abort(_('no files specified'))
2408 raise util.Abort(_('no files specified'))
2348 files, matchfn, anypats = matchpats(repo, pats, opts)
2409 files, matchfn, anypats = matchpats(repo, pats, opts)
2349 exact = dict.fromkeys(files)
2410 exact = dict.fromkeys(files)
2350 mardu = map(dict.fromkeys, repo.changes(files=files, match=matchfn))
2411 mardu = map(dict.fromkeys, repo.changes(files=files, match=matchfn))
2351 modified, added, removed, deleted, unknown = mardu
2412 modified, added, removed, deleted, unknown = mardu
2352 remove, forget = [], []
2413 remove, forget = [], []
2353 for src, abs, rel, exact in walk(repo, pats, opts):
2414 for src, abs, rel, exact in walk(repo, pats, opts):
2354 reason = None
2415 reason = None
2355 if abs not in deleted and opts['after']:
2416 if abs not in deleted and opts['after']:
2356 reason = _('is still present')
2417 reason = _('is still present')
2357 elif abs in modified and not opts['force']:
2418 elif abs in modified and not opts['force']:
2358 reason = _('is modified (use -f to force removal)')
2419 reason = _('is modified (use -f to force removal)')
2359 elif abs in added:
2420 elif abs in added:
2360 if opts['force']:
2421 if opts['force']:
2361 forget.append(abs)
2422 forget.append(abs)
2362 continue
2423 continue
2363 reason = _('has been marked for add (use -f to force removal)')
2424 reason = _('has been marked for add (use -f to force removal)')
2364 elif abs in unknown:
2425 elif abs in unknown:
2365 reason = _('is not managed')
2426 reason = _('is not managed')
2366 elif abs in removed:
2427 elif abs in removed:
2367 continue
2428 continue
2368 if reason:
2429 if reason:
2369 if exact:
2430 if exact:
2370 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2431 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2371 else:
2432 else:
2372 if ui.verbose or not exact:
2433 if ui.verbose or not exact:
2373 ui.status(_('removing %s\n') % rel)
2434 ui.status(_('removing %s\n') % rel)
2374 remove.append(abs)
2435 remove.append(abs)
2375 repo.forget(forget)
2436 repo.forget(forget)
2376 repo.remove(remove, unlink=not opts['after'])
2437 repo.remove(remove, unlink=not opts['after'])
2377
2438
2378 def rename(ui, repo, *pats, **opts):
2439 def rename(ui, repo, *pats, **opts):
2379 """rename files; equivalent of copy + remove
2440 """rename files; equivalent of copy + remove
2380
2441
2381 Mark dest as copies of sources; mark sources for deletion. If
2442 Mark dest as copies of sources; mark sources for deletion. If
2382 dest is a directory, copies are put in that directory. If dest is
2443 dest is a directory, copies are put in that directory. If dest is
2383 a file, there can only be one source.
2444 a file, there can only be one source.
2384
2445
2385 By default, this command copies the contents of files as they
2446 By default, this command copies the contents of files as they
2386 stand in the working directory. If invoked with --after, the
2447 stand in the working directory. If invoked with --after, the
2387 operation is recorded, but no copying is performed.
2448 operation is recorded, but no copying is performed.
2388
2449
2389 This command takes effect in the next commit.
2450 This command takes effect in the next commit.
2390
2451
2391 NOTE: This command should be treated as experimental. While it
2452 NOTE: This command should be treated as experimental. While it
2392 should properly record rename files, this information is not yet
2453 should properly record rename files, this information is not yet
2393 fully used by merge, nor fully reported by log.
2454 fully used by merge, nor fully reported by log.
2394 """
2455 """
2395 wlock = repo.wlock(0)
2456 wlock = repo.wlock(0)
2396 errs, copied = docopy(ui, repo, pats, opts, wlock)
2457 errs, copied = docopy(ui, repo, pats, opts, wlock)
2397 names = []
2458 names = []
2398 for abs, rel, exact in copied:
2459 for abs, rel, exact in copied:
2399 if ui.verbose or not exact:
2460 if ui.verbose or not exact:
2400 ui.status(_('removing %s\n') % rel)
2461 ui.status(_('removing %s\n') % rel)
2401 names.append(abs)
2462 names.append(abs)
2402 if not opts.get('dry_run'):
2463 if not opts.get('dry_run'):
2403 repo.remove(names, True, wlock)
2464 repo.remove(names, True, wlock)
2404 return errs
2465 return errs
2405
2466
2406 def revert(ui, repo, *pats, **opts):
2467 def revert(ui, repo, *pats, **opts):
2407 """revert files or dirs to their states as of some revision
2468 """revert files or dirs to their states as of some revision
2408
2469
2409 With no revision specified, revert the named files or directories
2470 With no revision specified, revert the named files or directories
2410 to the contents they had in the parent of the working directory.
2471 to the contents they had in the parent of the working directory.
2411 This restores the contents of the affected files to an unmodified
2472 This restores the contents of the affected files to an unmodified
2412 state. If the working directory has two parents, you must
2473 state. If the working directory has two parents, you must
2413 explicitly specify the revision to revert to.
2474 explicitly specify the revision to revert to.
2414
2475
2415 Modified files are saved with a .orig suffix before reverting.
2476 Modified files are saved with a .orig suffix before reverting.
2416 To disable these backups, use --no-backup.
2477 To disable these backups, use --no-backup.
2417
2478
2418 Using the -r option, revert the given files or directories to
2479 Using the -r option, revert the given files or directories to
2419 their contents as of a specific revision. This can be helpful to"roll
2480 their contents as of a specific revision. This can be helpful to"roll
2420 back" some or all of a change that should not have been committed.
2481 back" some or all of a change that should not have been committed.
2421
2482
2422 Revert modifies the working directory. It does not commit any
2483 Revert modifies the working directory. It does not commit any
2423 changes, or change the parent of the working directory. If you
2484 changes, or change the parent of the working directory. If you
2424 revert to a revision other than the parent of the working
2485 revert to a revision other than the parent of the working
2425 directory, the reverted files will thus appear modified
2486 directory, the reverted files will thus appear modified
2426 afterwards.
2487 afterwards.
2427
2488
2428 If a file has been deleted, it is recreated. If the executable
2489 If a file has been deleted, it is recreated. If the executable
2429 mode of a file was changed, it is reset.
2490 mode of a file was changed, it is reset.
2430
2491
2431 If names are given, all files matching the names are reverted.
2492 If names are given, all files matching the names are reverted.
2432
2493
2433 If no arguments are given, all files in the repository are reverted.
2494 If no arguments are given, all files in the repository are reverted.
2434 """
2495 """
2435 parent, p2 = repo.dirstate.parents()
2496 parent, p2 = repo.dirstate.parents()
2436 if opts['rev']:
2497 if opts['rev']:
2437 node = repo.lookup(opts['rev'])
2498 node = repo.lookup(opts['rev'])
2438 elif p2 != nullid:
2499 elif p2 != nullid:
2439 raise util.Abort(_('working dir has two parents; '
2500 raise util.Abort(_('working dir has two parents; '
2440 'you must specify the revision to revert to'))
2501 'you must specify the revision to revert to'))
2441 else:
2502 else:
2442 node = parent
2503 node = parent
2443 mf = repo.manifest.read(repo.changelog.read(node)[0])
2504 mf = repo.manifest.read(repo.changelog.read(node)[0])
2444 if node == parent:
2505 if node == parent:
2445 pmf = mf
2506 pmf = mf
2446 else:
2507 else:
2447 pmf = None
2508 pmf = None
2448
2509
2449 wlock = repo.wlock()
2510 wlock = repo.wlock()
2450
2511
2451 # need all matching names in dirstate and manifest of target rev,
2512 # need all matching names in dirstate and manifest of target rev,
2452 # so have to walk both. do not print errors if files exist in one
2513 # so have to walk both. do not print errors if files exist in one
2453 # but not other.
2514 # but not other.
2454
2515
2455 names = {}
2516 names = {}
2456 target_only = {}
2517 target_only = {}
2457
2518
2458 # walk dirstate.
2519 # walk dirstate.
2459
2520
2460 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2521 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2461 names[abs] = (rel, exact)
2522 names[abs] = (rel, exact)
2462 if src == 'b':
2523 if src == 'b':
2463 target_only[abs] = True
2524 target_only[abs] = True
2464
2525
2465 # walk target manifest.
2526 # walk target manifest.
2466
2527
2467 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2528 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2468 badmatch=names.has_key):
2529 badmatch=names.has_key):
2469 if abs in names: continue
2530 if abs in names: continue
2470 names[abs] = (rel, exact)
2531 names[abs] = (rel, exact)
2471 target_only[abs] = True
2532 target_only[abs] = True
2472
2533
2473 changes = repo.changes(match=names.has_key, wlock=wlock)
2534 changes = repo.changes(match=names.has_key, wlock=wlock)
2474 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2535 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2475
2536
2476 revert = ([], _('reverting %s\n'))
2537 revert = ([], _('reverting %s\n'))
2477 add = ([], _('adding %s\n'))
2538 add = ([], _('adding %s\n'))
2478 remove = ([], _('removing %s\n'))
2539 remove = ([], _('removing %s\n'))
2479 forget = ([], _('forgetting %s\n'))
2540 forget = ([], _('forgetting %s\n'))
2480 undelete = ([], _('undeleting %s\n'))
2541 undelete = ([], _('undeleting %s\n'))
2481 update = {}
2542 update = {}
2482
2543
2483 disptable = (
2544 disptable = (
2484 # dispatch table:
2545 # dispatch table:
2485 # file state
2546 # file state
2486 # action if in target manifest
2547 # action if in target manifest
2487 # action if not in target manifest
2548 # action if not in target manifest
2488 # make backup if in target manifest
2549 # make backup if in target manifest
2489 # make backup if not in target manifest
2550 # make backup if not in target manifest
2490 (modified, revert, remove, True, True),
2551 (modified, revert, remove, True, True),
2491 (added, revert, forget, True, False),
2552 (added, revert, forget, True, False),
2492 (removed, undelete, None, False, False),
2553 (removed, undelete, None, False, False),
2493 (deleted, revert, remove, False, False),
2554 (deleted, revert, remove, False, False),
2494 (unknown, add, None, True, False),
2555 (unknown, add, None, True, False),
2495 (target_only, add, None, False, False),
2556 (target_only, add, None, False, False),
2496 )
2557 )
2497
2558
2498 entries = names.items()
2559 entries = names.items()
2499 entries.sort()
2560 entries.sort()
2500
2561
2501 for abs, (rel, exact) in entries:
2562 for abs, (rel, exact) in entries:
2502 mfentry = mf.get(abs)
2563 mfentry = mf.get(abs)
2503 def handle(xlist, dobackup):
2564 def handle(xlist, dobackup):
2504 xlist[0].append(abs)
2565 xlist[0].append(abs)
2505 update[abs] = 1
2566 update[abs] = 1
2506 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2567 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2507 bakname = "%s.orig" % rel
2568 bakname = "%s.orig" % rel
2508 ui.note(_('saving current version of %s as %s\n') %
2569 ui.note(_('saving current version of %s as %s\n') %
2509 (rel, bakname))
2570 (rel, bakname))
2510 if not opts.get('dry_run'):
2571 if not opts.get('dry_run'):
2511 shutil.copyfile(rel, bakname)
2572 shutil.copyfile(rel, bakname)
2512 shutil.copymode(rel, bakname)
2573 shutil.copymode(rel, bakname)
2513 if ui.verbose or not exact:
2574 if ui.verbose or not exact:
2514 ui.status(xlist[1] % rel)
2575 ui.status(xlist[1] % rel)
2515 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2576 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2516 if abs not in table: continue
2577 if abs not in table: continue
2517 # file has changed in dirstate
2578 # file has changed in dirstate
2518 if mfentry:
2579 if mfentry:
2519 handle(hitlist, backuphit)
2580 handle(hitlist, backuphit)
2520 elif misslist is not None:
2581 elif misslist is not None:
2521 handle(misslist, backupmiss)
2582 handle(misslist, backupmiss)
2522 else:
2583 else:
2523 if exact: ui.warn(_('file not managed: %s\n' % rel))
2584 if exact: ui.warn(_('file not managed: %s\n' % rel))
2524 break
2585 break
2525 else:
2586 else:
2526 # file has not changed in dirstate
2587 # file has not changed in dirstate
2527 if node == parent:
2588 if node == parent:
2528 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2589 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2529 continue
2590 continue
2530 if pmf is None:
2591 if pmf is None:
2531 # only need parent manifest in this unlikely case,
2592 # only need parent manifest in this unlikely case,
2532 # so do not read by default
2593 # so do not read by default
2533 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2594 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2534 if abs in pmf:
2595 if abs in pmf:
2535 if mfentry:
2596 if mfentry:
2536 # if version of file is same in parent and target
2597 # if version of file is same in parent and target
2537 # manifests, do nothing
2598 # manifests, do nothing
2538 if pmf[abs] != mfentry:
2599 if pmf[abs] != mfentry:
2539 handle(revert, False)
2600 handle(revert, False)
2540 else:
2601 else:
2541 handle(remove, False)
2602 handle(remove, False)
2542
2603
2543 if not opts.get('dry_run'):
2604 if not opts.get('dry_run'):
2544 repo.dirstate.forget(forget[0])
2605 repo.dirstate.forget(forget[0])
2545 r = hg.update(repo, node, False, True, update.has_key, False,
2606 r = hg.update(repo, node, False, True, update.has_key, False,
2546 wlock=wlock, show_stats=False)
2607 wlock=wlock, show_stats=False)
2547 repo.dirstate.update(add[0], 'a')
2608 repo.dirstate.update(add[0], 'a')
2548 repo.dirstate.update(undelete[0], 'n')
2609 repo.dirstate.update(undelete[0], 'n')
2549 repo.dirstate.update(remove[0], 'r')
2610 repo.dirstate.update(remove[0], 'r')
2550 return r
2611 return r
2551
2612
2552 def rollback(ui, repo):
2613 def rollback(ui, repo):
2553 """roll back the last transaction in this repository
2614 """roll back the last transaction in this repository
2554
2615
2555 Roll back the last transaction in this repository, restoring the
2616 Roll back the last transaction in this repository, restoring the
2556 project to its state prior to the transaction.
2617 project to its state prior to the transaction.
2557
2618
2558 Transactions are used to encapsulate the effects of all commands
2619 Transactions are used to encapsulate the effects of all commands
2559 that create new changesets or propagate existing changesets into a
2620 that create new changesets or propagate existing changesets into a
2560 repository. For example, the following commands are transactional,
2621 repository. For example, the following commands are transactional,
2561 and their effects can be rolled back:
2622 and their effects can be rolled back:
2562
2623
2563 commit
2624 commit
2564 import
2625 import
2565 pull
2626 pull
2566 push (with this repository as destination)
2627 push (with this repository as destination)
2567 unbundle
2628 unbundle
2568
2629
2569 This command should be used with care. There is only one level of
2630 This command should be used with care. There is only one level of
2570 rollback, and there is no way to undo a rollback.
2631 rollback, and there is no way to undo a rollback.
2571
2632
2572 This command is not intended for use on public repositories. Once
2633 This command is not intended for use on public repositories. Once
2573 changes are visible for pull by other users, rolling a transaction
2634 changes are visible for pull by other users, rolling a transaction
2574 back locally is ineffective (someone else may already have pulled
2635 back locally is ineffective (someone else may already have pulled
2575 the changes). Furthermore, a race is possible with readers of the
2636 the changes). Furthermore, a race is possible with readers of the
2576 repository; for example an in-progress pull from the repository
2637 repository; for example an in-progress pull from the repository
2577 may fail if a rollback is performed.
2638 may fail if a rollback is performed.
2578 """
2639 """
2579 repo.rollback()
2640 repo.rollback()
2580
2641
2581 def root(ui, repo):
2642 def root(ui, repo):
2582 """print the root (top) of the current working dir
2643 """print the root (top) of the current working dir
2583
2644
2584 Print the root directory of the current repository.
2645 Print the root directory of the current repository.
2585 """
2646 """
2586 ui.write(repo.root + "\n")
2647 ui.write(repo.root + "\n")
2587
2648
2588 def serve(ui, repo, **opts):
2649 def serve(ui, repo, **opts):
2589 """export the repository via HTTP
2650 """export the repository via HTTP
2590
2651
2591 Start a local HTTP repository browser and pull server.
2652 Start a local HTTP repository browser and pull server.
2592
2653
2593 By default, the server logs accesses to stdout and errors to
2654 By default, the server logs accesses to stdout and errors to
2594 stderr. Use the "-A" and "-E" options to log to files.
2655 stderr. Use the "-A" and "-E" options to log to files.
2595 """
2656 """
2596
2657
2597 if opts["stdio"]:
2658 if opts["stdio"]:
2598 if repo is None:
2659 if repo is None:
2599 raise hg.RepoError(_('no repo found'))
2660 raise hg.RepoError(_('no repo found'))
2600 s = sshserver.sshserver(ui, repo)
2661 s = sshserver.sshserver(ui, repo)
2601 s.serve_forever()
2662 s.serve_forever()
2602
2663
2603 optlist = ("name templates style address port ipv6"
2664 optlist = ("name templates style address port ipv6"
2604 " accesslog errorlog webdir_conf")
2665 " accesslog errorlog webdir_conf")
2605 for o in optlist.split():
2666 for o in optlist.split():
2606 if opts[o]:
2667 if opts[o]:
2607 ui.setconfig("web", o, opts[o])
2668 ui.setconfig("web", o, opts[o])
2608
2669
2609 if repo is None and not ui.config("web", "webdir_conf"):
2670 if repo is None and not ui.config("web", "webdir_conf"):
2610 raise hg.RepoError(_('no repo found'))
2671 raise hg.RepoError(_('no repo found'))
2611
2672
2612 if opts['daemon'] and not opts['daemon_pipefds']:
2673 if opts['daemon'] and not opts['daemon_pipefds']:
2613 rfd, wfd = os.pipe()
2674 rfd, wfd = os.pipe()
2614 args = sys.argv[:]
2675 args = sys.argv[:]
2615 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2676 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2616 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2677 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2617 args[0], args)
2678 args[0], args)
2618 os.close(wfd)
2679 os.close(wfd)
2619 os.read(rfd, 1)
2680 os.read(rfd, 1)
2620 os._exit(0)
2681 os._exit(0)
2621
2682
2622 try:
2683 try:
2623 httpd = hgweb.server.create_server(ui, repo)
2684 httpd = hgweb.server.create_server(ui, repo)
2624 except socket.error, inst:
2685 except socket.error, inst:
2625 raise util.Abort(_('cannot start server: ') + inst.args[1])
2686 raise util.Abort(_('cannot start server: ') + inst.args[1])
2626
2687
2627 if ui.verbose:
2688 if ui.verbose:
2628 addr, port = httpd.socket.getsockname()
2689 addr, port = httpd.socket.getsockname()
2629 if addr == '0.0.0.0':
2690 if addr == '0.0.0.0':
2630 addr = socket.gethostname()
2691 addr = socket.gethostname()
2631 else:
2692 else:
2632 try:
2693 try:
2633 addr = socket.gethostbyaddr(addr)[0]
2694 addr = socket.gethostbyaddr(addr)[0]
2634 except socket.error:
2695 except socket.error:
2635 pass
2696 pass
2636 if port != 80:
2697 if port != 80:
2637 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2698 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2638 else:
2699 else:
2639 ui.status(_('listening at http://%s/\n') % addr)
2700 ui.status(_('listening at http://%s/\n') % addr)
2640
2701
2641 if opts['pid_file']:
2702 if opts['pid_file']:
2642 fp = open(opts['pid_file'], 'w')
2703 fp = open(opts['pid_file'], 'w')
2643 fp.write(str(os.getpid()) + '\n')
2704 fp.write(str(os.getpid()) + '\n')
2644 fp.close()
2705 fp.close()
2645
2706
2646 if opts['daemon_pipefds']:
2707 if opts['daemon_pipefds']:
2647 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2708 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2648 os.close(rfd)
2709 os.close(rfd)
2649 os.write(wfd, 'y')
2710 os.write(wfd, 'y')
2650 os.close(wfd)
2711 os.close(wfd)
2651 sys.stdout.flush()
2712 sys.stdout.flush()
2652 sys.stderr.flush()
2713 sys.stderr.flush()
2653 fd = os.open(util.nulldev, os.O_RDWR)
2714 fd = os.open(util.nulldev, os.O_RDWR)
2654 if fd != 0: os.dup2(fd, 0)
2715 if fd != 0: os.dup2(fd, 0)
2655 if fd != 1: os.dup2(fd, 1)
2716 if fd != 1: os.dup2(fd, 1)
2656 if fd != 2: os.dup2(fd, 2)
2717 if fd != 2: os.dup2(fd, 2)
2657 if fd not in (0, 1, 2): os.close(fd)
2718 if fd not in (0, 1, 2): os.close(fd)
2658
2719
2659 httpd.serve_forever()
2720 httpd.serve_forever()
2660
2721
2661 def status(ui, repo, *pats, **opts):
2722 def status(ui, repo, *pats, **opts):
2662 """show changed files in the working directory
2723 """show changed files in the working directory
2663
2724
2664 Show status of files in the repository. If names are given, only
2725 Show status of files in the repository. If names are given, only
2665 files that match are shown. Files that are clean or ignored, are
2726 files that match are shown. Files that are clean or ignored, are
2666 not listed unless -c (clean), -i (ignored) or -A is given.
2727 not listed unless -c (clean), -i (ignored) or -A is given.
2667
2728
2668 The codes used to show the status of files are:
2729 The codes used to show the status of files are:
2669 M = modified
2730 M = modified
2670 A = added
2731 A = added
2671 R = removed
2732 R = removed
2672 C = clean
2733 C = clean
2673 ! = deleted, but still tracked
2734 ! = deleted, but still tracked
2674 ? = not tracked
2735 ? = not tracked
2675 I = ignored (not shown by default)
2736 I = ignored (not shown by default)
2676 = the previous added file was copied from here
2737 = the previous added file was copied from here
2677 """
2738 """
2678
2739
2679 all = opts['all']
2740 all = opts['all']
2680
2741
2681 files, matchfn, anypats = matchpats(repo, pats, opts)
2742 files, matchfn, anypats = matchpats(repo, pats, opts)
2682 cwd = (pats and repo.getcwd()) or ''
2743 cwd = (pats and repo.getcwd()) or ''
2683 modified, added, removed, deleted, unknown, ignored, clean = [
2744 modified, added, removed, deleted, unknown, ignored, clean = [
2684 [util.pathto(cwd, x) for x in n]
2745 [util.pathto(cwd, x) for x in n]
2685 for n in repo.status(files=files, match=matchfn,
2746 for n in repo.status(files=files, match=matchfn,
2686 list_ignored=all or opts['ignored'],
2747 list_ignored=all or opts['ignored'],
2687 list_clean=all or opts['clean'])]
2748 list_clean=all or opts['clean'])]
2688
2749
2689 changetypes = (('modified', 'M', modified),
2750 changetypes = (('modified', 'M', modified),
2690 ('added', 'A', added),
2751 ('added', 'A', added),
2691 ('removed', 'R', removed),
2752 ('removed', 'R', removed),
2692 ('deleted', '!', deleted),
2753 ('deleted', '!', deleted),
2693 ('unknown', '?', unknown),
2754 ('unknown', '?', unknown),
2694 ('ignored', 'I', ignored))
2755 ('ignored', 'I', ignored))
2695
2756
2696 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2757 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2697
2758
2698 end = opts['print0'] and '\0' or '\n'
2759 end = opts['print0'] and '\0' or '\n'
2699
2760
2700 for opt, char, changes in ([ct for ct in explicit_changetypes
2761 for opt, char, changes in ([ct for ct in explicit_changetypes
2701 if all or opts[ct[0]]]
2762 if all or opts[ct[0]]]
2702 or changetypes):
2763 or changetypes):
2703 if opts['no_status']:
2764 if opts['no_status']:
2704 format = "%%s%s" % end
2765 format = "%%s%s" % end
2705 else:
2766 else:
2706 format = "%s %%s%s" % (char, end)
2767 format = "%s %%s%s" % (char, end)
2707
2768
2708 for f in changes:
2769 for f in changes:
2709 ui.write(format % f)
2770 ui.write(format % f)
2710 if ((all or opts.get('copies')) and not opts.get('no_status')
2771 if ((all or opts.get('copies')) and not opts.get('no_status')
2711 and opt == 'added' and repo.dirstate.copies.has_key(f)):
2772 and opt == 'added' and repo.dirstate.copies.has_key(f)):
2712 ui.write(' %s%s' % (repo.dirstate.copies[f], end))
2773 ui.write(' %s%s' % (repo.dirstate.copies[f], end))
2713
2774
2714 def tag(ui, repo, name, rev_=None, **opts):
2775 def tag(ui, repo, name, rev_=None, **opts):
2715 """add a tag for the current tip or a given revision
2776 """add a tag for the current tip or a given revision
2716
2777
2717 Name a particular revision using <name>.
2778 Name a particular revision using <name>.
2718
2779
2719 Tags are used to name particular revisions of the repository and are
2780 Tags are used to name particular revisions of the repository and are
2720 very useful to compare different revision, to go back to significant
2781 very useful to compare different revision, to go back to significant
2721 earlier versions or to mark branch points as releases, etc.
2782 earlier versions or to mark branch points as releases, etc.
2722
2783
2723 If no revision is given, the parent of the working directory is used.
2784 If no revision is given, the parent of the working directory is used.
2724
2785
2725 To facilitate version control, distribution, and merging of tags,
2786 To facilitate version control, distribution, and merging of tags,
2726 they are stored as a file named ".hgtags" which is managed
2787 they are stored as a file named ".hgtags" which is managed
2727 similarly to other project files and can be hand-edited if
2788 similarly to other project files and can be hand-edited if
2728 necessary. The file '.hg/localtags' is used for local tags (not
2789 necessary. The file '.hg/localtags' is used for local tags (not
2729 shared among repositories).
2790 shared among repositories).
2730 """
2791 """
2731 if name == "tip":
2792 if name in ['tip', '.']:
2732 raise util.Abort(_("the name 'tip' is reserved"))
2793 raise util.Abort(_("the name '%s' is reserved") % name)
2733 if rev_ is not None:
2794 if rev_ is not None:
2734 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2795 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2735 "please use 'hg tag [-r REV] NAME' instead\n"))
2796 "please use 'hg tag [-r REV] NAME' instead\n"))
2736 if opts['rev']:
2797 if opts['rev']:
2737 raise util.Abort(_("use only one form to specify the revision"))
2798 raise util.Abort(_("use only one form to specify the revision"))
2738 if opts['rev']:
2799 if opts['rev']:
2739 rev_ = opts['rev']
2800 rev_ = opts['rev']
2740 if rev_:
2801 if rev_:
2741 r = hex(repo.lookup(rev_))
2802 r = hex(repo.lookup(rev_))
2742 else:
2803 else:
2743 p1, p2 = repo.dirstate.parents()
2804 p1, p2 = repo.dirstate.parents()
2744 if p1 == nullid:
2805 if p1 == nullid:
2745 raise util.Abort(_('no revision to tag'))
2806 raise util.Abort(_('no revision to tag'))
2746 if p2 != nullid:
2807 if p2 != nullid:
2747 raise util.Abort(_('outstanding uncommitted merges'))
2808 raise util.Abort(_('outstanding uncommitted merges'))
2748 r = hex(p1)
2809 r = hex(p1)
2749
2810
2750 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2811 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2751 opts['date'])
2812 opts['date'])
2752
2813
2753 def tags(ui, repo):
2814 def tags(ui, repo):
2754 """list repository tags
2815 """list repository tags
2755
2816
2756 List the repository tags.
2817 List the repository tags.
2757
2818
2758 This lists both regular and local tags.
2819 This lists both regular and local tags.
2759 """
2820 """
2760
2821
2761 l = repo.tagslist()
2822 l = repo.tagslist()
2762 l.reverse()
2823 l.reverse()
2763 for t, n in l:
2824 for t, n in l:
2764 try:
2825 try:
2765 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2826 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2766 except KeyError:
2827 except KeyError:
2767 r = " ?:?"
2828 r = " ?:?"
2768 if ui.quiet:
2829 if ui.quiet:
2769 ui.write("%s\n" % t)
2830 ui.write("%s\n" % t)
2770 else:
2831 else:
2771 ui.write("%-30s %s\n" % (t, r))
2832 ui.write("%-30s %s\n" % (t, r))
2772
2833
2773 def tip(ui, repo, **opts):
2834 def tip(ui, repo, **opts):
2774 """show the tip revision
2835 """show the tip revision
2775
2836
2776 Show the tip revision.
2837 Show the tip revision.
2777 """
2838 """
2778 n = repo.changelog.tip()
2839 n = repo.changelog.tip()
2779 br = None
2840 br = None
2780 if opts['branches']:
2841 if opts['branches']:
2781 br = repo.branchlookup([n])
2842 br = repo.branchlookup([n])
2782 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2843 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2783 if opts['patch']:
2844 if opts['patch']:
2784 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2845 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2785
2846
2786 def unbundle(ui, repo, fname, **opts):
2847 def unbundle(ui, repo, fname, **opts):
2787 """apply a changegroup file
2848 """apply a changegroup file
2788
2849
2789 Apply a compressed changegroup file generated by the bundle
2850 Apply a compressed changegroup file generated by the bundle
2790 command.
2851 command.
2791 """
2852 """
2792 f = urllib.urlopen(fname)
2853 f = urllib.urlopen(fname)
2793
2854
2794 header = f.read(6)
2855 header = f.read(6)
2795 if not header.startswith("HG"):
2856 if not header.startswith("HG"):
2796 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2857 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2797 elif not header.startswith("HG10"):
2858 elif not header.startswith("HG10"):
2798 raise util.Abort(_("%s: unknown bundle version") % fname)
2859 raise util.Abort(_("%s: unknown bundle version") % fname)
2799 elif header == "HG10BZ":
2860 elif header == "HG10BZ":
2800 def generator(f):
2861 def generator(f):
2801 zd = bz2.BZ2Decompressor()
2862 zd = bz2.BZ2Decompressor()
2802 zd.decompress("BZ")
2863 zd.decompress("BZ")
2803 for chunk in f:
2864 for chunk in f:
2804 yield zd.decompress(chunk)
2865 yield zd.decompress(chunk)
2805 elif header == "HG10UN":
2866 elif header == "HG10UN":
2806 def generator(f):
2867 def generator(f):
2807 for chunk in f:
2868 for chunk in f:
2808 yield chunk
2869 yield chunk
2809 else:
2870 else:
2810 raise util.Abort(_("%s: unknown bundle compression type")
2871 raise util.Abort(_("%s: unknown bundle compression type")
2811 % fname)
2872 % fname)
2812 gen = generator(util.filechunkiter(f, 4096))
2873 gen = generator(util.filechunkiter(f, 4096))
2813 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2874 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2814 'bundle:' + fname)
2875 'bundle:' + fname)
2815 return postincoming(ui, repo, modheads, opts['update'])
2876 return postincoming(ui, repo, modheads, opts['update'])
2816
2877
2817 def undo(ui, repo):
2878 def undo(ui, repo):
2818 """undo the last commit or pull (DEPRECATED)
2879 """undo the last commit or pull (DEPRECATED)
2819
2880
2820 (DEPRECATED)
2881 (DEPRECATED)
2821 This command is now deprecated and will be removed in a future
2882 This command is now deprecated and will be removed in a future
2822 release. Please use the rollback command instead. For usage
2883 release. Please use the rollback command instead. For usage
2823 instructions, see the rollback command.
2884 instructions, see the rollback command.
2824 """
2885 """
2825 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2886 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2826 repo.rollback()
2887 repo.rollback()
2827
2888
2828 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2889 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2829 branch=None):
2890 branch=None):
2830 """update or merge working directory
2891 """update or merge working directory
2831
2892
2832 Update the working directory to the specified revision.
2893 Update the working directory to the specified revision.
2833
2894
2834 If there are no outstanding changes in the working directory and
2895 If there are no outstanding changes in the working directory and
2835 there is a linear relationship between the current version and the
2896 there is a linear relationship between the current version and the
2836 requested version, the result is the requested version.
2897 requested version, the result is the requested version.
2837
2898
2838 To merge the working directory with another revision, use the
2899 To merge the working directory with another revision, use the
2839 merge command.
2900 merge command.
2840
2901
2841 By default, update will refuse to run if doing so would require
2902 By default, update will refuse to run if doing so would require
2842 merging or discarding local changes.
2903 merging or discarding local changes.
2843 """
2904 """
2844 if merge:
2905 if merge:
2845 ui.warn(_('(the -m/--merge option is deprecated; '
2906 ui.warn(_('(the -m/--merge option is deprecated; '
2846 'use the merge command instead)\n'))
2907 'use the merge command instead)\n'))
2847 return doupdate(ui, repo, node, merge, clean, force, branch)
2908 return doupdate(ui, repo, node, merge, clean, force, branch)
2848
2909
2849 def doupdate(ui, repo, node=None, merge=False, clean=False, force=None,
2910 def doupdate(ui, repo, node=None, merge=False, clean=False, force=None,
2850 branch=None):
2911 branch=None):
2851 if branch:
2912 if branch:
2852 br = repo.branchlookup(branch=branch)
2913 br = repo.branchlookup(branch=branch)
2853 found = []
2914 found = []
2854 for x in br:
2915 for x in br:
2855 if branch in br[x]:
2916 if branch in br[x]:
2856 found.append(x)
2917 found.append(x)
2857 if len(found) > 1:
2918 if len(found) > 1:
2858 ui.warn(_("Found multiple heads for %s\n") % branch)
2919 ui.warn(_("Found multiple heads for %s\n") % branch)
2859 for x in found:
2920 for x in found:
2860 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2921 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2861 return 1
2922 return 1
2862 if len(found) == 1:
2923 if len(found) == 1:
2863 node = found[0]
2924 node = found[0]
2864 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2925 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2865 else:
2926 else:
2866 ui.warn(_("branch %s not found\n") % (branch))
2927 ui.warn(_("branch %s not found\n") % (branch))
2867 return 1
2928 return 1
2868 else:
2929 else:
2869 node = node and repo.lookup(node) or repo.changelog.tip()
2930 node = node and repo.lookup(node) or repo.changelog.tip()
2870 return hg.update(repo, node, allow=merge, force=clean, forcemerge=force)
2931 return hg.update(repo, node, allow=merge, force=clean, forcemerge=force)
2871
2932
2872 def verify(ui, repo):
2933 def verify(ui, repo):
2873 """verify the integrity of the repository
2934 """verify the integrity of the repository
2874
2935
2875 Verify the integrity of the current repository.
2936 Verify the integrity of the current repository.
2876
2937
2877 This will perform an extensive check of the repository's
2938 This will perform an extensive check of the repository's
2878 integrity, validating the hashes and checksums of each entry in
2939 integrity, validating the hashes and checksums of each entry in
2879 the changelog, manifest, and tracked files, as well as the
2940 the changelog, manifest, and tracked files, as well as the
2880 integrity of their crosslinks and indices.
2941 integrity of their crosslinks and indices.
2881 """
2942 """
2882 return hg.verify(repo)
2943 return hg.verify(repo)
2883
2944
2884 # Command options and aliases are listed here, alphabetically
2945 # Command options and aliases are listed here, alphabetically
2885
2946
2886 table = {
2947 table = {
2887 "^add":
2948 "^add":
2888 (add,
2949 (add,
2889 [('I', 'include', [], _('include names matching the given patterns')),
2950 [('I', 'include', [], _('include names matching the given patterns')),
2890 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2951 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2891 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2952 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2892 _('hg add [OPTION]... [FILE]...')),
2953 _('hg add [OPTION]... [FILE]...')),
2893 "debugaddremove|addremove":
2954 "debugaddremove|addremove":
2894 (addremove,
2955 (addremove,
2895 [('I', 'include', [], _('include names matching the given patterns')),
2956 [('I', 'include', [], _('include names matching the given patterns')),
2896 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2957 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2897 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2958 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2898 _('hg addremove [OPTION]... [FILE]...')),
2959 _('hg addremove [OPTION]... [FILE]...')),
2899 "^annotate":
2960 "^annotate":
2900 (annotate,
2961 (annotate,
2901 [('r', 'rev', '', _('annotate the specified revision')),
2962 [('r', 'rev', '', _('annotate the specified revision')),
2902 ('a', 'text', None, _('treat all files as text')),
2963 ('a', 'text', None, _('treat all files as text')),
2903 ('u', 'user', None, _('list the author')),
2964 ('u', 'user', None, _('list the author')),
2904 ('d', 'date', None, _('list the date')),
2965 ('d', 'date', None, _('list the date')),
2905 ('n', 'number', None, _('list the revision number (default)')),
2966 ('n', 'number', None, _('list the revision number (default)')),
2906 ('c', 'changeset', None, _('list the changeset')),
2967 ('c', 'changeset', None, _('list the changeset')),
2907 ('I', 'include', [], _('include names matching the given patterns')),
2968 ('I', 'include', [], _('include names matching the given patterns')),
2908 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2969 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2909 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2970 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2910 "archive":
2971 "archive":
2911 (archive,
2972 (archive,
2912 [('', 'no-decode', None, _('do not pass files through decoders')),
2973 [('', 'no-decode', None, _('do not pass files through decoders')),
2913 ('p', 'prefix', '', _('directory prefix for files in archive')),
2974 ('p', 'prefix', '', _('directory prefix for files in archive')),
2914 ('r', 'rev', '', _('revision to distribute')),
2975 ('r', 'rev', '', _('revision to distribute')),
2915 ('t', 'type', '', _('type of distribution to create')),
2976 ('t', 'type', '', _('type of distribution to create')),
2916 ('I', 'include', [], _('include names matching the given patterns')),
2977 ('I', 'include', [], _('include names matching the given patterns')),
2917 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2978 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2918 _('hg archive [OPTION]... DEST')),
2979 _('hg archive [OPTION]... DEST')),
2919 "backout":
2980 "backout":
2920 (backout,
2981 (backout,
2921 [('', 'merge', None,
2982 [('', 'merge', None,
2922 _('merge with old dirstate parent after backout')),
2983 _('merge with old dirstate parent after backout')),
2923 ('m', 'message', '', _('use <text> as commit message')),
2984 ('m', 'message', '', _('use <text> as commit message')),
2924 ('l', 'logfile', '', _('read commit message from <file>')),
2985 ('l', 'logfile', '', _('read commit message from <file>')),
2925 ('d', 'date', '', _('record datecode as commit date')),
2986 ('d', 'date', '', _('record datecode as commit date')),
2926 ('', 'parent', '', _('parent to choose when backing out merge')),
2987 ('', 'parent', '', _('parent to choose when backing out merge')),
2927 ('u', 'user', '', _('record user as committer')),
2988 ('u', 'user', '', _('record user as committer')),
2928 ('I', 'include', [], _('include names matching the given patterns')),
2989 ('I', 'include', [], _('include names matching the given patterns')),
2929 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2990 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2930 _('hg backout [OPTION]... REV')),
2991 _('hg backout [OPTION]... REV')),
2931 "bundle":
2992 "bundle":
2932 (bundle,
2993 (bundle,
2933 [('f', 'force', None,
2994 [('f', 'force', None,
2934 _('run even when remote repository is unrelated'))],
2995 _('run even when remote repository is unrelated'))],
2935 _('hg bundle FILE DEST')),
2996 _('hg bundle FILE DEST')),
2936 "cat":
2997 "cat":
2937 (cat,
2998 (cat,
2938 [('o', 'output', '', _('print output to file with formatted name')),
2999 [('o', 'output', '', _('print output to file with formatted name')),
2939 ('r', 'rev', '', _('print the given revision')),
3000 ('r', 'rev', '', _('print the given revision')),
2940 ('I', 'include', [], _('include names matching the given patterns')),
3001 ('I', 'include', [], _('include names matching the given patterns')),
2941 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3002 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2942 _('hg cat [OPTION]... FILE...')),
3003 _('hg cat [OPTION]... FILE...')),
2943 "^clone":
3004 "^clone":
2944 (clone,
3005 (clone,
2945 [('U', 'noupdate', None, _('do not update the new working directory')),
3006 [('U', 'noupdate', None, _('do not update the new working directory')),
2946 ('r', 'rev', [],
3007 ('r', 'rev', [],
2947 _('a changeset you would like to have after cloning')),
3008 _('a changeset you would like to have after cloning')),
2948 ('', 'pull', None, _('use pull protocol to copy metadata')),
3009 ('', 'pull', None, _('use pull protocol to copy metadata')),
2949 ('', 'uncompressed', None,
3010 ('', 'uncompressed', None,
2950 _('use uncompressed transfer (fast over LAN)')),
3011 _('use uncompressed transfer (fast over LAN)')),
2951 ('e', 'ssh', '', _('specify ssh command to use')),
3012 ('e', 'ssh', '', _('specify ssh command to use')),
2952 ('', 'remotecmd', '',
3013 ('', 'remotecmd', '',
2953 _('specify hg command to run on the remote side'))],
3014 _('specify hg command to run on the remote side'))],
2954 _('hg clone [OPTION]... SOURCE [DEST]')),
3015 _('hg clone [OPTION]... SOURCE [DEST]')),
2955 "^commit|ci":
3016 "^commit|ci":
2956 (commit,
3017 (commit,
2957 [('A', 'addremove', None,
3018 [('A', 'addremove', None,
2958 _('mark new/missing files as added/removed before committing')),
3019 _('mark new/missing files as added/removed before committing')),
2959 ('m', 'message', '', _('use <text> as commit message')),
3020 ('m', 'message', '', _('use <text> as commit message')),
2960 ('l', 'logfile', '', _('read the commit message from <file>')),
3021 ('l', 'logfile', '', _('read the commit message from <file>')),
2961 ('d', 'date', '', _('record datecode as commit date')),
3022 ('d', 'date', '', _('record datecode as commit date')),
2962 ('u', 'user', '', _('record user as commiter')),
3023 ('u', 'user', '', _('record user as commiter')),
2963 ('I', 'include', [], _('include names matching the given patterns')),
3024 ('I', 'include', [], _('include names matching the given patterns')),
2964 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3025 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2965 _('hg commit [OPTION]... [FILE]...')),
3026 _('hg commit [OPTION]... [FILE]...')),
2966 "copy|cp":
3027 "copy|cp":
2967 (copy,
3028 (copy,
2968 [('A', 'after', None, _('record a copy that has already occurred')),
3029 [('A', 'after', None, _('record a copy that has already occurred')),
2969 ('f', 'force', None,
3030 ('f', 'force', None,
2970 _('forcibly copy over an existing managed file')),
3031 _('forcibly copy over an existing managed file')),
2971 ('I', 'include', [], _('include names matching the given patterns')),
3032 ('I', 'include', [], _('include names matching the given patterns')),
2972 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3033 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2973 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3034 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2974 _('hg copy [OPTION]... [SOURCE]... DEST')),
3035 _('hg copy [OPTION]... [SOURCE]... DEST')),
2975 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
3036 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2976 "debugcomplete":
3037 "debugcomplete":
2977 (debugcomplete,
3038 (debugcomplete,
2978 [('o', 'options', None, _('show the command options'))],
3039 [('o', 'options', None, _('show the command options'))],
2979 _('debugcomplete [-o] CMD')),
3040 _('debugcomplete [-o] CMD')),
2980 "debugrebuildstate":
3041 "debugrebuildstate":
2981 (debugrebuildstate,
3042 (debugrebuildstate,
2982 [('r', 'rev', '', _('revision to rebuild to'))],
3043 [('r', 'rev', '', _('revision to rebuild to'))],
2983 _('debugrebuildstate [-r REV] [REV]')),
3044 _('debugrebuildstate [-r REV] [REV]')),
2984 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
3045 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2985 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
3046 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
2986 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
3047 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2987 "debugstate": (debugstate, [], _('debugstate')),
3048 "debugstate": (debugstate, [], _('debugstate')),
2988 "debugdata": (debugdata, [], _('debugdata FILE REV')),
3049 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2989 "debugindex": (debugindex, [], _('debugindex FILE')),
3050 "debugindex": (debugindex, [], _('debugindex FILE')),
2990 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
3051 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2991 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
3052 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2992 "debugwalk":
3053 "debugwalk":
2993 (debugwalk,
3054 (debugwalk,
2994 [('I', 'include', [], _('include names matching the given patterns')),
3055 [('I', 'include', [], _('include names matching the given patterns')),
2995 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3056 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2996 _('debugwalk [OPTION]... [FILE]...')),
3057 _('debugwalk [OPTION]... [FILE]...')),
2997 "^diff":
3058 "^diff":
2998 (diff,
3059 (diff,
2999 [('r', 'rev', [], _('revision')),
3060 [('r', 'rev', [], _('revision')),
3000 ('a', 'text', None, _('treat all files as text')),
3061 ('a', 'text', None, _('treat all files as text')),
3001 ('p', 'show-function', None,
3062 ('p', 'show-function', None,
3002 _('show which function each change is in')),
3063 _('show which function each change is in')),
3003 ('w', 'ignore-all-space', None,
3064 ('w', 'ignore-all-space', None,
3004 _('ignore white space when comparing lines')),
3065 _('ignore white space when comparing lines')),
3005 ('b', 'ignore-space-change', None,
3066 ('b', 'ignore-space-change', None,
3006 _('ignore changes in the amount of white space')),
3067 _('ignore changes in the amount of white space')),
3007 ('B', 'ignore-blank-lines', None,
3068 ('B', 'ignore-blank-lines', None,
3008 _('ignore changes whose lines are all blank')),
3069 _('ignore changes whose lines are all blank')),
3009 ('I', 'include', [], _('include names matching the given patterns')),
3070 ('I', 'include', [], _('include names matching the given patterns')),
3010 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3071 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3011 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
3072 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
3012 "^export":
3073 "^export":
3013 (export,
3074 (export,
3014 [('o', 'output', '', _('print output to file with formatted name')),
3075 [('o', 'output', '', _('print output to file with formatted name')),
3015 ('a', 'text', None, _('treat all files as text')),
3076 ('a', 'text', None, _('treat all files as text')),
3016 ('', 'switch-parent', None, _('diff against the second parent'))],
3077 ('', 'switch-parent', None, _('diff against the second parent'))],
3017 _('hg export [-a] [-o OUTFILESPEC] REV...')),
3078 _('hg export [-a] [-o OUTFILESPEC] REV...')),
3018 "debugforget|forget":
3079 "debugforget|forget":
3019 (forget,
3080 (forget,
3020 [('I', 'include', [], _('include names matching the given patterns')),
3081 [('I', 'include', [], _('include names matching the given patterns')),
3021 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3082 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3022 _('hg forget [OPTION]... FILE...')),
3083 _('hg forget [OPTION]... FILE...')),
3023 "grep":
3084 "grep":
3024 (grep,
3085 (grep,
3025 [('0', 'print0', None, _('end fields with NUL')),
3086 [('0', 'print0', None, _('end fields with NUL')),
3026 ('', 'all', None, _('print all revisions that match')),
3087 ('', 'all', None, _('print all revisions that match')),
3027 ('i', 'ignore-case', None, _('ignore case when matching')),
3088 ('i', 'ignore-case', None, _('ignore case when matching')),
3028 ('l', 'files-with-matches', None,
3089 ('l', 'files-with-matches', None,
3029 _('print only filenames and revs that match')),
3090 _('print only filenames and revs that match')),
3030 ('n', 'line-number', None, _('print matching line numbers')),
3091 ('n', 'line-number', None, _('print matching line numbers')),
3031 ('r', 'rev', [], _('search in given revision range')),
3092 ('r', 'rev', [], _('search in given revision range')),
3032 ('u', 'user', None, _('print user who committed change')),
3093 ('u', 'user', None, _('print user who committed change')),
3033 ('I', 'include', [], _('include names matching the given patterns')),
3094 ('I', 'include', [], _('include names matching the given patterns')),
3034 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3095 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3035 _('hg grep [OPTION]... PATTERN [FILE]...')),
3096 _('hg grep [OPTION]... PATTERN [FILE]...')),
3036 "heads":
3097 "heads":
3037 (heads,
3098 (heads,
3038 [('b', 'branches', None, _('show branches')),
3099 [('b', 'branches', None, _('show branches')),
3039 ('', 'style', '', _('display using template map file')),
3100 ('', 'style', '', _('display using template map file')),
3040 ('r', 'rev', '', _('show only heads which are descendants of rev')),
3101 ('r', 'rev', '', _('show only heads which are descendants of rev')),
3041 ('', 'template', '', _('display with template'))],
3102 ('', 'template', '', _('display with template'))],
3042 _('hg heads [-b] [-r <rev>]')),
3103 _('hg heads [-b] [-r <rev>]')),
3043 "help": (help_, [], _('hg help [COMMAND]')),
3104 "help": (help_, [], _('hg help [COMMAND]')),
3044 "identify|id": (identify, [], _('hg identify')),
3105 "identify|id": (identify, [], _('hg identify')),
3045 "import|patch":
3106 "import|patch":
3046 (import_,
3107 (import_,
3047 [('p', 'strip', 1,
3108 [('p', 'strip', 1,
3048 _('directory strip option for patch. This has the same\n'
3109 _('directory strip option for patch. This has the same\n'
3049 'meaning as the corresponding patch option')),
3110 'meaning as the corresponding patch option')),
3050 ('m', 'message', '', _('use <text> as commit message')),
3111 ('m', 'message', '', _('use <text> as commit message')),
3051 ('b', 'base', '', _('base path')),
3112 ('b', 'base', '', _('base path')),
3052 ('f', 'force', None,
3113 ('f', 'force', None,
3053 _('skip check for outstanding uncommitted changes'))],
3114 _('skip check for outstanding uncommitted changes'))],
3054 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
3115 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
3055 "incoming|in": (incoming,
3116 "incoming|in": (incoming,
3056 [('M', 'no-merges', None, _('do not show merges')),
3117 [('M', 'no-merges', None, _('do not show merges')),
3057 ('f', 'force', None,
3118 ('f', 'force', None,
3058 _('run even when remote repository is unrelated')),
3119 _('run even when remote repository is unrelated')),
3059 ('', 'style', '', _('display using template map file')),
3120 ('', 'style', '', _('display using template map file')),
3060 ('n', 'newest-first', None, _('show newest record first')),
3121 ('n', 'newest-first', None, _('show newest record first')),
3061 ('', 'bundle', '', _('file to store the bundles into')),
3122 ('', 'bundle', '', _('file to store the bundles into')),
3062 ('p', 'patch', None, _('show patch')),
3123 ('p', 'patch', None, _('show patch')),
3063 ('r', 'rev', [], _('a specific revision you would like to pull')),
3124 ('r', 'rev', [], _('a specific revision you would like to pull')),
3064 ('', 'template', '', _('display with template')),
3125 ('', 'template', '', _('display with template')),
3065 ('e', 'ssh', '', _('specify ssh command to use')),
3126 ('e', 'ssh', '', _('specify ssh command to use')),
3066 ('', 'remotecmd', '',
3127 ('', 'remotecmd', '',
3067 _('specify hg command to run on the remote side'))],
3128 _('specify hg command to run on the remote side'))],
3068 _('hg incoming [-p] [-n] [-M] [-r REV]...'
3129 _('hg incoming [-p] [-n] [-M] [-r REV]...'
3069 ' [--bundle FILENAME] [SOURCE]')),
3130 ' [--bundle FILENAME] [SOURCE]')),
3070 "^init":
3131 "^init":
3071 (init,
3132 (init,
3072 [('e', 'ssh', '', _('specify ssh command to use')),
3133 [('e', 'ssh', '', _('specify ssh command to use')),
3073 ('', 'remotecmd', '',
3134 ('', 'remotecmd', '',
3074 _('specify hg command to run on the remote side'))],
3135 _('specify hg command to run on the remote side'))],
3075 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
3136 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
3076 "locate":
3137 "locate":
3077 (locate,
3138 (locate,
3078 [('r', 'rev', '', _('search the repository as it stood at rev')),
3139 [('r', 'rev', '', _('search the repository as it stood at rev')),
3079 ('0', 'print0', None,
3140 ('0', 'print0', None,
3080 _('end filenames with NUL, for use with xargs')),
3141 _('end filenames with NUL, for use with xargs')),
3081 ('f', 'fullpath', None,
3142 ('f', 'fullpath', None,
3082 _('print complete paths from the filesystem root')),
3143 _('print complete paths from the filesystem root')),
3083 ('I', 'include', [], _('include names matching the given patterns')),
3144 ('I', 'include', [], _('include names matching the given patterns')),
3084 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3145 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3085 _('hg locate [OPTION]... [PATTERN]...')),
3146 _('hg locate [OPTION]... [PATTERN]...')),
3086 "^log|history":
3147 "^log|history":
3087 (log,
3148 (log,
3088 [('b', 'branches', None, _('show branches')),
3149 [('b', 'branches', None, _('show branches')),
3089 ('f', 'follow', None,
3150 ('f', 'follow', None,
3090 _('follow file history across copies and renames')),
3151 _('follow changeset history, or file history across copies and renames')),
3152 ('', 'follow-first', None,
3153 _('only follow the first parent of merge changesets')),
3091 ('k', 'keyword', [], _('search for a keyword')),
3154 ('k', 'keyword', [], _('search for a keyword')),
3092 ('l', 'limit', '', _('limit number of changes displayed')),
3155 ('l', 'limit', '', _('limit number of changes displayed')),
3093 ('r', 'rev', [], _('show the specified revision or range')),
3156 ('r', 'rev', [], _('show the specified revision or range')),
3094 ('M', 'no-merges', None, _('do not show merges')),
3157 ('M', 'no-merges', None, _('do not show merges')),
3095 ('', 'style', '', _('display using template map file')),
3158 ('', 'style', '', _('display using template map file')),
3096 ('m', 'only-merges', None, _('show only merges')),
3159 ('m', 'only-merges', None, _('show only merges')),
3097 ('p', 'patch', None, _('show patch')),
3160 ('p', 'patch', None, _('show patch')),
3098 ('', 'template', '', _('display with template')),
3161 ('', 'template', '', _('display with template')),
3099 ('I', 'include', [], _('include names matching the given patterns')),
3162 ('I', 'include', [], _('include names matching the given patterns')),
3100 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3163 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3101 _('hg log [OPTION]... [FILE]')),
3164 _('hg log [OPTION]... [FILE]')),
3102 "manifest": (manifest, [], _('hg manifest [REV]')),
3165 "manifest": (manifest, [], _('hg manifest [REV]')),
3103 "merge":
3166 "merge":
3104 (merge,
3167 (merge,
3105 [('b', 'branch', '', _('merge with head of a specific branch')),
3168 [('b', 'branch', '', _('merge with head of a specific branch')),
3106 ('f', 'force', None, _('force a merge with outstanding changes'))],
3169 ('f', 'force', None, _('force a merge with outstanding changes'))],
3107 _('hg merge [-b TAG] [-f] [REV]')),
3170 _('hg merge [-b TAG] [-f] [REV]')),
3108 "outgoing|out": (outgoing,
3171 "outgoing|out": (outgoing,
3109 [('M', 'no-merges', None, _('do not show merges')),
3172 [('M', 'no-merges', None, _('do not show merges')),
3110 ('f', 'force', None,
3173 ('f', 'force', None,
3111 _('run even when remote repository is unrelated')),
3174 _('run even when remote repository is unrelated')),
3112 ('p', 'patch', None, _('show patch')),
3175 ('p', 'patch', None, _('show patch')),
3113 ('', 'style', '', _('display using template map file')),
3176 ('', 'style', '', _('display using template map file')),
3114 ('r', 'rev', [], _('a specific revision you would like to push')),
3177 ('r', 'rev', [], _('a specific revision you would like to push')),
3115 ('n', 'newest-first', None, _('show newest record first')),
3178 ('n', 'newest-first', None, _('show newest record first')),
3116 ('', 'template', '', _('display with template')),
3179 ('', 'template', '', _('display with template')),
3117 ('e', 'ssh', '', _('specify ssh command to use')),
3180 ('e', 'ssh', '', _('specify ssh command to use')),
3118 ('', 'remotecmd', '',
3181 ('', 'remotecmd', '',
3119 _('specify hg command to run on the remote side'))],
3182 _('specify hg command to run on the remote side'))],
3120 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3183 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3121 "^parents":
3184 "^parents":
3122 (parents,
3185 (parents,
3123 [('b', 'branches', None, _('show branches')),
3186 [('b', 'branches', None, _('show branches')),
3124 ('r', 'rev', '', _('show parents from the specified rev')),
3187 ('r', 'rev', '', _('show parents from the specified rev')),
3125 ('', 'style', '', _('display using template map file')),
3188 ('', 'style', '', _('display using template map file')),
3126 ('', 'template', '', _('display with template'))],
3189 ('', 'template', '', _('display with template'))],
3127 _('hg parents [-b] [-r REV] [FILE]')),
3190 _('hg parents [-b] [-r REV] [FILE]')),
3128 "paths": (paths, [], _('hg paths [NAME]')),
3191 "paths": (paths, [], _('hg paths [NAME]')),
3129 "^pull":
3192 "^pull":
3130 (pull,
3193 (pull,
3131 [('u', 'update', None,
3194 [('u', 'update', None,
3132 _('update the working directory to tip after pull')),
3195 _('update the working directory to tip after pull')),
3133 ('e', 'ssh', '', _('specify ssh command to use')),
3196 ('e', 'ssh', '', _('specify ssh command to use')),
3134 ('f', 'force', None,
3197 ('f', 'force', None,
3135 _('run even when remote repository is unrelated')),
3198 _('run even when remote repository is unrelated')),
3136 ('r', 'rev', [], _('a specific revision you would like to pull')),
3199 ('r', 'rev', [], _('a specific revision you would like to pull')),
3137 ('', 'remotecmd', '',
3200 ('', 'remotecmd', '',
3138 _('specify hg command to run on the remote side'))],
3201 _('specify hg command to run on the remote side'))],
3139 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3202 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3140 "^push":
3203 "^push":
3141 (push,
3204 (push,
3142 [('f', 'force', None, _('force push')),
3205 [('f', 'force', None, _('force push')),
3143 ('e', 'ssh', '', _('specify ssh command to use')),
3206 ('e', 'ssh', '', _('specify ssh command to use')),
3144 ('r', 'rev', [], _('a specific revision you would like to push')),
3207 ('r', 'rev', [], _('a specific revision you would like to push')),
3145 ('', 'remotecmd', '',
3208 ('', 'remotecmd', '',
3146 _('specify hg command to run on the remote side'))],
3209 _('specify hg command to run on the remote side'))],
3147 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3210 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3148 "debugrawcommit|rawcommit":
3211 "debugrawcommit|rawcommit":
3149 (rawcommit,
3212 (rawcommit,
3150 [('p', 'parent', [], _('parent')),
3213 [('p', 'parent', [], _('parent')),
3151 ('d', 'date', '', _('date code')),
3214 ('d', 'date', '', _('date code')),
3152 ('u', 'user', '', _('user')),
3215 ('u', 'user', '', _('user')),
3153 ('F', 'files', '', _('file list')),
3216 ('F', 'files', '', _('file list')),
3154 ('m', 'message', '', _('commit message')),
3217 ('m', 'message', '', _('commit message')),
3155 ('l', 'logfile', '', _('commit message file'))],
3218 ('l', 'logfile', '', _('commit message file'))],
3156 _('hg debugrawcommit [OPTION]... [FILE]...')),
3219 _('hg debugrawcommit [OPTION]... [FILE]...')),
3157 "recover": (recover, [], _('hg recover')),
3220 "recover": (recover, [], _('hg recover')),
3158 "^remove|rm":
3221 "^remove|rm":
3159 (remove,
3222 (remove,
3160 [('A', 'after', None, _('record remove that has already occurred')),
3223 [('A', 'after', None, _('record remove that has already occurred')),
3161 ('f', 'force', None, _('remove file even if modified')),
3224 ('f', 'force', None, _('remove file even if modified')),
3162 ('I', 'include', [], _('include names matching the given patterns')),
3225 ('I', 'include', [], _('include names matching the given patterns')),
3163 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3226 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3164 _('hg remove [OPTION]... FILE...')),
3227 _('hg remove [OPTION]... FILE...')),
3165 "rename|mv":
3228 "rename|mv":
3166 (rename,
3229 (rename,
3167 [('A', 'after', None, _('record a rename that has already occurred')),
3230 [('A', 'after', None, _('record a rename that has already occurred')),
3168 ('f', 'force', None,
3231 ('f', 'force', None,
3169 _('forcibly copy over an existing managed file')),
3232 _('forcibly copy over an existing managed file')),
3170 ('I', 'include', [], _('include names matching the given patterns')),
3233 ('I', 'include', [], _('include names matching the given patterns')),
3171 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3234 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3172 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3235 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3173 _('hg rename [OPTION]... SOURCE... DEST')),
3236 _('hg rename [OPTION]... SOURCE... DEST')),
3174 "^revert":
3237 "^revert":
3175 (revert,
3238 (revert,
3176 [('r', 'rev', '', _('revision to revert to')),
3239 [('r', 'rev', '', _('revision to revert to')),
3177 ('', 'no-backup', None, _('do not save backup copies of files')),
3240 ('', 'no-backup', None, _('do not save backup copies of files')),
3178 ('I', 'include', [], _('include names matching given patterns')),
3241 ('I', 'include', [], _('include names matching given patterns')),
3179 ('X', 'exclude', [], _('exclude names matching given patterns')),
3242 ('X', 'exclude', [], _('exclude names matching given patterns')),
3180 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3243 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3181 _('hg revert [-r REV] [NAME]...')),
3244 _('hg revert [-r REV] [NAME]...')),
3182 "rollback": (rollback, [], _('hg rollback')),
3245 "rollback": (rollback, [], _('hg rollback')),
3183 "root": (root, [], _('hg root')),
3246 "root": (root, [], _('hg root')),
3184 "^serve":
3247 "^serve":
3185 (serve,
3248 (serve,
3186 [('A', 'accesslog', '', _('name of access log file to write to')),
3249 [('A', 'accesslog', '', _('name of access log file to write to')),
3187 ('d', 'daemon', None, _('run server in background')),
3250 ('d', 'daemon', None, _('run server in background')),
3188 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3251 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3189 ('E', 'errorlog', '', _('name of error log file to write to')),
3252 ('E', 'errorlog', '', _('name of error log file to write to')),
3190 ('p', 'port', 0, _('port to use (default: 8000)')),
3253 ('p', 'port', 0, _('port to use (default: 8000)')),
3191 ('a', 'address', '', _('address to use')),
3254 ('a', 'address', '', _('address to use')),
3192 ('n', 'name', '',
3255 ('n', 'name', '',
3193 _('name to show in web pages (default: working dir)')),
3256 _('name to show in web pages (default: working dir)')),
3194 ('', 'webdir-conf', '', _('name of the webdir config file'
3257 ('', 'webdir-conf', '', _('name of the webdir config file'
3195 ' (serve more than one repo)')),
3258 ' (serve more than one repo)')),
3196 ('', 'pid-file', '', _('name of file to write process ID to')),
3259 ('', 'pid-file', '', _('name of file to write process ID to')),
3197 ('', 'stdio', None, _('for remote clients')),
3260 ('', 'stdio', None, _('for remote clients')),
3198 ('t', 'templates', '', _('web templates to use')),
3261 ('t', 'templates', '', _('web templates to use')),
3199 ('', 'style', '', _('template style to use')),
3262 ('', 'style', '', _('template style to use')),
3200 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3263 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3201 _('hg serve [OPTION]...')),
3264 _('hg serve [OPTION]...')),
3202 "^status|st":
3265 "^status|st":
3203 (status,
3266 (status,
3204 [('A', 'all', None, _('show status of all files')),
3267 [('A', 'all', None, _('show status of all files')),
3205 ('m', 'modified', None, _('show only modified files')),
3268 ('m', 'modified', None, _('show only modified files')),
3206 ('a', 'added', None, _('show only added files')),
3269 ('a', 'added', None, _('show only added files')),
3207 ('r', 'removed', None, _('show only removed files')),
3270 ('r', 'removed', None, _('show only removed files')),
3208 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3271 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3209 ('c', 'clean', None, _('show only files without changes')),
3272 ('c', 'clean', None, _('show only files without changes')),
3210 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3273 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3211 ('i', 'ignored', None, _('show ignored files')),
3274 ('i', 'ignored', None, _('show ignored files')),
3212 ('n', 'no-status', None, _('hide status prefix')),
3275 ('n', 'no-status', None, _('hide status prefix')),
3213 ('C', 'copies', None, _('show source of copied files')),
3276 ('C', 'copies', None, _('show source of copied files')),
3214 ('0', 'print0', None,
3277 ('0', 'print0', None,
3215 _('end filenames with NUL, for use with xargs')),
3278 _('end filenames with NUL, for use with xargs')),
3216 ('I', 'include', [], _('include names matching the given patterns')),
3279 ('I', 'include', [], _('include names matching the given patterns')),
3217 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3280 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3218 _('hg status [OPTION]... [FILE]...')),
3281 _('hg status [OPTION]... [FILE]...')),
3219 "tag":
3282 "tag":
3220 (tag,
3283 (tag,
3221 [('l', 'local', None, _('make the tag local')),
3284 [('l', 'local', None, _('make the tag local')),
3222 ('m', 'message', '', _('message for tag commit log entry')),
3285 ('m', 'message', '', _('message for tag commit log entry')),
3223 ('d', 'date', '', _('record datecode as commit date')),
3286 ('d', 'date', '', _('record datecode as commit date')),
3224 ('u', 'user', '', _('record user as commiter')),
3287 ('u', 'user', '', _('record user as commiter')),
3225 ('r', 'rev', '', _('revision to tag'))],
3288 ('r', 'rev', '', _('revision to tag'))],
3226 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3289 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3227 "tags": (tags, [], _('hg tags')),
3290 "tags": (tags, [], _('hg tags')),
3228 "tip":
3291 "tip":
3229 (tip,
3292 (tip,
3230 [('b', 'branches', None, _('show branches')),
3293 [('b', 'branches', None, _('show branches')),
3231 ('', 'style', '', _('display using template map file')),
3294 ('', 'style', '', _('display using template map file')),
3232 ('p', 'patch', None, _('show patch')),
3295 ('p', 'patch', None, _('show patch')),
3233 ('', 'template', '', _('display with template'))],
3296 ('', 'template', '', _('display with template'))],
3234 _('hg tip [-b] [-p]')),
3297 _('hg tip [-b] [-p]')),
3235 "unbundle":
3298 "unbundle":
3236 (unbundle,
3299 (unbundle,
3237 [('u', 'update', None,
3300 [('u', 'update', None,
3238 _('update the working directory to tip after unbundle'))],
3301 _('update the working directory to tip after unbundle'))],
3239 _('hg unbundle [-u] FILE')),
3302 _('hg unbundle [-u] FILE')),
3240 "debugundo|undo": (undo, [], _('hg undo')),
3303 "debugundo|undo": (undo, [], _('hg undo')),
3241 "^update|up|checkout|co":
3304 "^update|up|checkout|co":
3242 (update,
3305 (update,
3243 [('b', 'branch', '', _('checkout the head of a specific branch')),
3306 [('b', 'branch', '', _('checkout the head of a specific branch')),
3244 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3307 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3245 ('C', 'clean', None, _('overwrite locally modified files')),
3308 ('C', 'clean', None, _('overwrite locally modified files')),
3246 ('f', 'force', None, _('force a merge with outstanding changes'))],
3309 ('f', 'force', None, _('force a merge with outstanding changes'))],
3247 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3310 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3248 "verify": (verify, [], _('hg verify')),
3311 "verify": (verify, [], _('hg verify')),
3249 "version": (show_version, [], _('hg version')),
3312 "version": (show_version, [], _('hg version')),
3250 }
3313 }
3251
3314
3252 globalopts = [
3315 globalopts = [
3253 ('R', 'repository', '',
3316 ('R', 'repository', '',
3254 _('repository root directory or symbolic path name')),
3317 _('repository root directory or symbolic path name')),
3255 ('', 'cwd', '', _('change working directory')),
3318 ('', 'cwd', '', _('change working directory')),
3256 ('y', 'noninteractive', None,
3319 ('y', 'noninteractive', None,
3257 _('do not prompt, assume \'yes\' for any required answers')),
3320 _('do not prompt, assume \'yes\' for any required answers')),
3258 ('q', 'quiet', None, _('suppress output')),
3321 ('q', 'quiet', None, _('suppress output')),
3259 ('v', 'verbose', None, _('enable additional output')),
3322 ('v', 'verbose', None, _('enable additional output')),
3260 ('', 'config', [], _('set/override config option')),
3323 ('', 'config', [], _('set/override config option')),
3261 ('', 'debug', None, _('enable debugging output')),
3324 ('', 'debug', None, _('enable debugging output')),
3262 ('', 'debugger', None, _('start debugger')),
3325 ('', 'debugger', None, _('start debugger')),
3263 ('', 'lsprof', None, _('print improved command execution profile')),
3326 ('', 'lsprof', None, _('print improved command execution profile')),
3264 ('', 'traceback', None, _('print traceback on exception')),
3327 ('', 'traceback', None, _('print traceback on exception')),
3265 ('', 'time', None, _('time how long the command takes')),
3328 ('', 'time', None, _('time how long the command takes')),
3266 ('', 'profile', None, _('print command execution profile')),
3329 ('', 'profile', None, _('print command execution profile')),
3267 ('', 'version', None, _('output version information and exit')),
3330 ('', 'version', None, _('output version information and exit')),
3268 ('h', 'help', None, _('display help and exit')),
3331 ('h', 'help', None, _('display help and exit')),
3269 ]
3332 ]
3270
3333
3271 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3334 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3272 " debugindex debugindexdot")
3335 " debugindex debugindexdot")
3273 optionalrepo = ("paths serve debugconfig")
3336 optionalrepo = ("paths serve debugconfig")
3274
3337
3275 def findpossible(cmd):
3338 def findpossible(cmd):
3276 """
3339 """
3277 Return cmd -> (aliases, command table entry)
3340 Return cmd -> (aliases, command table entry)
3278 for each matching command.
3341 for each matching command.
3279 Return debug commands (or their aliases) only if no normal command matches.
3342 Return debug commands (or their aliases) only if no normal command matches.
3280 """
3343 """
3281 choice = {}
3344 choice = {}
3282 debugchoice = {}
3345 debugchoice = {}
3283 for e in table.keys():
3346 for e in table.keys():
3284 aliases = e.lstrip("^").split("|")
3347 aliases = e.lstrip("^").split("|")
3285 found = None
3348 found = None
3286 if cmd in aliases:
3349 if cmd in aliases:
3287 found = cmd
3350 found = cmd
3288 else:
3351 else:
3289 for a in aliases:
3352 for a in aliases:
3290 if a.startswith(cmd):
3353 if a.startswith(cmd):
3291 found = a
3354 found = a
3292 break
3355 break
3293 if found is not None:
3356 if found is not None:
3294 if aliases[0].startswith("debug"):
3357 if aliases[0].startswith("debug"):
3295 debugchoice[found] = (aliases, table[e])
3358 debugchoice[found] = (aliases, table[e])
3296 else:
3359 else:
3297 choice[found] = (aliases, table[e])
3360 choice[found] = (aliases, table[e])
3298
3361
3299 if not choice and debugchoice:
3362 if not choice and debugchoice:
3300 choice = debugchoice
3363 choice = debugchoice
3301
3364
3302 return choice
3365 return choice
3303
3366
3304 def findcmd(cmd):
3367 def findcmd(cmd):
3305 """Return (aliases, command table entry) for command string."""
3368 """Return (aliases, command table entry) for command string."""
3306 choice = findpossible(cmd)
3369 choice = findpossible(cmd)
3307
3370
3308 if choice.has_key(cmd):
3371 if choice.has_key(cmd):
3309 return choice[cmd]
3372 return choice[cmd]
3310
3373
3311 if len(choice) > 1:
3374 if len(choice) > 1:
3312 clist = choice.keys()
3375 clist = choice.keys()
3313 clist.sort()
3376 clist.sort()
3314 raise AmbiguousCommand(cmd, clist)
3377 raise AmbiguousCommand(cmd, clist)
3315
3378
3316 if choice:
3379 if choice:
3317 return choice.values()[0]
3380 return choice.values()[0]
3318
3381
3319 raise UnknownCommand(cmd)
3382 raise UnknownCommand(cmd)
3320
3383
3321 def catchterm(*args):
3384 def catchterm(*args):
3322 raise util.SignalInterrupt
3385 raise util.SignalInterrupt
3323
3386
3324 def run():
3387 def run():
3325 sys.exit(dispatch(sys.argv[1:]))
3388 sys.exit(dispatch(sys.argv[1:]))
3326
3389
3327 class ParseError(Exception):
3390 class ParseError(Exception):
3328 """Exception raised on errors in parsing the command line."""
3391 """Exception raised on errors in parsing the command line."""
3329
3392
3330 def parse(ui, args):
3393 def parse(ui, args):
3331 options = {}
3394 options = {}
3332 cmdoptions = {}
3395 cmdoptions = {}
3333
3396
3334 try:
3397 try:
3335 args = fancyopts.fancyopts(args, globalopts, options)
3398 args = fancyopts.fancyopts(args, globalopts, options)
3336 except fancyopts.getopt.GetoptError, inst:
3399 except fancyopts.getopt.GetoptError, inst:
3337 raise ParseError(None, inst)
3400 raise ParseError(None, inst)
3338
3401
3339 if args:
3402 if args:
3340 cmd, args = args[0], args[1:]
3403 cmd, args = args[0], args[1:]
3341 aliases, i = findcmd(cmd)
3404 aliases, i = findcmd(cmd)
3342 cmd = aliases[0]
3405 cmd = aliases[0]
3343 defaults = ui.config("defaults", cmd)
3406 defaults = ui.config("defaults", cmd)
3344 if defaults:
3407 if defaults:
3345 args = defaults.split() + args
3408 args = defaults.split() + args
3346 c = list(i[1])
3409 c = list(i[1])
3347 else:
3410 else:
3348 cmd = None
3411 cmd = None
3349 c = []
3412 c = []
3350
3413
3351 # combine global options into local
3414 # combine global options into local
3352 for o in globalopts:
3415 for o in globalopts:
3353 c.append((o[0], o[1], options[o[1]], o[3]))
3416 c.append((o[0], o[1], options[o[1]], o[3]))
3354
3417
3355 try:
3418 try:
3356 args = fancyopts.fancyopts(args, c, cmdoptions)
3419 args = fancyopts.fancyopts(args, c, cmdoptions)
3357 except fancyopts.getopt.GetoptError, inst:
3420 except fancyopts.getopt.GetoptError, inst:
3358 raise ParseError(cmd, inst)
3421 raise ParseError(cmd, inst)
3359
3422
3360 # separate global options back out
3423 # separate global options back out
3361 for o in globalopts:
3424 for o in globalopts:
3362 n = o[1]
3425 n = o[1]
3363 options[n] = cmdoptions[n]
3426 options[n] = cmdoptions[n]
3364 del cmdoptions[n]
3427 del cmdoptions[n]
3365
3428
3366 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3429 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3367
3430
3368 external = {}
3431 external = {}
3369
3432
3370 def findext(name):
3433 def findext(name):
3371 '''return module with given extension name'''
3434 '''return module with given extension name'''
3372 try:
3435 try:
3373 return sys.modules[external[name]]
3436 return sys.modules[external[name]]
3374 except KeyError:
3437 except KeyError:
3375 for k, v in external.iteritems():
3438 for k, v in external.iteritems():
3376 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3439 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3377 return sys.modules[v]
3440 return sys.modules[v]
3378 raise KeyError(name)
3441 raise KeyError(name)
3379
3442
3380 def dispatch(args):
3443 def dispatch(args):
3381 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3444 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3382 num = getattr(signal, name, None)
3445 num = getattr(signal, name, None)
3383 if num: signal.signal(num, catchterm)
3446 if num: signal.signal(num, catchterm)
3384
3447
3385 try:
3448 try:
3386 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3449 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3387 except util.Abort, inst:
3450 except util.Abort, inst:
3388 sys.stderr.write(_("abort: %s\n") % inst)
3451 sys.stderr.write(_("abort: %s\n") % inst)
3389 return -1
3452 return -1
3390
3453
3391 for ext_name, load_from_name in u.extensions():
3454 for ext_name, load_from_name in u.extensions():
3392 try:
3455 try:
3393 if load_from_name:
3456 if load_from_name:
3394 # the module will be loaded in sys.modules
3457 # the module will be loaded in sys.modules
3395 # choose an unique name so that it doesn't
3458 # choose an unique name so that it doesn't
3396 # conflicts with other modules
3459 # conflicts with other modules
3397 module_name = "hgext_%s" % ext_name.replace('.', '_')
3460 module_name = "hgext_%s" % ext_name.replace('.', '_')
3398 mod = imp.load_source(module_name, load_from_name)
3461 mod = imp.load_source(module_name, load_from_name)
3399 else:
3462 else:
3400 def importh(name):
3463 def importh(name):
3401 mod = __import__(name)
3464 mod = __import__(name)
3402 components = name.split('.')
3465 components = name.split('.')
3403 for comp in components[1:]:
3466 for comp in components[1:]:
3404 mod = getattr(mod, comp)
3467 mod = getattr(mod, comp)
3405 return mod
3468 return mod
3406 try:
3469 try:
3407 mod = importh("hgext.%s" % ext_name)
3470 mod = importh("hgext.%s" % ext_name)
3408 except ImportError:
3471 except ImportError:
3409 mod = importh(ext_name)
3472 mod = importh(ext_name)
3410 external[ext_name] = mod.__name__
3473 external[ext_name] = mod.__name__
3411 except (util.SignalInterrupt, KeyboardInterrupt):
3474 except (util.SignalInterrupt, KeyboardInterrupt):
3412 raise
3475 raise
3413 except Exception, inst:
3476 except Exception, inst:
3414 u.warn(_("*** failed to import extension %s: %s\n") % (ext_name, inst))
3477 u.warn(_("*** failed to import extension %s: %s\n") % (ext_name, inst))
3415 if u.print_exc():
3478 if u.print_exc():
3416 return 1
3479 return 1
3417
3480
3418 for name in external.itervalues():
3481 for name in external.itervalues():
3419 mod = sys.modules[name]
3482 mod = sys.modules[name]
3420 uisetup = getattr(mod, 'uisetup', None)
3483 uisetup = getattr(mod, 'uisetup', None)
3421 if uisetup:
3484 if uisetup:
3422 uisetup(u)
3485 uisetup(u)
3423 cmdtable = getattr(mod, 'cmdtable', {})
3486 cmdtable = getattr(mod, 'cmdtable', {})
3424 for t in cmdtable:
3487 for t in cmdtable:
3425 if t in table:
3488 if t in table:
3426 u.warn(_("module %s overrides %s\n") % (name, t))
3489 u.warn(_("module %s overrides %s\n") % (name, t))
3427 table.update(cmdtable)
3490 table.update(cmdtable)
3428
3491
3429 try:
3492 try:
3430 cmd, func, args, options, cmdoptions = parse(u, args)
3493 cmd, func, args, options, cmdoptions = parse(u, args)
3431 if options["time"]:
3494 if options["time"]:
3432 def get_times():
3495 def get_times():
3433 t = os.times()
3496 t = os.times()
3434 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3497 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3435 t = (t[0], t[1], t[2], t[3], time.clock())
3498 t = (t[0], t[1], t[2], t[3], time.clock())
3436 return t
3499 return t
3437 s = get_times()
3500 s = get_times()
3438 def print_time():
3501 def print_time():
3439 t = get_times()
3502 t = get_times()
3440 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3503 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3441 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3504 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3442 atexit.register(print_time)
3505 atexit.register(print_time)
3443
3506
3444 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3507 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3445 not options["noninteractive"], options["traceback"],
3508 not options["noninteractive"], options["traceback"],
3446 options["config"])
3509 options["config"])
3447
3510
3448 # enter the debugger before command execution
3511 # enter the debugger before command execution
3449 if options['debugger']:
3512 if options['debugger']:
3450 pdb.set_trace()
3513 pdb.set_trace()
3451
3514
3452 try:
3515 try:
3453 if options['cwd']:
3516 if options['cwd']:
3454 try:
3517 try:
3455 os.chdir(options['cwd'])
3518 os.chdir(options['cwd'])
3456 except OSError, inst:
3519 except OSError, inst:
3457 raise util.Abort('%s: %s' %
3520 raise util.Abort('%s: %s' %
3458 (options['cwd'], inst.strerror))
3521 (options['cwd'], inst.strerror))
3459
3522
3460 path = u.expandpath(options["repository"]) or ""
3523 path = u.expandpath(options["repository"]) or ""
3461 repo = path and hg.repository(u, path=path) or None
3524 repo = path and hg.repository(u, path=path) or None
3462
3525
3463 if options['help']:
3526 if options['help']:
3464 return help_(u, cmd, options['version'])
3527 return help_(u, cmd, options['version'])
3465 elif options['version']:
3528 elif options['version']:
3466 return show_version(u)
3529 return show_version(u)
3467 elif not cmd:
3530 elif not cmd:
3468 return help_(u, 'shortlist')
3531 return help_(u, 'shortlist')
3469
3532
3470 if cmd not in norepo.split():
3533 if cmd not in norepo.split():
3471 try:
3534 try:
3472 if not repo:
3535 if not repo:
3473 repo = hg.repository(u, path=path)
3536 repo = hg.repository(u, path=path)
3474 u = repo.ui
3537 u = repo.ui
3475 for name in external.itervalues():
3538 for name in external.itervalues():
3476 mod = sys.modules[name]
3539 mod = sys.modules[name]
3477 if hasattr(mod, 'reposetup'):
3540 if hasattr(mod, 'reposetup'):
3478 mod.reposetup(u, repo)
3541 mod.reposetup(u, repo)
3479 except hg.RepoError:
3542 except hg.RepoError:
3480 if cmd not in optionalrepo.split():
3543 if cmd not in optionalrepo.split():
3481 raise
3544 raise
3482 d = lambda: func(u, repo, *args, **cmdoptions)
3545 d = lambda: func(u, repo, *args, **cmdoptions)
3483 else:
3546 else:
3484 d = lambda: func(u, *args, **cmdoptions)
3547 d = lambda: func(u, *args, **cmdoptions)
3485
3548
3486 # reupdate the options, repo/.hg/hgrc may have changed them
3549 # reupdate the options, repo/.hg/hgrc may have changed them
3487 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3550 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3488 not options["noninteractive"], options["traceback"],
3551 not options["noninteractive"], options["traceback"],
3489 options["config"])
3552 options["config"])
3490
3553
3491 try:
3554 try:
3492 if options['profile']:
3555 if options['profile']:
3493 import hotshot, hotshot.stats
3556 import hotshot, hotshot.stats
3494 prof = hotshot.Profile("hg.prof")
3557 prof = hotshot.Profile("hg.prof")
3495 try:
3558 try:
3496 try:
3559 try:
3497 return prof.runcall(d)
3560 return prof.runcall(d)
3498 except:
3561 except:
3499 try:
3562 try:
3500 u.warn(_('exception raised - generating '
3563 u.warn(_('exception raised - generating '
3501 'profile anyway\n'))
3564 'profile anyway\n'))
3502 except:
3565 except:
3503 pass
3566 pass
3504 raise
3567 raise
3505 finally:
3568 finally:
3506 prof.close()
3569 prof.close()
3507 stats = hotshot.stats.load("hg.prof")
3570 stats = hotshot.stats.load("hg.prof")
3508 stats.strip_dirs()
3571 stats.strip_dirs()
3509 stats.sort_stats('time', 'calls')
3572 stats.sort_stats('time', 'calls')
3510 stats.print_stats(40)
3573 stats.print_stats(40)
3511 elif options['lsprof']:
3574 elif options['lsprof']:
3512 try:
3575 try:
3513 from mercurial import lsprof
3576 from mercurial import lsprof
3514 except ImportError:
3577 except ImportError:
3515 raise util.Abort(_(
3578 raise util.Abort(_(
3516 'lsprof not available - install from '
3579 'lsprof not available - install from '
3517 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3580 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3518 p = lsprof.Profiler()
3581 p = lsprof.Profiler()
3519 p.enable(subcalls=True)
3582 p.enable(subcalls=True)
3520 try:
3583 try:
3521 return d()
3584 return d()
3522 finally:
3585 finally:
3523 p.disable()
3586 p.disable()
3524 stats = lsprof.Stats(p.getstats())
3587 stats = lsprof.Stats(p.getstats())
3525 stats.sort()
3588 stats.sort()
3526 stats.pprint(top=10, file=sys.stderr, climit=5)
3589 stats.pprint(top=10, file=sys.stderr, climit=5)
3527 else:
3590 else:
3528 return d()
3591 return d()
3529 finally:
3592 finally:
3530 u.flush()
3593 u.flush()
3531 except:
3594 except:
3532 # enter the debugger when we hit an exception
3595 # enter the debugger when we hit an exception
3533 if options['debugger']:
3596 if options['debugger']:
3534 pdb.post_mortem(sys.exc_info()[2])
3597 pdb.post_mortem(sys.exc_info()[2])
3535 u.print_exc()
3598 u.print_exc()
3536 raise
3599 raise
3537 except ParseError, inst:
3600 except ParseError, inst:
3538 if inst.args[0]:
3601 if inst.args[0]:
3539 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3602 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3540 help_(u, inst.args[0])
3603 help_(u, inst.args[0])
3541 else:
3604 else:
3542 u.warn(_("hg: %s\n") % inst.args[1])
3605 u.warn(_("hg: %s\n") % inst.args[1])
3543 help_(u, 'shortlist')
3606 help_(u, 'shortlist')
3544 except AmbiguousCommand, inst:
3607 except AmbiguousCommand, inst:
3545 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3608 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3546 (inst.args[0], " ".join(inst.args[1])))
3609 (inst.args[0], " ".join(inst.args[1])))
3547 except UnknownCommand, inst:
3610 except UnknownCommand, inst:
3548 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3611 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3549 help_(u, 'shortlist')
3612 help_(u, 'shortlist')
3550 except hg.RepoError, inst:
3613 except hg.RepoError, inst:
3551 u.warn(_("abort: %s!\n") % inst)
3614 u.warn(_("abort: %s!\n") % inst)
3552 except lock.LockHeld, inst:
3615 except lock.LockHeld, inst:
3553 if inst.errno == errno.ETIMEDOUT:
3616 if inst.errno == errno.ETIMEDOUT:
3554 reason = _('timed out waiting for lock held by %s') % inst.locker
3617 reason = _('timed out waiting for lock held by %s') % inst.locker
3555 else:
3618 else:
3556 reason = _('lock held by %s') % inst.locker
3619 reason = _('lock held by %s') % inst.locker
3557 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3620 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3558 except lock.LockUnavailable, inst:
3621 except lock.LockUnavailable, inst:
3559 u.warn(_("abort: could not lock %s: %s\n") %
3622 u.warn(_("abort: could not lock %s: %s\n") %
3560 (inst.desc or inst.filename, inst.strerror))
3623 (inst.desc or inst.filename, inst.strerror))
3561 except revlog.RevlogError, inst:
3624 except revlog.RevlogError, inst:
3562 u.warn(_("abort: "), inst, "!\n")
3625 u.warn(_("abort: "), inst, "!\n")
3563 except util.SignalInterrupt:
3626 except util.SignalInterrupt:
3564 u.warn(_("killed!\n"))
3627 u.warn(_("killed!\n"))
3565 except KeyboardInterrupt:
3628 except KeyboardInterrupt:
3566 try:
3629 try:
3567 u.warn(_("interrupted!\n"))
3630 u.warn(_("interrupted!\n"))
3568 except IOError, inst:
3631 except IOError, inst:
3569 if inst.errno == errno.EPIPE:
3632 if inst.errno == errno.EPIPE:
3570 if u.debugflag:
3633 if u.debugflag:
3571 u.warn(_("\nbroken pipe\n"))
3634 u.warn(_("\nbroken pipe\n"))
3572 else:
3635 else:
3573 raise
3636 raise
3574 except IOError, inst:
3637 except IOError, inst:
3575 if hasattr(inst, "code"):
3638 if hasattr(inst, "code"):
3576 u.warn(_("abort: %s\n") % inst)
3639 u.warn(_("abort: %s\n") % inst)
3577 elif hasattr(inst, "reason"):
3640 elif hasattr(inst, "reason"):
3578 u.warn(_("abort: error: %s\n") % inst.reason[1])
3641 u.warn(_("abort: error: %s\n") % inst.reason[1])
3579 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3642 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3580 if u.debugflag:
3643 if u.debugflag:
3581 u.warn(_("broken pipe\n"))
3644 u.warn(_("broken pipe\n"))
3582 elif getattr(inst, "strerror", None):
3645 elif getattr(inst, "strerror", None):
3583 if getattr(inst, "filename", None):
3646 if getattr(inst, "filename", None):
3584 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3647 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3585 else:
3648 else:
3586 u.warn(_("abort: %s\n") % inst.strerror)
3649 u.warn(_("abort: %s\n") % inst.strerror)
3587 else:
3650 else:
3588 raise
3651 raise
3589 except OSError, inst:
3652 except OSError, inst:
3590 if hasattr(inst, "filename"):
3653 if hasattr(inst, "filename"):
3591 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3654 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3592 else:
3655 else:
3593 u.warn(_("abort: %s\n") % inst.strerror)
3656 u.warn(_("abort: %s\n") % inst.strerror)
3594 except util.Abort, inst:
3657 except util.Abort, inst:
3595 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3658 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3596 except TypeError, inst:
3659 except TypeError, inst:
3597 # was this an argument error?
3660 # was this an argument error?
3598 tb = traceback.extract_tb(sys.exc_info()[2])
3661 tb = traceback.extract_tb(sys.exc_info()[2])
3599 if len(tb) > 2: # no
3662 if len(tb) > 2: # no
3600 raise
3663 raise
3601 u.debug(inst, "\n")
3664 u.debug(inst, "\n")
3602 u.warn(_("%s: invalid arguments\n") % cmd)
3665 u.warn(_("%s: invalid arguments\n") % cmd)
3603 help_(u, cmd)
3666 help_(u, cmd)
3604 except SystemExit, inst:
3667 except SystemExit, inst:
3605 # Commands shouldn't sys.exit directly, but give a return code.
3668 # Commands shouldn't sys.exit directly, but give a return code.
3606 # Just in case catch this and and pass exit code to caller.
3669 # Just in case catch this and and pass exit code to caller.
3607 return inst.code
3670 return inst.code
3608 except:
3671 except:
3609 u.warn(_("** unknown exception encountered, details follow\n"))
3672 u.warn(_("** unknown exception encountered, details follow\n"))
3610 u.warn(_("** report bug details to "
3673 u.warn(_("** report bug details to "
3611 "http://www.selenic.com/mercurial/bts\n"))
3674 "http://www.selenic.com/mercurial/bts\n"))
3612 u.warn(_("** or mercurial@selenic.com\n"))
3675 u.warn(_("** or mercurial@selenic.com\n"))
3613 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3676 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3614 % version.get_version())
3677 % version.get_version())
3615 raise
3678 raise
3616
3679
3617 return -1
3680 return -1
@@ -1,1753 +1,1758
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 import repo
11 import repo
12 demandload(globals(), "appendfile changegroup")
12 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "changelog dirstate filelog manifest context")
13 demandload(globals(), "changelog dirstate filelog manifest context")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "os revlog time util")
15 demandload(globals(), "os revlog time util")
16
16
17 class localrepository(repo.repository):
17 class localrepository(repo.repository):
18 capabilities = ()
18 capabilities = ()
19
19
20 def __del__(self):
20 def __del__(self):
21 self.transhandle = None
21 self.transhandle = None
22 def __init__(self, parentui, path=None, create=0):
22 def __init__(self, parentui, path=None, create=0):
23 repo.repository.__init__(self)
23 repo.repository.__init__(self)
24 if not path:
24 if not path:
25 p = os.getcwd()
25 p = os.getcwd()
26 while not os.path.isdir(os.path.join(p, ".hg")):
26 while not os.path.isdir(os.path.join(p, ".hg")):
27 oldp = p
27 oldp = p
28 p = os.path.dirname(p)
28 p = os.path.dirname(p)
29 if p == oldp:
29 if p == oldp:
30 raise repo.RepoError(_("no repo found"))
30 raise repo.RepoError(_("no repo found"))
31 path = p
31 path = p
32 self.path = os.path.join(path, ".hg")
32 self.path = os.path.join(path, ".hg")
33
33
34 if not create and not os.path.isdir(self.path):
34 if not create and not os.path.isdir(self.path):
35 raise repo.RepoError(_("repository %s not found") % path)
35 raise repo.RepoError(_("repository %s not found") % path)
36
36
37 self.root = os.path.abspath(path)
37 self.root = os.path.abspath(path)
38 self.origroot = path
38 self.origroot = path
39 self.ui = ui.ui(parentui=parentui)
39 self.ui = ui.ui(parentui=parentui)
40 self.opener = util.opener(self.path)
40 self.opener = util.opener(self.path)
41 self.wopener = util.opener(self.root)
41 self.wopener = util.opener(self.root)
42
42
43 try:
43 try:
44 self.ui.readconfig(self.join("hgrc"), self.root)
44 self.ui.readconfig(self.join("hgrc"), self.root)
45 except IOError:
45 except IOError:
46 pass
46 pass
47
47
48 v = self.ui.revlogopts
48 v = self.ui.revlogopts
49 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
49 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
50 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
50 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
51 fl = v.get('flags', None)
51 fl = v.get('flags', None)
52 flags = 0
52 flags = 0
53 if fl != None:
53 if fl != None:
54 for x in fl.split():
54 for x in fl.split():
55 flags |= revlog.flagstr(x)
55 flags |= revlog.flagstr(x)
56 elif self.revlogv1:
56 elif self.revlogv1:
57 flags = revlog.REVLOG_DEFAULT_FLAGS
57 flags = revlog.REVLOG_DEFAULT_FLAGS
58
58
59 v = self.revlogversion | flags
59 v = self.revlogversion | flags
60 self.manifest = manifest.manifest(self.opener, v)
60 self.manifest = manifest.manifest(self.opener, v)
61 self.changelog = changelog.changelog(self.opener, v)
61 self.changelog = changelog.changelog(self.opener, v)
62
62
63 # the changelog might not have the inline index flag
63 # the changelog might not have the inline index flag
64 # on. If the format of the changelog is the same as found in
64 # on. If the format of the changelog is the same as found in
65 # .hgrc, apply any flags found in the .hgrc as well.
65 # .hgrc, apply any flags found in the .hgrc as well.
66 # Otherwise, just version from the changelog
66 # Otherwise, just version from the changelog
67 v = self.changelog.version
67 v = self.changelog.version
68 if v == self.revlogversion:
68 if v == self.revlogversion:
69 v |= flags
69 v |= flags
70 self.revlogversion = v
70 self.revlogversion = v
71
71
72 self.tagscache = None
72 self.tagscache = None
73 self.nodetagscache = None
73 self.nodetagscache = None
74 self.encodepats = None
74 self.encodepats = None
75 self.decodepats = None
75 self.decodepats = None
76 self.transhandle = None
76 self.transhandle = None
77
77
78 if create:
78 if create:
79 if not os.path.exists(path):
79 if not os.path.exists(path):
80 os.mkdir(path)
80 os.mkdir(path)
81 os.mkdir(self.path)
81 os.mkdir(self.path)
82 os.mkdir(self.join("data"))
82 os.mkdir(self.join("data"))
83
83
84 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
84 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
85
85
86 def url(self):
86 def url(self):
87 return 'file:' + self.root
87 return 'file:' + self.root
88
88
89 def hook(self, name, throw=False, **args):
89 def hook(self, name, throw=False, **args):
90 def callhook(hname, funcname):
90 def callhook(hname, funcname):
91 '''call python hook. hook is callable object, looked up as
91 '''call python hook. hook is callable object, looked up as
92 name in python module. if callable returns "true", hook
92 name in python module. if callable returns "true", hook
93 fails, else passes. if hook raises exception, treated as
93 fails, else passes. if hook raises exception, treated as
94 hook failure. exception propagates if throw is "true".
94 hook failure. exception propagates if throw is "true".
95
95
96 reason for "true" meaning "hook failed" is so that
96 reason for "true" meaning "hook failed" is so that
97 unmodified commands (e.g. mercurial.commands.update) can
97 unmodified commands (e.g. mercurial.commands.update) can
98 be run as hooks without wrappers to convert return values.'''
98 be run as hooks without wrappers to convert return values.'''
99
99
100 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
100 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
101 d = funcname.rfind('.')
101 d = funcname.rfind('.')
102 if d == -1:
102 if d == -1:
103 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
103 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
104 % (hname, funcname))
104 % (hname, funcname))
105 modname = funcname[:d]
105 modname = funcname[:d]
106 try:
106 try:
107 obj = __import__(modname)
107 obj = __import__(modname)
108 except ImportError:
108 except ImportError:
109 try:
109 try:
110 # extensions are loaded with hgext_ prefix
110 # extensions are loaded with hgext_ prefix
111 obj = __import__("hgext_%s" % modname)
111 obj = __import__("hgext_%s" % modname)
112 except ImportError:
112 except ImportError:
113 raise util.Abort(_('%s hook is invalid '
113 raise util.Abort(_('%s hook is invalid '
114 '(import of "%s" failed)') %
114 '(import of "%s" failed)') %
115 (hname, modname))
115 (hname, modname))
116 try:
116 try:
117 for p in funcname.split('.')[1:]:
117 for p in funcname.split('.')[1:]:
118 obj = getattr(obj, p)
118 obj = getattr(obj, p)
119 except AttributeError, err:
119 except AttributeError, err:
120 raise util.Abort(_('%s hook is invalid '
120 raise util.Abort(_('%s hook is invalid '
121 '("%s" is not defined)') %
121 '("%s" is not defined)') %
122 (hname, funcname))
122 (hname, funcname))
123 if not callable(obj):
123 if not callable(obj):
124 raise util.Abort(_('%s hook is invalid '
124 raise util.Abort(_('%s hook is invalid '
125 '("%s" is not callable)') %
125 '("%s" is not callable)') %
126 (hname, funcname))
126 (hname, funcname))
127 try:
127 try:
128 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
128 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
129 except (KeyboardInterrupt, util.SignalInterrupt):
129 except (KeyboardInterrupt, util.SignalInterrupt):
130 raise
130 raise
131 except Exception, exc:
131 except Exception, exc:
132 if isinstance(exc, util.Abort):
132 if isinstance(exc, util.Abort):
133 self.ui.warn(_('error: %s hook failed: %s\n') %
133 self.ui.warn(_('error: %s hook failed: %s\n') %
134 (hname, exc.args[0] % exc.args[1:]))
134 (hname, exc.args[0] % exc.args[1:]))
135 else:
135 else:
136 self.ui.warn(_('error: %s hook raised an exception: '
136 self.ui.warn(_('error: %s hook raised an exception: '
137 '%s\n') % (hname, exc))
137 '%s\n') % (hname, exc))
138 if throw:
138 if throw:
139 raise
139 raise
140 self.ui.print_exc()
140 self.ui.print_exc()
141 return True
141 return True
142 if r:
142 if r:
143 if throw:
143 if throw:
144 raise util.Abort(_('%s hook failed') % hname)
144 raise util.Abort(_('%s hook failed') % hname)
145 self.ui.warn(_('warning: %s hook failed\n') % hname)
145 self.ui.warn(_('warning: %s hook failed\n') % hname)
146 return r
146 return r
147
147
148 def runhook(name, cmd):
148 def runhook(name, cmd):
149 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
149 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
150 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
150 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
151 r = util.system(cmd, environ=env, cwd=self.root)
151 r = util.system(cmd, environ=env, cwd=self.root)
152 if r:
152 if r:
153 desc, r = util.explain_exit(r)
153 desc, r = util.explain_exit(r)
154 if throw:
154 if throw:
155 raise util.Abort(_('%s hook %s') % (name, desc))
155 raise util.Abort(_('%s hook %s') % (name, desc))
156 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
156 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
157 return r
157 return r
158
158
159 r = False
159 r = False
160 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
160 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
161 if hname.split(".", 1)[0] == name and cmd]
161 if hname.split(".", 1)[0] == name and cmd]
162 hooks.sort()
162 hooks.sort()
163 for hname, cmd in hooks:
163 for hname, cmd in hooks:
164 if cmd.startswith('python:'):
164 if cmd.startswith('python:'):
165 r = callhook(hname, cmd[7:].strip()) or r
165 r = callhook(hname, cmd[7:].strip()) or r
166 else:
166 else:
167 r = runhook(hname, cmd) or r
167 r = runhook(hname, cmd) or r
168 return r
168 return r
169
169
170 tag_disallowed = ':\r\n'
170 tag_disallowed = ':\r\n'
171
171
172 def tag(self, name, node, local=False, message=None, user=None, date=None):
172 def tag(self, name, node, local=False, message=None, user=None, date=None):
173 '''tag a revision with a symbolic name.
173 '''tag a revision with a symbolic name.
174
174
175 if local is True, the tag is stored in a per-repository file.
175 if local is True, the tag is stored in a per-repository file.
176 otherwise, it is stored in the .hgtags file, and a new
176 otherwise, it is stored in the .hgtags file, and a new
177 changeset is committed with the change.
177 changeset is committed with the change.
178
178
179 keyword arguments:
179 keyword arguments:
180
180
181 local: whether to store tag in non-version-controlled file
181 local: whether to store tag in non-version-controlled file
182 (default False)
182 (default False)
183
183
184 message: commit message to use if committing
184 message: commit message to use if committing
185
185
186 user: name of user to use if committing
186 user: name of user to use if committing
187
187
188 date: date tuple to use if committing'''
188 date: date tuple to use if committing'''
189
189
190 for c in self.tag_disallowed:
190 for c in self.tag_disallowed:
191 if c in name:
191 if c in name:
192 raise util.Abort(_('%r cannot be used in a tag name') % c)
192 raise util.Abort(_('%r cannot be used in a tag name') % c)
193
193
194 self.hook('pretag', throw=True, node=node, tag=name, local=local)
194 self.hook('pretag', throw=True, node=node, tag=name, local=local)
195
195
196 if local:
196 if local:
197 self.opener('localtags', 'a').write('%s %s\n' % (node, name))
197 self.opener('localtags', 'a').write('%s %s\n' % (node, name))
198 self.hook('tag', node=node, tag=name, local=local)
198 self.hook('tag', node=node, tag=name, local=local)
199 return
199 return
200
200
201 for x in self.changes():
201 for x in self.changes():
202 if '.hgtags' in x:
202 if '.hgtags' in x:
203 raise util.Abort(_('working copy of .hgtags is changed '
203 raise util.Abort(_('working copy of .hgtags is changed '
204 '(please commit .hgtags manually)'))
204 '(please commit .hgtags manually)'))
205
205
206 self.wfile('.hgtags', 'ab').write('%s %s\n' % (node, name))
206 self.wfile('.hgtags', 'ab').write('%s %s\n' % (node, name))
207 if self.dirstate.state('.hgtags') == '?':
207 if self.dirstate.state('.hgtags') == '?':
208 self.add(['.hgtags'])
208 self.add(['.hgtags'])
209
209
210 if not message:
210 if not message:
211 message = _('Added tag %s for changeset %s') % (name, node)
211 message = _('Added tag %s for changeset %s') % (name, node)
212
212
213 self.commit(['.hgtags'], message, user, date)
213 self.commit(['.hgtags'], message, user, date)
214 self.hook('tag', node=node, tag=name, local=local)
214 self.hook('tag', node=node, tag=name, local=local)
215
215
216 def tags(self):
216 def tags(self):
217 '''return a mapping of tag to node'''
217 '''return a mapping of tag to node'''
218 if not self.tagscache:
218 if not self.tagscache:
219 self.tagscache = {}
219 self.tagscache = {}
220
220
221 def parsetag(line, context):
221 def parsetag(line, context):
222 if not line:
222 if not line:
223 return
223 return
224 s = l.split(" ", 1)
224 s = l.split(" ", 1)
225 if len(s) != 2:
225 if len(s) != 2:
226 self.ui.warn(_("%s: cannot parse entry\n") % context)
226 self.ui.warn(_("%s: cannot parse entry\n") % context)
227 return
227 return
228 node, key = s
228 node, key = s
229 key = key.strip()
229 key = key.strip()
230 try:
230 try:
231 bin_n = bin(node)
231 bin_n = bin(node)
232 except TypeError:
232 except TypeError:
233 self.ui.warn(_("%s: node '%s' is not well formed\n") %
233 self.ui.warn(_("%s: node '%s' is not well formed\n") %
234 (context, node))
234 (context, node))
235 return
235 return
236 if bin_n not in self.changelog.nodemap:
236 if bin_n not in self.changelog.nodemap:
237 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
237 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
238 (context, key))
238 (context, key))
239 return
239 return
240 self.tagscache[key] = bin_n
240 self.tagscache[key] = bin_n
241
241
242 # read the tags file from each head, ending with the tip,
242 # read the tags file from each head, ending with the tip,
243 # and add each tag found to the map, with "newer" ones
243 # and add each tag found to the map, with "newer" ones
244 # taking precedence
244 # taking precedence
245 heads = self.heads()
245 heads = self.heads()
246 heads.reverse()
246 heads.reverse()
247 fl = self.file(".hgtags")
247 fl = self.file(".hgtags")
248 for node in heads:
248 for node in heads:
249 change = self.changelog.read(node)
249 change = self.changelog.read(node)
250 rev = self.changelog.rev(node)
250 rev = self.changelog.rev(node)
251 fn, ff = self.manifest.find(change[0], '.hgtags')
251 fn, ff = self.manifest.find(change[0], '.hgtags')
252 if fn is None: continue
252 if fn is None: continue
253 count = 0
253 count = 0
254 for l in fl.read(fn).splitlines():
254 for l in fl.read(fn).splitlines():
255 count += 1
255 count += 1
256 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
256 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
257 (rev, short(node), count))
257 (rev, short(node), count))
258 try:
258 try:
259 f = self.opener("localtags")
259 f = self.opener("localtags")
260 count = 0
260 count = 0
261 for l in f:
261 for l in f:
262 count += 1
262 count += 1
263 parsetag(l, _("localtags, line %d") % count)
263 parsetag(l, _("localtags, line %d") % count)
264 except IOError:
264 except IOError:
265 pass
265 pass
266
266
267 self.tagscache['tip'] = self.changelog.tip()
267 self.tagscache['tip'] = self.changelog.tip()
268
268
269 return self.tagscache
269 return self.tagscache
270
270
271 def tagslist(self):
271 def tagslist(self):
272 '''return a list of tags ordered by revision'''
272 '''return a list of tags ordered by revision'''
273 l = []
273 l = []
274 for t, n in self.tags().items():
274 for t, n in self.tags().items():
275 try:
275 try:
276 r = self.changelog.rev(n)
276 r = self.changelog.rev(n)
277 except:
277 except:
278 r = -2 # sort to the beginning of the list if unknown
278 r = -2 # sort to the beginning of the list if unknown
279 l.append((r, t, n))
279 l.append((r, t, n))
280 l.sort()
280 l.sort()
281 return [(t, n) for r, t, n in l]
281 return [(t, n) for r, t, n in l]
282
282
283 def nodetags(self, node):
283 def nodetags(self, node):
284 '''return the tags associated with a node'''
284 '''return the tags associated with a node'''
285 if not self.nodetagscache:
285 if not self.nodetagscache:
286 self.nodetagscache = {}
286 self.nodetagscache = {}
287 for t, n in self.tags().items():
287 for t, n in self.tags().items():
288 self.nodetagscache.setdefault(n, []).append(t)
288 self.nodetagscache.setdefault(n, []).append(t)
289 return self.nodetagscache.get(node, [])
289 return self.nodetagscache.get(node, [])
290
290
291 def lookup(self, key):
291 def lookup(self, key):
292 try:
292 try:
293 return self.tags()[key]
293 return self.tags()[key]
294 except KeyError:
294 except KeyError:
295 if key == '.':
296 key = self.dirstate.parents()[0]
297 if key == nullid:
298 raise repo.RepoError(_("no revision checked out"))
295 try:
299 try:
296 return self.changelog.lookup(key)
300 return self.changelog.lookup(key)
297 except:
301 except:
298 raise repo.RepoError(_("unknown revision '%s'") % key)
302 raise repo.RepoError(_("unknown revision '%s'") % key)
299
303
300 def dev(self):
304 def dev(self):
301 return os.lstat(self.path).st_dev
305 return os.lstat(self.path).st_dev
302
306
303 def local(self):
307 def local(self):
304 return True
308 return True
305
309
306 def join(self, f):
310 def join(self, f):
307 return os.path.join(self.path, f)
311 return os.path.join(self.path, f)
308
312
309 def wjoin(self, f):
313 def wjoin(self, f):
310 return os.path.join(self.root, f)
314 return os.path.join(self.root, f)
311
315
312 def file(self, f):
316 def file(self, f):
313 if f[0] == '/':
317 if f[0] == '/':
314 f = f[1:]
318 f = f[1:]
315 return filelog.filelog(self.opener, f, self.revlogversion)
319 return filelog.filelog(self.opener, f, self.revlogversion)
316
320
317 def changectx(self, changeid):
321 def changectx(self, changeid):
318 return context.changectx(self, changeid)
322 return context.changectx(self, changeid)
319
323
320 def filectx(self, path, changeid=None, fileid=None):
324 def filectx(self, path, changeid=None, fileid=None):
321 """changeid can be a changeset revision, node, or tag.
325 """changeid can be a changeset revision, node, or tag.
322 fileid can be a file revision or node."""
326 fileid can be a file revision or node."""
323 return context.filectx(self, path, changeid, fileid)
327 return context.filectx(self, path, changeid, fileid)
324
328
325 def getcwd(self):
329 def getcwd(self):
326 return self.dirstate.getcwd()
330 return self.dirstate.getcwd()
327
331
328 def wfile(self, f, mode='r'):
332 def wfile(self, f, mode='r'):
329 return self.wopener(f, mode)
333 return self.wopener(f, mode)
330
334
331 def wread(self, filename):
335 def wread(self, filename):
332 if self.encodepats == None:
336 if self.encodepats == None:
333 l = []
337 l = []
334 for pat, cmd in self.ui.configitems("encode"):
338 for pat, cmd in self.ui.configitems("encode"):
335 mf = util.matcher(self.root, "", [pat], [], [])[1]
339 mf = util.matcher(self.root, "", [pat], [], [])[1]
336 l.append((mf, cmd))
340 l.append((mf, cmd))
337 self.encodepats = l
341 self.encodepats = l
338
342
339 data = self.wopener(filename, 'r').read()
343 data = self.wopener(filename, 'r').read()
340
344
341 for mf, cmd in self.encodepats:
345 for mf, cmd in self.encodepats:
342 if mf(filename):
346 if mf(filename):
343 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
347 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
344 data = util.filter(data, cmd)
348 data = util.filter(data, cmd)
345 break
349 break
346
350
347 return data
351 return data
348
352
349 def wwrite(self, filename, data, fd=None):
353 def wwrite(self, filename, data, fd=None):
350 if self.decodepats == None:
354 if self.decodepats == None:
351 l = []
355 l = []
352 for pat, cmd in self.ui.configitems("decode"):
356 for pat, cmd in self.ui.configitems("decode"):
353 mf = util.matcher(self.root, "", [pat], [], [])[1]
357 mf = util.matcher(self.root, "", [pat], [], [])[1]
354 l.append((mf, cmd))
358 l.append((mf, cmd))
355 self.decodepats = l
359 self.decodepats = l
356
360
357 for mf, cmd in self.decodepats:
361 for mf, cmd in self.decodepats:
358 if mf(filename):
362 if mf(filename):
359 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
363 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
360 data = util.filter(data, cmd)
364 data = util.filter(data, cmd)
361 break
365 break
362
366
363 if fd:
367 if fd:
364 return fd.write(data)
368 return fd.write(data)
365 return self.wopener(filename, 'w').write(data)
369 return self.wopener(filename, 'w').write(data)
366
370
367 def transaction(self):
371 def transaction(self):
368 tr = self.transhandle
372 tr = self.transhandle
369 if tr != None and tr.running():
373 if tr != None and tr.running():
370 return tr.nest()
374 return tr.nest()
371
375
372 # save dirstate for rollback
376 # save dirstate for rollback
373 try:
377 try:
374 ds = self.opener("dirstate").read()
378 ds = self.opener("dirstate").read()
375 except IOError:
379 except IOError:
376 ds = ""
380 ds = ""
377 self.opener("journal.dirstate", "w").write(ds)
381 self.opener("journal.dirstate", "w").write(ds)
378
382
379 tr = transaction.transaction(self.ui.warn, self.opener,
383 tr = transaction.transaction(self.ui.warn, self.opener,
380 self.join("journal"),
384 self.join("journal"),
381 aftertrans(self.path))
385 aftertrans(self.path))
382 self.transhandle = tr
386 self.transhandle = tr
383 return tr
387 return tr
384
388
385 def recover(self):
389 def recover(self):
386 l = self.lock()
390 l = self.lock()
387 if os.path.exists(self.join("journal")):
391 if os.path.exists(self.join("journal")):
388 self.ui.status(_("rolling back interrupted transaction\n"))
392 self.ui.status(_("rolling back interrupted transaction\n"))
389 transaction.rollback(self.opener, self.join("journal"))
393 transaction.rollback(self.opener, self.join("journal"))
390 self.reload()
394 self.reload()
391 return True
395 return True
392 else:
396 else:
393 self.ui.warn(_("no interrupted transaction available\n"))
397 self.ui.warn(_("no interrupted transaction available\n"))
394 return False
398 return False
395
399
396 def rollback(self, wlock=None):
400 def rollback(self, wlock=None):
397 if not wlock:
401 if not wlock:
398 wlock = self.wlock()
402 wlock = self.wlock()
399 l = self.lock()
403 l = self.lock()
400 if os.path.exists(self.join("undo")):
404 if os.path.exists(self.join("undo")):
401 self.ui.status(_("rolling back last transaction\n"))
405 self.ui.status(_("rolling back last transaction\n"))
402 transaction.rollback(self.opener, self.join("undo"))
406 transaction.rollback(self.opener, self.join("undo"))
403 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
407 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
404 self.reload()
408 self.reload()
405 self.wreload()
409 self.wreload()
406 else:
410 else:
407 self.ui.warn(_("no rollback information available\n"))
411 self.ui.warn(_("no rollback information available\n"))
408
412
409 def wreload(self):
413 def wreload(self):
410 self.dirstate.read()
414 self.dirstate.read()
411
415
412 def reload(self):
416 def reload(self):
413 self.changelog.load()
417 self.changelog.load()
414 self.manifest.load()
418 self.manifest.load()
415 self.tagscache = None
419 self.tagscache = None
416 self.nodetagscache = None
420 self.nodetagscache = None
417
421
418 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
422 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
419 desc=None):
423 desc=None):
420 try:
424 try:
421 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
425 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
422 except lock.LockHeld, inst:
426 except lock.LockHeld, inst:
423 if not wait:
427 if not wait:
424 raise
428 raise
425 self.ui.warn(_("waiting for lock on %s held by %s\n") %
429 self.ui.warn(_("waiting for lock on %s held by %s\n") %
426 (desc, inst.args[0]))
430 (desc, inst.args[0]))
427 # default to 600 seconds timeout
431 # default to 600 seconds timeout
428 l = lock.lock(self.join(lockname),
432 l = lock.lock(self.join(lockname),
429 int(self.ui.config("ui", "timeout") or 600),
433 int(self.ui.config("ui", "timeout") or 600),
430 releasefn, desc=desc)
434 releasefn, desc=desc)
431 if acquirefn:
435 if acquirefn:
432 acquirefn()
436 acquirefn()
433 return l
437 return l
434
438
435 def lock(self, wait=1):
439 def lock(self, wait=1):
436 return self.do_lock("lock", wait, acquirefn=self.reload,
440 return self.do_lock("lock", wait, acquirefn=self.reload,
437 desc=_('repository %s') % self.origroot)
441 desc=_('repository %s') % self.origroot)
438
442
439 def wlock(self, wait=1):
443 def wlock(self, wait=1):
440 return self.do_lock("wlock", wait, self.dirstate.write,
444 return self.do_lock("wlock", wait, self.dirstate.write,
441 self.wreload,
445 self.wreload,
442 desc=_('working directory of %s') % self.origroot)
446 desc=_('working directory of %s') % self.origroot)
443
447
444 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
448 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
445 "determine whether a new filenode is needed"
449 "determine whether a new filenode is needed"
446 fp1 = manifest1.get(filename, nullid)
450 fp1 = manifest1.get(filename, nullid)
447 fp2 = manifest2.get(filename, nullid)
451 fp2 = manifest2.get(filename, nullid)
448
452
449 if fp2 != nullid:
453 if fp2 != nullid:
450 # is one parent an ancestor of the other?
454 # is one parent an ancestor of the other?
451 fpa = filelog.ancestor(fp1, fp2)
455 fpa = filelog.ancestor(fp1, fp2)
452 if fpa == fp1:
456 if fpa == fp1:
453 fp1, fp2 = fp2, nullid
457 fp1, fp2 = fp2, nullid
454 elif fpa == fp2:
458 elif fpa == fp2:
455 fp2 = nullid
459 fp2 = nullid
456
460
457 # is the file unmodified from the parent? report existing entry
461 # is the file unmodified from the parent? report existing entry
458 if fp2 == nullid and text == filelog.read(fp1):
462 if fp2 == nullid and text == filelog.read(fp1):
459 return (fp1, None, None)
463 return (fp1, None, None)
460
464
461 return (None, fp1, fp2)
465 return (None, fp1, fp2)
462
466
463 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
467 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
464 orig_parent = self.dirstate.parents()[0] or nullid
468 orig_parent = self.dirstate.parents()[0] or nullid
465 p1 = p1 or self.dirstate.parents()[0] or nullid
469 p1 = p1 or self.dirstate.parents()[0] or nullid
466 p2 = p2 or self.dirstate.parents()[1] or nullid
470 p2 = p2 or self.dirstate.parents()[1] or nullid
467 c1 = self.changelog.read(p1)
471 c1 = self.changelog.read(p1)
468 c2 = self.changelog.read(p2)
472 c2 = self.changelog.read(p2)
469 m1 = self.manifest.read(c1[0])
473 m1 = self.manifest.read(c1[0])
470 mf1 = self.manifest.readflags(c1[0])
474 mf1 = self.manifest.readflags(c1[0])
471 m2 = self.manifest.read(c2[0])
475 m2 = self.manifest.read(c2[0])
472 changed = []
476 changed = []
473
477
474 if orig_parent == p1:
478 if orig_parent == p1:
475 update_dirstate = 1
479 update_dirstate = 1
476 else:
480 else:
477 update_dirstate = 0
481 update_dirstate = 0
478
482
479 if not wlock:
483 if not wlock:
480 wlock = self.wlock()
484 wlock = self.wlock()
481 l = self.lock()
485 l = self.lock()
482 tr = self.transaction()
486 tr = self.transaction()
483 mm = m1.copy()
487 mm = m1.copy()
484 mfm = mf1.copy()
488 mfm = mf1.copy()
485 linkrev = self.changelog.count()
489 linkrev = self.changelog.count()
486 for f in files:
490 for f in files:
487 try:
491 try:
488 t = self.wread(f)
492 t = self.wread(f)
489 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
493 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
490 r = self.file(f)
494 r = self.file(f)
491 mfm[f] = tm
495 mfm[f] = tm
492
496
493 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
497 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
494 if entry:
498 if entry:
495 mm[f] = entry
499 mm[f] = entry
496 continue
500 continue
497
501
498 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
502 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
499 changed.append(f)
503 changed.append(f)
500 if update_dirstate:
504 if update_dirstate:
501 self.dirstate.update([f], "n")
505 self.dirstate.update([f], "n")
502 except IOError:
506 except IOError:
503 try:
507 try:
504 del mm[f]
508 del mm[f]
505 del mfm[f]
509 del mfm[f]
506 if update_dirstate:
510 if update_dirstate:
507 self.dirstate.forget([f])
511 self.dirstate.forget([f])
508 except:
512 except:
509 # deleted from p2?
513 # deleted from p2?
510 pass
514 pass
511
515
512 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
516 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
513 user = user or self.ui.username()
517 user = user or self.ui.username()
514 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
518 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
515 tr.close()
519 tr.close()
516 if update_dirstate:
520 if update_dirstate:
517 self.dirstate.setparents(n, nullid)
521 self.dirstate.setparents(n, nullid)
518
522
519 def commit(self, files=None, text="", user=None, date=None,
523 def commit(self, files=None, text="", user=None, date=None,
520 match=util.always, force=False, lock=None, wlock=None,
524 match=util.always, force=False, lock=None, wlock=None,
521 force_editor=False):
525 force_editor=False):
522 commit = []
526 commit = []
523 remove = []
527 remove = []
524 changed = []
528 changed = []
525
529
526 if files:
530 if files:
527 for f in files:
531 for f in files:
528 s = self.dirstate.state(f)
532 s = self.dirstate.state(f)
529 if s in 'nmai':
533 if s in 'nmai':
530 commit.append(f)
534 commit.append(f)
531 elif s == 'r':
535 elif s == 'r':
532 remove.append(f)
536 remove.append(f)
533 else:
537 else:
534 self.ui.warn(_("%s not tracked!\n") % f)
538 self.ui.warn(_("%s not tracked!\n") % f)
535 else:
539 else:
536 modified, added, removed, deleted, unknown = self.changes(match=match)
540 modified, added, removed, deleted, unknown = self.changes(match=match)
537 commit = modified + added
541 commit = modified + added
538 remove = removed
542 remove = removed
539
543
540 p1, p2 = self.dirstate.parents()
544 p1, p2 = self.dirstate.parents()
541 c1 = self.changelog.read(p1)
545 c1 = self.changelog.read(p1)
542 c2 = self.changelog.read(p2)
546 c2 = self.changelog.read(p2)
543 m1 = self.manifest.read(c1[0])
547 m1 = self.manifest.read(c1[0])
544 mf1 = self.manifest.readflags(c1[0])
548 mf1 = self.manifest.readflags(c1[0])
545 m2 = self.manifest.read(c2[0])
549 m2 = self.manifest.read(c2[0])
546
550
547 if not commit and not remove and not force and p2 == nullid:
551 if not commit and not remove and not force and p2 == nullid:
548 self.ui.status(_("nothing changed\n"))
552 self.ui.status(_("nothing changed\n"))
549 return None
553 return None
550
554
551 xp1 = hex(p1)
555 xp1 = hex(p1)
552 if p2 == nullid: xp2 = ''
556 if p2 == nullid: xp2 = ''
553 else: xp2 = hex(p2)
557 else: xp2 = hex(p2)
554
558
555 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
559 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
556
560
557 if not wlock:
561 if not wlock:
558 wlock = self.wlock()
562 wlock = self.wlock()
559 if not lock:
563 if not lock:
560 lock = self.lock()
564 lock = self.lock()
561 tr = self.transaction()
565 tr = self.transaction()
562
566
563 # check in files
567 # check in files
564 new = {}
568 new = {}
565 linkrev = self.changelog.count()
569 linkrev = self.changelog.count()
566 commit.sort()
570 commit.sort()
567 for f in commit:
571 for f in commit:
568 self.ui.note(f + "\n")
572 self.ui.note(f + "\n")
569 try:
573 try:
570 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
574 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
571 t = self.wread(f)
575 t = self.wread(f)
572 except IOError:
576 except IOError:
573 self.ui.warn(_("trouble committing %s!\n") % f)
577 self.ui.warn(_("trouble committing %s!\n") % f)
574 raise
578 raise
575
579
576 r = self.file(f)
580 r = self.file(f)
577
581
578 meta = {}
582 meta = {}
579 cp = self.dirstate.copied(f)
583 cp = self.dirstate.copied(f)
580 if cp:
584 if cp:
581 meta["copy"] = cp
585 meta["copy"] = cp
582 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
586 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
583 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
587 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
584 fp1, fp2 = nullid, nullid
588 fp1, fp2 = nullid, nullid
585 else:
589 else:
586 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
590 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
587 if entry:
591 if entry:
588 new[f] = entry
592 new[f] = entry
589 continue
593 continue
590
594
591 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
595 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
592 # remember what we've added so that we can later calculate
596 # remember what we've added so that we can later calculate
593 # the files to pull from a set of changesets
597 # the files to pull from a set of changesets
594 changed.append(f)
598 changed.append(f)
595
599
596 # update manifest
600 # update manifest
597 m1 = m1.copy()
601 m1 = m1.copy()
598 m1.update(new)
602 m1.update(new)
599 for f in remove:
603 for f in remove:
600 if f in m1:
604 if f in m1:
601 del m1[f]
605 del m1[f]
602 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
606 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
603 (new, remove))
607 (new, remove))
604
608
605 # add changeset
609 # add changeset
606 new = new.keys()
610 new = new.keys()
607 new.sort()
611 new.sort()
608
612
609 user = user or self.ui.username()
613 user = user or self.ui.username()
610 if not text or force_editor:
614 if not text or force_editor:
611 edittext = []
615 edittext = []
612 if text:
616 if text:
613 edittext.append(text)
617 edittext.append(text)
614 edittext.append("")
618 edittext.append("")
615 if p2 != nullid:
619 if p2 != nullid:
616 edittext.append("HG: branch merge")
620 edittext.append("HG: branch merge")
617 edittext.extend(["HG: changed %s" % f for f in changed])
621 edittext.extend(["HG: changed %s" % f for f in changed])
618 edittext.extend(["HG: removed %s" % f for f in remove])
622 edittext.extend(["HG: removed %s" % f for f in remove])
619 if not changed and not remove:
623 if not changed and not remove:
620 edittext.append("HG: no files changed")
624 edittext.append("HG: no files changed")
621 edittext.append("")
625 edittext.append("")
622 # run editor in the repository root
626 # run editor in the repository root
623 olddir = os.getcwd()
627 olddir = os.getcwd()
624 os.chdir(self.root)
628 os.chdir(self.root)
625 text = self.ui.edit("\n".join(edittext), user)
629 text = self.ui.edit("\n".join(edittext), user)
626 os.chdir(olddir)
630 os.chdir(olddir)
627
631
628 lines = [line.rstrip() for line in text.rstrip().splitlines()]
632 lines = [line.rstrip() for line in text.rstrip().splitlines()]
629 while lines and not lines[0]:
633 while lines and not lines[0]:
630 del lines[0]
634 del lines[0]
631 if not lines:
635 if not lines:
632 return None
636 return None
633 text = '\n'.join(lines)
637 text = '\n'.join(lines)
634 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
638 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
635 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
639 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
636 parent2=xp2)
640 parent2=xp2)
637 tr.close()
641 tr.close()
638
642
639 self.dirstate.setparents(n)
643 self.dirstate.setparents(n)
640 self.dirstate.update(new, "n")
644 self.dirstate.update(new, "n")
641 self.dirstate.forget(remove)
645 self.dirstate.forget(remove)
642
646
643 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
647 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
644 return n
648 return n
645
649
646 def walk(self, node=None, files=[], match=util.always, badmatch=None):
650 def walk(self, node=None, files=[], match=util.always, badmatch=None):
647 if node:
651 if node:
648 fdict = dict.fromkeys(files)
652 fdict = dict.fromkeys(files)
649 for fn in self.manifest.read(self.changelog.read(node)[0]):
653 for fn in self.manifest.read(self.changelog.read(node)[0]):
650 fdict.pop(fn, None)
654 fdict.pop(fn, None)
651 if match(fn):
655 if match(fn):
652 yield 'm', fn
656 yield 'm', fn
653 for fn in fdict:
657 for fn in fdict:
654 if badmatch and badmatch(fn):
658 if badmatch and badmatch(fn):
655 if match(fn):
659 if match(fn):
656 yield 'b', fn
660 yield 'b', fn
657 else:
661 else:
658 self.ui.warn(_('%s: No such file in rev %s\n') % (
662 self.ui.warn(_('%s: No such file in rev %s\n') % (
659 util.pathto(self.getcwd(), fn), short(node)))
663 util.pathto(self.getcwd(), fn), short(node)))
660 else:
664 else:
661 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
665 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
662 yield src, fn
666 yield src, fn
663
667
664 def status(self, node1=None, node2=None, files=[], match=util.always,
668 def status(self, node1=None, node2=None, files=[], match=util.always,
665 wlock=None, list_ignored=False, list_clean=False):
669 wlock=None, list_ignored=False, list_clean=False):
666 """return status of files between two nodes or node and working directory
670 """return status of files between two nodes or node and working directory
667
671
668 If node1 is None, use the first dirstate parent instead.
672 If node1 is None, use the first dirstate parent instead.
669 If node2 is None, compare node1 with working directory.
673 If node2 is None, compare node1 with working directory.
670 """
674 """
671
675
672 def fcmp(fn, mf):
676 def fcmp(fn, mf):
673 t1 = self.wread(fn)
677 t1 = self.wread(fn)
674 t2 = self.file(fn).read(mf.get(fn, nullid))
678 t2 = self.file(fn).read(mf.get(fn, nullid))
675 return cmp(t1, t2)
679 return cmp(t1, t2)
676
680
677 def mfmatches(node):
681 def mfmatches(node):
678 change = self.changelog.read(node)
682 change = self.changelog.read(node)
679 mf = dict(self.manifest.read(change[0]))
683 mf = dict(self.manifest.read(change[0]))
680 for fn in mf.keys():
684 for fn in mf.keys():
681 if not match(fn):
685 if not match(fn):
682 del mf[fn]
686 del mf[fn]
683 return mf
687 return mf
684
688
685 modified, added, removed, deleted, unknown = [], [], [], [], []
689 modified, added, removed, deleted, unknown = [], [], [], [], []
686 ignored, clean = [], []
690 ignored, clean = [], []
687
691
688 compareworking = False
692 compareworking = False
689 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
693 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
690 compareworking = True
694 compareworking = True
691
695
692 if not compareworking:
696 if not compareworking:
693 # read the manifest from node1 before the manifest from node2,
697 # read the manifest from node1 before the manifest from node2,
694 # so that we'll hit the manifest cache if we're going through
698 # so that we'll hit the manifest cache if we're going through
695 # all the revisions in parent->child order.
699 # all the revisions in parent->child order.
696 mf1 = mfmatches(node1)
700 mf1 = mfmatches(node1)
697
701
698 # are we comparing the working directory?
702 # are we comparing the working directory?
699 if not node2:
703 if not node2:
700 if not wlock:
704 if not wlock:
701 try:
705 try:
702 wlock = self.wlock(wait=0)
706 wlock = self.wlock(wait=0)
703 except lock.LockException:
707 except lock.LockException:
704 wlock = None
708 wlock = None
705 (lookup, modified, added, removed, deleted, unknown,
709 (lookup, modified, added, removed, deleted, unknown,
706 ignored, clean) = self.dirstate.status(files, match,
710 ignored, clean) = self.dirstate.status(files, match,
707 list_ignored, list_clean)
711 list_ignored, list_clean)
708
712
709 # are we comparing working dir against its parent?
713 # are we comparing working dir against its parent?
710 if compareworking:
714 if compareworking:
711 if lookup:
715 if lookup:
712 # do a full compare of any files that might have changed
716 # do a full compare of any files that might have changed
713 mf2 = mfmatches(self.dirstate.parents()[0])
717 mf2 = mfmatches(self.dirstate.parents()[0])
714 for f in lookup:
718 for f in lookup:
715 if fcmp(f, mf2):
719 if fcmp(f, mf2):
716 modified.append(f)
720 modified.append(f)
717 elif wlock is not None:
721 elif wlock is not None:
718 self.dirstate.update([f], "n")
722 self.dirstate.update([f], "n")
719 else:
723 else:
720 # we are comparing working dir against non-parent
724 # we are comparing working dir against non-parent
721 # generate a pseudo-manifest for the working dir
725 # generate a pseudo-manifest for the working dir
722 mf2 = mfmatches(self.dirstate.parents()[0])
726 mf2 = mfmatches(self.dirstate.parents()[0])
723 for f in lookup + modified + added:
727 for f in lookup + modified + added:
724 mf2[f] = ""
728 mf2[f] = ""
725 for f in removed:
729 for f in removed:
726 if f in mf2:
730 if f in mf2:
727 del mf2[f]
731 del mf2[f]
728 else:
732 else:
729 # we are comparing two revisions
733 # we are comparing two revisions
730 mf2 = mfmatches(node2)
734 mf2 = mfmatches(node2)
731
735
732 if not compareworking:
736 if not compareworking:
733 # flush lists from dirstate before comparing manifests
737 # flush lists from dirstate before comparing manifests
734 modified, added, clean = [], [], []
738 modified, added, clean = [], [], []
735
739
736 # make sure to sort the files so we talk to the disk in a
740 # make sure to sort the files so we talk to the disk in a
737 # reasonable order
741 # reasonable order
738 mf2keys = mf2.keys()
742 mf2keys = mf2.keys()
739 mf2keys.sort()
743 mf2keys.sort()
740 for fn in mf2keys:
744 for fn in mf2keys:
741 if mf1.has_key(fn):
745 if mf1.has_key(fn):
742 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
746 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
743 modified.append(fn)
747 modified.append(fn)
744 elif list_clean:
748 elif list_clean:
745 clean.append(fn)
749 clean.append(fn)
746 del mf1[fn]
750 del mf1[fn]
747 else:
751 else:
748 added.append(fn)
752 added.append(fn)
749
753
750 removed = mf1.keys()
754 removed = mf1.keys()
751
755
752 # sort and return results:
756 # sort and return results:
753 for l in modified, added, removed, deleted, unknown, ignored, clean:
757 for l in modified, added, removed, deleted, unknown, ignored, clean:
754 l.sort()
758 l.sort()
755 return (modified, added, removed, deleted, unknown, ignored, clean)
759 return (modified, added, removed, deleted, unknown, ignored, clean)
756
760
757 def changes(self, node1=None, node2=None, files=[], match=util.always,
761 def changes(self, node1=None, node2=None, files=[], match=util.always,
758 wlock=None, list_ignored=False, list_clean=False):
762 wlock=None, list_ignored=False, list_clean=False):
759 '''DEPRECATED - use status instead'''
763 '''DEPRECATED - use status instead'''
760 marduit = self.status(node1, node2, files, match, wlock,
764 marduit = self.status(node1, node2, files, match, wlock,
761 list_ignored, list_clean)
765 list_ignored, list_clean)
762 if list_ignored:
766 if list_ignored:
763 return marduit[:-1]
767 return marduit[:-1]
764 else:
768 else:
765 return marduit[:-2]
769 return marduit[:-2]
766
770
767 def add(self, list, wlock=None):
771 def add(self, list, wlock=None):
768 if not wlock:
772 if not wlock:
769 wlock = self.wlock()
773 wlock = self.wlock()
770 for f in list:
774 for f in list:
771 p = self.wjoin(f)
775 p = self.wjoin(f)
772 if not os.path.exists(p):
776 if not os.path.exists(p):
773 self.ui.warn(_("%s does not exist!\n") % f)
777 self.ui.warn(_("%s does not exist!\n") % f)
774 elif not os.path.isfile(p):
778 elif not os.path.isfile(p):
775 self.ui.warn(_("%s not added: only files supported currently\n")
779 self.ui.warn(_("%s not added: only files supported currently\n")
776 % f)
780 % f)
777 elif self.dirstate.state(f) in 'an':
781 elif self.dirstate.state(f) in 'an':
778 self.ui.warn(_("%s already tracked!\n") % f)
782 self.ui.warn(_("%s already tracked!\n") % f)
779 else:
783 else:
780 self.dirstate.update([f], "a")
784 self.dirstate.update([f], "a")
781
785
782 def forget(self, list, wlock=None):
786 def forget(self, list, wlock=None):
783 if not wlock:
787 if not wlock:
784 wlock = self.wlock()
788 wlock = self.wlock()
785 for f in list:
789 for f in list:
786 if self.dirstate.state(f) not in 'ai':
790 if self.dirstate.state(f) not in 'ai':
787 self.ui.warn(_("%s not added!\n") % f)
791 self.ui.warn(_("%s not added!\n") % f)
788 else:
792 else:
789 self.dirstate.forget([f])
793 self.dirstate.forget([f])
790
794
791 def remove(self, list, unlink=False, wlock=None):
795 def remove(self, list, unlink=False, wlock=None):
792 if unlink:
796 if unlink:
793 for f in list:
797 for f in list:
794 try:
798 try:
795 util.unlink(self.wjoin(f))
799 util.unlink(self.wjoin(f))
796 except OSError, inst:
800 except OSError, inst:
797 if inst.errno != errno.ENOENT:
801 if inst.errno != errno.ENOENT:
798 raise
802 raise
799 if not wlock:
803 if not wlock:
800 wlock = self.wlock()
804 wlock = self.wlock()
801 for f in list:
805 for f in list:
802 p = self.wjoin(f)
806 p = self.wjoin(f)
803 if os.path.exists(p):
807 if os.path.exists(p):
804 self.ui.warn(_("%s still exists!\n") % f)
808 self.ui.warn(_("%s still exists!\n") % f)
805 elif self.dirstate.state(f) == 'a':
809 elif self.dirstate.state(f) == 'a':
806 self.dirstate.forget([f])
810 self.dirstate.forget([f])
807 elif f not in self.dirstate:
811 elif f not in self.dirstate:
808 self.ui.warn(_("%s not tracked!\n") % f)
812 self.ui.warn(_("%s not tracked!\n") % f)
809 else:
813 else:
810 self.dirstate.update([f], "r")
814 self.dirstate.update([f], "r")
811
815
812 def undelete(self, list, wlock=None):
816 def undelete(self, list, wlock=None):
813 p = self.dirstate.parents()[0]
817 p = self.dirstate.parents()[0]
814 mn = self.changelog.read(p)[0]
818 mn = self.changelog.read(p)[0]
815 mf = self.manifest.readflags(mn)
819 mf = self.manifest.readflags(mn)
816 m = self.manifest.read(mn)
820 m = self.manifest.read(mn)
817 if not wlock:
821 if not wlock:
818 wlock = self.wlock()
822 wlock = self.wlock()
819 for f in list:
823 for f in list:
820 if self.dirstate.state(f) not in "r":
824 if self.dirstate.state(f) not in "r":
821 self.ui.warn("%s not removed!\n" % f)
825 self.ui.warn("%s not removed!\n" % f)
822 else:
826 else:
823 t = self.file(f).read(m[f])
827 t = self.file(f).read(m[f])
824 self.wwrite(f, t)
828 self.wwrite(f, t)
825 util.set_exec(self.wjoin(f), mf[f])
829 util.set_exec(self.wjoin(f), mf[f])
826 self.dirstate.update([f], "n")
830 self.dirstate.update([f], "n")
827
831
828 def copy(self, source, dest, wlock=None):
832 def copy(self, source, dest, wlock=None):
829 p = self.wjoin(dest)
833 p = self.wjoin(dest)
830 if not os.path.exists(p):
834 if not os.path.exists(p):
831 self.ui.warn(_("%s does not exist!\n") % dest)
835 self.ui.warn(_("%s does not exist!\n") % dest)
832 elif not os.path.isfile(p):
836 elif not os.path.isfile(p):
833 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
837 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
834 else:
838 else:
835 if not wlock:
839 if not wlock:
836 wlock = self.wlock()
840 wlock = self.wlock()
837 if self.dirstate.state(dest) == '?':
841 if self.dirstate.state(dest) == '?':
838 self.dirstate.update([dest], "a")
842 self.dirstate.update([dest], "a")
839 self.dirstate.copy(source, dest)
843 self.dirstate.copy(source, dest)
840
844
841 def heads(self, start=None):
845 def heads(self, start=None):
842 heads = self.changelog.heads(start)
846 heads = self.changelog.heads(start)
843 # sort the output in rev descending order
847 # sort the output in rev descending order
844 heads = [(-self.changelog.rev(h), h) for h in heads]
848 heads = [(-self.changelog.rev(h), h) for h in heads]
845 heads.sort()
849 heads.sort()
846 return [n for (r, n) in heads]
850 return [n for (r, n) in heads]
847
851
848 # branchlookup returns a dict giving a list of branches for
852 # branchlookup returns a dict giving a list of branches for
849 # each head. A branch is defined as the tag of a node or
853 # each head. A branch is defined as the tag of a node or
850 # the branch of the node's parents. If a node has multiple
854 # the branch of the node's parents. If a node has multiple
851 # branch tags, tags are eliminated if they are visible from other
855 # branch tags, tags are eliminated if they are visible from other
852 # branch tags.
856 # branch tags.
853 #
857 #
854 # So, for this graph: a->b->c->d->e
858 # So, for this graph: a->b->c->d->e
855 # \ /
859 # \ /
856 # aa -----/
860 # aa -----/
857 # a has tag 2.6.12
861 # a has tag 2.6.12
858 # d has tag 2.6.13
862 # d has tag 2.6.13
859 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
863 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
860 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
864 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
861 # from the list.
865 # from the list.
862 #
866 #
863 # It is possible that more than one head will have the same branch tag.
867 # It is possible that more than one head will have the same branch tag.
864 # callers need to check the result for multiple heads under the same
868 # callers need to check the result for multiple heads under the same
865 # branch tag if that is a problem for them (ie checkout of a specific
869 # branch tag if that is a problem for them (ie checkout of a specific
866 # branch).
870 # branch).
867 #
871 #
868 # passing in a specific branch will limit the depth of the search
872 # passing in a specific branch will limit the depth of the search
869 # through the parents. It won't limit the branches returned in the
873 # through the parents. It won't limit the branches returned in the
870 # result though.
874 # result though.
871 def branchlookup(self, heads=None, branch=None):
875 def branchlookup(self, heads=None, branch=None):
872 if not heads:
876 if not heads:
873 heads = self.heads()
877 heads = self.heads()
874 headt = [ h for h in heads ]
878 headt = [ h for h in heads ]
875 chlog = self.changelog
879 chlog = self.changelog
876 branches = {}
880 branches = {}
877 merges = []
881 merges = []
878 seenmerge = {}
882 seenmerge = {}
879
883
880 # traverse the tree once for each head, recording in the branches
884 # traverse the tree once for each head, recording in the branches
881 # dict which tags are visible from this head. The branches
885 # dict which tags are visible from this head. The branches
882 # dict also records which tags are visible from each tag
886 # dict also records which tags are visible from each tag
883 # while we traverse.
887 # while we traverse.
884 while headt or merges:
888 while headt or merges:
885 if merges:
889 if merges:
886 n, found = merges.pop()
890 n, found = merges.pop()
887 visit = [n]
891 visit = [n]
888 else:
892 else:
889 h = headt.pop()
893 h = headt.pop()
890 visit = [h]
894 visit = [h]
891 found = [h]
895 found = [h]
892 seen = {}
896 seen = {}
893 while visit:
897 while visit:
894 n = visit.pop()
898 n = visit.pop()
895 if n in seen:
899 if n in seen:
896 continue
900 continue
897 pp = chlog.parents(n)
901 pp = chlog.parents(n)
898 tags = self.nodetags(n)
902 tags = self.nodetags(n)
899 if tags:
903 if tags:
900 for x in tags:
904 for x in tags:
901 if x == 'tip':
905 if x == 'tip':
902 continue
906 continue
903 for f in found:
907 for f in found:
904 branches.setdefault(f, {})[n] = 1
908 branches.setdefault(f, {})[n] = 1
905 branches.setdefault(n, {})[n] = 1
909 branches.setdefault(n, {})[n] = 1
906 break
910 break
907 if n not in found:
911 if n not in found:
908 found.append(n)
912 found.append(n)
909 if branch in tags:
913 if branch in tags:
910 continue
914 continue
911 seen[n] = 1
915 seen[n] = 1
912 if pp[1] != nullid and n not in seenmerge:
916 if pp[1] != nullid and n not in seenmerge:
913 merges.append((pp[1], [x for x in found]))
917 merges.append((pp[1], [x for x in found]))
914 seenmerge[n] = 1
918 seenmerge[n] = 1
915 if pp[0] != nullid:
919 if pp[0] != nullid:
916 visit.append(pp[0])
920 visit.append(pp[0])
917 # traverse the branches dict, eliminating branch tags from each
921 # traverse the branches dict, eliminating branch tags from each
918 # head that are visible from another branch tag for that head.
922 # head that are visible from another branch tag for that head.
919 out = {}
923 out = {}
920 viscache = {}
924 viscache = {}
921 for h in heads:
925 for h in heads:
922 def visible(node):
926 def visible(node):
923 if node in viscache:
927 if node in viscache:
924 return viscache[node]
928 return viscache[node]
925 ret = {}
929 ret = {}
926 visit = [node]
930 visit = [node]
927 while visit:
931 while visit:
928 x = visit.pop()
932 x = visit.pop()
929 if x in viscache:
933 if x in viscache:
930 ret.update(viscache[x])
934 ret.update(viscache[x])
931 elif x not in ret:
935 elif x not in ret:
932 ret[x] = 1
936 ret[x] = 1
933 if x in branches:
937 if x in branches:
934 visit[len(visit):] = branches[x].keys()
938 visit[len(visit):] = branches[x].keys()
935 viscache[node] = ret
939 viscache[node] = ret
936 return ret
940 return ret
937 if h not in branches:
941 if h not in branches:
938 continue
942 continue
939 # O(n^2), but somewhat limited. This only searches the
943 # O(n^2), but somewhat limited. This only searches the
940 # tags visible from a specific head, not all the tags in the
944 # tags visible from a specific head, not all the tags in the
941 # whole repo.
945 # whole repo.
942 for b in branches[h]:
946 for b in branches[h]:
943 vis = False
947 vis = False
944 for bb in branches[h].keys():
948 for bb in branches[h].keys():
945 if b != bb:
949 if b != bb:
946 if b in visible(bb):
950 if b in visible(bb):
947 vis = True
951 vis = True
948 break
952 break
949 if not vis:
953 if not vis:
950 l = out.setdefault(h, [])
954 l = out.setdefault(h, [])
951 l[len(l):] = self.nodetags(b)
955 l[len(l):] = self.nodetags(b)
952 return out
956 return out
953
957
954 def branches(self, nodes):
958 def branches(self, nodes):
955 if not nodes:
959 if not nodes:
956 nodes = [self.changelog.tip()]
960 nodes = [self.changelog.tip()]
957 b = []
961 b = []
958 for n in nodes:
962 for n in nodes:
959 t = n
963 t = n
960 while 1:
964 while 1:
961 p = self.changelog.parents(n)
965 p = self.changelog.parents(n)
962 if p[1] != nullid or p[0] == nullid:
966 if p[1] != nullid or p[0] == nullid:
963 b.append((t, n, p[0], p[1]))
967 b.append((t, n, p[0], p[1]))
964 break
968 break
965 n = p[0]
969 n = p[0]
966 return b
970 return b
967
971
968 def between(self, pairs):
972 def between(self, pairs):
969 r = []
973 r = []
970
974
971 for top, bottom in pairs:
975 for top, bottom in pairs:
972 n, l, i = top, [], 0
976 n, l, i = top, [], 0
973 f = 1
977 f = 1
974
978
975 while n != bottom:
979 while n != bottom:
976 p = self.changelog.parents(n)[0]
980 p = self.changelog.parents(n)[0]
977 if i == f:
981 if i == f:
978 l.append(n)
982 l.append(n)
979 f = f * 2
983 f = f * 2
980 n = p
984 n = p
981 i += 1
985 i += 1
982
986
983 r.append(l)
987 r.append(l)
984
988
985 return r
989 return r
986
990
987 def findincoming(self, remote, base=None, heads=None, force=False):
991 def findincoming(self, remote, base=None, heads=None, force=False):
988 """Return list of roots of the subsets of missing nodes from remote
992 """Return list of roots of the subsets of missing nodes from remote
989
993
990 If base dict is specified, assume that these nodes and their parents
994 If base dict is specified, assume that these nodes and their parents
991 exist on the remote side and that no child of a node of base exists
995 exist on the remote side and that no child of a node of base exists
992 in both remote and self.
996 in both remote and self.
993 Furthermore base will be updated to include the nodes that exists
997 Furthermore base will be updated to include the nodes that exists
994 in self and remote but no children exists in self and remote.
998 in self and remote but no children exists in self and remote.
995 If a list of heads is specified, return only nodes which are heads
999 If a list of heads is specified, return only nodes which are heads
996 or ancestors of these heads.
1000 or ancestors of these heads.
997
1001
998 All the ancestors of base are in self and in remote.
1002 All the ancestors of base are in self and in remote.
999 All the descendants of the list returned are missing in self.
1003 All the descendants of the list returned are missing in self.
1000 (and so we know that the rest of the nodes are missing in remote, see
1004 (and so we know that the rest of the nodes are missing in remote, see
1001 outgoing)
1005 outgoing)
1002 """
1006 """
1003 m = self.changelog.nodemap
1007 m = self.changelog.nodemap
1004 search = []
1008 search = []
1005 fetch = {}
1009 fetch = {}
1006 seen = {}
1010 seen = {}
1007 seenbranch = {}
1011 seenbranch = {}
1008 if base == None:
1012 if base == None:
1009 base = {}
1013 base = {}
1010
1014
1011 if not heads:
1015 if not heads:
1012 heads = remote.heads()
1016 heads = remote.heads()
1013
1017
1014 if self.changelog.tip() == nullid:
1018 if self.changelog.tip() == nullid:
1015 base[nullid] = 1
1019 base[nullid] = 1
1016 if heads != [nullid]:
1020 if heads != [nullid]:
1017 return [nullid]
1021 return [nullid]
1018 return []
1022 return []
1019
1023
1020 # assume we're closer to the tip than the root
1024 # assume we're closer to the tip than the root
1021 # and start by examining the heads
1025 # and start by examining the heads
1022 self.ui.status(_("searching for changes\n"))
1026 self.ui.status(_("searching for changes\n"))
1023
1027
1024 unknown = []
1028 unknown = []
1025 for h in heads:
1029 for h in heads:
1026 if h not in m:
1030 if h not in m:
1027 unknown.append(h)
1031 unknown.append(h)
1028 else:
1032 else:
1029 base[h] = 1
1033 base[h] = 1
1030
1034
1031 if not unknown:
1035 if not unknown:
1032 return []
1036 return []
1033
1037
1034 req = dict.fromkeys(unknown)
1038 req = dict.fromkeys(unknown)
1035 reqcnt = 0
1039 reqcnt = 0
1036
1040
1037 # search through remote branches
1041 # search through remote branches
1038 # a 'branch' here is a linear segment of history, with four parts:
1042 # a 'branch' here is a linear segment of history, with four parts:
1039 # head, root, first parent, second parent
1043 # head, root, first parent, second parent
1040 # (a branch always has two parents (or none) by definition)
1044 # (a branch always has two parents (or none) by definition)
1041 unknown = remote.branches(unknown)
1045 unknown = remote.branches(unknown)
1042 while unknown:
1046 while unknown:
1043 r = []
1047 r = []
1044 while unknown:
1048 while unknown:
1045 n = unknown.pop(0)
1049 n = unknown.pop(0)
1046 if n[0] in seen:
1050 if n[0] in seen:
1047 continue
1051 continue
1048
1052
1049 self.ui.debug(_("examining %s:%s\n")
1053 self.ui.debug(_("examining %s:%s\n")
1050 % (short(n[0]), short(n[1])))
1054 % (short(n[0]), short(n[1])))
1051 if n[0] == nullid: # found the end of the branch
1055 if n[0] == nullid: # found the end of the branch
1052 pass
1056 pass
1053 elif n in seenbranch:
1057 elif n in seenbranch:
1054 self.ui.debug(_("branch already found\n"))
1058 self.ui.debug(_("branch already found\n"))
1055 continue
1059 continue
1056 elif n[1] and n[1] in m: # do we know the base?
1060 elif n[1] and n[1] in m: # do we know the base?
1057 self.ui.debug(_("found incomplete branch %s:%s\n")
1061 self.ui.debug(_("found incomplete branch %s:%s\n")
1058 % (short(n[0]), short(n[1])))
1062 % (short(n[0]), short(n[1])))
1059 search.append(n) # schedule branch range for scanning
1063 search.append(n) # schedule branch range for scanning
1060 seenbranch[n] = 1
1064 seenbranch[n] = 1
1061 else:
1065 else:
1062 if n[1] not in seen and n[1] not in fetch:
1066 if n[1] not in seen and n[1] not in fetch:
1063 if n[2] in m and n[3] in m:
1067 if n[2] in m and n[3] in m:
1064 self.ui.debug(_("found new changeset %s\n") %
1068 self.ui.debug(_("found new changeset %s\n") %
1065 short(n[1]))
1069 short(n[1]))
1066 fetch[n[1]] = 1 # earliest unknown
1070 fetch[n[1]] = 1 # earliest unknown
1067 for p in n[2:4]:
1071 for p in n[2:4]:
1068 if p in m:
1072 if p in m:
1069 base[p] = 1 # latest known
1073 base[p] = 1 # latest known
1070
1074
1071 for p in n[2:4]:
1075 for p in n[2:4]:
1072 if p not in req and p not in m:
1076 if p not in req and p not in m:
1073 r.append(p)
1077 r.append(p)
1074 req[p] = 1
1078 req[p] = 1
1075 seen[n[0]] = 1
1079 seen[n[0]] = 1
1076
1080
1077 if r:
1081 if r:
1078 reqcnt += 1
1082 reqcnt += 1
1079 self.ui.debug(_("request %d: %s\n") %
1083 self.ui.debug(_("request %d: %s\n") %
1080 (reqcnt, " ".join(map(short, r))))
1084 (reqcnt, " ".join(map(short, r))))
1081 for p in range(0, len(r), 10):
1085 for p in range(0, len(r), 10):
1082 for b in remote.branches(r[p:p+10]):
1086 for b in remote.branches(r[p:p+10]):
1083 self.ui.debug(_("received %s:%s\n") %
1087 self.ui.debug(_("received %s:%s\n") %
1084 (short(b[0]), short(b[1])))
1088 (short(b[0]), short(b[1])))
1085 unknown.append(b)
1089 unknown.append(b)
1086
1090
1087 # do binary search on the branches we found
1091 # do binary search on the branches we found
1088 while search:
1092 while search:
1089 n = search.pop(0)
1093 n = search.pop(0)
1090 reqcnt += 1
1094 reqcnt += 1
1091 l = remote.between([(n[0], n[1])])[0]
1095 l = remote.between([(n[0], n[1])])[0]
1092 l.append(n[1])
1096 l.append(n[1])
1093 p = n[0]
1097 p = n[0]
1094 f = 1
1098 f = 1
1095 for i in l:
1099 for i in l:
1096 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1100 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1097 if i in m:
1101 if i in m:
1098 if f <= 2:
1102 if f <= 2:
1099 self.ui.debug(_("found new branch changeset %s\n") %
1103 self.ui.debug(_("found new branch changeset %s\n") %
1100 short(p))
1104 short(p))
1101 fetch[p] = 1
1105 fetch[p] = 1
1102 base[i] = 1
1106 base[i] = 1
1103 else:
1107 else:
1104 self.ui.debug(_("narrowed branch search to %s:%s\n")
1108 self.ui.debug(_("narrowed branch search to %s:%s\n")
1105 % (short(p), short(i)))
1109 % (short(p), short(i)))
1106 search.append((p, i))
1110 search.append((p, i))
1107 break
1111 break
1108 p, f = i, f * 2
1112 p, f = i, f * 2
1109
1113
1110 # sanity check our fetch list
1114 # sanity check our fetch list
1111 for f in fetch.keys():
1115 for f in fetch.keys():
1112 if f in m:
1116 if f in m:
1113 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1117 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1114
1118
1115 if base.keys() == [nullid]:
1119 if base.keys() == [nullid]:
1116 if force:
1120 if force:
1117 self.ui.warn(_("warning: repository is unrelated\n"))
1121 self.ui.warn(_("warning: repository is unrelated\n"))
1118 else:
1122 else:
1119 raise util.Abort(_("repository is unrelated"))
1123 raise util.Abort(_("repository is unrelated"))
1120
1124
1121 self.ui.note(_("found new changesets starting at ") +
1125 self.ui.note(_("found new changesets starting at ") +
1122 " ".join([short(f) for f in fetch]) + "\n")
1126 " ".join([short(f) for f in fetch]) + "\n")
1123
1127
1124 self.ui.debug(_("%d total queries\n") % reqcnt)
1128 self.ui.debug(_("%d total queries\n") % reqcnt)
1125
1129
1126 return fetch.keys()
1130 return fetch.keys()
1127
1131
1128 def findoutgoing(self, remote, base=None, heads=None, force=False):
1132 def findoutgoing(self, remote, base=None, heads=None, force=False):
1129 """Return list of nodes that are roots of subsets not in remote
1133 """Return list of nodes that are roots of subsets not in remote
1130
1134
1131 If base dict is specified, assume that these nodes and their parents
1135 If base dict is specified, assume that these nodes and their parents
1132 exist on the remote side.
1136 exist on the remote side.
1133 If a list of heads is specified, return only nodes which are heads
1137 If a list of heads is specified, return only nodes which are heads
1134 or ancestors of these heads, and return a second element which
1138 or ancestors of these heads, and return a second element which
1135 contains all remote heads which get new children.
1139 contains all remote heads which get new children.
1136 """
1140 """
1137 if base == None:
1141 if base == None:
1138 base = {}
1142 base = {}
1139 self.findincoming(remote, base, heads, force=force)
1143 self.findincoming(remote, base, heads, force=force)
1140
1144
1141 self.ui.debug(_("common changesets up to ")
1145 self.ui.debug(_("common changesets up to ")
1142 + " ".join(map(short, base.keys())) + "\n")
1146 + " ".join(map(short, base.keys())) + "\n")
1143
1147
1144 remain = dict.fromkeys(self.changelog.nodemap)
1148 remain = dict.fromkeys(self.changelog.nodemap)
1145
1149
1146 # prune everything remote has from the tree
1150 # prune everything remote has from the tree
1147 del remain[nullid]
1151 del remain[nullid]
1148 remove = base.keys()
1152 remove = base.keys()
1149 while remove:
1153 while remove:
1150 n = remove.pop(0)
1154 n = remove.pop(0)
1151 if n in remain:
1155 if n in remain:
1152 del remain[n]
1156 del remain[n]
1153 for p in self.changelog.parents(n):
1157 for p in self.changelog.parents(n):
1154 remove.append(p)
1158 remove.append(p)
1155
1159
1156 # find every node whose parents have been pruned
1160 # find every node whose parents have been pruned
1157 subset = []
1161 subset = []
1158 # find every remote head that will get new children
1162 # find every remote head that will get new children
1159 updated_heads = {}
1163 updated_heads = {}
1160 for n in remain:
1164 for n in remain:
1161 p1, p2 = self.changelog.parents(n)
1165 p1, p2 = self.changelog.parents(n)
1162 if p1 not in remain and p2 not in remain:
1166 if p1 not in remain and p2 not in remain:
1163 subset.append(n)
1167 subset.append(n)
1164 if heads:
1168 if heads:
1165 if p1 in heads:
1169 if p1 in heads:
1166 updated_heads[p1] = True
1170 updated_heads[p1] = True
1167 if p2 in heads:
1171 if p2 in heads:
1168 updated_heads[p2] = True
1172 updated_heads[p2] = True
1169
1173
1170 # this is the set of all roots we have to push
1174 # this is the set of all roots we have to push
1171 if heads:
1175 if heads:
1172 return subset, updated_heads.keys()
1176 return subset, updated_heads.keys()
1173 else:
1177 else:
1174 return subset
1178 return subset
1175
1179
1176 def pull(self, remote, heads=None, force=False):
1180 def pull(self, remote, heads=None, force=False):
1177 l = self.lock()
1181 l = self.lock()
1178
1182
1179 fetch = self.findincoming(remote, force=force)
1183 fetch = self.findincoming(remote, force=force)
1180 if fetch == [nullid]:
1184 if fetch == [nullid]:
1181 self.ui.status(_("requesting all changes\n"))
1185 self.ui.status(_("requesting all changes\n"))
1182
1186
1183 if not fetch:
1187 if not fetch:
1184 self.ui.status(_("no changes found\n"))
1188 self.ui.status(_("no changes found\n"))
1185 return 0
1189 return 0
1186
1190
1187 if heads is None:
1191 if heads is None:
1188 cg = remote.changegroup(fetch, 'pull')
1192 cg = remote.changegroup(fetch, 'pull')
1189 else:
1193 else:
1190 cg = remote.changegroupsubset(fetch, heads, 'pull')
1194 cg = remote.changegroupsubset(fetch, heads, 'pull')
1191 return self.addchangegroup(cg, 'pull', remote.url())
1195 return self.addchangegroup(cg, 'pull', remote.url())
1192
1196
1193 def push(self, remote, force=False, revs=None):
1197 def push(self, remote, force=False, revs=None):
1194 # there are two ways to push to remote repo:
1198 # there are two ways to push to remote repo:
1195 #
1199 #
1196 # addchangegroup assumes local user can lock remote
1200 # addchangegroup assumes local user can lock remote
1197 # repo (local filesystem, old ssh servers).
1201 # repo (local filesystem, old ssh servers).
1198 #
1202 #
1199 # unbundle assumes local user cannot lock remote repo (new ssh
1203 # unbundle assumes local user cannot lock remote repo (new ssh
1200 # servers, http servers).
1204 # servers, http servers).
1201
1205
1202 if remote.capable('unbundle'):
1206 if remote.capable('unbundle'):
1203 return self.push_unbundle(remote, force, revs)
1207 return self.push_unbundle(remote, force, revs)
1204 return self.push_addchangegroup(remote, force, revs)
1208 return self.push_addchangegroup(remote, force, revs)
1205
1209
1206 def prepush(self, remote, force, revs):
1210 def prepush(self, remote, force, revs):
1207 base = {}
1211 base = {}
1208 remote_heads = remote.heads()
1212 remote_heads = remote.heads()
1209 inc = self.findincoming(remote, base, remote_heads, force=force)
1213 inc = self.findincoming(remote, base, remote_heads, force=force)
1210 if not force and inc:
1214 if not force and inc:
1211 self.ui.warn(_("abort: unsynced remote changes!\n"))
1215 self.ui.warn(_("abort: unsynced remote changes!\n"))
1212 self.ui.status(_("(did you forget to sync?"
1216 self.ui.status(_("(did you forget to sync?"
1213 " use push -f to force)\n"))
1217 " use push -f to force)\n"))
1214 return None, 1
1218 return None, 1
1215
1219
1216 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1220 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1217 if revs is not None:
1221 if revs is not None:
1218 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1222 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1219 else:
1223 else:
1220 bases, heads = update, self.changelog.heads()
1224 bases, heads = update, self.changelog.heads()
1221
1225
1222 if not bases:
1226 if not bases:
1223 self.ui.status(_("no changes found\n"))
1227 self.ui.status(_("no changes found\n"))
1224 return None, 1
1228 return None, 1
1225 elif not force:
1229 elif not force:
1226 # FIXME we don't properly detect creation of new heads
1230 # FIXME we don't properly detect creation of new heads
1227 # in the push -r case, assume the user knows what he's doing
1231 # in the push -r case, assume the user knows what he's doing
1228 if not revs and len(remote_heads) < len(heads) \
1232 if not revs and len(remote_heads) < len(heads) \
1229 and remote_heads != [nullid]:
1233 and remote_heads != [nullid]:
1230 self.ui.warn(_("abort: push creates new remote branches!\n"))
1234 self.ui.warn(_("abort: push creates new remote branches!\n"))
1231 self.ui.status(_("(did you forget to merge?"
1235 self.ui.status(_("(did you forget to merge?"
1232 " use push -f to force)\n"))
1236 " use push -f to force)\n"))
1233 return None, 1
1237 return None, 1
1234
1238
1235 if revs is None:
1239 if revs is None:
1236 cg = self.changegroup(update, 'push')
1240 cg = self.changegroup(update, 'push')
1237 else:
1241 else:
1238 cg = self.changegroupsubset(update, revs, 'push')
1242 cg = self.changegroupsubset(update, revs, 'push')
1239 return cg, remote_heads
1243 return cg, remote_heads
1240
1244
1241 def push_addchangegroup(self, remote, force, revs):
1245 def push_addchangegroup(self, remote, force, revs):
1242 lock = remote.lock()
1246 lock = remote.lock()
1243
1247
1244 ret = self.prepush(remote, force, revs)
1248 ret = self.prepush(remote, force, revs)
1245 if ret[0] is not None:
1249 if ret[0] is not None:
1246 cg, remote_heads = ret
1250 cg, remote_heads = ret
1247 return remote.addchangegroup(cg, 'push', self.url())
1251 return remote.addchangegroup(cg, 'push', self.url())
1248 return ret[1]
1252 return ret[1]
1249
1253
1250 def push_unbundle(self, remote, force, revs):
1254 def push_unbundle(self, remote, force, revs):
1251 # local repo finds heads on server, finds out what revs it
1255 # local repo finds heads on server, finds out what revs it
1252 # must push. once revs transferred, if server finds it has
1256 # must push. once revs transferred, if server finds it has
1253 # different heads (someone else won commit/push race), server
1257 # different heads (someone else won commit/push race), server
1254 # aborts.
1258 # aborts.
1255
1259
1256 ret = self.prepush(remote, force, revs)
1260 ret = self.prepush(remote, force, revs)
1257 if ret[0] is not None:
1261 if ret[0] is not None:
1258 cg, remote_heads = ret
1262 cg, remote_heads = ret
1259 if force: remote_heads = ['force']
1263 if force: remote_heads = ['force']
1260 return remote.unbundle(cg, remote_heads, 'push')
1264 return remote.unbundle(cg, remote_heads, 'push')
1261 return ret[1]
1265 return ret[1]
1262
1266
1263 def changegroupsubset(self, bases, heads, source):
1267 def changegroupsubset(self, bases, heads, source):
1264 """This function generates a changegroup consisting of all the nodes
1268 """This function generates a changegroup consisting of all the nodes
1265 that are descendents of any of the bases, and ancestors of any of
1269 that are descendents of any of the bases, and ancestors of any of
1266 the heads.
1270 the heads.
1267
1271
1268 It is fairly complex as determining which filenodes and which
1272 It is fairly complex as determining which filenodes and which
1269 manifest nodes need to be included for the changeset to be complete
1273 manifest nodes need to be included for the changeset to be complete
1270 is non-trivial.
1274 is non-trivial.
1271
1275
1272 Another wrinkle is doing the reverse, figuring out which changeset in
1276 Another wrinkle is doing the reverse, figuring out which changeset in
1273 the changegroup a particular filenode or manifestnode belongs to."""
1277 the changegroup a particular filenode or manifestnode belongs to."""
1274
1278
1275 self.hook('preoutgoing', throw=True, source=source)
1279 self.hook('preoutgoing', throw=True, source=source)
1276
1280
1277 # Set up some initial variables
1281 # Set up some initial variables
1278 # Make it easy to refer to self.changelog
1282 # Make it easy to refer to self.changelog
1279 cl = self.changelog
1283 cl = self.changelog
1280 # msng is short for missing - compute the list of changesets in this
1284 # msng is short for missing - compute the list of changesets in this
1281 # changegroup.
1285 # changegroup.
1282 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1286 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1283 # Some bases may turn out to be superfluous, and some heads may be
1287 # Some bases may turn out to be superfluous, and some heads may be
1284 # too. nodesbetween will return the minimal set of bases and heads
1288 # too. nodesbetween will return the minimal set of bases and heads
1285 # necessary to re-create the changegroup.
1289 # necessary to re-create the changegroup.
1286
1290
1287 # Known heads are the list of heads that it is assumed the recipient
1291 # Known heads are the list of heads that it is assumed the recipient
1288 # of this changegroup will know about.
1292 # of this changegroup will know about.
1289 knownheads = {}
1293 knownheads = {}
1290 # We assume that all parents of bases are known heads.
1294 # We assume that all parents of bases are known heads.
1291 for n in bases:
1295 for n in bases:
1292 for p in cl.parents(n):
1296 for p in cl.parents(n):
1293 if p != nullid:
1297 if p != nullid:
1294 knownheads[p] = 1
1298 knownheads[p] = 1
1295 knownheads = knownheads.keys()
1299 knownheads = knownheads.keys()
1296 if knownheads:
1300 if knownheads:
1297 # Now that we know what heads are known, we can compute which
1301 # Now that we know what heads are known, we can compute which
1298 # changesets are known. The recipient must know about all
1302 # changesets are known. The recipient must know about all
1299 # changesets required to reach the known heads from the null
1303 # changesets required to reach the known heads from the null
1300 # changeset.
1304 # changeset.
1301 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1305 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1302 junk = None
1306 junk = None
1303 # Transform the list into an ersatz set.
1307 # Transform the list into an ersatz set.
1304 has_cl_set = dict.fromkeys(has_cl_set)
1308 has_cl_set = dict.fromkeys(has_cl_set)
1305 else:
1309 else:
1306 # If there were no known heads, the recipient cannot be assumed to
1310 # If there were no known heads, the recipient cannot be assumed to
1307 # know about any changesets.
1311 # know about any changesets.
1308 has_cl_set = {}
1312 has_cl_set = {}
1309
1313
1310 # Make it easy to refer to self.manifest
1314 # Make it easy to refer to self.manifest
1311 mnfst = self.manifest
1315 mnfst = self.manifest
1312 # We don't know which manifests are missing yet
1316 # We don't know which manifests are missing yet
1313 msng_mnfst_set = {}
1317 msng_mnfst_set = {}
1314 # Nor do we know which filenodes are missing.
1318 # Nor do we know which filenodes are missing.
1315 msng_filenode_set = {}
1319 msng_filenode_set = {}
1316
1320
1317 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1321 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1318 junk = None
1322 junk = None
1319
1323
1320 # A changeset always belongs to itself, so the changenode lookup
1324 # A changeset always belongs to itself, so the changenode lookup
1321 # function for a changenode is identity.
1325 # function for a changenode is identity.
1322 def identity(x):
1326 def identity(x):
1323 return x
1327 return x
1324
1328
1325 # A function generating function. Sets up an environment for the
1329 # A function generating function. Sets up an environment for the
1326 # inner function.
1330 # inner function.
1327 def cmp_by_rev_func(revlog):
1331 def cmp_by_rev_func(revlog):
1328 # Compare two nodes by their revision number in the environment's
1332 # Compare two nodes by their revision number in the environment's
1329 # revision history. Since the revision number both represents the
1333 # revision history. Since the revision number both represents the
1330 # most efficient order to read the nodes in, and represents a
1334 # most efficient order to read the nodes in, and represents a
1331 # topological sorting of the nodes, this function is often useful.
1335 # topological sorting of the nodes, this function is often useful.
1332 def cmp_by_rev(a, b):
1336 def cmp_by_rev(a, b):
1333 return cmp(revlog.rev(a), revlog.rev(b))
1337 return cmp(revlog.rev(a), revlog.rev(b))
1334 return cmp_by_rev
1338 return cmp_by_rev
1335
1339
1336 # If we determine that a particular file or manifest node must be a
1340 # If we determine that a particular file or manifest node must be a
1337 # node that the recipient of the changegroup will already have, we can
1341 # node that the recipient of the changegroup will already have, we can
1338 # also assume the recipient will have all the parents. This function
1342 # also assume the recipient will have all the parents. This function
1339 # prunes them from the set of missing nodes.
1343 # prunes them from the set of missing nodes.
1340 def prune_parents(revlog, hasset, msngset):
1344 def prune_parents(revlog, hasset, msngset):
1341 haslst = hasset.keys()
1345 haslst = hasset.keys()
1342 haslst.sort(cmp_by_rev_func(revlog))
1346 haslst.sort(cmp_by_rev_func(revlog))
1343 for node in haslst:
1347 for node in haslst:
1344 parentlst = [p for p in revlog.parents(node) if p != nullid]
1348 parentlst = [p for p in revlog.parents(node) if p != nullid]
1345 while parentlst:
1349 while parentlst:
1346 n = parentlst.pop()
1350 n = parentlst.pop()
1347 if n not in hasset:
1351 if n not in hasset:
1348 hasset[n] = 1
1352 hasset[n] = 1
1349 p = [p for p in revlog.parents(n) if p != nullid]
1353 p = [p for p in revlog.parents(n) if p != nullid]
1350 parentlst.extend(p)
1354 parentlst.extend(p)
1351 for n in hasset:
1355 for n in hasset:
1352 msngset.pop(n, None)
1356 msngset.pop(n, None)
1353
1357
1354 # This is a function generating function used to set up an environment
1358 # This is a function generating function used to set up an environment
1355 # for the inner function to execute in.
1359 # for the inner function to execute in.
1356 def manifest_and_file_collector(changedfileset):
1360 def manifest_and_file_collector(changedfileset):
1357 # This is an information gathering function that gathers
1361 # This is an information gathering function that gathers
1358 # information from each changeset node that goes out as part of
1362 # information from each changeset node that goes out as part of
1359 # the changegroup. The information gathered is a list of which
1363 # the changegroup. The information gathered is a list of which
1360 # manifest nodes are potentially required (the recipient may
1364 # manifest nodes are potentially required (the recipient may
1361 # already have them) and total list of all files which were
1365 # already have them) and total list of all files which were
1362 # changed in any changeset in the changegroup.
1366 # changed in any changeset in the changegroup.
1363 #
1367 #
1364 # We also remember the first changenode we saw any manifest
1368 # We also remember the first changenode we saw any manifest
1365 # referenced by so we can later determine which changenode 'owns'
1369 # referenced by so we can later determine which changenode 'owns'
1366 # the manifest.
1370 # the manifest.
1367 def collect_manifests_and_files(clnode):
1371 def collect_manifests_and_files(clnode):
1368 c = cl.read(clnode)
1372 c = cl.read(clnode)
1369 for f in c[3]:
1373 for f in c[3]:
1370 # This is to make sure we only have one instance of each
1374 # This is to make sure we only have one instance of each
1371 # filename string for each filename.
1375 # filename string for each filename.
1372 changedfileset.setdefault(f, f)
1376 changedfileset.setdefault(f, f)
1373 msng_mnfst_set.setdefault(c[0], clnode)
1377 msng_mnfst_set.setdefault(c[0], clnode)
1374 return collect_manifests_and_files
1378 return collect_manifests_and_files
1375
1379
1376 # Figure out which manifest nodes (of the ones we think might be part
1380 # Figure out which manifest nodes (of the ones we think might be part
1377 # of the changegroup) the recipient must know about and remove them
1381 # of the changegroup) the recipient must know about and remove them
1378 # from the changegroup.
1382 # from the changegroup.
1379 def prune_manifests():
1383 def prune_manifests():
1380 has_mnfst_set = {}
1384 has_mnfst_set = {}
1381 for n in msng_mnfst_set:
1385 for n in msng_mnfst_set:
1382 # If a 'missing' manifest thinks it belongs to a changenode
1386 # If a 'missing' manifest thinks it belongs to a changenode
1383 # the recipient is assumed to have, obviously the recipient
1387 # the recipient is assumed to have, obviously the recipient
1384 # must have that manifest.
1388 # must have that manifest.
1385 linknode = cl.node(mnfst.linkrev(n))
1389 linknode = cl.node(mnfst.linkrev(n))
1386 if linknode in has_cl_set:
1390 if linknode in has_cl_set:
1387 has_mnfst_set[n] = 1
1391 has_mnfst_set[n] = 1
1388 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1392 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1389
1393
1390 # Use the information collected in collect_manifests_and_files to say
1394 # Use the information collected in collect_manifests_and_files to say
1391 # which changenode any manifestnode belongs to.
1395 # which changenode any manifestnode belongs to.
1392 def lookup_manifest_link(mnfstnode):
1396 def lookup_manifest_link(mnfstnode):
1393 return msng_mnfst_set[mnfstnode]
1397 return msng_mnfst_set[mnfstnode]
1394
1398
1395 # A function generating function that sets up the initial environment
1399 # A function generating function that sets up the initial environment
1396 # the inner function.
1400 # the inner function.
1397 def filenode_collector(changedfiles):
1401 def filenode_collector(changedfiles):
1398 next_rev = [0]
1402 next_rev = [0]
1399 # This gathers information from each manifestnode included in the
1403 # This gathers information from each manifestnode included in the
1400 # changegroup about which filenodes the manifest node references
1404 # changegroup about which filenodes the manifest node references
1401 # so we can include those in the changegroup too.
1405 # so we can include those in the changegroup too.
1402 #
1406 #
1403 # It also remembers which changenode each filenode belongs to. It
1407 # It also remembers which changenode each filenode belongs to. It
1404 # does this by assuming the a filenode belongs to the changenode
1408 # does this by assuming the a filenode belongs to the changenode
1405 # the first manifest that references it belongs to.
1409 # the first manifest that references it belongs to.
1406 def collect_msng_filenodes(mnfstnode):
1410 def collect_msng_filenodes(mnfstnode):
1407 r = mnfst.rev(mnfstnode)
1411 r = mnfst.rev(mnfstnode)
1408 if r == next_rev[0]:
1412 if r == next_rev[0]:
1409 # If the last rev we looked at was the one just previous,
1413 # If the last rev we looked at was the one just previous,
1410 # we only need to see a diff.
1414 # we only need to see a diff.
1411 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1415 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1412 # For each line in the delta
1416 # For each line in the delta
1413 for dline in delta.splitlines():
1417 for dline in delta.splitlines():
1414 # get the filename and filenode for that line
1418 # get the filename and filenode for that line
1415 f, fnode = dline.split('\0')
1419 f, fnode = dline.split('\0')
1416 fnode = bin(fnode[:40])
1420 fnode = bin(fnode[:40])
1417 f = changedfiles.get(f, None)
1421 f = changedfiles.get(f, None)
1418 # And if the file is in the list of files we care
1422 # And if the file is in the list of files we care
1419 # about.
1423 # about.
1420 if f is not None:
1424 if f is not None:
1421 # Get the changenode this manifest belongs to
1425 # Get the changenode this manifest belongs to
1422 clnode = msng_mnfst_set[mnfstnode]
1426 clnode = msng_mnfst_set[mnfstnode]
1423 # Create the set of filenodes for the file if
1427 # Create the set of filenodes for the file if
1424 # there isn't one already.
1428 # there isn't one already.
1425 ndset = msng_filenode_set.setdefault(f, {})
1429 ndset = msng_filenode_set.setdefault(f, {})
1426 # And set the filenode's changelog node to the
1430 # And set the filenode's changelog node to the
1427 # manifest's if it hasn't been set already.
1431 # manifest's if it hasn't been set already.
1428 ndset.setdefault(fnode, clnode)
1432 ndset.setdefault(fnode, clnode)
1429 else:
1433 else:
1430 # Otherwise we need a full manifest.
1434 # Otherwise we need a full manifest.
1431 m = mnfst.read(mnfstnode)
1435 m = mnfst.read(mnfstnode)
1432 # For every file in we care about.
1436 # For every file in we care about.
1433 for f in changedfiles:
1437 for f in changedfiles:
1434 fnode = m.get(f, None)
1438 fnode = m.get(f, None)
1435 # If it's in the manifest
1439 # If it's in the manifest
1436 if fnode is not None:
1440 if fnode is not None:
1437 # See comments above.
1441 # See comments above.
1438 clnode = msng_mnfst_set[mnfstnode]
1442 clnode = msng_mnfst_set[mnfstnode]
1439 ndset = msng_filenode_set.setdefault(f, {})
1443 ndset = msng_filenode_set.setdefault(f, {})
1440 ndset.setdefault(fnode, clnode)
1444 ndset.setdefault(fnode, clnode)
1441 # Remember the revision we hope to see next.
1445 # Remember the revision we hope to see next.
1442 next_rev[0] = r + 1
1446 next_rev[0] = r + 1
1443 return collect_msng_filenodes
1447 return collect_msng_filenodes
1444
1448
1445 # We have a list of filenodes we think we need for a file, lets remove
1449 # We have a list of filenodes we think we need for a file, lets remove
1446 # all those we now the recipient must have.
1450 # all those we now the recipient must have.
1447 def prune_filenodes(f, filerevlog):
1451 def prune_filenodes(f, filerevlog):
1448 msngset = msng_filenode_set[f]
1452 msngset = msng_filenode_set[f]
1449 hasset = {}
1453 hasset = {}
1450 # If a 'missing' filenode thinks it belongs to a changenode we
1454 # If a 'missing' filenode thinks it belongs to a changenode we
1451 # assume the recipient must have, then the recipient must have
1455 # assume the recipient must have, then the recipient must have
1452 # that filenode.
1456 # that filenode.
1453 for n in msngset:
1457 for n in msngset:
1454 clnode = cl.node(filerevlog.linkrev(n))
1458 clnode = cl.node(filerevlog.linkrev(n))
1455 if clnode in has_cl_set:
1459 if clnode in has_cl_set:
1456 hasset[n] = 1
1460 hasset[n] = 1
1457 prune_parents(filerevlog, hasset, msngset)
1461 prune_parents(filerevlog, hasset, msngset)
1458
1462
1459 # A function generator function that sets up the a context for the
1463 # A function generator function that sets up the a context for the
1460 # inner function.
1464 # inner function.
1461 def lookup_filenode_link_func(fname):
1465 def lookup_filenode_link_func(fname):
1462 msngset = msng_filenode_set[fname]
1466 msngset = msng_filenode_set[fname]
1463 # Lookup the changenode the filenode belongs to.
1467 # Lookup the changenode the filenode belongs to.
1464 def lookup_filenode_link(fnode):
1468 def lookup_filenode_link(fnode):
1465 return msngset[fnode]
1469 return msngset[fnode]
1466 return lookup_filenode_link
1470 return lookup_filenode_link
1467
1471
1468 # Now that we have all theses utility functions to help out and
1472 # Now that we have all theses utility functions to help out and
1469 # logically divide up the task, generate the group.
1473 # logically divide up the task, generate the group.
1470 def gengroup():
1474 def gengroup():
1471 # The set of changed files starts empty.
1475 # The set of changed files starts empty.
1472 changedfiles = {}
1476 changedfiles = {}
1473 # Create a changenode group generator that will call our functions
1477 # Create a changenode group generator that will call our functions
1474 # back to lookup the owning changenode and collect information.
1478 # back to lookup the owning changenode and collect information.
1475 group = cl.group(msng_cl_lst, identity,
1479 group = cl.group(msng_cl_lst, identity,
1476 manifest_and_file_collector(changedfiles))
1480 manifest_and_file_collector(changedfiles))
1477 for chnk in group:
1481 for chnk in group:
1478 yield chnk
1482 yield chnk
1479
1483
1480 # The list of manifests has been collected by the generator
1484 # The list of manifests has been collected by the generator
1481 # calling our functions back.
1485 # calling our functions back.
1482 prune_manifests()
1486 prune_manifests()
1483 msng_mnfst_lst = msng_mnfst_set.keys()
1487 msng_mnfst_lst = msng_mnfst_set.keys()
1484 # Sort the manifestnodes by revision number.
1488 # Sort the manifestnodes by revision number.
1485 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1489 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1486 # Create a generator for the manifestnodes that calls our lookup
1490 # Create a generator for the manifestnodes that calls our lookup
1487 # and data collection functions back.
1491 # and data collection functions back.
1488 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1492 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1489 filenode_collector(changedfiles))
1493 filenode_collector(changedfiles))
1490 for chnk in group:
1494 for chnk in group:
1491 yield chnk
1495 yield chnk
1492
1496
1493 # These are no longer needed, dereference and toss the memory for
1497 # These are no longer needed, dereference and toss the memory for
1494 # them.
1498 # them.
1495 msng_mnfst_lst = None
1499 msng_mnfst_lst = None
1496 msng_mnfst_set.clear()
1500 msng_mnfst_set.clear()
1497
1501
1498 changedfiles = changedfiles.keys()
1502 changedfiles = changedfiles.keys()
1499 changedfiles.sort()
1503 changedfiles.sort()
1500 # Go through all our files in order sorted by name.
1504 # Go through all our files in order sorted by name.
1501 for fname in changedfiles:
1505 for fname in changedfiles:
1502 filerevlog = self.file(fname)
1506 filerevlog = self.file(fname)
1503 # Toss out the filenodes that the recipient isn't really
1507 # Toss out the filenodes that the recipient isn't really
1504 # missing.
1508 # missing.
1505 if msng_filenode_set.has_key(fname):
1509 if msng_filenode_set.has_key(fname):
1506 prune_filenodes(fname, filerevlog)
1510 prune_filenodes(fname, filerevlog)
1507 msng_filenode_lst = msng_filenode_set[fname].keys()
1511 msng_filenode_lst = msng_filenode_set[fname].keys()
1508 else:
1512 else:
1509 msng_filenode_lst = []
1513 msng_filenode_lst = []
1510 # If any filenodes are left, generate the group for them,
1514 # If any filenodes are left, generate the group for them,
1511 # otherwise don't bother.
1515 # otherwise don't bother.
1512 if len(msng_filenode_lst) > 0:
1516 if len(msng_filenode_lst) > 0:
1513 yield changegroup.genchunk(fname)
1517 yield changegroup.genchunk(fname)
1514 # Sort the filenodes by their revision #
1518 # Sort the filenodes by their revision #
1515 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1519 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1516 # Create a group generator and only pass in a changenode
1520 # Create a group generator and only pass in a changenode
1517 # lookup function as we need to collect no information
1521 # lookup function as we need to collect no information
1518 # from filenodes.
1522 # from filenodes.
1519 group = filerevlog.group(msng_filenode_lst,
1523 group = filerevlog.group(msng_filenode_lst,
1520 lookup_filenode_link_func(fname))
1524 lookup_filenode_link_func(fname))
1521 for chnk in group:
1525 for chnk in group:
1522 yield chnk
1526 yield chnk
1523 if msng_filenode_set.has_key(fname):
1527 if msng_filenode_set.has_key(fname):
1524 # Don't need this anymore, toss it to free memory.
1528 # Don't need this anymore, toss it to free memory.
1525 del msng_filenode_set[fname]
1529 del msng_filenode_set[fname]
1526 # Signal that no more groups are left.
1530 # Signal that no more groups are left.
1527 yield changegroup.closechunk()
1531 yield changegroup.closechunk()
1528
1532
1529 if msng_cl_lst:
1533 if msng_cl_lst:
1530 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1534 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1531
1535
1532 return util.chunkbuffer(gengroup())
1536 return util.chunkbuffer(gengroup())
1533
1537
1534 def changegroup(self, basenodes, source):
1538 def changegroup(self, basenodes, source):
1535 """Generate a changegroup of all nodes that we have that a recipient
1539 """Generate a changegroup of all nodes that we have that a recipient
1536 doesn't.
1540 doesn't.
1537
1541
1538 This is much easier than the previous function as we can assume that
1542 This is much easier than the previous function as we can assume that
1539 the recipient has any changenode we aren't sending them."""
1543 the recipient has any changenode we aren't sending them."""
1540
1544
1541 self.hook('preoutgoing', throw=True, source=source)
1545 self.hook('preoutgoing', throw=True, source=source)
1542
1546
1543 cl = self.changelog
1547 cl = self.changelog
1544 nodes = cl.nodesbetween(basenodes, None)[0]
1548 nodes = cl.nodesbetween(basenodes, None)[0]
1545 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1549 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1546
1550
1547 def identity(x):
1551 def identity(x):
1548 return x
1552 return x
1549
1553
1550 def gennodelst(revlog):
1554 def gennodelst(revlog):
1551 for r in xrange(0, revlog.count()):
1555 for r in xrange(0, revlog.count()):
1552 n = revlog.node(r)
1556 n = revlog.node(r)
1553 if revlog.linkrev(n) in revset:
1557 if revlog.linkrev(n) in revset:
1554 yield n
1558 yield n
1555
1559
1556 def changed_file_collector(changedfileset):
1560 def changed_file_collector(changedfileset):
1557 def collect_changed_files(clnode):
1561 def collect_changed_files(clnode):
1558 c = cl.read(clnode)
1562 c = cl.read(clnode)
1559 for fname in c[3]:
1563 for fname in c[3]:
1560 changedfileset[fname] = 1
1564 changedfileset[fname] = 1
1561 return collect_changed_files
1565 return collect_changed_files
1562
1566
1563 def lookuprevlink_func(revlog):
1567 def lookuprevlink_func(revlog):
1564 def lookuprevlink(n):
1568 def lookuprevlink(n):
1565 return cl.node(revlog.linkrev(n))
1569 return cl.node(revlog.linkrev(n))
1566 return lookuprevlink
1570 return lookuprevlink
1567
1571
1568 def gengroup():
1572 def gengroup():
1569 # construct a list of all changed files
1573 # construct a list of all changed files
1570 changedfiles = {}
1574 changedfiles = {}
1571
1575
1572 for chnk in cl.group(nodes, identity,
1576 for chnk in cl.group(nodes, identity,
1573 changed_file_collector(changedfiles)):
1577 changed_file_collector(changedfiles)):
1574 yield chnk
1578 yield chnk
1575 changedfiles = changedfiles.keys()
1579 changedfiles = changedfiles.keys()
1576 changedfiles.sort()
1580 changedfiles.sort()
1577
1581
1578 mnfst = self.manifest
1582 mnfst = self.manifest
1579 nodeiter = gennodelst(mnfst)
1583 nodeiter = gennodelst(mnfst)
1580 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1584 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1581 yield chnk
1585 yield chnk
1582
1586
1583 for fname in changedfiles:
1587 for fname in changedfiles:
1584 filerevlog = self.file(fname)
1588 filerevlog = self.file(fname)
1585 nodeiter = gennodelst(filerevlog)
1589 nodeiter = gennodelst(filerevlog)
1586 nodeiter = list(nodeiter)
1590 nodeiter = list(nodeiter)
1587 if nodeiter:
1591 if nodeiter:
1588 yield changegroup.genchunk(fname)
1592 yield changegroup.genchunk(fname)
1589 lookup = lookuprevlink_func(filerevlog)
1593 lookup = lookuprevlink_func(filerevlog)
1590 for chnk in filerevlog.group(nodeiter, lookup):
1594 for chnk in filerevlog.group(nodeiter, lookup):
1591 yield chnk
1595 yield chnk
1592
1596
1593 yield changegroup.closechunk()
1597 yield changegroup.closechunk()
1594
1598
1595 if nodes:
1599 if nodes:
1596 self.hook('outgoing', node=hex(nodes[0]), source=source)
1600 self.hook('outgoing', node=hex(nodes[0]), source=source)
1597
1601
1598 return util.chunkbuffer(gengroup())
1602 return util.chunkbuffer(gengroup())
1599
1603
1600 def addchangegroup(self, source, srctype, url):
1604 def addchangegroup(self, source, srctype, url):
1601 """add changegroup to repo.
1605 """add changegroup to repo.
1602 returns number of heads modified or added + 1."""
1606 returns number of heads modified or added + 1."""
1603
1607
1604 def csmap(x):
1608 def csmap(x):
1605 self.ui.debug(_("add changeset %s\n") % short(x))
1609 self.ui.debug(_("add changeset %s\n") % short(x))
1606 return cl.count()
1610 return cl.count()
1607
1611
1608 def revmap(x):
1612 def revmap(x):
1609 return cl.rev(x)
1613 return cl.rev(x)
1610
1614
1611 if not source:
1615 if not source:
1612 return 0
1616 return 0
1613
1617
1614 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1618 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1615
1619
1616 changesets = files = revisions = 0
1620 changesets = files = revisions = 0
1617
1621
1618 tr = self.transaction()
1622 tr = self.transaction()
1619
1623
1620 # write changelog data to temp files so concurrent readers will not see
1624 # write changelog data to temp files so concurrent readers will not see
1621 # inconsistent view
1625 # inconsistent view
1622 cl = None
1626 cl = None
1623 try:
1627 try:
1624 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1628 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1625
1629
1626 oldheads = len(cl.heads())
1630 oldheads = len(cl.heads())
1627
1631
1628 # pull off the changeset group
1632 # pull off the changeset group
1629 self.ui.status(_("adding changesets\n"))
1633 self.ui.status(_("adding changesets\n"))
1630 cor = cl.count() - 1
1634 cor = cl.count() - 1
1631 chunkiter = changegroup.chunkiter(source)
1635 chunkiter = changegroup.chunkiter(source)
1632 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1636 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1633 raise util.Abort(_("received changelog group is empty"))
1637 raise util.Abort(_("received changelog group is empty"))
1634 cnr = cl.count() - 1
1638 cnr = cl.count() - 1
1635 changesets = cnr - cor
1639 changesets = cnr - cor
1636
1640
1637 # pull off the manifest group
1641 # pull off the manifest group
1638 self.ui.status(_("adding manifests\n"))
1642 self.ui.status(_("adding manifests\n"))
1639 chunkiter = changegroup.chunkiter(source)
1643 chunkiter = changegroup.chunkiter(source)
1640 # no need to check for empty manifest group here:
1644 # no need to check for empty manifest group here:
1641 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1645 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1642 # no new manifest will be created and the manifest group will
1646 # no new manifest will be created and the manifest group will
1643 # be empty during the pull
1647 # be empty during the pull
1644 self.manifest.addgroup(chunkiter, revmap, tr)
1648 self.manifest.addgroup(chunkiter, revmap, tr)
1645
1649
1646 # process the files
1650 # process the files
1647 self.ui.status(_("adding file changes\n"))
1651 self.ui.status(_("adding file changes\n"))
1648 while 1:
1652 while 1:
1649 f = changegroup.getchunk(source)
1653 f = changegroup.getchunk(source)
1650 if not f:
1654 if not f:
1651 break
1655 break
1652 self.ui.debug(_("adding %s revisions\n") % f)
1656 self.ui.debug(_("adding %s revisions\n") % f)
1653 fl = self.file(f)
1657 fl = self.file(f)
1654 o = fl.count()
1658 o = fl.count()
1655 chunkiter = changegroup.chunkiter(source)
1659 chunkiter = changegroup.chunkiter(source)
1656 if fl.addgroup(chunkiter, revmap, tr) is None:
1660 if fl.addgroup(chunkiter, revmap, tr) is None:
1657 raise util.Abort(_("received file revlog group is empty"))
1661 raise util.Abort(_("received file revlog group is empty"))
1658 revisions += fl.count() - o
1662 revisions += fl.count() - o
1659 files += 1
1663 files += 1
1660
1664
1661 cl.writedata()
1665 cl.writedata()
1662 finally:
1666 finally:
1663 if cl:
1667 if cl:
1664 cl.cleanup()
1668 cl.cleanup()
1665
1669
1666 # make changelog see real files again
1670 # make changelog see real files again
1667 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1671 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1668 self.changelog.checkinlinesize(tr)
1672 self.changelog.checkinlinesize(tr)
1669
1673
1670 newheads = len(self.changelog.heads())
1674 newheads = len(self.changelog.heads())
1671 heads = ""
1675 heads = ""
1672 if oldheads and newheads != oldheads:
1676 if oldheads and newheads != oldheads:
1673 heads = _(" (%+d heads)") % (newheads - oldheads)
1677 heads = _(" (%+d heads)") % (newheads - oldheads)
1674
1678
1675 self.ui.status(_("added %d changesets"
1679 self.ui.status(_("added %d changesets"
1676 " with %d changes to %d files%s\n")
1680 " with %d changes to %d files%s\n")
1677 % (changesets, revisions, files, heads))
1681 % (changesets, revisions, files, heads))
1678
1682
1679 if changesets > 0:
1683 if changesets > 0:
1680 self.hook('pretxnchangegroup', throw=True,
1684 self.hook('pretxnchangegroup', throw=True,
1681 node=hex(self.changelog.node(cor+1)), source=srctype,
1685 node=hex(self.changelog.node(cor+1)), source=srctype,
1682 url=url)
1686 url=url)
1683
1687
1684 tr.close()
1688 tr.close()
1685
1689
1686 if changesets > 0:
1690 if changesets > 0:
1687 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1691 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1688 source=srctype, url=url)
1692 source=srctype, url=url)
1689
1693
1690 for i in range(cor + 1, cnr + 1):
1694 for i in range(cor + 1, cnr + 1):
1691 self.hook("incoming", node=hex(self.changelog.node(i)),
1695 self.hook("incoming", node=hex(self.changelog.node(i)),
1692 source=srctype, url=url)
1696 source=srctype, url=url)
1693
1697
1694 return newheads - oldheads + 1
1698 return newheads - oldheads + 1
1695
1699
1700
1696 def stream_in(self, remote):
1701 def stream_in(self, remote):
1697 fp = remote.stream_out()
1702 fp = remote.stream_out()
1698 resp = int(fp.readline())
1703 resp = int(fp.readline())
1699 if resp != 0:
1704 if resp != 0:
1700 raise util.Abort(_('operation forbidden by server'))
1705 raise util.Abort(_('operation forbidden by server'))
1701 self.ui.status(_('streaming all changes\n'))
1706 self.ui.status(_('streaming all changes\n'))
1702 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
1707 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
1703 self.ui.status(_('%d files to transfer, %s of data\n') %
1708 self.ui.status(_('%d files to transfer, %s of data\n') %
1704 (total_files, util.bytecount(total_bytes)))
1709 (total_files, util.bytecount(total_bytes)))
1705 start = time.time()
1710 start = time.time()
1706 for i in xrange(total_files):
1711 for i in xrange(total_files):
1707 name, size = fp.readline().split('\0', 1)
1712 name, size = fp.readline().split('\0', 1)
1708 size = int(size)
1713 size = int(size)
1709 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1714 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1710 ofp = self.opener(name, 'w')
1715 ofp = self.opener(name, 'w')
1711 for chunk in util.filechunkiter(fp, limit=size):
1716 for chunk in util.filechunkiter(fp, limit=size):
1712 ofp.write(chunk)
1717 ofp.write(chunk)
1713 ofp.close()
1718 ofp.close()
1714 elapsed = time.time() - start
1719 elapsed = time.time() - start
1715 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1720 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1716 (util.bytecount(total_bytes), elapsed,
1721 (util.bytecount(total_bytes), elapsed,
1717 util.bytecount(total_bytes / elapsed)))
1722 util.bytecount(total_bytes / elapsed)))
1718 self.reload()
1723 self.reload()
1719 return len(self.heads()) + 1
1724 return len(self.heads()) + 1
1720
1725
1721 def clone(self, remote, heads=[], stream=False):
1726 def clone(self, remote, heads=[], stream=False):
1722 '''clone remote repository.
1727 '''clone remote repository.
1723
1728
1724 keyword arguments:
1729 keyword arguments:
1725 heads: list of revs to clone (forces use of pull)
1730 heads: list of revs to clone (forces use of pull)
1726 stream: use streaming clone if possible'''
1731 stream: use streaming clone if possible'''
1727
1732
1728 # now, all clients that can request uncompressed clones can
1733 # now, all clients that can request uncompressed clones can
1729 # read repo formats supported by all servers that can serve
1734 # read repo formats supported by all servers that can serve
1730 # them.
1735 # them.
1731
1736
1732 # if revlog format changes, client will have to check version
1737 # if revlog format changes, client will have to check version
1733 # and format flags on "stream" capability, and use
1738 # and format flags on "stream" capability, and use
1734 # uncompressed only if compatible.
1739 # uncompressed only if compatible.
1735
1740
1736 if stream and not heads and remote.capable('stream'):
1741 if stream and not heads and remote.capable('stream'):
1737 return self.stream_in(remote)
1742 return self.stream_in(remote)
1738 return self.pull(remote, heads)
1743 return self.pull(remote, heads)
1739
1744
1740 # used to avoid circular references so destructors work
1745 # used to avoid circular references so destructors work
1741 def aftertrans(base):
1746 def aftertrans(base):
1742 p = base
1747 p = base
1743 def a():
1748 def a():
1744 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1749 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1745 util.rename(os.path.join(p, "journal.dirstate"),
1750 util.rename(os.path.join(p, "journal.dirstate"),
1746 os.path.join(p, "undo.dirstate"))
1751 os.path.join(p, "undo.dirstate"))
1747 return a
1752 return a
1748
1753
1749 def instance(ui, path, create):
1754 def instance(ui, path, create):
1750 return localrepository(ui, util.drop_scheme('file', path), create)
1755 return localrepository(ui, util.drop_scheme('file', path), create)
1751
1756
1752 def islocal(path):
1757 def islocal(path):
1753 return True
1758 return True
@@ -1,348 +1,349
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 demandload(globals(), "util os tempfile")
11 demandload(globals(), "util os tempfile")
12
12
13 def merge3(repo, fn, my, other, p1, p2):
13 def merge3(repo, fn, my, other, p1, p2):
14 """perform a 3-way merge in the working directory"""
14 """perform a 3-way merge in the working directory"""
15
15
16 def temp(prefix, node):
16 def temp(prefix, node):
17 pre = "%s~%s." % (os.path.basename(fn), prefix)
17 pre = "%s~%s." % (os.path.basename(fn), prefix)
18 (fd, name) = tempfile.mkstemp(prefix=pre)
18 (fd, name) = tempfile.mkstemp(prefix=pre)
19 f = os.fdopen(fd, "wb")
19 f = os.fdopen(fd, "wb")
20 repo.wwrite(fn, fl.read(node), f)
20 repo.wwrite(fn, fl.read(node), f)
21 f.close()
21 f.close()
22 return name
22 return name
23
23
24 fl = repo.file(fn)
24 fl = repo.file(fn)
25 base = fl.ancestor(my, other)
25 base = fl.ancestor(my, other)
26 a = repo.wjoin(fn)
26 a = repo.wjoin(fn)
27 b = temp("base", base)
27 b = temp("base", base)
28 c = temp("other", other)
28 c = temp("other", other)
29
29
30 repo.ui.note(_("resolving %s\n") % fn)
30 repo.ui.note(_("resolving %s\n") % fn)
31 repo.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
31 repo.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
32 (fn, short(my), short(other), short(base)))
32 (fn, short(my), short(other), short(base)))
33
33
34 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
34 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
35 or "hgmerge")
35 or "hgmerge")
36 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
36 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
37 environ={'HG_FILE': fn,
37 environ={'HG_FILE': fn,
38 'HG_MY_NODE': p1,
38 'HG_MY_NODE': p1,
39 'HG_OTHER_NODE': p2,
39 'HG_OTHER_NODE': p2,
40 'HG_FILE_MY_NODE': hex(my),
40 'HG_FILE_MY_NODE': hex(my),
41 'HG_FILE_OTHER_NODE': hex(other),
41 'HG_FILE_OTHER_NODE': hex(other),
42 'HG_FILE_BASE_NODE': hex(base)})
42 'HG_FILE_BASE_NODE': hex(base)})
43 if r:
43 if r:
44 repo.ui.warn(_("merging %s failed!\n") % fn)
44 repo.ui.warn(_("merging %s failed!\n") % fn)
45
45
46 os.unlink(b)
46 os.unlink(b)
47 os.unlink(c)
47 os.unlink(c)
48 return r
48 return r
49
49
50 def update(repo, node, allow=False, force=False, choose=None,
50 def update(repo, node, allow=False, force=False, choose=None,
51 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
51 moddirstate=True, forcemerge=False, wlock=None, show_stats=True,
52 remind=True):
52 pl = repo.dirstate.parents()
53 pl = repo.dirstate.parents()
53 if not force and pl[1] != nullid:
54 if not force and pl[1] != nullid:
54 raise util.Abort(_("outstanding uncommitted merges"))
55 raise util.Abort(_("outstanding uncommitted merges"))
55
56
56 err = False
57 err = False
57
58
58 p1, p2 = pl[0], node
59 p1, p2 = pl[0], node
59 pa = repo.changelog.ancestor(p1, p2)
60 pa = repo.changelog.ancestor(p1, p2)
60 m1n = repo.changelog.read(p1)[0]
61 m1n = repo.changelog.read(p1)[0]
61 m2n = repo.changelog.read(p2)[0]
62 m2n = repo.changelog.read(p2)[0]
62 man = repo.manifest.ancestor(m1n, m2n)
63 man = repo.manifest.ancestor(m1n, m2n)
63 m1 = repo.manifest.read(m1n)
64 m1 = repo.manifest.read(m1n)
64 mf1 = repo.manifest.readflags(m1n)
65 mf1 = repo.manifest.readflags(m1n)
65 m2 = repo.manifest.read(m2n).copy()
66 m2 = repo.manifest.read(m2n).copy()
66 mf2 = repo.manifest.readflags(m2n)
67 mf2 = repo.manifest.readflags(m2n)
67 ma = repo.manifest.read(man)
68 ma = repo.manifest.read(man)
68 mfa = repo.manifest.readflags(man)
69 mfa = repo.manifest.readflags(man)
69
70
70 modified, added, removed, deleted, unknown = repo.changes()
71 modified, added, removed, deleted, unknown = repo.changes()
71
72
72 # is this a jump, or a merge? i.e. is there a linear path
73 # is this a jump, or a merge? i.e. is there a linear path
73 # from p1 to p2?
74 # from p1 to p2?
74 linear_path = (pa == p1 or pa == p2)
75 linear_path = (pa == p1 or pa == p2)
75
76
76 if allow and linear_path:
77 if allow and linear_path:
77 raise util.Abort(_("there is nothing to merge, just use "
78 raise util.Abort(_("there is nothing to merge, just use "
78 "'hg update' or look at 'hg heads'"))
79 "'hg update' or look at 'hg heads'"))
79 if allow and not forcemerge:
80 if allow and not forcemerge:
80 if modified or added or removed:
81 if modified or added or removed:
81 raise util.Abort(_("outstanding uncommitted changes"))
82 raise util.Abort(_("outstanding uncommitted changes"))
82
83
83 if not forcemerge and not force:
84 if not forcemerge and not force:
84 for f in unknown:
85 for f in unknown:
85 if f in m2:
86 if f in m2:
86 t1 = repo.wread(f)
87 t1 = repo.wread(f)
87 t2 = repo.file(f).read(m2[f])
88 t2 = repo.file(f).read(m2[f])
88 if cmp(t1, t2) != 0:
89 if cmp(t1, t2) != 0:
89 raise util.Abort(_("'%s' already exists in the working"
90 raise util.Abort(_("'%s' already exists in the working"
90 " dir and differs from remote") % f)
91 " dir and differs from remote") % f)
91
92
92 # resolve the manifest to determine which files
93 # resolve the manifest to determine which files
93 # we care about merging
94 # we care about merging
94 repo.ui.note(_("resolving manifests\n"))
95 repo.ui.note(_("resolving manifests\n"))
95 repo.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
96 repo.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
96 (force, allow, moddirstate, linear_path))
97 (force, allow, moddirstate, linear_path))
97 repo.ui.debug(_(" ancestor %s local %s remote %s\n") %
98 repo.ui.debug(_(" ancestor %s local %s remote %s\n") %
98 (short(man), short(m1n), short(m2n)))
99 (short(man), short(m1n), short(m2n)))
99
100
100 merge = {}
101 merge = {}
101 get = {}
102 get = {}
102 remove = []
103 remove = []
103
104
104 # construct a working dir manifest
105 # construct a working dir manifest
105 mw = m1.copy()
106 mw = m1.copy()
106 mfw = mf1.copy()
107 mfw = mf1.copy()
107 umap = dict.fromkeys(unknown)
108 umap = dict.fromkeys(unknown)
108
109
109 for f in added + modified + unknown:
110 for f in added + modified + unknown:
110 mw[f] = ""
111 mw[f] = ""
111 mfw[f] = util.is_exec(repo.wjoin(f), mfw.get(f, False))
112 mfw[f] = util.is_exec(repo.wjoin(f), mfw.get(f, False))
112
113
113 if moddirstate and not wlock:
114 if moddirstate and not wlock:
114 wlock = repo.wlock()
115 wlock = repo.wlock()
115
116
116 for f in deleted + removed:
117 for f in deleted + removed:
117 if f in mw:
118 if f in mw:
118 del mw[f]
119 del mw[f]
119
120
120 # If we're jumping between revisions (as opposed to merging),
121 # If we're jumping between revisions (as opposed to merging),
121 # and if neither the working directory nor the target rev has
122 # and if neither the working directory nor the target rev has
122 # the file, then we need to remove it from the dirstate, to
123 # the file, then we need to remove it from the dirstate, to
123 # prevent the dirstate from listing the file when it is no
124 # prevent the dirstate from listing the file when it is no
124 # longer in the manifest.
125 # longer in the manifest.
125 if moddirstate and linear_path and f not in m2:
126 if moddirstate and linear_path and f not in m2:
126 repo.dirstate.forget((f,))
127 repo.dirstate.forget((f,))
127
128
128 # Compare manifests
129 # Compare manifests
129 for f, n in mw.iteritems():
130 for f, n in mw.iteritems():
130 if choose and not choose(f):
131 if choose and not choose(f):
131 continue
132 continue
132 if f in m2:
133 if f in m2:
133 s = 0
134 s = 0
134
135
135 # is the wfile new since m1, and match m2?
136 # is the wfile new since m1, and match m2?
136 if f not in m1:
137 if f not in m1:
137 t1 = repo.wread(f)
138 t1 = repo.wread(f)
138 t2 = repo.file(f).read(m2[f])
139 t2 = repo.file(f).read(m2[f])
139 if cmp(t1, t2) == 0:
140 if cmp(t1, t2) == 0:
140 n = m2[f]
141 n = m2[f]
141 del t1, t2
142 del t1, t2
142
143
143 # are files different?
144 # are files different?
144 if n != m2[f]:
145 if n != m2[f]:
145 a = ma.get(f, nullid)
146 a = ma.get(f, nullid)
146 # are both different from the ancestor?
147 # are both different from the ancestor?
147 if n != a and m2[f] != a:
148 if n != a and m2[f] != a:
148 repo.ui.debug(_(" %s versions differ, resolve\n") % f)
149 repo.ui.debug(_(" %s versions differ, resolve\n") % f)
149 # merge executable bits
150 # merge executable bits
150 # "if we changed or they changed, change in merge"
151 # "if we changed or they changed, change in merge"
151 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
152 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
152 mode = ((a^b) | (a^c)) ^ a
153 mode = ((a^b) | (a^c)) ^ a
153 merge[f] = (m1.get(f, nullid), m2[f], mode)
154 merge[f] = (m1.get(f, nullid), m2[f], mode)
154 s = 1
155 s = 1
155 # are we clobbering?
156 # are we clobbering?
156 # is remote's version newer?
157 # is remote's version newer?
157 # or are we going back in time?
158 # or are we going back in time?
158 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
159 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
159 repo.ui.debug(_(" remote %s is newer, get\n") % f)
160 repo.ui.debug(_(" remote %s is newer, get\n") % f)
160 get[f] = m2[f]
161 get[f] = m2[f]
161 s = 1
162 s = 1
162 elif f in umap or f in added:
163 elif f in umap or f in added:
163 # this unknown file is the same as the checkout
164 # this unknown file is the same as the checkout
164 # we need to reset the dirstate if the file was added
165 # we need to reset the dirstate if the file was added
165 get[f] = m2[f]
166 get[f] = m2[f]
166
167
167 if not s and mfw[f] != mf2[f]:
168 if not s and mfw[f] != mf2[f]:
168 if force:
169 if force:
169 repo.ui.debug(_(" updating permissions for %s\n") % f)
170 repo.ui.debug(_(" updating permissions for %s\n") % f)
170 util.set_exec(repo.wjoin(f), mf2[f])
171 util.set_exec(repo.wjoin(f), mf2[f])
171 else:
172 else:
172 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
173 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
173 mode = ((a^b) | (a^c)) ^ a
174 mode = ((a^b) | (a^c)) ^ a
174 if mode != b:
175 if mode != b:
175 repo.ui.debug(_(" updating permissions for %s\n")
176 repo.ui.debug(_(" updating permissions for %s\n")
176 % f)
177 % f)
177 util.set_exec(repo.wjoin(f), mode)
178 util.set_exec(repo.wjoin(f), mode)
178 del m2[f]
179 del m2[f]
179 elif f in ma:
180 elif f in ma:
180 if n != ma[f]:
181 if n != ma[f]:
181 r = _("d")
182 r = _("d")
182 if not force and (linear_path or allow):
183 if not force and (linear_path or allow):
183 r = repo.ui.prompt(
184 r = repo.ui.prompt(
184 (_(" local changed %s which remote deleted\n") % f) +
185 (_(" local changed %s which remote deleted\n") % f) +
185 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
186 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
186 if r == _("d"):
187 if r == _("d"):
187 remove.append(f)
188 remove.append(f)
188 else:
189 else:
189 repo.ui.debug(_("other deleted %s\n") % f)
190 repo.ui.debug(_("other deleted %s\n") % f)
190 remove.append(f) # other deleted it
191 remove.append(f) # other deleted it
191 else:
192 else:
192 # file is created on branch or in working directory
193 # file is created on branch or in working directory
193 if force and f not in umap:
194 if force and f not in umap:
194 repo.ui.debug(_("remote deleted %s, clobbering\n") % f)
195 repo.ui.debug(_("remote deleted %s, clobbering\n") % f)
195 remove.append(f)
196 remove.append(f)
196 elif n == m1.get(f, nullid): # same as parent
197 elif n == m1.get(f, nullid): # same as parent
197 if p2 == pa: # going backwards?
198 if p2 == pa: # going backwards?
198 repo.ui.debug(_("remote deleted %s\n") % f)
199 repo.ui.debug(_("remote deleted %s\n") % f)
199 remove.append(f)
200 remove.append(f)
200 else:
201 else:
201 repo.ui.debug(_("local modified %s, keeping\n") % f)
202 repo.ui.debug(_("local modified %s, keeping\n") % f)
202 else:
203 else:
203 repo.ui.debug(_("working dir created %s, keeping\n") % f)
204 repo.ui.debug(_("working dir created %s, keeping\n") % f)
204
205
205 for f, n in m2.iteritems():
206 for f, n in m2.iteritems():
206 if choose and not choose(f):
207 if choose and not choose(f):
207 continue
208 continue
208 if f[0] == "/":
209 if f[0] == "/":
209 continue
210 continue
210 if f in ma and n != ma[f]:
211 if f in ma and n != ma[f]:
211 r = _("k")
212 r = _("k")
212 if not force and (linear_path or allow):
213 if not force and (linear_path or allow):
213 r = repo.ui.prompt(
214 r = repo.ui.prompt(
214 (_("remote changed %s which local deleted\n") % f) +
215 (_("remote changed %s which local deleted\n") % f) +
215 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
216 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
216 if r == _("k"):
217 if r == _("k"):
217 get[f] = n
218 get[f] = n
218 elif f not in ma:
219 elif f not in ma:
219 repo.ui.debug(_("remote created %s\n") % f)
220 repo.ui.debug(_("remote created %s\n") % f)
220 get[f] = n
221 get[f] = n
221 else:
222 else:
222 if force or p2 == pa: # going backwards?
223 if force or p2 == pa: # going backwards?
223 repo.ui.debug(_("local deleted %s, recreating\n") % f)
224 repo.ui.debug(_("local deleted %s, recreating\n") % f)
224 get[f] = n
225 get[f] = n
225 else:
226 else:
226 repo.ui.debug(_("local deleted %s\n") % f)
227 repo.ui.debug(_("local deleted %s\n") % f)
227
228
228 del mw, m1, m2, ma
229 del mw, m1, m2, ma
229
230
230 if force:
231 if force:
231 for f in merge:
232 for f in merge:
232 get[f] = merge[f][1]
233 get[f] = merge[f][1]
233 merge = {}
234 merge = {}
234
235
235 if linear_path or force:
236 if linear_path or force:
236 # we don't need to do any magic, just jump to the new rev
237 # we don't need to do any magic, just jump to the new rev
237 branch_merge = False
238 branch_merge = False
238 p1, p2 = p2, nullid
239 p1, p2 = p2, nullid
239 else:
240 else:
240 if not allow:
241 if not allow:
241 repo.ui.status(_("this update spans a branch"
242 repo.ui.status(_("this update spans a branch"
242 " affecting the following files:\n"))
243 " affecting the following files:\n"))
243 fl = merge.keys() + get.keys()
244 fl = merge.keys() + get.keys()
244 fl.sort()
245 fl.sort()
245 for f in fl:
246 for f in fl:
246 cf = ""
247 cf = ""
247 if f in merge:
248 if f in merge:
248 cf = _(" (resolve)")
249 cf = _(" (resolve)")
249 repo.ui.status(" %s%s\n" % (f, cf))
250 repo.ui.status(" %s%s\n" % (f, cf))
250 repo.ui.warn(_("aborting update spanning branches!\n"))
251 repo.ui.warn(_("aborting update spanning branches!\n"))
251 repo.ui.status(_("(use 'hg merge' to merge across branches"
252 repo.ui.status(_("(use 'hg merge' to merge across branches"
252 " or 'hg update -C' to lose changes)\n"))
253 " or 'hg update -C' to lose changes)\n"))
253 return 1
254 return 1
254 branch_merge = True
255 branch_merge = True
255
256
256 xp1 = hex(p1)
257 xp1 = hex(p1)
257 xp2 = hex(p2)
258 xp2 = hex(p2)
258 if p2 == nullid: xxp2 = ''
259 if p2 == nullid: xxp2 = ''
259 else: xxp2 = xp2
260 else: xxp2 = xp2
260
261
261 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
262 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
262
263
263 # get the files we don't need to change
264 # get the files we don't need to change
264 files = get.keys()
265 files = get.keys()
265 files.sort()
266 files.sort()
266 for f in files:
267 for f in files:
267 if f[0] == "/":
268 if f[0] == "/":
268 continue
269 continue
269 repo.ui.note(_("getting %s\n") % f)
270 repo.ui.note(_("getting %s\n") % f)
270 t = repo.file(f).read(get[f])
271 t = repo.file(f).read(get[f])
271 repo.wwrite(f, t)
272 repo.wwrite(f, t)
272 util.set_exec(repo.wjoin(f), mf2[f])
273 util.set_exec(repo.wjoin(f), mf2[f])
273 if moddirstate:
274 if moddirstate:
274 if branch_merge:
275 if branch_merge:
275 repo.dirstate.update([f], 'n', st_mtime=-1)
276 repo.dirstate.update([f], 'n', st_mtime=-1)
276 else:
277 else:
277 repo.dirstate.update([f], 'n')
278 repo.dirstate.update([f], 'n')
278
279
279 # merge the tricky bits
280 # merge the tricky bits
280 failedmerge = []
281 failedmerge = []
281 files = merge.keys()
282 files = merge.keys()
282 files.sort()
283 files.sort()
283 for f in files:
284 for f in files:
284 repo.ui.status(_("merging %s\n") % f)
285 repo.ui.status(_("merging %s\n") % f)
285 my, other, flag = merge[f]
286 my, other, flag = merge[f]
286 ret = merge3(repo, f, my, other, xp1, xp2)
287 ret = merge3(repo, f, my, other, xp1, xp2)
287 if ret:
288 if ret:
288 err = True
289 err = True
289 failedmerge.append(f)
290 failedmerge.append(f)
290 util.set_exec(repo.wjoin(f), flag)
291 util.set_exec(repo.wjoin(f), flag)
291 if moddirstate:
292 if moddirstate:
292 if branch_merge:
293 if branch_merge:
293 # We've done a branch merge, mark this file as merged
294 # We've done a branch merge, mark this file as merged
294 # so that we properly record the merger later
295 # so that we properly record the merger later
295 repo.dirstate.update([f], 'm')
296 repo.dirstate.update([f], 'm')
296 else:
297 else:
297 # We've update-merged a locally modified file, so
298 # We've update-merged a locally modified file, so
298 # we set the dirstate to emulate a normal checkout
299 # we set the dirstate to emulate a normal checkout
299 # of that file some time in the past. Thus our
300 # of that file some time in the past. Thus our
300 # merge will appear as a normal local file
301 # merge will appear as a normal local file
301 # modification.
302 # modification.
302 f_len = len(repo.file(f).read(other))
303 f_len = len(repo.file(f).read(other))
303 repo.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
304 repo.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
304
305
305 remove.sort()
306 remove.sort()
306 for f in remove:
307 for f in remove:
307 repo.ui.note(_("removing %s\n") % f)
308 repo.ui.note(_("removing %s\n") % f)
308 util.audit_path(f)
309 util.audit_path(f)
309 try:
310 try:
310 util.unlink(repo.wjoin(f))
311 util.unlink(repo.wjoin(f))
311 except OSError, inst:
312 except OSError, inst:
312 if inst.errno != errno.ENOENT:
313 if inst.errno != errno.ENOENT:
313 repo.ui.warn(_("update failed to remove %s: %s!\n") %
314 repo.ui.warn(_("update failed to remove %s: %s!\n") %
314 (f, inst.strerror))
315 (f, inst.strerror))
315 if moddirstate:
316 if moddirstate:
316 if branch_merge:
317 if branch_merge:
317 repo.dirstate.update(remove, 'r')
318 repo.dirstate.update(remove, 'r')
318 else:
319 else:
319 repo.dirstate.forget(remove)
320 repo.dirstate.forget(remove)
320
321
321 if moddirstate:
322 if moddirstate:
322 repo.dirstate.setparents(p1, p2)
323 repo.dirstate.setparents(p1, p2)
323
324
324 if show_stats:
325 if show_stats:
325 stats = ((len(get), _("updated")),
326 stats = ((len(get), _("updated")),
326 (len(merge) - len(failedmerge), _("merged")),
327 (len(merge) - len(failedmerge), _("merged")),
327 (len(remove), _("removed")),
328 (len(remove), _("removed")),
328 (len(failedmerge), _("unresolved")))
329 (len(failedmerge), _("unresolved")))
329 note = ", ".join([_("%d files %s") % s for s in stats])
330 note = ", ".join([_("%d files %s") % s for s in stats])
330 repo.ui.status("%s\n" % note)
331 repo.ui.status("%s\n" % note)
331 if moddirstate:
332 if moddirstate:
332 if branch_merge:
333 if branch_merge:
333 if failedmerge:
334 if failedmerge:
334 repo.ui.status(_("There are unresolved merges,"
335 repo.ui.status(_("There are unresolved merges,"
335 " you can redo the full merge using:\n"
336 " you can redo the full merge using:\n"
336 " hg update -C %s\n"
337 " hg update -C %s\n"
337 " hg merge %s\n"
338 " hg merge %s\n"
338 % (repo.changelog.rev(p1),
339 % (repo.changelog.rev(p1),
339 repo.changelog.rev(p2))))
340 repo.changelog.rev(p2))))
340 else:
341 elif remind:
341 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
342 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
342 elif failedmerge:
343 elif failedmerge:
343 repo.ui.status(_("There are unresolved merges with"
344 repo.ui.status(_("There are unresolved merges with"
344 " locally modified files.\n"))
345 " locally modified files.\n"))
345
346
346 repo.hook('update', parent1=xp1, parent2=xxp2, error=int(err))
347 repo.hook('update', parent1=xp1, parent2=xxp2, error=int(err))
347 return err
348 return err
348
349
@@ -1,1010 +1,1016
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8
8
9 This contains helper routines that are independent of the SCM core and hide
9 This contains helper routines that are independent of the SCM core and hide
10 platform-specific details from the core.
10 platform-specific details from the core.
11 """
11 """
12
12
13 from i18n import gettext as _
13 from i18n import gettext as _
14 from demandload import *
14 from demandload import *
15 demandload(globals(), "cStringIO errno getpass popen2 re shutil sys tempfile")
15 demandload(globals(), "cStringIO errno getpass popen2 re shutil sys tempfile")
16 demandload(globals(), "os threading time")
16 demandload(globals(), "os threading time")
17
17
18 # used by parsedate
18 # used by parsedate
19 defaultdateformats = ('%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M',
19 defaultdateformats = ('%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M',
20 '%a %b %d %H:%M:%S %Y')
20 '%a %b %d %H:%M:%S %Y')
21
21
22 class SignalInterrupt(Exception):
22 class SignalInterrupt(Exception):
23 """Exception raised on SIGTERM and SIGHUP."""
23 """Exception raised on SIGTERM and SIGHUP."""
24
24
25 def pipefilter(s, cmd):
25 def pipefilter(s, cmd):
26 '''filter string S through command CMD, returning its output'''
26 '''filter string S through command CMD, returning its output'''
27 (pout, pin) = popen2.popen2(cmd, -1, 'b')
27 (pout, pin) = popen2.popen2(cmd, -1, 'b')
28 def writer():
28 def writer():
29 try:
29 try:
30 pin.write(s)
30 pin.write(s)
31 pin.close()
31 pin.close()
32 except IOError, inst:
32 except IOError, inst:
33 if inst.errno != errno.EPIPE:
33 if inst.errno != errno.EPIPE:
34 raise
34 raise
35
35
36 # we should use select instead on UNIX, but this will work on most
36 # we should use select instead on UNIX, but this will work on most
37 # systems, including Windows
37 # systems, including Windows
38 w = threading.Thread(target=writer)
38 w = threading.Thread(target=writer)
39 w.start()
39 w.start()
40 f = pout.read()
40 f = pout.read()
41 pout.close()
41 pout.close()
42 w.join()
42 w.join()
43 return f
43 return f
44
44
45 def tempfilter(s, cmd):
45 def tempfilter(s, cmd):
46 '''filter string S through a pair of temporary files with CMD.
46 '''filter string S through a pair of temporary files with CMD.
47 CMD is used as a template to create the real command to be run,
47 CMD is used as a template to create the real command to be run,
48 with the strings INFILE and OUTFILE replaced by the real names of
48 with the strings INFILE and OUTFILE replaced by the real names of
49 the temporary files generated.'''
49 the temporary files generated.'''
50 inname, outname = None, None
50 inname, outname = None, None
51 try:
51 try:
52 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
52 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
53 fp = os.fdopen(infd, 'wb')
53 fp = os.fdopen(infd, 'wb')
54 fp.write(s)
54 fp.write(s)
55 fp.close()
55 fp.close()
56 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
56 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
57 os.close(outfd)
57 os.close(outfd)
58 cmd = cmd.replace('INFILE', inname)
58 cmd = cmd.replace('INFILE', inname)
59 cmd = cmd.replace('OUTFILE', outname)
59 cmd = cmd.replace('OUTFILE', outname)
60 code = os.system(cmd)
60 code = os.system(cmd)
61 if code: raise Abort(_("command '%s' failed: %s") %
61 if code: raise Abort(_("command '%s' failed: %s") %
62 (cmd, explain_exit(code)))
62 (cmd, explain_exit(code)))
63 return open(outname, 'rb').read()
63 return open(outname, 'rb').read()
64 finally:
64 finally:
65 try:
65 try:
66 if inname: os.unlink(inname)
66 if inname: os.unlink(inname)
67 except: pass
67 except: pass
68 try:
68 try:
69 if outname: os.unlink(outname)
69 if outname: os.unlink(outname)
70 except: pass
70 except: pass
71
71
72 filtertable = {
72 filtertable = {
73 'tempfile:': tempfilter,
73 'tempfile:': tempfilter,
74 'pipe:': pipefilter,
74 'pipe:': pipefilter,
75 }
75 }
76
76
77 def filter(s, cmd):
77 def filter(s, cmd):
78 "filter a string through a command that transforms its input to its output"
78 "filter a string through a command that transforms its input to its output"
79 for name, fn in filtertable.iteritems():
79 for name, fn in filtertable.iteritems():
80 if cmd.startswith(name):
80 if cmd.startswith(name):
81 return fn(s, cmd[len(name):].lstrip())
81 return fn(s, cmd[len(name):].lstrip())
82 return pipefilter(s, cmd)
82 return pipefilter(s, cmd)
83
83
84 def find_in_path(name, path, default=None):
84 def find_in_path(name, path, default=None):
85 '''find name in search path. path can be string (will be split
85 '''find name in search path. path can be string (will be split
86 with os.pathsep), or iterable thing that returns strings. if name
86 with os.pathsep), or iterable thing that returns strings. if name
87 found, return path to name. else return default.'''
87 found, return path to name. else return default.'''
88 if isinstance(path, str):
88 if isinstance(path, str):
89 path = path.split(os.pathsep)
89 path = path.split(os.pathsep)
90 for p in path:
90 for p in path:
91 p_name = os.path.join(p, name)
91 p_name = os.path.join(p, name)
92 if os.path.exists(p_name):
92 if os.path.exists(p_name):
93 return p_name
93 return p_name
94 return default
94 return default
95
95
96 def patch(strip, patchname, ui, cwd=None):
96 def patch(strip, patchname, ui, cwd=None):
97 """apply the patch <patchname> to the working directory.
97 """apply the patch <patchname> to the working directory.
98 a list of patched files is returned"""
98 a list of patched files is returned"""
99 patcher = find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
99 patcher = find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
100 args = []
100 args = []
101 if cwd:
101 if cwd:
102 args.append('-d "%s"' % cwd)
102 args.append('-d %s' % shellquote(cwd))
103 fp = os.popen('%s %s -p%d < "%s"' % (patcher, ' '.join(args), strip,
103 fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
104 patchname))
104 shellquote(patchname)))
105 files = {}
105 files = {}
106 for line in fp:
106 for line in fp:
107 line = line.rstrip()
107 line = line.rstrip()
108 ui.status("%s\n" % line)
108 ui.status("%s\n" % line)
109 if line.startswith('patching file '):
109 if line.startswith('patching file '):
110 pf = parse_patch_output(line)
110 pf = parse_patch_output(line)
111 files.setdefault(pf, 1)
111 files.setdefault(pf, 1)
112 code = fp.close()
112 code = fp.close()
113 if code:
113 if code:
114 raise Abort(_("patch command failed: %s") % explain_exit(code)[0])
114 raise Abort(_("patch command failed: %s") % explain_exit(code)[0])
115 return files.keys()
115 return files.keys()
116
116
117 def binary(s):
117 def binary(s):
118 """return true if a string is binary data using diff's heuristic"""
118 """return true if a string is binary data using diff's heuristic"""
119 if s and '\0' in s[:4096]:
119 if s and '\0' in s[:4096]:
120 return True
120 return True
121 return False
121 return False
122
122
123 def unique(g):
123 def unique(g):
124 """return the uniq elements of iterable g"""
124 """return the uniq elements of iterable g"""
125 seen = {}
125 seen = {}
126 for f in g:
126 for f in g:
127 if f not in seen:
127 if f not in seen:
128 seen[f] = 1
128 seen[f] = 1
129 yield f
129 yield f
130
130
131 class Abort(Exception):
131 class Abort(Exception):
132 """Raised if a command needs to print an error and exit."""
132 """Raised if a command needs to print an error and exit."""
133
133
134 def always(fn): return True
134 def always(fn): return True
135 def never(fn): return False
135 def never(fn): return False
136
136
137 def patkind(name, dflt_pat='glob'):
137 def patkind(name, dflt_pat='glob'):
138 """Split a string into an optional pattern kind prefix and the
138 """Split a string into an optional pattern kind prefix and the
139 actual pattern."""
139 actual pattern."""
140 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
140 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
141 if name.startswith(prefix + ':'): return name.split(':', 1)
141 if name.startswith(prefix + ':'): return name.split(':', 1)
142 return dflt_pat, name
142 return dflt_pat, name
143
143
144 def globre(pat, head='^', tail='$'):
144 def globre(pat, head='^', tail='$'):
145 "convert a glob pattern into a regexp"
145 "convert a glob pattern into a regexp"
146 i, n = 0, len(pat)
146 i, n = 0, len(pat)
147 res = ''
147 res = ''
148 group = False
148 group = False
149 def peek(): return i < n and pat[i]
149 def peek(): return i < n and pat[i]
150 while i < n:
150 while i < n:
151 c = pat[i]
151 c = pat[i]
152 i = i+1
152 i = i+1
153 if c == '*':
153 if c == '*':
154 if peek() == '*':
154 if peek() == '*':
155 i += 1
155 i += 1
156 res += '.*'
156 res += '.*'
157 else:
157 else:
158 res += '[^/]*'
158 res += '[^/]*'
159 elif c == '?':
159 elif c == '?':
160 res += '.'
160 res += '.'
161 elif c == '[':
161 elif c == '[':
162 j = i
162 j = i
163 if j < n and pat[j] in '!]':
163 if j < n and pat[j] in '!]':
164 j += 1
164 j += 1
165 while j < n and pat[j] != ']':
165 while j < n and pat[j] != ']':
166 j += 1
166 j += 1
167 if j >= n:
167 if j >= n:
168 res += '\\['
168 res += '\\['
169 else:
169 else:
170 stuff = pat[i:j].replace('\\','\\\\')
170 stuff = pat[i:j].replace('\\','\\\\')
171 i = j + 1
171 i = j + 1
172 if stuff[0] == '!':
172 if stuff[0] == '!':
173 stuff = '^' + stuff[1:]
173 stuff = '^' + stuff[1:]
174 elif stuff[0] == '^':
174 elif stuff[0] == '^':
175 stuff = '\\' + stuff
175 stuff = '\\' + stuff
176 res = '%s[%s]' % (res, stuff)
176 res = '%s[%s]' % (res, stuff)
177 elif c == '{':
177 elif c == '{':
178 group = True
178 group = True
179 res += '(?:'
179 res += '(?:'
180 elif c == '}' and group:
180 elif c == '}' and group:
181 res += ')'
181 res += ')'
182 group = False
182 group = False
183 elif c == ',' and group:
183 elif c == ',' and group:
184 res += '|'
184 res += '|'
185 elif c == '\\':
185 elif c == '\\':
186 p = peek()
186 p = peek()
187 if p:
187 if p:
188 i += 1
188 i += 1
189 res += re.escape(p)
189 res += re.escape(p)
190 else:
190 else:
191 res += re.escape(c)
191 res += re.escape(c)
192 else:
192 else:
193 res += re.escape(c)
193 res += re.escape(c)
194 return head + res + tail
194 return head + res + tail
195
195
196 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
196 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
197
197
198 def pathto(n1, n2):
198 def pathto(n1, n2):
199 '''return the relative path from one place to another.
199 '''return the relative path from one place to another.
200 this returns a path in the form used by the local filesystem, not hg.'''
200 this returns a path in the form used by the local filesystem, not hg.'''
201 if not n1: return localpath(n2)
201 if not n1: return localpath(n2)
202 a, b = n1.split('/'), n2.split('/')
202 a, b = n1.split('/'), n2.split('/')
203 a.reverse()
203 a.reverse()
204 b.reverse()
204 b.reverse()
205 while a and b and a[-1] == b[-1]:
205 while a and b and a[-1] == b[-1]:
206 a.pop()
206 a.pop()
207 b.pop()
207 b.pop()
208 b.reverse()
208 b.reverse()
209 return os.sep.join((['..'] * len(a)) + b)
209 return os.sep.join((['..'] * len(a)) + b)
210
210
211 def canonpath(root, cwd, myname):
211 def canonpath(root, cwd, myname):
212 """return the canonical path of myname, given cwd and root"""
212 """return the canonical path of myname, given cwd and root"""
213 if root == os.sep:
213 if root == os.sep:
214 rootsep = os.sep
214 rootsep = os.sep
215 elif root.endswith(os.sep):
215 elif root.endswith(os.sep):
216 rootsep = root
216 rootsep = root
217 else:
217 else:
218 rootsep = root + os.sep
218 rootsep = root + os.sep
219 name = myname
219 name = myname
220 if not os.path.isabs(name):
220 if not os.path.isabs(name):
221 name = os.path.join(root, cwd, name)
221 name = os.path.join(root, cwd, name)
222 name = os.path.normpath(name)
222 name = os.path.normpath(name)
223 if name != rootsep and name.startswith(rootsep):
223 if name != rootsep and name.startswith(rootsep):
224 name = name[len(rootsep):]
224 name = name[len(rootsep):]
225 audit_path(name)
225 audit_path(name)
226 return pconvert(name)
226 return pconvert(name)
227 elif name == root:
227 elif name == root:
228 return ''
228 return ''
229 else:
229 else:
230 # Determine whether `name' is in the hierarchy at or beneath `root',
230 # Determine whether `name' is in the hierarchy at or beneath `root',
231 # by iterating name=dirname(name) until that causes no change (can't
231 # by iterating name=dirname(name) until that causes no change (can't
232 # check name == '/', because that doesn't work on windows). For each
232 # check name == '/', because that doesn't work on windows). For each
233 # `name', compare dev/inode numbers. If they match, the list `rel'
233 # `name', compare dev/inode numbers. If they match, the list `rel'
234 # holds the reversed list of components making up the relative file
234 # holds the reversed list of components making up the relative file
235 # name we want.
235 # name we want.
236 root_st = os.stat(root)
236 root_st = os.stat(root)
237 rel = []
237 rel = []
238 while True:
238 while True:
239 try:
239 try:
240 name_st = os.stat(name)
240 name_st = os.stat(name)
241 except OSError:
241 except OSError:
242 break
242 break
243 if samestat(name_st, root_st):
243 if samestat(name_st, root_st):
244 rel.reverse()
244 rel.reverse()
245 name = os.path.join(*rel)
245 name = os.path.join(*rel)
246 audit_path(name)
246 audit_path(name)
247 return pconvert(name)
247 return pconvert(name)
248 dirname, basename = os.path.split(name)
248 dirname, basename = os.path.split(name)
249 rel.append(basename)
249 rel.append(basename)
250 if dirname == name:
250 if dirname == name:
251 break
251 break
252 name = dirname
252 name = dirname
253
253
254 raise Abort('%s not under root' % myname)
254 raise Abort('%s not under root' % myname)
255
255
256 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
256 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
257 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
257 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
258
258
259 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
259 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
260 if os.name == 'nt':
260 if os.name == 'nt':
261 dflt_pat = 'glob'
261 dflt_pat = 'glob'
262 else:
262 else:
263 dflt_pat = 'relpath'
263 dflt_pat = 'relpath'
264 return _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src)
264 return _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src)
265
265
266 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
266 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
267 """build a function to match a set of file patterns
267 """build a function to match a set of file patterns
268
268
269 arguments:
269 arguments:
270 canonroot - the canonical root of the tree you're matching against
270 canonroot - the canonical root of the tree you're matching against
271 cwd - the current working directory, if relevant
271 cwd - the current working directory, if relevant
272 names - patterns to find
272 names - patterns to find
273 inc - patterns to include
273 inc - patterns to include
274 exc - patterns to exclude
274 exc - patterns to exclude
275 head - a regex to prepend to patterns to control whether a match is rooted
275 head - a regex to prepend to patterns to control whether a match is rooted
276
276
277 a pattern is one of:
277 a pattern is one of:
278 'glob:<rooted glob>'
278 'glob:<rooted glob>'
279 're:<rooted regexp>'
279 're:<rooted regexp>'
280 'path:<rooted path>'
280 'path:<rooted path>'
281 'relglob:<relative glob>'
281 'relglob:<relative glob>'
282 'relpath:<relative path>'
282 'relpath:<relative path>'
283 'relre:<relative regexp>'
283 'relre:<relative regexp>'
284 '<rooted path or regexp>'
284 '<rooted path or regexp>'
285
285
286 returns:
286 returns:
287 a 3-tuple containing
287 a 3-tuple containing
288 - list of explicit non-pattern names passed in
288 - list of explicit non-pattern names passed in
289 - a bool match(filename) function
289 - a bool match(filename) function
290 - a bool indicating if any patterns were passed in
290 - a bool indicating if any patterns were passed in
291
291
292 todo:
292 todo:
293 make head regex a rooted bool
293 make head regex a rooted bool
294 """
294 """
295
295
296 def contains_glob(name):
296 def contains_glob(name):
297 for c in name:
297 for c in name:
298 if c in _globchars: return True
298 if c in _globchars: return True
299 return False
299 return False
300
300
301 def regex(kind, name, tail):
301 def regex(kind, name, tail):
302 '''convert a pattern into a regular expression'''
302 '''convert a pattern into a regular expression'''
303 if kind == 're':
303 if kind == 're':
304 return name
304 return name
305 elif kind == 'path':
305 elif kind == 'path':
306 return '^' + re.escape(name) + '(?:/|$)'
306 return '^' + re.escape(name) + '(?:/|$)'
307 elif kind == 'relglob':
307 elif kind == 'relglob':
308 return head + globre(name, '(?:|.*/)', tail)
308 return head + globre(name, '(?:|.*/)', tail)
309 elif kind == 'relpath':
309 elif kind == 'relpath':
310 return head + re.escape(name) + tail
310 return head + re.escape(name) + tail
311 elif kind == 'relre':
311 elif kind == 'relre':
312 if name.startswith('^'):
312 if name.startswith('^'):
313 return name
313 return name
314 return '.*' + name
314 return '.*' + name
315 return head + globre(name, '', tail)
315 return head + globre(name, '', tail)
316
316
317 def matchfn(pats, tail):
317 def matchfn(pats, tail):
318 """build a matching function from a set of patterns"""
318 """build a matching function from a set of patterns"""
319 if not pats:
319 if not pats:
320 return
320 return
321 matches = []
321 matches = []
322 for k, p in pats:
322 for k, p in pats:
323 try:
323 try:
324 pat = '(?:%s)' % regex(k, p, tail)
324 pat = '(?:%s)' % regex(k, p, tail)
325 matches.append(re.compile(pat).match)
325 matches.append(re.compile(pat).match)
326 except re.error:
326 except re.error:
327 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
327 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
328 else: raise Abort("invalid pattern (%s): %s" % (k, p))
328 else: raise Abort("invalid pattern (%s): %s" % (k, p))
329
329
330 def buildfn(text):
330 def buildfn(text):
331 for m in matches:
331 for m in matches:
332 r = m(text)
332 r = m(text)
333 if r:
333 if r:
334 return r
334 return r
335
335
336 return buildfn
336 return buildfn
337
337
338 def globprefix(pat):
338 def globprefix(pat):
339 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
339 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
340 root = []
340 root = []
341 for p in pat.split(os.sep):
341 for p in pat.split(os.sep):
342 if contains_glob(p): break
342 if contains_glob(p): break
343 root.append(p)
343 root.append(p)
344 return '/'.join(root)
344 return '/'.join(root)
345
345
346 pats = []
346 pats = []
347 files = []
347 files = []
348 roots = []
348 roots = []
349 for kind, name in [patkind(p, dflt_pat) for p in names]:
349 for kind, name in [patkind(p, dflt_pat) for p in names]:
350 if kind in ('glob', 'relpath'):
350 if kind in ('glob', 'relpath'):
351 name = canonpath(canonroot, cwd, name)
351 name = canonpath(canonroot, cwd, name)
352 if name == '':
352 if name == '':
353 kind, name = 'glob', '**'
353 kind, name = 'glob', '**'
354 if kind in ('glob', 'path', 're'):
354 if kind in ('glob', 'path', 're'):
355 pats.append((kind, name))
355 pats.append((kind, name))
356 if kind == 'glob':
356 if kind == 'glob':
357 root = globprefix(name)
357 root = globprefix(name)
358 if root: roots.append(root)
358 if root: roots.append(root)
359 elif kind == 'relpath':
359 elif kind == 'relpath':
360 files.append((kind, name))
360 files.append((kind, name))
361 roots.append(name)
361 roots.append(name)
362
362
363 patmatch = matchfn(pats, '$') or always
363 patmatch = matchfn(pats, '$') or always
364 filematch = matchfn(files, '(?:/|$)') or always
364 filematch = matchfn(files, '(?:/|$)') or always
365 incmatch = always
365 incmatch = always
366 if inc:
366 if inc:
367 inckinds = [patkind(canonpath(canonroot, cwd, i)) for i in inc]
367 inckinds = [patkind(canonpath(canonroot, cwd, i)) for i in inc]
368 incmatch = matchfn(inckinds, '(?:/|$)')
368 incmatch = matchfn(inckinds, '(?:/|$)')
369 excmatch = lambda fn: False
369 excmatch = lambda fn: False
370 if exc:
370 if exc:
371 exckinds = [patkind(canonpath(canonroot, cwd, x)) for x in exc]
371 exckinds = [patkind(canonpath(canonroot, cwd, x)) for x in exc]
372 excmatch = matchfn(exckinds, '(?:/|$)')
372 excmatch = matchfn(exckinds, '(?:/|$)')
373
373
374 return (roots,
374 return (roots,
375 lambda fn: (incmatch(fn) and not excmatch(fn) and
375 lambda fn: (incmatch(fn) and not excmatch(fn) and
376 (fn.endswith('/') or
376 (fn.endswith('/') or
377 (not pats and not files) or
377 (not pats and not files) or
378 (pats and patmatch(fn)) or
378 (pats and patmatch(fn)) or
379 (files and filematch(fn)))),
379 (files and filematch(fn)))),
380 (inc or exc or (pats and pats != [('glob', '**')])) and True)
380 (inc or exc or (pats and pats != [('glob', '**')])) and True)
381
381
382 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
382 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
383 '''enhanced shell command execution.
383 '''enhanced shell command execution.
384 run with environment maybe modified, maybe in different dir.
384 run with environment maybe modified, maybe in different dir.
385
385
386 if command fails and onerr is None, return status. if ui object,
386 if command fails and onerr is None, return status. if ui object,
387 print error message and return status, else raise onerr object as
387 print error message and return status, else raise onerr object as
388 exception.'''
388 exception.'''
389 def py2shell(val):
389 def py2shell(val):
390 'convert python object into string that is useful to shell'
390 'convert python object into string that is useful to shell'
391 if val in (None, False):
391 if val in (None, False):
392 return '0'
392 return '0'
393 if val == True:
393 if val == True:
394 return '1'
394 return '1'
395 return str(val)
395 return str(val)
396 oldenv = {}
396 oldenv = {}
397 for k in environ:
397 for k in environ:
398 oldenv[k] = os.environ.get(k)
398 oldenv[k] = os.environ.get(k)
399 if cwd is not None:
399 if cwd is not None:
400 oldcwd = os.getcwd()
400 oldcwd = os.getcwd()
401 try:
401 try:
402 for k, v in environ.iteritems():
402 for k, v in environ.iteritems():
403 os.environ[k] = py2shell(v)
403 os.environ[k] = py2shell(v)
404 if cwd is not None and oldcwd != cwd:
404 if cwd is not None and oldcwd != cwd:
405 os.chdir(cwd)
405 os.chdir(cwd)
406 rc = os.system(cmd)
406 rc = os.system(cmd)
407 if rc and onerr:
407 if rc and onerr:
408 errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
408 errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
409 explain_exit(rc)[0])
409 explain_exit(rc)[0])
410 if errprefix:
410 if errprefix:
411 errmsg = '%s: %s' % (errprefix, errmsg)
411 errmsg = '%s: %s' % (errprefix, errmsg)
412 try:
412 try:
413 onerr.warn(errmsg + '\n')
413 onerr.warn(errmsg + '\n')
414 except AttributeError:
414 except AttributeError:
415 raise onerr(errmsg)
415 raise onerr(errmsg)
416 return rc
416 return rc
417 finally:
417 finally:
418 for k, v in oldenv.iteritems():
418 for k, v in oldenv.iteritems():
419 if v is None:
419 if v is None:
420 del os.environ[k]
420 del os.environ[k]
421 else:
421 else:
422 os.environ[k] = v
422 os.environ[k] = v
423 if cwd is not None and oldcwd != cwd:
423 if cwd is not None and oldcwd != cwd:
424 os.chdir(oldcwd)
424 os.chdir(oldcwd)
425
425
426 def rename(src, dst):
426 def rename(src, dst):
427 """forcibly rename a file"""
427 """forcibly rename a file"""
428 try:
428 try:
429 os.rename(src, dst)
429 os.rename(src, dst)
430 except OSError, err:
430 except OSError, err:
431 # on windows, rename to existing file is not allowed, so we
431 # on windows, rename to existing file is not allowed, so we
432 # must delete destination first. but if file is open, unlink
432 # must delete destination first. but if file is open, unlink
433 # schedules it for delete but does not delete it. rename
433 # schedules it for delete but does not delete it. rename
434 # happens immediately even for open files, so we create
434 # happens immediately even for open files, so we create
435 # temporary file, delete it, rename destination to that name,
435 # temporary file, delete it, rename destination to that name,
436 # then delete that. then rename is safe to do.
436 # then delete that. then rename is safe to do.
437 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
437 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
438 os.close(fd)
438 os.close(fd)
439 os.unlink(temp)
439 os.unlink(temp)
440 os.rename(dst, temp)
440 os.rename(dst, temp)
441 os.unlink(temp)
441 os.unlink(temp)
442 os.rename(src, dst)
442 os.rename(src, dst)
443
443
444 def unlink(f):
444 def unlink(f):
445 """unlink and remove the directory if it is empty"""
445 """unlink and remove the directory if it is empty"""
446 os.unlink(f)
446 os.unlink(f)
447 # try removing directories that might now be empty
447 # try removing directories that might now be empty
448 try:
448 try:
449 os.removedirs(os.path.dirname(f))
449 os.removedirs(os.path.dirname(f))
450 except OSError:
450 except OSError:
451 pass
451 pass
452
452
453 def copyfiles(src, dst, hardlink=None):
453 def copyfiles(src, dst, hardlink=None):
454 """Copy a directory tree using hardlinks if possible"""
454 """Copy a directory tree using hardlinks if possible"""
455
455
456 if hardlink is None:
456 if hardlink is None:
457 hardlink = (os.stat(src).st_dev ==
457 hardlink = (os.stat(src).st_dev ==
458 os.stat(os.path.dirname(dst)).st_dev)
458 os.stat(os.path.dirname(dst)).st_dev)
459
459
460 if os.path.isdir(src):
460 if os.path.isdir(src):
461 os.mkdir(dst)
461 os.mkdir(dst)
462 for name in os.listdir(src):
462 for name in os.listdir(src):
463 srcname = os.path.join(src, name)
463 srcname = os.path.join(src, name)
464 dstname = os.path.join(dst, name)
464 dstname = os.path.join(dst, name)
465 copyfiles(srcname, dstname, hardlink)
465 copyfiles(srcname, dstname, hardlink)
466 else:
466 else:
467 if hardlink:
467 if hardlink:
468 try:
468 try:
469 os_link(src, dst)
469 os_link(src, dst)
470 except (IOError, OSError):
470 except (IOError, OSError):
471 hardlink = False
471 hardlink = False
472 shutil.copy(src, dst)
472 shutil.copy(src, dst)
473 else:
473 else:
474 shutil.copy(src, dst)
474 shutil.copy(src, dst)
475
475
476 def audit_path(path):
476 def audit_path(path):
477 """Abort if path contains dangerous components"""
477 """Abort if path contains dangerous components"""
478 parts = os.path.normcase(path).split(os.sep)
478 parts = os.path.normcase(path).split(os.sep)
479 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
479 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
480 or os.pardir in parts):
480 or os.pardir in parts):
481 raise Abort(_("path contains illegal component: %s\n") % path)
481 raise Abort(_("path contains illegal component: %s\n") % path)
482
482
483 def _makelock_file(info, pathname):
483 def _makelock_file(info, pathname):
484 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
484 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
485 os.write(ld, info)
485 os.write(ld, info)
486 os.close(ld)
486 os.close(ld)
487
487
488 def _readlock_file(pathname):
488 def _readlock_file(pathname):
489 return posixfile(pathname).read()
489 return posixfile(pathname).read()
490
490
491 def nlinks(pathname):
491 def nlinks(pathname):
492 """Return number of hardlinks for the given file."""
492 """Return number of hardlinks for the given file."""
493 return os.lstat(pathname).st_nlink
493 return os.lstat(pathname).st_nlink
494
494
495 if hasattr(os, 'link'):
495 if hasattr(os, 'link'):
496 os_link = os.link
496 os_link = os.link
497 else:
497 else:
498 def os_link(src, dst):
498 def os_link(src, dst):
499 raise OSError(0, _("Hardlinks not supported"))
499 raise OSError(0, _("Hardlinks not supported"))
500
500
501 def fstat(fp):
501 def fstat(fp):
502 '''stat file object that may not have fileno method.'''
502 '''stat file object that may not have fileno method.'''
503 try:
503 try:
504 return os.fstat(fp.fileno())
504 return os.fstat(fp.fileno())
505 except AttributeError:
505 except AttributeError:
506 return os.stat(fp.name)
506 return os.stat(fp.name)
507
507
508 posixfile = file
508 posixfile = file
509
509
510 def is_win_9x():
510 def is_win_9x():
511 '''return true if run on windows 95, 98 or me.'''
511 '''return true if run on windows 95, 98 or me.'''
512 try:
512 try:
513 return sys.getwindowsversion()[3] == 1
513 return sys.getwindowsversion()[3] == 1
514 except AttributeError:
514 except AttributeError:
515 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
515 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
516
516
517 getuser_fallback = None
517 getuser_fallback = None
518
518
519 def getuser():
519 def getuser():
520 '''return name of current user'''
520 '''return name of current user'''
521 try:
521 try:
522 return getpass.getuser()
522 return getpass.getuser()
523 except ImportError:
523 except ImportError:
524 # import of pwd will fail on windows - try fallback
524 # import of pwd will fail on windows - try fallback
525 if getuser_fallback:
525 if getuser_fallback:
526 return getuser_fallback()
526 return getuser_fallback()
527 # raised if win32api not available
527 # raised if win32api not available
528 raise Abort(_('user name not available - set USERNAME '
528 raise Abort(_('user name not available - set USERNAME '
529 'environment variable'))
529 'environment variable'))
530
530
531 # Platform specific variants
531 # Platform specific variants
532 if os.name == 'nt':
532 if os.name == 'nt':
533 demandload(globals(), "msvcrt")
533 demandload(globals(), "msvcrt")
534 nulldev = 'NUL:'
534 nulldev = 'NUL:'
535
535
536 class winstdout:
536 class winstdout:
537 '''stdout on windows misbehaves if sent through a pipe'''
537 '''stdout on windows misbehaves if sent through a pipe'''
538
538
539 def __init__(self, fp):
539 def __init__(self, fp):
540 self.fp = fp
540 self.fp = fp
541
541
542 def __getattr__(self, key):
542 def __getattr__(self, key):
543 return getattr(self.fp, key)
543 return getattr(self.fp, key)
544
544
545 def close(self):
545 def close(self):
546 try:
546 try:
547 self.fp.close()
547 self.fp.close()
548 except: pass
548 except: pass
549
549
550 def write(self, s):
550 def write(self, s):
551 try:
551 try:
552 return self.fp.write(s)
552 return self.fp.write(s)
553 except IOError, inst:
553 except IOError, inst:
554 if inst.errno != 0: raise
554 if inst.errno != 0: raise
555 self.close()
555 self.close()
556 raise IOError(errno.EPIPE, 'Broken pipe')
556 raise IOError(errno.EPIPE, 'Broken pipe')
557
557
558 sys.stdout = winstdout(sys.stdout)
558 sys.stdout = winstdout(sys.stdout)
559
559
560 def system_rcpath():
560 def system_rcpath():
561 try:
561 try:
562 return system_rcpath_win32()
562 return system_rcpath_win32()
563 except:
563 except:
564 return [r'c:\mercurial\mercurial.ini']
564 return [r'c:\mercurial\mercurial.ini']
565
565
566 def os_rcpath():
566 def os_rcpath():
567 '''return default os-specific hgrc search path'''
567 '''return default os-specific hgrc search path'''
568 path = system_rcpath()
568 path = system_rcpath()
569 path.append(user_rcpath())
569 path.append(user_rcpath())
570 userprofile = os.environ.get('USERPROFILE')
570 userprofile = os.environ.get('USERPROFILE')
571 if userprofile:
571 if userprofile:
572 path.append(os.path.join(userprofile, 'mercurial.ini'))
572 path.append(os.path.join(userprofile, 'mercurial.ini'))
573 return path
573 return path
574
574
575 def user_rcpath():
575 def user_rcpath():
576 '''return os-specific hgrc search path to the user dir'''
576 '''return os-specific hgrc search path to the user dir'''
577 return os.path.join(os.path.expanduser('~'), 'mercurial.ini')
577 return os.path.join(os.path.expanduser('~'), 'mercurial.ini')
578
578
579 def parse_patch_output(output_line):
579 def parse_patch_output(output_line):
580 """parses the output produced by patch and returns the file name"""
580 """parses the output produced by patch and returns the file name"""
581 pf = output_line[14:]
581 pf = output_line[14:]
582 if pf[0] == '`':
582 if pf[0] == '`':
583 pf = pf[1:-1] # Remove the quotes
583 pf = pf[1:-1] # Remove the quotes
584 return pf
584 return pf
585
585
586 def testpid(pid):
586 def testpid(pid):
587 '''return False if pid dead, True if running or not known'''
587 '''return False if pid dead, True if running or not known'''
588 return True
588 return True
589
589
590 def is_exec(f, last):
590 def is_exec(f, last):
591 return last
591 return last
592
592
593 def set_exec(f, mode):
593 def set_exec(f, mode):
594 pass
594 pass
595
595
596 def set_binary(fd):
596 def set_binary(fd):
597 msvcrt.setmode(fd.fileno(), os.O_BINARY)
597 msvcrt.setmode(fd.fileno(), os.O_BINARY)
598
598
599 def pconvert(path):
599 def pconvert(path):
600 return path.replace("\\", "/")
600 return path.replace("\\", "/")
601
601
602 def localpath(path):
602 def localpath(path):
603 return path.replace('/', '\\')
603 return path.replace('/', '\\')
604
604
605 def normpath(path):
605 def normpath(path):
606 return pconvert(os.path.normpath(path))
606 return pconvert(os.path.normpath(path))
607
607
608 makelock = _makelock_file
608 makelock = _makelock_file
609 readlock = _readlock_file
609 readlock = _readlock_file
610
610
611 def samestat(s1, s2):
611 def samestat(s1, s2):
612 return False
612 return False
613
613
614 def shellquote(s):
615 return '"%s"' % s.replace('"', '\\"')
616
614 def explain_exit(code):
617 def explain_exit(code):
615 return _("exited with status %d") % code, code
618 return _("exited with status %d") % code, code
616
619
617 try:
620 try:
618 # override functions with win32 versions if possible
621 # override functions with win32 versions if possible
619 from util_win32 import *
622 from util_win32 import *
620 if not is_win_9x():
623 if not is_win_9x():
621 posixfile = posixfile_nt
624 posixfile = posixfile_nt
622 except ImportError:
625 except ImportError:
623 pass
626 pass
624
627
625 else:
628 else:
626 nulldev = '/dev/null'
629 nulldev = '/dev/null'
627
630
628 def rcfiles(path):
631 def rcfiles(path):
629 rcs = [os.path.join(path, 'hgrc')]
632 rcs = [os.path.join(path, 'hgrc')]
630 rcdir = os.path.join(path, 'hgrc.d')
633 rcdir = os.path.join(path, 'hgrc.d')
631 try:
634 try:
632 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
635 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
633 if f.endswith(".rc")])
636 if f.endswith(".rc")])
634 except OSError, inst: pass
637 except OSError, inst: pass
635 return rcs
638 return rcs
636
639
637 def os_rcpath():
640 def os_rcpath():
638 '''return default os-specific hgrc search path'''
641 '''return default os-specific hgrc search path'''
639 path = []
642 path = []
640 # old mod_python does not set sys.argv
643 # old mod_python does not set sys.argv
641 if len(getattr(sys, 'argv', [])) > 0:
644 if len(getattr(sys, 'argv', [])) > 0:
642 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
645 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
643 '/../etc/mercurial'))
646 '/../etc/mercurial'))
644 path.extend(rcfiles('/etc/mercurial'))
647 path.extend(rcfiles('/etc/mercurial'))
645 path.append(os.path.expanduser('~/.hgrc'))
648 path.append(os.path.expanduser('~/.hgrc'))
646 path = [os.path.normpath(f) for f in path]
649 path = [os.path.normpath(f) for f in path]
647 return path
650 return path
648
651
649 def parse_patch_output(output_line):
652 def parse_patch_output(output_line):
650 """parses the output produced by patch and returns the file name"""
653 """parses the output produced by patch and returns the file name"""
651 pf = output_line[14:]
654 pf = output_line[14:]
652 if pf.startswith("'") and pf.endswith("'") and " " in pf:
655 if pf.startswith("'") and pf.endswith("'") and " " in pf:
653 pf = pf[1:-1] # Remove the quotes
656 pf = pf[1:-1] # Remove the quotes
654 return pf
657 return pf
655
658
656 def is_exec(f, last):
659 def is_exec(f, last):
657 """check whether a file is executable"""
660 """check whether a file is executable"""
658 return (os.lstat(f).st_mode & 0100 != 0)
661 return (os.lstat(f).st_mode & 0100 != 0)
659
662
660 def set_exec(f, mode):
663 def set_exec(f, mode):
661 s = os.lstat(f).st_mode
664 s = os.lstat(f).st_mode
662 if (s & 0100 != 0) == mode:
665 if (s & 0100 != 0) == mode:
663 return
666 return
664 if mode:
667 if mode:
665 # Turn on +x for every +r bit when making a file executable
668 # Turn on +x for every +r bit when making a file executable
666 # and obey umask.
669 # and obey umask.
667 umask = os.umask(0)
670 umask = os.umask(0)
668 os.umask(umask)
671 os.umask(umask)
669 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
672 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
670 else:
673 else:
671 os.chmod(f, s & 0666)
674 os.chmod(f, s & 0666)
672
675
673 def set_binary(fd):
676 def set_binary(fd):
674 pass
677 pass
675
678
676 def pconvert(path):
679 def pconvert(path):
677 return path
680 return path
678
681
679 def localpath(path):
682 def localpath(path):
680 return path
683 return path
681
684
682 normpath = os.path.normpath
685 normpath = os.path.normpath
683 samestat = os.path.samestat
686 samestat = os.path.samestat
684
687
685 def makelock(info, pathname):
688 def makelock(info, pathname):
686 try:
689 try:
687 os.symlink(info, pathname)
690 os.symlink(info, pathname)
688 except OSError, why:
691 except OSError, why:
689 if why.errno == errno.EEXIST:
692 if why.errno == errno.EEXIST:
690 raise
693 raise
691 else:
694 else:
692 _makelock_file(info, pathname)
695 _makelock_file(info, pathname)
693
696
694 def readlock(pathname):
697 def readlock(pathname):
695 try:
698 try:
696 return os.readlink(pathname)
699 return os.readlink(pathname)
697 except OSError, why:
700 except OSError, why:
698 if why.errno == errno.EINVAL:
701 if why.errno == errno.EINVAL:
699 return _readlock_file(pathname)
702 return _readlock_file(pathname)
700 else:
703 else:
701 raise
704 raise
702
705
706 def shellquote(s):
707 return "'%s'" % s.replace("'", "'\\''")
708
703 def testpid(pid):
709 def testpid(pid):
704 '''return False if pid dead, True if running or not sure'''
710 '''return False if pid dead, True if running or not sure'''
705 try:
711 try:
706 os.kill(pid, 0)
712 os.kill(pid, 0)
707 return True
713 return True
708 except OSError, inst:
714 except OSError, inst:
709 return inst.errno != errno.ESRCH
715 return inst.errno != errno.ESRCH
710
716
711 def explain_exit(code):
717 def explain_exit(code):
712 """return a 2-tuple (desc, code) describing a process's status"""
718 """return a 2-tuple (desc, code) describing a process's status"""
713 if os.WIFEXITED(code):
719 if os.WIFEXITED(code):
714 val = os.WEXITSTATUS(code)
720 val = os.WEXITSTATUS(code)
715 return _("exited with status %d") % val, val
721 return _("exited with status %d") % val, val
716 elif os.WIFSIGNALED(code):
722 elif os.WIFSIGNALED(code):
717 val = os.WTERMSIG(code)
723 val = os.WTERMSIG(code)
718 return _("killed by signal %d") % val, val
724 return _("killed by signal %d") % val, val
719 elif os.WIFSTOPPED(code):
725 elif os.WIFSTOPPED(code):
720 val = os.WSTOPSIG(code)
726 val = os.WSTOPSIG(code)
721 return _("stopped by signal %d") % val, val
727 return _("stopped by signal %d") % val, val
722 raise ValueError(_("invalid exit code"))
728 raise ValueError(_("invalid exit code"))
723
729
724 def opener(base, audit=True):
730 def opener(base, audit=True):
725 """
731 """
726 return a function that opens files relative to base
732 return a function that opens files relative to base
727
733
728 this function is used to hide the details of COW semantics and
734 this function is used to hide the details of COW semantics and
729 remote file access from higher level code.
735 remote file access from higher level code.
730 """
736 """
731 p = base
737 p = base
732 audit_p = audit
738 audit_p = audit
733
739
734 def mktempcopy(name):
740 def mktempcopy(name):
735 d, fn = os.path.split(name)
741 d, fn = os.path.split(name)
736 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
742 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
737 os.close(fd)
743 os.close(fd)
738 ofp = posixfile(temp, "wb")
744 ofp = posixfile(temp, "wb")
739 try:
745 try:
740 try:
746 try:
741 ifp = posixfile(name, "rb")
747 ifp = posixfile(name, "rb")
742 except IOError, inst:
748 except IOError, inst:
743 if not getattr(inst, 'filename', None):
749 if not getattr(inst, 'filename', None):
744 inst.filename = name
750 inst.filename = name
745 raise
751 raise
746 for chunk in filechunkiter(ifp):
752 for chunk in filechunkiter(ifp):
747 ofp.write(chunk)
753 ofp.write(chunk)
748 ifp.close()
754 ifp.close()
749 ofp.close()
755 ofp.close()
750 except:
756 except:
751 try: os.unlink(temp)
757 try: os.unlink(temp)
752 except: pass
758 except: pass
753 raise
759 raise
754 st = os.lstat(name)
760 st = os.lstat(name)
755 os.chmod(temp, st.st_mode)
761 os.chmod(temp, st.st_mode)
756 return temp
762 return temp
757
763
758 class atomictempfile(posixfile):
764 class atomictempfile(posixfile):
759 """the file will only be copied when rename is called"""
765 """the file will only be copied when rename is called"""
760 def __init__(self, name, mode):
766 def __init__(self, name, mode):
761 self.__name = name
767 self.__name = name
762 self.temp = mktempcopy(name)
768 self.temp = mktempcopy(name)
763 posixfile.__init__(self, self.temp, mode)
769 posixfile.__init__(self, self.temp, mode)
764 def rename(self):
770 def rename(self):
765 if not self.closed:
771 if not self.closed:
766 posixfile.close(self)
772 posixfile.close(self)
767 rename(self.temp, localpath(self.__name))
773 rename(self.temp, localpath(self.__name))
768 def __del__(self):
774 def __del__(self):
769 if not self.closed:
775 if not self.closed:
770 try:
776 try:
771 os.unlink(self.temp)
777 os.unlink(self.temp)
772 except: pass
778 except: pass
773 posixfile.close(self)
779 posixfile.close(self)
774
780
775 class atomicfile(atomictempfile):
781 class atomicfile(atomictempfile):
776 """the file will only be copied on close"""
782 """the file will only be copied on close"""
777 def __init__(self, name, mode):
783 def __init__(self, name, mode):
778 atomictempfile.__init__(self, name, mode)
784 atomictempfile.__init__(self, name, mode)
779 def close(self):
785 def close(self):
780 self.rename()
786 self.rename()
781 def __del__(self):
787 def __del__(self):
782 self.rename()
788 self.rename()
783
789
784 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
790 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
785 if audit_p:
791 if audit_p:
786 audit_path(path)
792 audit_path(path)
787 f = os.path.join(p, path)
793 f = os.path.join(p, path)
788
794
789 if not text:
795 if not text:
790 mode += "b" # for that other OS
796 mode += "b" # for that other OS
791
797
792 if mode[0] != "r":
798 if mode[0] != "r":
793 try:
799 try:
794 nlink = nlinks(f)
800 nlink = nlinks(f)
795 except OSError:
801 except OSError:
796 d = os.path.dirname(f)
802 d = os.path.dirname(f)
797 if not os.path.isdir(d):
803 if not os.path.isdir(d):
798 os.makedirs(d)
804 os.makedirs(d)
799 else:
805 else:
800 if atomic:
806 if atomic:
801 return atomicfile(f, mode)
807 return atomicfile(f, mode)
802 elif atomictemp:
808 elif atomictemp:
803 return atomictempfile(f, mode)
809 return atomictempfile(f, mode)
804 if nlink > 1:
810 if nlink > 1:
805 rename(mktempcopy(f), f)
811 rename(mktempcopy(f), f)
806 return posixfile(f, mode)
812 return posixfile(f, mode)
807
813
808 return o
814 return o
809
815
810 class chunkbuffer(object):
816 class chunkbuffer(object):
811 """Allow arbitrary sized chunks of data to be efficiently read from an
817 """Allow arbitrary sized chunks of data to be efficiently read from an
812 iterator over chunks of arbitrary size."""
818 iterator over chunks of arbitrary size."""
813
819
814 def __init__(self, in_iter, targetsize = 2**16):
820 def __init__(self, in_iter, targetsize = 2**16):
815 """in_iter is the iterator that's iterating over the input chunks.
821 """in_iter is the iterator that's iterating over the input chunks.
816 targetsize is how big a buffer to try to maintain."""
822 targetsize is how big a buffer to try to maintain."""
817 self.in_iter = iter(in_iter)
823 self.in_iter = iter(in_iter)
818 self.buf = ''
824 self.buf = ''
819 self.targetsize = int(targetsize)
825 self.targetsize = int(targetsize)
820 if self.targetsize <= 0:
826 if self.targetsize <= 0:
821 raise ValueError(_("targetsize must be greater than 0, was %d") %
827 raise ValueError(_("targetsize must be greater than 0, was %d") %
822 targetsize)
828 targetsize)
823 self.iterempty = False
829 self.iterempty = False
824
830
825 def fillbuf(self):
831 def fillbuf(self):
826 """Ignore target size; read every chunk from iterator until empty."""
832 """Ignore target size; read every chunk from iterator until empty."""
827 if not self.iterempty:
833 if not self.iterempty:
828 collector = cStringIO.StringIO()
834 collector = cStringIO.StringIO()
829 collector.write(self.buf)
835 collector.write(self.buf)
830 for ch in self.in_iter:
836 for ch in self.in_iter:
831 collector.write(ch)
837 collector.write(ch)
832 self.buf = collector.getvalue()
838 self.buf = collector.getvalue()
833 self.iterempty = True
839 self.iterempty = True
834
840
835 def read(self, l):
841 def read(self, l):
836 """Read L bytes of data from the iterator of chunks of data.
842 """Read L bytes of data from the iterator of chunks of data.
837 Returns less than L bytes if the iterator runs dry."""
843 Returns less than L bytes if the iterator runs dry."""
838 if l > len(self.buf) and not self.iterempty:
844 if l > len(self.buf) and not self.iterempty:
839 # Clamp to a multiple of self.targetsize
845 # Clamp to a multiple of self.targetsize
840 targetsize = self.targetsize * ((l // self.targetsize) + 1)
846 targetsize = self.targetsize * ((l // self.targetsize) + 1)
841 collector = cStringIO.StringIO()
847 collector = cStringIO.StringIO()
842 collector.write(self.buf)
848 collector.write(self.buf)
843 collected = len(self.buf)
849 collected = len(self.buf)
844 for chunk in self.in_iter:
850 for chunk in self.in_iter:
845 collector.write(chunk)
851 collector.write(chunk)
846 collected += len(chunk)
852 collected += len(chunk)
847 if collected >= targetsize:
853 if collected >= targetsize:
848 break
854 break
849 if collected < targetsize:
855 if collected < targetsize:
850 self.iterempty = True
856 self.iterempty = True
851 self.buf = collector.getvalue()
857 self.buf = collector.getvalue()
852 s, self.buf = self.buf[:l], buffer(self.buf, l)
858 s, self.buf = self.buf[:l], buffer(self.buf, l)
853 return s
859 return s
854
860
855 def filechunkiter(f, size=65536, limit=None):
861 def filechunkiter(f, size=65536, limit=None):
856 """Create a generator that produces the data in the file size
862 """Create a generator that produces the data in the file size
857 (default 65536) bytes at a time, up to optional limit (default is
863 (default 65536) bytes at a time, up to optional limit (default is
858 to read all data). Chunks may be less than size bytes if the
864 to read all data). Chunks may be less than size bytes if the
859 chunk is the last chunk in the file, or the file is a socket or
865 chunk is the last chunk in the file, or the file is a socket or
860 some other type of file that sometimes reads less data than is
866 some other type of file that sometimes reads less data than is
861 requested."""
867 requested."""
862 assert size >= 0
868 assert size >= 0
863 assert limit is None or limit >= 0
869 assert limit is None or limit >= 0
864 while True:
870 while True:
865 if limit is None: nbytes = size
871 if limit is None: nbytes = size
866 else: nbytes = min(limit, size)
872 else: nbytes = min(limit, size)
867 s = nbytes and f.read(nbytes)
873 s = nbytes and f.read(nbytes)
868 if not s: break
874 if not s: break
869 if limit: limit -= len(s)
875 if limit: limit -= len(s)
870 yield s
876 yield s
871
877
872 def makedate():
878 def makedate():
873 lt = time.localtime()
879 lt = time.localtime()
874 if lt[8] == 1 and time.daylight:
880 if lt[8] == 1 and time.daylight:
875 tz = time.altzone
881 tz = time.altzone
876 else:
882 else:
877 tz = time.timezone
883 tz = time.timezone
878 return time.mktime(lt), tz
884 return time.mktime(lt), tz
879
885
880 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
886 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
881 """represent a (unixtime, offset) tuple as a localized time.
887 """represent a (unixtime, offset) tuple as a localized time.
882 unixtime is seconds since the epoch, and offset is the time zone's
888 unixtime is seconds since the epoch, and offset is the time zone's
883 number of seconds away from UTC. if timezone is false, do not
889 number of seconds away from UTC. if timezone is false, do not
884 append time zone to string."""
890 append time zone to string."""
885 t, tz = date or makedate()
891 t, tz = date or makedate()
886 s = time.strftime(format, time.gmtime(float(t) - tz))
892 s = time.strftime(format, time.gmtime(float(t) - tz))
887 if timezone:
893 if timezone:
888 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
894 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
889 return s
895 return s
890
896
891 def strdate(string, format='%a %b %d %H:%M:%S %Y'):
897 def strdate(string, format='%a %b %d %H:%M:%S %Y'):
892 """parse a localized time string and return a (unixtime, offset) tuple.
898 """parse a localized time string and return a (unixtime, offset) tuple.
893 if the string cannot be parsed, ValueError is raised."""
899 if the string cannot be parsed, ValueError is raised."""
894 def hastimezone(string):
900 def hastimezone(string):
895 return (string[-4:].isdigit() and
901 return (string[-4:].isdigit() and
896 (string[-5] == '+' or string[-5] == '-') and
902 (string[-5] == '+' or string[-5] == '-') and
897 string[-6].isspace())
903 string[-6].isspace())
898
904
899 if hastimezone(string):
905 if hastimezone(string):
900 date, tz = string[:-6], string[-5:]
906 date, tz = string[:-6], string[-5:]
901 tz = int(tz)
907 tz = int(tz)
902 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
908 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
903 else:
909 else:
904 date, offset = string, 0
910 date, offset = string, 0
905 when = int(time.mktime(time.strptime(date, format))) + offset
911 when = int(time.mktime(time.strptime(date, format))) + offset
906 return when, offset
912 return when, offset
907
913
908 def parsedate(string, formats=None):
914 def parsedate(string, formats=None):
909 """parse a localized time string and return a (unixtime, offset) tuple.
915 """parse a localized time string and return a (unixtime, offset) tuple.
910 The date may be a "unixtime offset" string or in one of the specified
916 The date may be a "unixtime offset" string or in one of the specified
911 formats."""
917 formats."""
912 if not formats:
918 if not formats:
913 formats = defaultdateformats
919 formats = defaultdateformats
914 try:
920 try:
915 when, offset = map(int, string.split(' '))
921 when, offset = map(int, string.split(' '))
916 except ValueError:
922 except ValueError:
917 for format in formats:
923 for format in formats:
918 try:
924 try:
919 when, offset = strdate(string, format)
925 when, offset = strdate(string, format)
920 except ValueError:
926 except ValueError:
921 pass
927 pass
922 else:
928 else:
923 break
929 break
924 else:
930 else:
925 raise ValueError(_('invalid date: %r') % string)
931 raise ValueError(_('invalid date: %r') % string)
926 # validate explicit (probably user-specified) date and
932 # validate explicit (probably user-specified) date and
927 # time zone offset. values must fit in signed 32 bits for
933 # time zone offset. values must fit in signed 32 bits for
928 # current 32-bit linux runtimes. timezones go from UTC-12
934 # current 32-bit linux runtimes. timezones go from UTC-12
929 # to UTC+14
935 # to UTC+14
930 if abs(when) > 0x7fffffff:
936 if abs(when) > 0x7fffffff:
931 raise ValueError(_('date exceeds 32 bits: %d') % when)
937 raise ValueError(_('date exceeds 32 bits: %d') % when)
932 if offset < -50400 or offset > 43200:
938 if offset < -50400 or offset > 43200:
933 raise ValueError(_('impossible time zone offset: %d') % offset)
939 raise ValueError(_('impossible time zone offset: %d') % offset)
934 return when, offset
940 return when, offset
935
941
936 def shortuser(user):
942 def shortuser(user):
937 """Return a short representation of a user name or email address."""
943 """Return a short representation of a user name or email address."""
938 f = user.find('@')
944 f = user.find('@')
939 if f >= 0:
945 if f >= 0:
940 user = user[:f]
946 user = user[:f]
941 f = user.find('<')
947 f = user.find('<')
942 if f >= 0:
948 if f >= 0:
943 user = user[f+1:]
949 user = user[f+1:]
944 return user
950 return user
945
951
946 def walkrepos(path):
952 def walkrepos(path):
947 '''yield every hg repository under path, recursively.'''
953 '''yield every hg repository under path, recursively.'''
948 def errhandler(err):
954 def errhandler(err):
949 if err.filename == path:
955 if err.filename == path:
950 raise err
956 raise err
951
957
952 for root, dirs, files in os.walk(path, onerror=errhandler):
958 for root, dirs, files in os.walk(path, onerror=errhandler):
953 for d in dirs:
959 for d in dirs:
954 if d == '.hg':
960 if d == '.hg':
955 yield root
961 yield root
956 dirs[:] = []
962 dirs[:] = []
957 break
963 break
958
964
959 _rcpath = None
965 _rcpath = None
960
966
961 def rcpath():
967 def rcpath():
962 '''return hgrc search path. if env var HGRCPATH is set, use it.
968 '''return hgrc search path. if env var HGRCPATH is set, use it.
963 for each item in path, if directory, use files ending in .rc,
969 for each item in path, if directory, use files ending in .rc,
964 else use item.
970 else use item.
965 make HGRCPATH empty to only look in .hg/hgrc of current repo.
971 make HGRCPATH empty to only look in .hg/hgrc of current repo.
966 if no HGRCPATH, use default os-specific path.'''
972 if no HGRCPATH, use default os-specific path.'''
967 global _rcpath
973 global _rcpath
968 if _rcpath is None:
974 if _rcpath is None:
969 if 'HGRCPATH' in os.environ:
975 if 'HGRCPATH' in os.environ:
970 _rcpath = []
976 _rcpath = []
971 for p in os.environ['HGRCPATH'].split(os.pathsep):
977 for p in os.environ['HGRCPATH'].split(os.pathsep):
972 if not p: continue
978 if not p: continue
973 if os.path.isdir(p):
979 if os.path.isdir(p):
974 for f in os.listdir(p):
980 for f in os.listdir(p):
975 if f.endswith('.rc'):
981 if f.endswith('.rc'):
976 _rcpath.append(os.path.join(p, f))
982 _rcpath.append(os.path.join(p, f))
977 else:
983 else:
978 _rcpath.append(p)
984 _rcpath.append(p)
979 else:
985 else:
980 _rcpath = os_rcpath()
986 _rcpath = os_rcpath()
981 return _rcpath
987 return _rcpath
982
988
983 def bytecount(nbytes):
989 def bytecount(nbytes):
984 '''return byte count formatted as readable string, with units'''
990 '''return byte count formatted as readable string, with units'''
985
991
986 units = (
992 units = (
987 (100, 1<<30, _('%.0f GB')),
993 (100, 1<<30, _('%.0f GB')),
988 (10, 1<<30, _('%.1f GB')),
994 (10, 1<<30, _('%.1f GB')),
989 (1, 1<<30, _('%.2f GB')),
995 (1, 1<<30, _('%.2f GB')),
990 (100, 1<<20, _('%.0f MB')),
996 (100, 1<<20, _('%.0f MB')),
991 (10, 1<<20, _('%.1f MB')),
997 (10, 1<<20, _('%.1f MB')),
992 (1, 1<<20, _('%.2f MB')),
998 (1, 1<<20, _('%.2f MB')),
993 (100, 1<<10, _('%.0f KB')),
999 (100, 1<<10, _('%.0f KB')),
994 (10, 1<<10, _('%.1f KB')),
1000 (10, 1<<10, _('%.1f KB')),
995 (1, 1<<10, _('%.2f KB')),
1001 (1, 1<<10, _('%.2f KB')),
996 (1, 1, _('%.0f bytes')),
1002 (1, 1, _('%.0f bytes')),
997 )
1003 )
998
1004
999 for multiplier, divisor, format in units:
1005 for multiplier, divisor, format in units:
1000 if nbytes >= divisor * multiplier:
1006 if nbytes >= divisor * multiplier:
1001 return format % (nbytes / float(divisor))
1007 return format % (nbytes / float(divisor))
1002 return units[-1][2] % nbytes
1008 return units[-1][2] % nbytes
1003
1009
1004 def drop_scheme(scheme, path):
1010 def drop_scheme(scheme, path):
1005 sc = scheme + ':'
1011 sc = scheme + ':'
1006 if path.startswith(sc):
1012 if path.startswith(sc):
1007 path = path[len(sc):]
1013 path = path[len(sc):]
1008 if path.startswith('//'):
1014 if path.startswith('//'):
1009 path = path[2:]
1015 path = path[2:]
1010 return path
1016 return path
@@ -1,30 +1,65
1 #!/bin/sh
1 #!/bin/sh
2
2
3 hg init a
3 hg init a
4
4
5 cd a
5 cd a
6 echo a > a
6 echo a > a
7 hg ci -Ama -d '1 0'
7 hg ci -Ama -d '1 0'
8
8
9 hg cp a b
9 hg cp a b
10 hg ci -mb -d '2 0'
10 hg ci -mb -d '2 0'
11
11
12 mkdir dir
12 mkdir dir
13 hg mv b dir
13 hg mv b dir
14 hg ci -mc -d '3 0'
14 hg ci -mc -d '3 0'
15
15
16 hg mv a b
16 hg mv a b
17 hg ci -md -d '4 0'
17 hg ci -md -d '4 0'
18
18
19 hg mv dir/b e
19 hg mv dir/b e
20 hg ci -me -d '5 0'
20 hg ci -me -d '5 0'
21
21
22 hg log a
22 hg log a
23 echo % -f, directory
23 echo % -f, directory
24 hg log -f dir
24 hg log -f dir
25 echo % -f, but no args
25 echo % -f, but no args
26 hg log -f
26 hg log -f
27 echo % one rename
27 echo % one rename
28 hg log -vf a
28 hg log -vf a
29 echo % many renames
29 echo % many renames
30 hg log -vf e
30 hg log -vf e
31
32 # log --follow tests
33 hg init ../follow
34 cd ../follow
35 echo base > base
36 hg ci -Ambase -d '1 0'
37
38 echo r1 >> base
39 hg ci -Amr1 -d '1 0'
40 echo r2 >> base
41 hg ci -Amr2 -d '1 0'
42
43 hg up -C 1
44 echo b1 > b1
45 hg ci -Amb1 -d '1 0'
46
47 echo % log -f
48 hg log -f
49
50 hg up -C 0
51 echo b2 > b2
52 hg ci -Amb2 -d '1 0'
53
54 echo % log -f -r 1:tip
55 hg log -f -r 1:tip
56
57 hg up -C 3
58 hg merge tip
59 hg ci -mm12 -d '1 0'
60
61 echo postm >> b1
62 hg ci -Amb1.1 -d'1 0'
63
64 echo % log --follow-first
65 hg log --follow-first
@@ -1,78 +1,151
1 adding a
1 adding a
2 changeset: 0:8580ff50825a
2 changeset: 0:8580ff50825a
3 user: test
3 user: test
4 date: Thu Jan 01 00:00:01 1970 +0000
4 date: Thu Jan 01 00:00:01 1970 +0000
5 summary: a
5 summary: a
6
6
7 % -f, directory
7 % -f, directory
8 abort: can only follow copies/renames for explicit file names
8 abort: can only follow copies/renames for explicit file names
9 % -f, but no args
9 % -f, but no args
10 changeset: 4:8c1c8408f737
10 changeset: 4:8c1c8408f737
11 tag: tip
11 tag: tip
12 user: test
12 user: test
13 date: Thu Jan 01 00:00:05 1970 +0000
13 date: Thu Jan 01 00:00:05 1970 +0000
14 summary: e
14 summary: e
15
15
16 changeset: 3:c4ba038c90ce
16 changeset: 3:c4ba038c90ce
17 user: test
17 user: test
18 date: Thu Jan 01 00:00:04 1970 +0000
18 date: Thu Jan 01 00:00:04 1970 +0000
19 summary: d
19 summary: d
20
20
21 changeset: 2:21fba396af4c
21 changeset: 2:21fba396af4c
22 user: test
22 user: test
23 date: Thu Jan 01 00:00:03 1970 +0000
23 date: Thu Jan 01 00:00:03 1970 +0000
24 summary: c
24 summary: c
25
25
26 changeset: 1:c0296dabce9b
26 changeset: 1:c0296dabce9b
27 user: test
27 user: test
28 date: Thu Jan 01 00:00:02 1970 +0000
28 date: Thu Jan 01 00:00:02 1970 +0000
29 summary: b
29 summary: b
30
30
31 changeset: 0:8580ff50825a
31 changeset: 0:8580ff50825a
32 user: test
32 user: test
33 date: Thu Jan 01 00:00:01 1970 +0000
33 date: Thu Jan 01 00:00:01 1970 +0000
34 summary: a
34 summary: a
35
35
36 % one rename
36 % one rename
37 changeset: 0:8580ff50825a50c8f716709acdf8de0deddcd6ab
37 changeset: 0:8580ff50825a50c8f716709acdf8de0deddcd6ab
38 user: test
38 user: test
39 date: Thu Jan 01 00:00:01 1970 +0000
39 date: Thu Jan 01 00:00:01 1970 +0000
40 files: a
40 files: a
41 description:
41 description:
42 a
42 a
43
43
44
44
45 % many renames
45 % many renames
46 changeset: 4:8c1c8408f7371319750ea2d4fa7969828effbcf4
46 changeset: 4:8c1c8408f7371319750ea2d4fa7969828effbcf4
47 tag: tip
47 tag: tip
48 user: test
48 user: test
49 date: Thu Jan 01 00:00:05 1970 +0000
49 date: Thu Jan 01 00:00:05 1970 +0000
50 files: dir/b e
50 files: dir/b e
51 description:
51 description:
52 e
52 e
53
53
54
54
55 changeset: 2:21fba396af4c801f9717de6c415b6cc9620437e8
55 changeset: 2:21fba396af4c801f9717de6c415b6cc9620437e8
56 user: test
56 user: test
57 date: Thu Jan 01 00:00:03 1970 +0000
57 date: Thu Jan 01 00:00:03 1970 +0000
58 files: b dir/b
58 files: b dir/b
59 description:
59 description:
60 c
60 c
61
61
62
62
63 changeset: 1:c0296dabce9bf0cd3fdd608de26693c91cd6bbf4
63 changeset: 1:c0296dabce9bf0cd3fdd608de26693c91cd6bbf4
64 user: test
64 user: test
65 date: Thu Jan 01 00:00:02 1970 +0000
65 date: Thu Jan 01 00:00:02 1970 +0000
66 files: b
66 files: b
67 description:
67 description:
68 b
68 b
69
69
70
70
71 changeset: 0:8580ff50825a50c8f716709acdf8de0deddcd6ab
71 changeset: 0:8580ff50825a50c8f716709acdf8de0deddcd6ab
72 user: test
72 user: test
73 date: Thu Jan 01 00:00:01 1970 +0000
73 date: Thu Jan 01 00:00:01 1970 +0000
74 files: a
74 files: a
75 description:
75 description:
76 a
76 a
77
77
78
78
79 adding base
80 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
81 adding b1
82 % log -f
83 changeset: 3:e62f78d544b4
84 tag: tip
85 parent: 1:3d5bf5654eda
86 user: test
87 date: Thu Jan 01 00:00:01 1970 +0000
88 summary: b1
89
90 changeset: 1:3d5bf5654eda
91 user: test
92 date: Thu Jan 01 00:00:01 1970 +0000
93 summary: r1
94
95 changeset: 0:67e992f2c4f3
96 user: test
97 date: Thu Jan 01 00:00:01 1970 +0000
98 summary: base
99
100 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
101 adding b2
102 % log -f -r 1:tip
103 changeset: 1:3d5bf5654eda
104 user: test
105 date: Thu Jan 01 00:00:01 1970 +0000
106 summary: r1
107
108 changeset: 2:60c670bf5b30
109 user: test
110 date: Thu Jan 01 00:00:01 1970 +0000
111 summary: r2
112
113 changeset: 3:e62f78d544b4
114 parent: 1:3d5bf5654eda
115 user: test
116 date: Thu Jan 01 00:00:01 1970 +0000
117 summary: b1
118
119 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
120 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
121 (branch merge, don't forget to commit)
122 % log --follow-first
123 changeset: 6:2404bbcab562
124 tag: tip
125 user: test
126 date: Thu Jan 01 00:00:01 1970 +0000
127 summary: b1.1
128
129 changeset: 5:302e9dd6890d
130 parent: 3:e62f78d544b4
131 parent: 4:ddb82e70d1a1
132 user: test
133 date: Thu Jan 01 00:00:01 1970 +0000
134 summary: m12
135
136 changeset: 3:e62f78d544b4
137 parent: 1:3d5bf5654eda
138 user: test
139 date: Thu Jan 01 00:00:01 1970 +0000
140 summary: b1
141
142 changeset: 1:3d5bf5654eda
143 user: test
144 date: Thu Jan 01 00:00:01 1970 +0000
145 summary: r1
146
147 changeset: 0:67e992f2c4f3
148 user: test
149 date: Thu Jan 01 00:00:01 1970 +0000
150 summary: base
151
General Comments 0
You need to be logged in to leave comments. Login now