##// END OF EJS Templates
util: add sort helper
Matt Mackall -
r6762:f67d1468 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,311 +1,307 b''
1 # bugzilla.py - bugzilla integration for mercurial
1 # bugzilla.py - bugzilla integration for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 # hook extension to update comments of bugzilla bugs when changesets
8 # hook extension to update comments of bugzilla bugs when changesets
9 # that refer to bugs by id are seen. this hook does not change bug
9 # that refer to bugs by id are seen. this hook does not change bug
10 # status, only comments.
10 # status, only comments.
11 #
11 #
12 # to configure, add items to '[bugzilla]' section of hgrc.
12 # to configure, add items to '[bugzilla]' section of hgrc.
13 #
13 #
14 # to use, configure bugzilla extension and enable like this:
14 # to use, configure bugzilla extension and enable like this:
15 #
15 #
16 # [extensions]
16 # [extensions]
17 # hgext.bugzilla =
17 # hgext.bugzilla =
18 #
18 #
19 # [hooks]
19 # [hooks]
20 # # run bugzilla hook on every change pulled or pushed in here
20 # # run bugzilla hook on every change pulled or pushed in here
21 # incoming.bugzilla = python:hgext.bugzilla.hook
21 # incoming.bugzilla = python:hgext.bugzilla.hook
22 #
22 #
23 # config items:
23 # config items:
24 #
24 #
25 # section name is 'bugzilla'.
25 # section name is 'bugzilla'.
26 # [bugzilla]
26 # [bugzilla]
27 #
27 #
28 # REQUIRED:
28 # REQUIRED:
29 # host = bugzilla # mysql server where bugzilla database lives
29 # host = bugzilla # mysql server where bugzilla database lives
30 # password = ** # user's password
30 # password = ** # user's password
31 # version = 2.16 # version of bugzilla installed
31 # version = 2.16 # version of bugzilla installed
32 #
32 #
33 # OPTIONAL:
33 # OPTIONAL:
34 # bzuser = ... # fallback bugzilla user name to record comments with
34 # bzuser = ... # fallback bugzilla user name to record comments with
35 # db = bugs # database to connect to
35 # db = bugs # database to connect to
36 # notify = ... # command to run to get bugzilla to send mail
36 # notify = ... # command to run to get bugzilla to send mail
37 # regexp = ... # regexp to match bug ids (must contain one "()" group)
37 # regexp = ... # regexp to match bug ids (must contain one "()" group)
38 # strip = 0 # number of slashes to strip for url paths
38 # strip = 0 # number of slashes to strip for url paths
39 # style = ... # style file to use when formatting comments
39 # style = ... # style file to use when formatting comments
40 # template = ... # template to use when formatting comments
40 # template = ... # template to use when formatting comments
41 # timeout = 5 # database connection timeout (seconds)
41 # timeout = 5 # database connection timeout (seconds)
42 # user = bugs # user to connect to database as
42 # user = bugs # user to connect to database as
43 # [web]
43 # [web]
44 # baseurl = http://hgserver/... # root of hg web site for browsing commits
44 # baseurl = http://hgserver/... # root of hg web site for browsing commits
45 #
45 #
46 # if hg committer names are not same as bugzilla user names, use
46 # if hg committer names are not same as bugzilla user names, use
47 # "usermap" feature to map from committer email to bugzilla user name.
47 # "usermap" feature to map from committer email to bugzilla user name.
48 # usermap can be in hgrc or separate config file.
48 # usermap can be in hgrc or separate config file.
49 #
49 #
50 # [bugzilla]
50 # [bugzilla]
51 # usermap = filename # cfg file with "committer"="bugzilla user" info
51 # usermap = filename # cfg file with "committer"="bugzilla user" info
52 # [usermap]
52 # [usermap]
53 # committer_email = bugzilla_user_name
53 # committer_email = bugzilla_user_name
54
54
55 from mercurial.i18n import _
55 from mercurial.i18n import _
56 from mercurial.node import short
56 from mercurial.node import short
57 from mercurial import cmdutil, templater, util
57 from mercurial import cmdutil, templater, util
58 import re, time
58 import re, time
59
59
60 MySQLdb = None
60 MySQLdb = None
61
61
62 def buglist(ids):
62 def buglist(ids):
63 return '(' + ','.join(map(str, ids)) + ')'
63 return '(' + ','.join(map(str, ids)) + ')'
64
64
65 class bugzilla_2_16(object):
65 class bugzilla_2_16(object):
66 '''support for bugzilla version 2.16.'''
66 '''support for bugzilla version 2.16.'''
67
67
68 def __init__(self, ui):
68 def __init__(self, ui):
69 self.ui = ui
69 self.ui = ui
70 host = self.ui.config('bugzilla', 'host', 'localhost')
70 host = self.ui.config('bugzilla', 'host', 'localhost')
71 user = self.ui.config('bugzilla', 'user', 'bugs')
71 user = self.ui.config('bugzilla', 'user', 'bugs')
72 passwd = self.ui.config('bugzilla', 'password')
72 passwd = self.ui.config('bugzilla', 'password')
73 db = self.ui.config('bugzilla', 'db', 'bugs')
73 db = self.ui.config('bugzilla', 'db', 'bugs')
74 timeout = int(self.ui.config('bugzilla', 'timeout', 5))
74 timeout = int(self.ui.config('bugzilla', 'timeout', 5))
75 usermap = self.ui.config('bugzilla', 'usermap')
75 usermap = self.ui.config('bugzilla', 'usermap')
76 if usermap:
76 if usermap:
77 self.ui.readsections(usermap, 'usermap')
77 self.ui.readsections(usermap, 'usermap')
78 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
78 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
79 (host, db, user, '*' * len(passwd)))
79 (host, db, user, '*' * len(passwd)))
80 self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd,
80 self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd,
81 db=db, connect_timeout=timeout)
81 db=db, connect_timeout=timeout)
82 self.cursor = self.conn.cursor()
82 self.cursor = self.conn.cursor()
83 self.run('select fieldid from fielddefs where name = "longdesc"')
83 self.run('select fieldid from fielddefs where name = "longdesc"')
84 ids = self.cursor.fetchall()
84 ids = self.cursor.fetchall()
85 if len(ids) != 1:
85 if len(ids) != 1:
86 raise util.Abort(_('unknown database schema'))
86 raise util.Abort(_('unknown database schema'))
87 self.longdesc_id = ids[0][0]
87 self.longdesc_id = ids[0][0]
88 self.user_ids = {}
88 self.user_ids = {}
89
89
90 def run(self, *args, **kwargs):
90 def run(self, *args, **kwargs):
91 '''run a query.'''
91 '''run a query.'''
92 self.ui.note(_('query: %s %s\n') % (args, kwargs))
92 self.ui.note(_('query: %s %s\n') % (args, kwargs))
93 try:
93 try:
94 self.cursor.execute(*args, **kwargs)
94 self.cursor.execute(*args, **kwargs)
95 except MySQLdb.MySQLError, err:
95 except MySQLdb.MySQLError, err:
96 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
96 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
97 raise
97 raise
98
98
99 def filter_real_bug_ids(self, ids):
99 def filter_real_bug_ids(self, ids):
100 '''filter not-existing bug ids from list.'''
100 '''filter not-existing bug ids from list.'''
101 self.run('select bug_id from bugs where bug_id in %s' % buglist(ids))
101 self.run('select bug_id from bugs where bug_id in %s' % buglist(ids))
102 ids = [c[0] for c in self.cursor.fetchall()]
102 return util.sort([c[0] for c in self.cursor.fetchall()])
103 ids.sort()
104 return ids
105
103
106 def filter_unknown_bug_ids(self, node, ids):
104 def filter_unknown_bug_ids(self, node, ids):
107 '''filter bug ids from list that already refer to this changeset.'''
105 '''filter bug ids from list that already refer to this changeset.'''
108
106
109 self.run('''select bug_id from longdescs where
107 self.run('''select bug_id from longdescs where
110 bug_id in %s and thetext like "%%%s%%"''' %
108 bug_id in %s and thetext like "%%%s%%"''' %
111 (buglist(ids), short(node)))
109 (buglist(ids), short(node)))
112 unknown = dict.fromkeys(ids)
110 unknown = dict.fromkeys(ids)
113 for (id,) in self.cursor.fetchall():
111 for (id,) in self.cursor.fetchall():
114 self.ui.status(_('bug %d already knows about changeset %s\n') %
112 self.ui.status(_('bug %d already knows about changeset %s\n') %
115 (id, short(node)))
113 (id, short(node)))
116 unknown.pop(id, None)
114 unknown.pop(id, None)
117 ids = unknown.keys()
115 return util.sort(unknown.keys())
118 ids.sort()
119 return ids
120
116
121 def notify(self, ids):
117 def notify(self, ids):
122 '''tell bugzilla to send mail.'''
118 '''tell bugzilla to send mail.'''
123
119
124 self.ui.status(_('telling bugzilla to send mail:\n'))
120 self.ui.status(_('telling bugzilla to send mail:\n'))
125 for id in ids:
121 for id in ids:
126 self.ui.status(_(' bug %s\n') % id)
122 self.ui.status(_(' bug %s\n') % id)
127 cmd = self.ui.config('bugzilla', 'notify',
123 cmd = self.ui.config('bugzilla', 'notify',
128 'cd /var/www/html/bugzilla && '
124 'cd /var/www/html/bugzilla && '
129 './processmail %s nobody@nowhere.com') % id
125 './processmail %s nobody@nowhere.com') % id
130 fp = util.popen('(%s) 2>&1' % cmd)
126 fp = util.popen('(%s) 2>&1' % cmd)
131 out = fp.read()
127 out = fp.read()
132 ret = fp.close()
128 ret = fp.close()
133 if ret:
129 if ret:
134 self.ui.warn(out)
130 self.ui.warn(out)
135 raise util.Abort(_('bugzilla notify command %s') %
131 raise util.Abort(_('bugzilla notify command %s') %
136 util.explain_exit(ret)[0])
132 util.explain_exit(ret)[0])
137 self.ui.status(_('done\n'))
133 self.ui.status(_('done\n'))
138
134
139 def get_user_id(self, user):
135 def get_user_id(self, user):
140 '''look up numeric bugzilla user id.'''
136 '''look up numeric bugzilla user id.'''
141 try:
137 try:
142 return self.user_ids[user]
138 return self.user_ids[user]
143 except KeyError:
139 except KeyError:
144 try:
140 try:
145 userid = int(user)
141 userid = int(user)
146 except ValueError:
142 except ValueError:
147 self.ui.note(_('looking up user %s\n') % user)
143 self.ui.note(_('looking up user %s\n') % user)
148 self.run('''select userid from profiles
144 self.run('''select userid from profiles
149 where login_name like %s''', user)
145 where login_name like %s''', user)
150 all = self.cursor.fetchall()
146 all = self.cursor.fetchall()
151 if len(all) != 1:
147 if len(all) != 1:
152 raise KeyError(user)
148 raise KeyError(user)
153 userid = int(all[0][0])
149 userid = int(all[0][0])
154 self.user_ids[user] = userid
150 self.user_ids[user] = userid
155 return userid
151 return userid
156
152
157 def map_committer(self, user):
153 def map_committer(self, user):
158 '''map name of committer to bugzilla user name.'''
154 '''map name of committer to bugzilla user name.'''
159 for committer, bzuser in self.ui.configitems('usermap'):
155 for committer, bzuser in self.ui.configitems('usermap'):
160 if committer.lower() == user.lower():
156 if committer.lower() == user.lower():
161 return bzuser
157 return bzuser
162 return user
158 return user
163
159
164 def add_comment(self, bugid, text, committer):
160 def add_comment(self, bugid, text, committer):
165 '''add comment to bug. try adding comment as committer of
161 '''add comment to bug. try adding comment as committer of
166 changeset, otherwise as default bugzilla user.'''
162 changeset, otherwise as default bugzilla user.'''
167 user = self.map_committer(committer)
163 user = self.map_committer(committer)
168 try:
164 try:
169 userid = self.get_user_id(user)
165 userid = self.get_user_id(user)
170 except KeyError:
166 except KeyError:
171 try:
167 try:
172 defaultuser = self.ui.config('bugzilla', 'bzuser')
168 defaultuser = self.ui.config('bugzilla', 'bzuser')
173 if not defaultuser:
169 if not defaultuser:
174 raise util.Abort(_('cannot find bugzilla user id for %s') %
170 raise util.Abort(_('cannot find bugzilla user id for %s') %
175 user)
171 user)
176 userid = self.get_user_id(defaultuser)
172 userid = self.get_user_id(defaultuser)
177 except KeyError:
173 except KeyError:
178 raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
174 raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
179 (user, defaultuser))
175 (user, defaultuser))
180 now = time.strftime('%Y-%m-%d %H:%M:%S')
176 now = time.strftime('%Y-%m-%d %H:%M:%S')
181 self.run('''insert into longdescs
177 self.run('''insert into longdescs
182 (bug_id, who, bug_when, thetext)
178 (bug_id, who, bug_when, thetext)
183 values (%s, %s, %s, %s)''',
179 values (%s, %s, %s, %s)''',
184 (bugid, userid, now, text))
180 (bugid, userid, now, text))
185 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
181 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
186 values (%s, %s, %s, %s)''',
182 values (%s, %s, %s, %s)''',
187 (bugid, userid, now, self.longdesc_id))
183 (bugid, userid, now, self.longdesc_id))
188
184
189 class bugzilla(object):
185 class bugzilla(object):
190 # supported versions of bugzilla. different versions have
186 # supported versions of bugzilla. different versions have
191 # different schemas.
187 # different schemas.
192 _versions = {
188 _versions = {
193 '2.16': bugzilla_2_16,
189 '2.16': bugzilla_2_16,
194 }
190 }
195
191
196 _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
192 _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
197 r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
193 r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
198
194
199 _bz = None
195 _bz = None
200
196
201 def __init__(self, ui, repo):
197 def __init__(self, ui, repo):
202 self.ui = ui
198 self.ui = ui
203 self.repo = repo
199 self.repo = repo
204
200
205 def bz(self):
201 def bz(self):
206 '''return object that knows how to talk to bugzilla version in
202 '''return object that knows how to talk to bugzilla version in
207 use.'''
203 use.'''
208
204
209 if bugzilla._bz is None:
205 if bugzilla._bz is None:
210 bzversion = self.ui.config('bugzilla', 'version')
206 bzversion = self.ui.config('bugzilla', 'version')
211 try:
207 try:
212 bzclass = bugzilla._versions[bzversion]
208 bzclass = bugzilla._versions[bzversion]
213 except KeyError:
209 except KeyError:
214 raise util.Abort(_('bugzilla version %s not supported') %
210 raise util.Abort(_('bugzilla version %s not supported') %
215 bzversion)
211 bzversion)
216 bugzilla._bz = bzclass(self.ui)
212 bugzilla._bz = bzclass(self.ui)
217 return bugzilla._bz
213 return bugzilla._bz
218
214
219 def __getattr__(self, key):
215 def __getattr__(self, key):
220 return getattr(self.bz(), key)
216 return getattr(self.bz(), key)
221
217
222 _bug_re = None
218 _bug_re = None
223 _split_re = None
219 _split_re = None
224
220
225 def find_bug_ids(self, ctx):
221 def find_bug_ids(self, ctx):
226 '''find valid bug ids that are referred to in changeset
222 '''find valid bug ids that are referred to in changeset
227 comments and that do not already have references to this
223 comments and that do not already have references to this
228 changeset.'''
224 changeset.'''
229
225
230 if bugzilla._bug_re is None:
226 if bugzilla._bug_re is None:
231 bugzilla._bug_re = re.compile(
227 bugzilla._bug_re = re.compile(
232 self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
228 self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
233 re.IGNORECASE)
229 re.IGNORECASE)
234 bugzilla._split_re = re.compile(r'\D+')
230 bugzilla._split_re = re.compile(r'\D+')
235 start = 0
231 start = 0
236 ids = {}
232 ids = {}
237 while True:
233 while True:
238 m = bugzilla._bug_re.search(ctx.description(), start)
234 m = bugzilla._bug_re.search(ctx.description(), start)
239 if not m:
235 if not m:
240 break
236 break
241 start = m.end()
237 start = m.end()
242 for id in bugzilla._split_re.split(m.group(1)):
238 for id in bugzilla._split_re.split(m.group(1)):
243 if not id: continue
239 if not id: continue
244 ids[int(id)] = 1
240 ids[int(id)] = 1
245 ids = ids.keys()
241 ids = ids.keys()
246 if ids:
242 if ids:
247 ids = self.filter_real_bug_ids(ids)
243 ids = self.filter_real_bug_ids(ids)
248 if ids:
244 if ids:
249 ids = self.filter_unknown_bug_ids(ctx.node(), ids)
245 ids = self.filter_unknown_bug_ids(ctx.node(), ids)
250 return ids
246 return ids
251
247
252 def update(self, bugid, ctx):
248 def update(self, bugid, ctx):
253 '''update bugzilla bug with reference to changeset.'''
249 '''update bugzilla bug with reference to changeset.'''
254
250
255 def webroot(root):
251 def webroot(root):
256 '''strip leading prefix of repo root and turn into
252 '''strip leading prefix of repo root and turn into
257 url-safe path.'''
253 url-safe path.'''
258 count = int(self.ui.config('bugzilla', 'strip', 0))
254 count = int(self.ui.config('bugzilla', 'strip', 0))
259 root = util.pconvert(root)
255 root = util.pconvert(root)
260 while count > 0:
256 while count > 0:
261 c = root.find('/')
257 c = root.find('/')
262 if c == -1:
258 if c == -1:
263 break
259 break
264 root = root[c+1:]
260 root = root[c+1:]
265 count -= 1
261 count -= 1
266 return root
262 return root
267
263
268 mapfile = self.ui.config('bugzilla', 'style')
264 mapfile = self.ui.config('bugzilla', 'style')
269 tmpl = self.ui.config('bugzilla', 'template')
265 tmpl = self.ui.config('bugzilla', 'template')
270 t = cmdutil.changeset_templater(self.ui, self.repo,
266 t = cmdutil.changeset_templater(self.ui, self.repo,
271 False, mapfile, False)
267 False, mapfile, False)
272 if not mapfile and not tmpl:
268 if not mapfile and not tmpl:
273 tmpl = _('changeset {node|short} in repo {root} refers '
269 tmpl = _('changeset {node|short} in repo {root} refers '
274 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
270 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
275 if tmpl:
271 if tmpl:
276 tmpl = templater.parsestring(tmpl, quoted=False)
272 tmpl = templater.parsestring(tmpl, quoted=False)
277 t.use_template(tmpl)
273 t.use_template(tmpl)
278 self.ui.pushbuffer()
274 self.ui.pushbuffer()
279 t.show(changenode=ctx.node(), changes=ctx.changeset(),
275 t.show(changenode=ctx.node(), changes=ctx.changeset(),
280 bug=str(bugid),
276 bug=str(bugid),
281 hgweb=self.ui.config('web', 'baseurl'),
277 hgweb=self.ui.config('web', 'baseurl'),
282 root=self.repo.root,
278 root=self.repo.root,
283 webroot=webroot(self.repo.root))
279 webroot=webroot(self.repo.root))
284 data = self.ui.popbuffer()
280 data = self.ui.popbuffer()
285 self.add_comment(bugid, data, util.email(ctx.user()))
281 self.add_comment(bugid, data, util.email(ctx.user()))
286
282
287 def hook(ui, repo, hooktype, node=None, **kwargs):
283 def hook(ui, repo, hooktype, node=None, **kwargs):
288 '''add comment to bugzilla for each changeset that refers to a
284 '''add comment to bugzilla for each changeset that refers to a
289 bugzilla bug id. only add a comment once per bug, so same change
285 bugzilla bug id. only add a comment once per bug, so same change
290 seen multiple times does not fill bug with duplicate data.'''
286 seen multiple times does not fill bug with duplicate data.'''
291 try:
287 try:
292 import MySQLdb as mysql
288 import MySQLdb as mysql
293 global MySQLdb
289 global MySQLdb
294 MySQLdb = mysql
290 MySQLdb = mysql
295 except ImportError, err:
291 except ImportError, err:
296 raise util.Abort(_('python mysql support not available: %s') % err)
292 raise util.Abort(_('python mysql support not available: %s') % err)
297
293
298 if node is None:
294 if node is None:
299 raise util.Abort(_('hook type %s does not pass a changeset id') %
295 raise util.Abort(_('hook type %s does not pass a changeset id') %
300 hooktype)
296 hooktype)
301 try:
297 try:
302 bz = bugzilla(ui, repo)
298 bz = bugzilla(ui, repo)
303 ctx = repo[node]
299 ctx = repo[node]
304 ids = bz.find_bug_ids(ctx)
300 ids = bz.find_bug_ids(ctx)
305 if ids:
301 if ids:
306 for id in ids:
302 for id in ids:
307 bz.update(id, ctx)
303 bz.update(id, ctx)
308 bz.notify(ids)
304 bz.notify(ids)
309 except MySQLdb.MySQLError, err:
305 except MySQLdb.MySQLError, err:
310 raise util.Abort(_('database error: %s') % err[1])
306 raise util.Abort(_('database error: %s') % err[1])
311
307
@@ -1,121 +1,119 b''
1 # churn.py - create a graph showing who changed the most lines
1 # churn.py - create a graph showing who changed the most lines
2 #
2 #
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 '''allow graphing the number of lines changed per contributor'''
7 '''allow graphing the number of lines changed per contributor'''
8
8
9 from mercurial.i18n import gettext as _
9 from mercurial.i18n import gettext as _
10 from mercurial import patch, cmdutil, util, node
10 from mercurial import patch, cmdutil, util, node
11 import os, sys
11 import os, sys
12
12
13 def get_tty_width():
13 def get_tty_width():
14 if 'COLUMNS' in os.environ:
14 if 'COLUMNS' in os.environ:
15 try:
15 try:
16 return int(os.environ['COLUMNS'])
16 return int(os.environ['COLUMNS'])
17 except ValueError:
17 except ValueError:
18 pass
18 pass
19 try:
19 try:
20 import termios, array, fcntl
20 import termios, array, fcntl
21 for dev in (sys.stdout, sys.stdin):
21 for dev in (sys.stdout, sys.stdin):
22 try:
22 try:
23 fd = dev.fileno()
23 fd = dev.fileno()
24 if not os.isatty(fd):
24 if not os.isatty(fd):
25 continue
25 continue
26 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
26 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
27 return array.array('h', arri)[1]
27 return array.array('h', arri)[1]
28 except ValueError:
28 except ValueError:
29 pass
29 pass
30 except ImportError:
30 except ImportError:
31 pass
31 pass
32 return 80
32 return 80
33
33
34 def countrevs(ui, repo, amap, revs, progress=False):
34 def countrevs(ui, repo, amap, revs, progress=False):
35 stats = {}
35 stats = {}
36 count = pct = 0
36 count = pct = 0
37 if not revs:
37 if not revs:
38 revs = range(len(repo))
38 revs = range(len(repo))
39
39
40 for rev in revs:
40 for rev in revs:
41 ctx2 = repo[rev]
41 ctx2 = repo[rev]
42 parents = ctx2.parents()
42 parents = ctx2.parents()
43 if len(parents) > 1:
43 if len(parents) > 1:
44 ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
44 ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
45 continue
45 continue
46
46
47 ctx1 = parents[0]
47 ctx1 = parents[0]
48 lines = 0
48 lines = 0
49 ui.pushbuffer()
49 ui.pushbuffer()
50 patch.diff(repo, ctx1.node(), ctx2.node())
50 patch.diff(repo, ctx1.node(), ctx2.node())
51 diff = ui.popbuffer()
51 diff = ui.popbuffer()
52
52
53 for l in diff.split('\n'):
53 for l in diff.split('\n'):
54 if (l.startswith("+") and not l.startswith("+++ ") or
54 if (l.startswith("+") and not l.startswith("+++ ") or
55 l.startswith("-") and not l.startswith("--- ")):
55 l.startswith("-") and not l.startswith("--- ")):
56 lines += 1
56 lines += 1
57
57
58 user = util.email(ctx2.user())
58 user = util.email(ctx2.user())
59 user = amap.get(user, user) # remap
59 user = amap.get(user, user) # remap
60 stats[user] = stats.get(user, 0) + lines
60 stats[user] = stats.get(user, 0) + lines
61 ui.debug("rev %d: %d lines by %s\n" % (rev, lines, user))
61 ui.debug("rev %d: %d lines by %s\n" % (rev, lines, user))
62
62
63 if progress:
63 if progress:
64 count += 1
64 count += 1
65 newpct = int(100.0 * count / max(len(revs), 1))
65 newpct = int(100.0 * count / max(len(revs), 1))
66 if pct < newpct:
66 if pct < newpct:
67 pct = newpct
67 pct = newpct
68 ui.write("\rGenerating stats: %d%%" % pct)
68 ui.write("\rGenerating stats: %d%%" % pct)
69 sys.stdout.flush()
69 sys.stdout.flush()
70
70
71 if progress:
71 if progress:
72 ui.write("\r")
72 ui.write("\r")
73 sys.stdout.flush()
73 sys.stdout.flush()
74
74
75 return stats
75 return stats
76
76
77 def churn(ui, repo, **opts):
77 def churn(ui, repo, **opts):
78 '''graphs the number of lines changed
78 '''graphs the number of lines changed
79
79
80 The map file format used to specify aliases is fairly simple:
80 The map file format used to specify aliases is fairly simple:
81
81
82 <alias email> <actual email>'''
82 <alias email> <actual email>'''
83
83
84 def pad(s, l):
84 def pad(s, l):
85 return (s + " " * l)[:l]
85 return (s + " " * l)[:l]
86
86
87 amap = {}
87 amap = {}
88 aliases = opts.get('aliases')
88 aliases = opts.get('aliases')
89 if aliases:
89 if aliases:
90 for l in open(aliases, "r"):
90 for l in open(aliases, "r"):
91 l = l.strip()
91 l = l.strip()
92 alias, actual = l.split()
92 alias, actual = l.split()
93 amap[alias] = actual
93 amap[alias] = actual
94
94
95 revs = [int(r) for r in cmdutil.revrange(repo, opts['rev'])]
95 revs = util.sort([int(r) for r in cmdutil.revrange(repo, opts['rev'])])
96 revs.sort()
97 stats = countrevs(ui, repo, amap, revs, opts.get('progress'))
96 stats = countrevs(ui, repo, amap, revs, opts.get('progress'))
98 if not stats:
97 if not stats:
99 return
98 return
100
99
101 stats = [(-l, u, l) for u,l in stats.items()]
100 stats = util.sort([(-l, u, l) for u,l in stats.items()])
102 stats.sort()
103 maxchurn = float(max(1, stats[0][2]))
101 maxchurn = float(max(1, stats[0][2]))
104 maxuser = max([len(u) for k, u, l in stats])
102 maxuser = max([len(u) for k, u, l in stats])
105
103
106 ttywidth = get_tty_width()
104 ttywidth = get_tty_width()
107 ui.debug(_("assuming %i character terminal\n") % ttywidth)
105 ui.debug(_("assuming %i character terminal\n") % ttywidth)
108 width = ttywidth - maxuser - 2 - 6 - 2 - 2
106 width = ttywidth - maxuser - 2 - 6 - 2 - 2
109
107
110 for k, user, churn in stats:
108 for k, user, churn in stats:
111 print "%s %6d %s" % (pad(user, maxuser), churn,
109 print "%s %6d %s" % (pad(user, maxuser), churn,
112 "*" * int(churn * width / maxchurn))
110 "*" * int(churn * width / maxchurn))
113
111
114 cmdtable = {
112 cmdtable = {
115 "churn":
113 "churn":
116 (churn,
114 (churn,
117 [('r', 'rev', [], _('limit statistics to the specified revisions')),
115 [('r', 'rev', [], _('limit statistics to the specified revisions')),
118 ('', 'aliases', '', _('file with email aliases')),
116 ('', 'aliases', '', _('file with email aliases')),
119 ('', 'progress', None, _('show progress'))],
117 ('', 'progress', None, _('show progress'))],
120 'hg churn [-r revision range] [-a file] [--progress]'),
118 'hg churn [-r revision range] [-a file] [--progress]'),
121 }
119 }
@@ -1,355 +1,349 b''
1 # CVS conversion code inspired by hg-cvs-import and git-cvsimport
1 # CVS conversion code inspired by hg-cvs-import and git-cvsimport
2
2
3 import os, locale, re, socket
3 import os, locale, re, socket
4 from cStringIO import StringIO
4 from cStringIO import StringIO
5 from mercurial import util
5 from mercurial import util
6 from mercurial.i18n import _
6 from mercurial.i18n import _
7
7
8 from common import NoRepo, commit, converter_source, checktool
8 from common import NoRepo, commit, converter_source, checktool
9 import cvsps
9 import cvsps
10
10
11 class convert_cvs(converter_source):
11 class convert_cvs(converter_source):
12 def __init__(self, ui, path, rev=None):
12 def __init__(self, ui, path, rev=None):
13 super(convert_cvs, self).__init__(ui, path, rev=rev)
13 super(convert_cvs, self).__init__(ui, path, rev=rev)
14
14
15 cvs = os.path.join(path, "CVS")
15 cvs = os.path.join(path, "CVS")
16 if not os.path.exists(cvs):
16 if not os.path.exists(cvs):
17 raise NoRepo("%s does not look like a CVS checkout" % path)
17 raise NoRepo("%s does not look like a CVS checkout" % path)
18
18
19 checktool('cvs')
19 checktool('cvs')
20 self.cmd = ui.config('convert', 'cvsps', 'cvsps -A -u --cvs-direct -q')
20 self.cmd = ui.config('convert', 'cvsps', 'cvsps -A -u --cvs-direct -q')
21 cvspsexe = self.cmd.split(None, 1)[0]
21 cvspsexe = self.cmd.split(None, 1)[0]
22 self.builtin = cvspsexe == 'builtin'
22 self.builtin = cvspsexe == 'builtin'
23
23
24 if not self.builtin:
24 if not self.builtin:
25 checktool(cvspsexe)
25 checktool(cvspsexe)
26
26
27 self.changeset = {}
27 self.changeset = {}
28 self.files = {}
28 self.files = {}
29 self.tags = {}
29 self.tags = {}
30 self.lastbranch = {}
30 self.lastbranch = {}
31 self.parent = {}
31 self.parent = {}
32 self.socket = None
32 self.socket = None
33 self.cvsroot = file(os.path.join(cvs, "Root")).read()[:-1]
33 self.cvsroot = file(os.path.join(cvs, "Root")).read()[:-1]
34 self.cvsrepo = file(os.path.join(cvs, "Repository")).read()[:-1]
34 self.cvsrepo = file(os.path.join(cvs, "Repository")).read()[:-1]
35 self.encoding = locale.getpreferredencoding()
35 self.encoding = locale.getpreferredencoding()
36
36
37 self._parse(ui)
37 self._parse(ui)
38 self._connect()
38 self._connect()
39
39
40 def _parse(self, ui):
40 def _parse(self, ui):
41 if self.changeset:
41 if self.changeset:
42 return
42 return
43
43
44 maxrev = 0
44 maxrev = 0
45 cmd = self.cmd
45 cmd = self.cmd
46 if self.rev:
46 if self.rev:
47 # TODO: handle tags
47 # TODO: handle tags
48 try:
48 try:
49 # patchset number?
49 # patchset number?
50 maxrev = int(self.rev)
50 maxrev = int(self.rev)
51 except ValueError:
51 except ValueError:
52 try:
52 try:
53 # date
53 # date
54 util.parsedate(self.rev, ['%Y/%m/%d %H:%M:%S'])
54 util.parsedate(self.rev, ['%Y/%m/%d %H:%M:%S'])
55 cmd = '%s -d "1970/01/01 00:00:01" -d "%s"' % (cmd, self.rev)
55 cmd = '%s -d "1970/01/01 00:00:01" -d "%s"' % (cmd, self.rev)
56 except util.Abort:
56 except util.Abort:
57 raise util.Abort('revision %s is not a patchset number or date' % self.rev)
57 raise util.Abort('revision %s is not a patchset number or date' % self.rev)
58
58
59 d = os.getcwd()
59 d = os.getcwd()
60 try:
60 try:
61 os.chdir(self.path)
61 os.chdir(self.path)
62 id = None
62 id = None
63 state = 0
63 state = 0
64 filerevids = {}
64 filerevids = {}
65
65
66 if self.builtin:
66 if self.builtin:
67 # builtin cvsps code
67 # builtin cvsps code
68 ui.status(_('using builtin cvsps\n'))
68 ui.status(_('using builtin cvsps\n'))
69
69
70 db = cvsps.createlog(ui, cache='update')
70 db = cvsps.createlog(ui, cache='update')
71 db = cvsps.createchangeset(ui, db,
71 db = cvsps.createchangeset(ui, db,
72 fuzz=int(ui.config('convert', 'cvsps.fuzz', 60)),
72 fuzz=int(ui.config('convert', 'cvsps.fuzz', 60)),
73 mergeto=ui.config('convert', 'cvsps.mergeto', None),
73 mergeto=ui.config('convert', 'cvsps.mergeto', None),
74 mergefrom=ui.config('convert', 'cvsps.mergefrom', None))
74 mergefrom=ui.config('convert', 'cvsps.mergefrom', None))
75
75
76 for cs in db:
76 for cs in db:
77 if maxrev and cs.id>maxrev:
77 if maxrev and cs.id>maxrev:
78 break
78 break
79 id = str(cs.id)
79 id = str(cs.id)
80 cs.author = self.recode(cs.author)
80 cs.author = self.recode(cs.author)
81 self.lastbranch[cs.branch] = id
81 self.lastbranch[cs.branch] = id
82 cs.comment = self.recode(cs.comment)
82 cs.comment = self.recode(cs.comment)
83 date = util.datestr(cs.date)
83 date = util.datestr(cs.date)
84 self.tags.update(dict.fromkeys(cs.tags, id))
84 self.tags.update(dict.fromkeys(cs.tags, id))
85
85
86 files = {}
86 files = {}
87 for f in cs.entries:
87 for f in cs.entries:
88 files[f.file] = "%s%s" % ('.'.join([str(x) for x in f.revision]),
88 files[f.file] = "%s%s" % ('.'.join([str(x) for x in f.revision]),
89 ['', '(DEAD)'][f.dead])
89 ['', '(DEAD)'][f.dead])
90
90
91 # add current commit to set
91 # add current commit to set
92 c = commit(author=cs.author, date=date,
92 c = commit(author=cs.author, date=date,
93 parents=[str(p.id) for p in cs.parents],
93 parents=[str(p.id) for p in cs.parents],
94 desc=cs.comment, branch=cs.branch or '')
94 desc=cs.comment, branch=cs.branch or '')
95 self.changeset[id] = c
95 self.changeset[id] = c
96 self.files[id] = files
96 self.files[id] = files
97 else:
97 else:
98 # external cvsps
98 # external cvsps
99 for l in util.popen(cmd):
99 for l in util.popen(cmd):
100 if state == 0: # header
100 if state == 0: # header
101 if l.startswith("PatchSet"):
101 if l.startswith("PatchSet"):
102 id = l[9:-2]
102 id = l[9:-2]
103 if maxrev and int(id) > maxrev:
103 if maxrev and int(id) > maxrev:
104 # ignore everything
104 # ignore everything
105 state = 3
105 state = 3
106 elif l.startswith("Date"):
106 elif l.startswith("Date"):
107 date = util.parsedate(l[6:-1], ["%Y/%m/%d %H:%M:%S"])
107 date = util.parsedate(l[6:-1], ["%Y/%m/%d %H:%M:%S"])
108 date = util.datestr(date)
108 date = util.datestr(date)
109 elif l.startswith("Branch"):
109 elif l.startswith("Branch"):
110 branch = l[8:-1]
110 branch = l[8:-1]
111 self.parent[id] = self.lastbranch.get(branch, 'bad')
111 self.parent[id] = self.lastbranch.get(branch, 'bad')
112 self.lastbranch[branch] = id
112 self.lastbranch[branch] = id
113 elif l.startswith("Ancestor branch"):
113 elif l.startswith("Ancestor branch"):
114 ancestor = l[17:-1]
114 ancestor = l[17:-1]
115 # figure out the parent later
115 # figure out the parent later
116 self.parent[id] = self.lastbranch[ancestor]
116 self.parent[id] = self.lastbranch[ancestor]
117 elif l.startswith("Author"):
117 elif l.startswith("Author"):
118 author = self.recode(l[8:-1])
118 author = self.recode(l[8:-1])
119 elif l.startswith("Tag:") or l.startswith("Tags:"):
119 elif l.startswith("Tag:") or l.startswith("Tags:"):
120 t = l[l.index(':')+1:]
120 t = l[l.index(':')+1:]
121 t = [ut.strip() for ut in t.split(',')]
121 t = [ut.strip() for ut in t.split(',')]
122 if (len(t) > 1) or (t[0] and (t[0] != "(none)")):
122 if (len(t) > 1) or (t[0] and (t[0] != "(none)")):
123 self.tags.update(dict.fromkeys(t, id))
123 self.tags.update(dict.fromkeys(t, id))
124 elif l.startswith("Log:"):
124 elif l.startswith("Log:"):
125 # switch to gathering log
125 # switch to gathering log
126 state = 1
126 state = 1
127 log = ""
127 log = ""
128 elif state == 1: # log
128 elif state == 1: # log
129 if l == "Members: \n":
129 if l == "Members: \n":
130 # switch to gathering members
130 # switch to gathering members
131 files = {}
131 files = {}
132 oldrevs = []
132 oldrevs = []
133 log = self.recode(log[:-1])
133 log = self.recode(log[:-1])
134 state = 2
134 state = 2
135 else:
135 else:
136 # gather log
136 # gather log
137 log += l
137 log += l
138 elif state == 2: # members
138 elif state == 2: # members
139 if l == "\n": # start of next entry
139 if l == "\n": # start of next entry
140 state = 0
140 state = 0
141 p = [self.parent[id]]
141 p = [self.parent[id]]
142 if id == "1":
142 if id == "1":
143 p = []
143 p = []
144 if branch == "HEAD":
144 if branch == "HEAD":
145 branch = ""
145 branch = ""
146 if branch:
146 if branch:
147 latest = None
147 latest = None
148 # the last changeset that contains a base
148 # the last changeset that contains a base
149 # file is our parent
149 # file is our parent
150 for r in oldrevs:
150 for r in oldrevs:
151 latest = max(filerevids.get(r, None), latest)
151 latest = max(filerevids.get(r, None), latest)
152 if latest:
152 if latest:
153 p = [latest]
153 p = [latest]
154
154
155 # add current commit to set
155 # add current commit to set
156 c = commit(author=author, date=date, parents=p,
156 c = commit(author=author, date=date, parents=p,
157 desc=log, branch=branch)
157 desc=log, branch=branch)
158 self.changeset[id] = c
158 self.changeset[id] = c
159 self.files[id] = files
159 self.files[id] = files
160 else:
160 else:
161 colon = l.rfind(':')
161 colon = l.rfind(':')
162 file = l[1:colon]
162 file = l[1:colon]
163 rev = l[colon+1:-2]
163 rev = l[colon+1:-2]
164 oldrev, rev = rev.split("->")
164 oldrev, rev = rev.split("->")
165 files[file] = rev
165 files[file] = rev
166
166
167 # save some information for identifying branch points
167 # save some information for identifying branch points
168 oldrevs.append("%s:%s" % (oldrev, file))
168 oldrevs.append("%s:%s" % (oldrev, file))
169 filerevids["%s:%s" % (rev, file)] = id
169 filerevids["%s:%s" % (rev, file)] = id
170 elif state == 3:
170 elif state == 3:
171 # swallow all input
171 # swallow all input
172 continue
172 continue
173
173
174 self.heads = self.lastbranch.values()
174 self.heads = self.lastbranch.values()
175 finally:
175 finally:
176 os.chdir(d)
176 os.chdir(d)
177
177
178 def _connect(self):
178 def _connect(self):
179 root = self.cvsroot
179 root = self.cvsroot
180 conntype = None
180 conntype = None
181 user, host = None, None
181 user, host = None, None
182 cmd = ['cvs', 'server']
182 cmd = ['cvs', 'server']
183
183
184 self.ui.status("connecting to %s\n" % root)
184 self.ui.status("connecting to %s\n" % root)
185
185
186 if root.startswith(":pserver:"):
186 if root.startswith(":pserver:"):
187 root = root[9:]
187 root = root[9:]
188 m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)',
188 m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)',
189 root)
189 root)
190 if m:
190 if m:
191 conntype = "pserver"
191 conntype = "pserver"
192 user, passw, serv, port, root = m.groups()
192 user, passw, serv, port, root = m.groups()
193 if not user:
193 if not user:
194 user = "anonymous"
194 user = "anonymous"
195 if not port:
195 if not port:
196 port = 2401
196 port = 2401
197 else:
197 else:
198 port = int(port)
198 port = int(port)
199 format0 = ":pserver:%s@%s:%s" % (user, serv, root)
199 format0 = ":pserver:%s@%s:%s" % (user, serv, root)
200 format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root)
200 format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root)
201
201
202 if not passw:
202 if not passw:
203 passw = "A"
203 passw = "A"
204 pf = open(os.path.join(os.environ["HOME"], ".cvspass"))
204 pf = open(os.path.join(os.environ["HOME"], ".cvspass"))
205 for line in pf.read().splitlines():
205 for line in pf.read().splitlines():
206 part1, part2 = line.split(' ', 1)
206 part1, part2 = line.split(' ', 1)
207 if part1 == '/1':
207 if part1 == '/1':
208 # /1 :pserver:user@example.com:2401/cvsroot/foo Ah<Z
208 # /1 :pserver:user@example.com:2401/cvsroot/foo Ah<Z
209 part1, part2 = part2.split(' ', 1)
209 part1, part2 = part2.split(' ', 1)
210 format = format1
210 format = format1
211 else:
211 else:
212 # :pserver:user@example.com:/cvsroot/foo Ah<Z
212 # :pserver:user@example.com:/cvsroot/foo Ah<Z
213 format = format0
213 format = format0
214 if part1 == format:
214 if part1 == format:
215 passw = part2
215 passw = part2
216 break
216 break
217 pf.close()
217 pf.close()
218
218
219 sck = socket.socket()
219 sck = socket.socket()
220 sck.connect((serv, port))
220 sck.connect((serv, port))
221 sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
221 sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
222 "END AUTH REQUEST", ""]))
222 "END AUTH REQUEST", ""]))
223 if sck.recv(128) != "I LOVE YOU\n":
223 if sck.recv(128) != "I LOVE YOU\n":
224 raise util.Abort("CVS pserver authentication failed")
224 raise util.Abort("CVS pserver authentication failed")
225
225
226 self.writep = self.readp = sck.makefile('r+')
226 self.writep = self.readp = sck.makefile('r+')
227
227
228 if not conntype and root.startswith(":local:"):
228 if not conntype and root.startswith(":local:"):
229 conntype = "local"
229 conntype = "local"
230 root = root[7:]
230 root = root[7:]
231
231
232 if not conntype:
232 if not conntype:
233 # :ext:user@host/home/user/path/to/cvsroot
233 # :ext:user@host/home/user/path/to/cvsroot
234 if root.startswith(":ext:"):
234 if root.startswith(":ext:"):
235 root = root[5:]
235 root = root[5:]
236 m = re.match(r'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
236 m = re.match(r'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
237 # Do not take Windows path "c:\foo\bar" for a connection strings
237 # Do not take Windows path "c:\foo\bar" for a connection strings
238 if os.path.isdir(root) or not m:
238 if os.path.isdir(root) or not m:
239 conntype = "local"
239 conntype = "local"
240 else:
240 else:
241 conntype = "rsh"
241 conntype = "rsh"
242 user, host, root = m.group(1), m.group(2), m.group(3)
242 user, host, root = m.group(1), m.group(2), m.group(3)
243
243
244 if conntype != "pserver":
244 if conntype != "pserver":
245 if conntype == "rsh":
245 if conntype == "rsh":
246 rsh = os.environ.get("CVS_RSH") or "ssh"
246 rsh = os.environ.get("CVS_RSH") or "ssh"
247 if user:
247 if user:
248 cmd = [rsh, '-l', user, host] + cmd
248 cmd = [rsh, '-l', user, host] + cmd
249 else:
249 else:
250 cmd = [rsh, host] + cmd
250 cmd = [rsh, host] + cmd
251
251
252 # popen2 does not support argument lists under Windows
252 # popen2 does not support argument lists under Windows
253 cmd = [util.shellquote(arg) for arg in cmd]
253 cmd = [util.shellquote(arg) for arg in cmd]
254 cmd = util.quotecommand(' '.join(cmd))
254 cmd = util.quotecommand(' '.join(cmd))
255 self.writep, self.readp = os.popen2(cmd, 'b')
255 self.writep, self.readp = os.popen2(cmd, 'b')
256
256
257 self.realroot = root
257 self.realroot = root
258
258
259 self.writep.write("Root %s\n" % root)
259 self.writep.write("Root %s\n" % root)
260 self.writep.write("Valid-responses ok error Valid-requests Mode"
260 self.writep.write("Valid-responses ok error Valid-requests Mode"
261 " M Mbinary E Checked-in Created Updated"
261 " M Mbinary E Checked-in Created Updated"
262 " Merged Removed\n")
262 " Merged Removed\n")
263 self.writep.write("valid-requests\n")
263 self.writep.write("valid-requests\n")
264 self.writep.flush()
264 self.writep.flush()
265 r = self.readp.readline()
265 r = self.readp.readline()
266 if not r.startswith("Valid-requests"):
266 if not r.startswith("Valid-requests"):
267 raise util.Abort("server sucks")
267 raise util.Abort("server sucks")
268 if "UseUnchanged" in r:
268 if "UseUnchanged" in r:
269 self.writep.write("UseUnchanged\n")
269 self.writep.write("UseUnchanged\n")
270 self.writep.flush()
270 self.writep.flush()
271 r = self.readp.readline()
271 r = self.readp.readline()
272
272
273 def getheads(self):
273 def getheads(self):
274 return self.heads
274 return self.heads
275
275
276 def _getfile(self, name, rev):
276 def _getfile(self, name, rev):
277
277
278 def chunkedread(fp, count):
278 def chunkedread(fp, count):
279 # file-objects returned by socked.makefile() do not handle
279 # file-objects returned by socked.makefile() do not handle
280 # large read() requests very well.
280 # large read() requests very well.
281 chunksize = 65536
281 chunksize = 65536
282 output = StringIO()
282 output = StringIO()
283 while count > 0:
283 while count > 0:
284 data = fp.read(min(count, chunksize))
284 data = fp.read(min(count, chunksize))
285 if not data:
285 if not data:
286 raise util.Abort("%d bytes missing from remote file" % count)
286 raise util.Abort("%d bytes missing from remote file" % count)
287 count -= len(data)
287 count -= len(data)
288 output.write(data)
288 output.write(data)
289 return output.getvalue()
289 return output.getvalue()
290
290
291 if rev.endswith("(DEAD)"):
291 if rev.endswith("(DEAD)"):
292 raise IOError
292 raise IOError
293
293
294 args = ("-N -P -kk -r %s --" % rev).split()
294 args = ("-N -P -kk -r %s --" % rev).split()
295 args.append(self.cvsrepo + '/' + name)
295 args.append(self.cvsrepo + '/' + name)
296 for x in args:
296 for x in args:
297 self.writep.write("Argument %s\n" % x)
297 self.writep.write("Argument %s\n" % x)
298 self.writep.write("Directory .\n%s\nco\n" % self.realroot)
298 self.writep.write("Directory .\n%s\nco\n" % self.realroot)
299 self.writep.flush()
299 self.writep.flush()
300
300
301 data = ""
301 data = ""
302 while 1:
302 while 1:
303 line = self.readp.readline()
303 line = self.readp.readline()
304 if line.startswith("Created ") or line.startswith("Updated "):
304 if line.startswith("Created ") or line.startswith("Updated "):
305 self.readp.readline() # path
305 self.readp.readline() # path
306 self.readp.readline() # entries
306 self.readp.readline() # entries
307 mode = self.readp.readline()[:-1]
307 mode = self.readp.readline()[:-1]
308 count = int(self.readp.readline()[:-1])
308 count = int(self.readp.readline()[:-1])
309 data = chunkedread(self.readp, count)
309 data = chunkedread(self.readp, count)
310 elif line.startswith(" "):
310 elif line.startswith(" "):
311 data += line[1:]
311 data += line[1:]
312 elif line.startswith("M "):
312 elif line.startswith("M "):
313 pass
313 pass
314 elif line.startswith("Mbinary "):
314 elif line.startswith("Mbinary "):
315 count = int(self.readp.readline()[:-1])
315 count = int(self.readp.readline()[:-1])
316 data = chunkedread(self.readp, count)
316 data = chunkedread(self.readp, count)
317 else:
317 else:
318 if line == "ok\n":
318 if line == "ok\n":
319 return (data, "x" in mode and "x" or "")
319 return (data, "x" in mode and "x" or "")
320 elif line.startswith("E "):
320 elif line.startswith("E "):
321 self.ui.warn("cvs server: %s\n" % line[2:])
321 self.ui.warn("cvs server: %s\n" % line[2:])
322 elif line.startswith("Remove"):
322 elif line.startswith("Remove"):
323 l = self.readp.readline()
323 l = self.readp.readline()
324 l = self.readp.readline()
324 l = self.readp.readline()
325 if l != "ok\n":
325 if l != "ok\n":
326 raise util.Abort("unknown CVS response: %s" % l)
326 raise util.Abort("unknown CVS response: %s" % l)
327 else:
327 else:
328 raise util.Abort("unknown CVS response: %s" % line)
328 raise util.Abort("unknown CVS response: %s" % line)
329
329
330 def getfile(self, file, rev):
330 def getfile(self, file, rev):
331 data, mode = self._getfile(file, rev)
331 data, mode = self._getfile(file, rev)
332 self.modecache[(file, rev)] = mode
332 self.modecache[(file, rev)] = mode
333 return data
333 return data
334
334
335 def getmode(self, file, rev):
335 def getmode(self, file, rev):
336 return self.modecache[(file, rev)]
336 return self.modecache[(file, rev)]
337
337
338 def getchanges(self, rev):
338 def getchanges(self, rev):
339 self.modecache = {}
339 self.modecache = {}
340 files = self.files[rev]
340 return util.sort(self.files[rev].items()), {}
341 cl = files.items()
342 cl.sort()
343 return (cl, {})
344
341
345 def getcommit(self, rev):
342 def getcommit(self, rev):
346 return self.changeset[rev]
343 return self.changeset[rev]
347
344
348 def gettags(self):
345 def gettags(self):
349 return self.tags
346 return self.tags
350
347
351 def getchangedfiles(self, rev, i):
348 def getchangedfiles(self, rev, i):
352 files = self.files[rev].keys()
349 return util.sort(self.files[rev].keys())
353 files.sort()
354 return files
355
@@ -1,551 +1,548 b''
1 #
1 #
2 # Mercurial built-in replacement for cvsps.
2 # Mercurial built-in replacement for cvsps.
3 #
3 #
4 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
4 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os
9 import os
10 import re
10 import re
11 import sys
11 import sys
12 import cPickle as pickle
12 import cPickle as pickle
13 from mercurial import util
13 from mercurial import util
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15
15
16 def listsort(list, key):
16 def listsort(list, key):
17 "helper to sort by key in Python 2.3"
17 "helper to sort by key in Python 2.3"
18 try:
18 try:
19 list.sort(key=key)
19 list.sort(key=key)
20 except TypeError:
20 except TypeError:
21 list.sort(lambda l, r: cmp(key(l), key(r)))
21 list.sort(lambda l, r: cmp(key(l), key(r)))
22
22
23 class logentry(object):
23 class logentry(object):
24 '''Class logentry has the following attributes:
24 '''Class logentry has the following attributes:
25 .author - author name as CVS knows it
25 .author - author name as CVS knows it
26 .branch - name of branch this revision is on
26 .branch - name of branch this revision is on
27 .branches - revision tuple of branches starting at this revision
27 .branches - revision tuple of branches starting at this revision
28 .comment - commit message
28 .comment - commit message
29 .date - the commit date as a (time, tz) tuple
29 .date - the commit date as a (time, tz) tuple
30 .dead - true if file revision is dead
30 .dead - true if file revision is dead
31 .file - Name of file
31 .file - Name of file
32 .lines - a tuple (+lines, -lines) or None
32 .lines - a tuple (+lines, -lines) or None
33 .parent - Previous revision of this entry
33 .parent - Previous revision of this entry
34 .rcs - name of file as returned from CVS
34 .rcs - name of file as returned from CVS
35 .revision - revision number as tuple
35 .revision - revision number as tuple
36 .tags - list of tags on the file
36 .tags - list of tags on the file
37 '''
37 '''
38 def __init__(self, **entries):
38 def __init__(self, **entries):
39 self.__dict__.update(entries)
39 self.__dict__.update(entries)
40
40
41 class logerror(Exception):
41 class logerror(Exception):
42 pass
42 pass
43
43
44 def createlog(ui, directory=None, root="", rlog=True, cache=None):
44 def createlog(ui, directory=None, root="", rlog=True, cache=None):
45 '''Collect the CVS rlog'''
45 '''Collect the CVS rlog'''
46
46
47 # Because we store many duplicate commit log messages, reusing strings
47 # Because we store many duplicate commit log messages, reusing strings
48 # saves a lot of memory and pickle storage space.
48 # saves a lot of memory and pickle storage space.
49 _scache = {}
49 _scache = {}
50 def scache(s):
50 def scache(s):
51 "return a shared version of a string"
51 "return a shared version of a string"
52 return _scache.setdefault(s, s)
52 return _scache.setdefault(s, s)
53
53
54 ui.status(_('collecting CVS rlog\n'))
54 ui.status(_('collecting CVS rlog\n'))
55
55
56 log = [] # list of logentry objects containing the CVS state
56 log = [] # list of logentry objects containing the CVS state
57
57
58 # patterns to match in CVS (r)log output, by state of use
58 # patterns to match in CVS (r)log output, by state of use
59 re_00 = re.compile('RCS file: (.+)$')
59 re_00 = re.compile('RCS file: (.+)$')
60 re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
60 re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
61 re_02 = re.compile('cvs (r?log|server): (.+)\n$')
61 re_02 = re.compile('cvs (r?log|server): (.+)\n$')
62 re_03 = re.compile("(Cannot access.+CVSROOT)|(can't create temporary directory.+)$")
62 re_03 = re.compile("(Cannot access.+CVSROOT)|(can't create temporary directory.+)$")
63 re_10 = re.compile('Working file: (.+)$')
63 re_10 = re.compile('Working file: (.+)$')
64 re_20 = re.compile('symbolic names:')
64 re_20 = re.compile('symbolic names:')
65 re_30 = re.compile('\t(.+): ([\\d.]+)$')
65 re_30 = re.compile('\t(.+): ([\\d.]+)$')
66 re_31 = re.compile('----------------------------$')
66 re_31 = re.compile('----------------------------$')
67 re_32 = re.compile('=============================================================================$')
67 re_32 = re.compile('=============================================================================$')
68 re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
68 re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
69 re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?')
69 re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?')
70 re_70 = re.compile('branches: (.+);$')
70 re_70 = re.compile('branches: (.+);$')
71
71
72 prefix = '' # leading path to strip of what we get from CVS
72 prefix = '' # leading path to strip of what we get from CVS
73
73
74 if directory is None:
74 if directory is None:
75 # Current working directory
75 # Current working directory
76
76
77 # Get the real directory in the repository
77 # Get the real directory in the repository
78 try:
78 try:
79 prefix = file(os.path.join('CVS','Repository')).read().strip()
79 prefix = file(os.path.join('CVS','Repository')).read().strip()
80 if prefix == ".":
80 if prefix == ".":
81 prefix = ""
81 prefix = ""
82 directory = prefix
82 directory = prefix
83 except IOError:
83 except IOError:
84 raise logerror('Not a CVS sandbox')
84 raise logerror('Not a CVS sandbox')
85
85
86 if prefix and not prefix.endswith('/'):
86 if prefix and not prefix.endswith('/'):
87 prefix += '/'
87 prefix += '/'
88
88
89 # Use the Root file in the sandbox, if it exists
89 # Use the Root file in the sandbox, if it exists
90 try:
90 try:
91 root = file(os.path.join('CVS','Root')).read().strip()
91 root = file(os.path.join('CVS','Root')).read().strip()
92 except IOError:
92 except IOError:
93 pass
93 pass
94
94
95 if not root:
95 if not root:
96 root = os.environ.get('CVSROOT', '')
96 root = os.environ.get('CVSROOT', '')
97
97
98 # read log cache if one exists
98 # read log cache if one exists
99 oldlog = []
99 oldlog = []
100 date = None
100 date = None
101
101
102 if cache:
102 if cache:
103 cachedir = os.path.expanduser('~/.hg.cvsps')
103 cachedir = os.path.expanduser('~/.hg.cvsps')
104 if not os.path.exists(cachedir):
104 if not os.path.exists(cachedir):
105 os.mkdir(cachedir)
105 os.mkdir(cachedir)
106
106
107 # The cvsps cache pickle needs a uniquified name, based on the
107 # The cvsps cache pickle needs a uniquified name, based on the
108 # repository location. The address may have all sort of nasties
108 # repository location. The address may have all sort of nasties
109 # in it, slashes, colons and such. So here we take just the
109 # in it, slashes, colons and such. So here we take just the
110 # alphanumerics, concatenated in a way that does not mix up the
110 # alphanumerics, concatenated in a way that does not mix up the
111 # various components, so that
111 # various components, so that
112 # :pserver:user@server:/path
112 # :pserver:user@server:/path
113 # and
113 # and
114 # /pserver/user/server/path
114 # /pserver/user/server/path
115 # are mapped to different cache file names.
115 # are mapped to different cache file names.
116 cachefile = root.split(":") + [directory, "cache"]
116 cachefile = root.split(":") + [directory, "cache"]
117 cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s]
117 cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s]
118 cachefile = os.path.join(cachedir,
118 cachefile = os.path.join(cachedir,
119 '.'.join([s for s in cachefile if s]))
119 '.'.join([s for s in cachefile if s]))
120
120
121 if cache == 'update':
121 if cache == 'update':
122 try:
122 try:
123 ui.note(_('reading cvs log cache %s\n') % cachefile)
123 ui.note(_('reading cvs log cache %s\n') % cachefile)
124 oldlog = pickle.load(file(cachefile))
124 oldlog = pickle.load(file(cachefile))
125 ui.note(_('cache has %d log entries\n') % len(oldlog))
125 ui.note(_('cache has %d log entries\n') % len(oldlog))
126 except Exception, e:
126 except Exception, e:
127 ui.note(_('error reading cache: %r\n') % e)
127 ui.note(_('error reading cache: %r\n') % e)
128
128
129 if oldlog:
129 if oldlog:
130 date = oldlog[-1].date # last commit date as a (time,tz) tuple
130 date = oldlog[-1].date # last commit date as a (time,tz) tuple
131 date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
131 date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
132
132
133 # build the CVS commandline
133 # build the CVS commandline
134 cmd = ['cvs', '-q']
134 cmd = ['cvs', '-q']
135 if root:
135 if root:
136 cmd.append('-d%s' % root)
136 cmd.append('-d%s' % root)
137 p = root.split(':')[-1]
137 p = root.split(':')[-1]
138 if not p.endswith('/'):
138 if not p.endswith('/'):
139 p += '/'
139 p += '/'
140 prefix = p + prefix
140 prefix = p + prefix
141 cmd.append(['log', 'rlog'][rlog])
141 cmd.append(['log', 'rlog'][rlog])
142 if date:
142 if date:
143 # no space between option and date string
143 # no space between option and date string
144 cmd.append('-d>%s' % date)
144 cmd.append('-d>%s' % date)
145 cmd.append(directory)
145 cmd.append(directory)
146
146
147 # state machine begins here
147 # state machine begins here
148 tags = {} # dictionary of revisions on current file with their tags
148 tags = {} # dictionary of revisions on current file with their tags
149 state = 0
149 state = 0
150 store = False # set when a new record can be appended
150 store = False # set when a new record can be appended
151
151
152 cmd = [util.shellquote(arg) for arg in cmd]
152 cmd = [util.shellquote(arg) for arg in cmd]
153 ui.note("running %s\n" % (' '.join(cmd)))
153 ui.note("running %s\n" % (' '.join(cmd)))
154 ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
154 ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
155
155
156 for line in util.popen(' '.join(cmd)):
156 for line in util.popen(' '.join(cmd)):
157 if line.endswith('\n'):
157 if line.endswith('\n'):
158 line = line[:-1]
158 line = line[:-1]
159 #ui.debug('state=%d line=%r\n' % (state, line))
159 #ui.debug('state=%d line=%r\n' % (state, line))
160
160
161 if state == 0:
161 if state == 0:
162 # initial state, consume input until we see 'RCS file'
162 # initial state, consume input until we see 'RCS file'
163 match = re_00.match(line)
163 match = re_00.match(line)
164 if match:
164 if match:
165 rcs = match.group(1)
165 rcs = match.group(1)
166 tags = {}
166 tags = {}
167 if rlog:
167 if rlog:
168 filename = rcs[:-2]
168 filename = rcs[:-2]
169 if filename.startswith(prefix):
169 if filename.startswith(prefix):
170 filename = filename[len(prefix):]
170 filename = filename[len(prefix):]
171 if filename.startswith('/'):
171 if filename.startswith('/'):
172 filename = filename[1:]
172 filename = filename[1:]
173 if filename.startswith('Attic/'):
173 if filename.startswith('Attic/'):
174 filename = filename[6:]
174 filename = filename[6:]
175 else:
175 else:
176 filename = filename.replace('/Attic/', '/')
176 filename = filename.replace('/Attic/', '/')
177 state = 2
177 state = 2
178 continue
178 continue
179 state = 1
179 state = 1
180 continue
180 continue
181 match = re_01.match(line)
181 match = re_01.match(line)
182 if match:
182 if match:
183 raise Exception(match.group(1))
183 raise Exception(match.group(1))
184 match = re_02.match(line)
184 match = re_02.match(line)
185 if match:
185 if match:
186 raise Exception(match.group(2))
186 raise Exception(match.group(2))
187 if re_03.match(line):
187 if re_03.match(line):
188 raise Exception(line)
188 raise Exception(line)
189
189
190 elif state == 1:
190 elif state == 1:
191 # expect 'Working file' (only when using log instead of rlog)
191 # expect 'Working file' (only when using log instead of rlog)
192 match = re_10.match(line)
192 match = re_10.match(line)
193 assert match, _('RCS file must be followed by working file')
193 assert match, _('RCS file must be followed by working file')
194 filename = match.group(1)
194 filename = match.group(1)
195 state = 2
195 state = 2
196
196
197 elif state == 2:
197 elif state == 2:
198 # expect 'symbolic names'
198 # expect 'symbolic names'
199 if re_20.match(line):
199 if re_20.match(line):
200 state = 3
200 state = 3
201
201
202 elif state == 3:
202 elif state == 3:
203 # read the symbolic names and store as tags
203 # read the symbolic names and store as tags
204 match = re_30.match(line)
204 match = re_30.match(line)
205 if match:
205 if match:
206 rev = [int(x) for x in match.group(2).split('.')]
206 rev = [int(x) for x in match.group(2).split('.')]
207
207
208 # Convert magic branch number to an odd-numbered one
208 # Convert magic branch number to an odd-numbered one
209 revn = len(rev)
209 revn = len(rev)
210 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
210 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
211 rev = rev[:-2] + rev[-1:]
211 rev = rev[:-2] + rev[-1:]
212 rev = tuple(rev)
212 rev = tuple(rev)
213
213
214 if rev not in tags:
214 if rev not in tags:
215 tags[rev] = []
215 tags[rev] = []
216 tags[rev].append(match.group(1))
216 tags[rev].append(match.group(1))
217
217
218 elif re_31.match(line):
218 elif re_31.match(line):
219 state = 5
219 state = 5
220 elif re_32.match(line):
220 elif re_32.match(line):
221 state = 0
221 state = 0
222
222
223 elif state == 4:
223 elif state == 4:
224 # expecting '------' separator before first revision
224 # expecting '------' separator before first revision
225 if re_31.match(line):
225 if re_31.match(line):
226 state = 5
226 state = 5
227 else:
227 else:
228 assert not re_32.match(line), _('Must have at least some revisions')
228 assert not re_32.match(line), _('Must have at least some revisions')
229
229
230 elif state == 5:
230 elif state == 5:
231 # expecting revision number and possibly (ignored) lock indication
231 # expecting revision number and possibly (ignored) lock indication
232 # we create the logentry here from values stored in states 0 to 4,
232 # we create the logentry here from values stored in states 0 to 4,
233 # as this state is re-entered for subsequent revisions of a file.
233 # as this state is re-entered for subsequent revisions of a file.
234 match = re_50.match(line)
234 match = re_50.match(line)
235 assert match, _('expected revision number')
235 assert match, _('expected revision number')
236 e = logentry(rcs=scache(rcs), file=scache(filename),
236 e = logentry(rcs=scache(rcs), file=scache(filename),
237 revision=tuple([int(x) for x in match.group(1).split('.')]),
237 revision=tuple([int(x) for x in match.group(1).split('.')]),
238 branches=[], parent=None)
238 branches=[], parent=None)
239 state = 6
239 state = 6
240
240
241 elif state == 6:
241 elif state == 6:
242 # expecting date, author, state, lines changed
242 # expecting date, author, state, lines changed
243 match = re_60.match(line)
243 match = re_60.match(line)
244 assert match, _('revision must be followed by date line')
244 assert match, _('revision must be followed by date line')
245 d = match.group(1)
245 d = match.group(1)
246 if d[2] == '/':
246 if d[2] == '/':
247 # Y2K
247 # Y2K
248 d = '19' + d
248 d = '19' + d
249
249
250 if len(d.split()) != 3:
250 if len(d.split()) != 3:
251 # cvs log dates always in GMT
251 # cvs log dates always in GMT
252 d = d + ' UTC'
252 d = d + ' UTC'
253 e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S', '%Y/%m/%d %H:%M:%S', '%Y-%m-%d %H:%M:%S'])
253 e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S', '%Y/%m/%d %H:%M:%S', '%Y-%m-%d %H:%M:%S'])
254 e.author = scache(match.group(2))
254 e.author = scache(match.group(2))
255 e.dead = match.group(3).lower() == 'dead'
255 e.dead = match.group(3).lower() == 'dead'
256
256
257 if match.group(5):
257 if match.group(5):
258 if match.group(6):
258 if match.group(6):
259 e.lines = (int(match.group(5)), int(match.group(6)))
259 e.lines = (int(match.group(5)), int(match.group(6)))
260 else:
260 else:
261 e.lines = (int(match.group(5)), 0)
261 e.lines = (int(match.group(5)), 0)
262 elif match.group(6):
262 elif match.group(6):
263 e.lines = (0, int(match.group(6)))
263 e.lines = (0, int(match.group(6)))
264 else:
264 else:
265 e.lines = None
265 e.lines = None
266 e.comment = []
266 e.comment = []
267 state = 7
267 state = 7
268
268
269 elif state == 7:
269 elif state == 7:
270 # read the revision numbers of branches that start at this revision
270 # read the revision numbers of branches that start at this revision
271 # or store the commit log message otherwise
271 # or store the commit log message otherwise
272 m = re_70.match(line)
272 m = re_70.match(line)
273 if m:
273 if m:
274 e.branches = [tuple([int(y) for y in x.strip().split('.')])
274 e.branches = [tuple([int(y) for y in x.strip().split('.')])
275 for x in m.group(1).split(';')]
275 for x in m.group(1).split(';')]
276 state = 8
276 state = 8
277 elif re_31.match(line):
277 elif re_31.match(line):
278 state = 5
278 state = 5
279 store = True
279 store = True
280 elif re_32.match(line):
280 elif re_32.match(line):
281 state = 0
281 state = 0
282 store = True
282 store = True
283 else:
283 else:
284 e.comment.append(line)
284 e.comment.append(line)
285
285
286 elif state == 8:
286 elif state == 8:
287 # store commit log message
287 # store commit log message
288 if re_31.match(line):
288 if re_31.match(line):
289 state = 5
289 state = 5
290 store = True
290 store = True
291 elif re_32.match(line):
291 elif re_32.match(line):
292 state = 0
292 state = 0
293 store = True
293 store = True
294 else:
294 else:
295 e.comment.append(line)
295 e.comment.append(line)
296
296
297 if store:
297 if store:
298 # clean up the results and save in the log.
298 # clean up the results and save in the log.
299 store = False
299 store = False
300 e.tags = [scache(x) for x in tags.get(e.revision, [])]
300 e.tags = util.sort([scache(x) for x in tags.get(e.revision, [])])
301 e.tags.sort()
302 e.comment = scache('\n'.join(e.comment))
301 e.comment = scache('\n'.join(e.comment))
303
302
304 revn = len(e.revision)
303 revn = len(e.revision)
305 if revn > 3 and (revn % 2) == 0:
304 if revn > 3 and (revn % 2) == 0:
306 e.branch = tags.get(e.revision[:-1], [None])[0]
305 e.branch = tags.get(e.revision[:-1], [None])[0]
307 else:
306 else:
308 e.branch = None
307 e.branch = None
309
308
310 log.append(e)
309 log.append(e)
311
310
312 if len(log) % 100 == 0:
311 if len(log) % 100 == 0:
313 ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n')
312 ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n')
314
313
315 listsort(log, key=lambda x:(x.rcs, x.revision))
314 listsort(log, key=lambda x:(x.rcs, x.revision))
316
315
317 # find parent revisions of individual files
316 # find parent revisions of individual files
318 versions = {}
317 versions = {}
319 for e in log:
318 for e in log:
320 branch = e.revision[:-1]
319 branch = e.revision[:-1]
321 p = versions.get((e.rcs, branch), None)
320 p = versions.get((e.rcs, branch), None)
322 if p is None:
321 if p is None:
323 p = e.revision[:-2]
322 p = e.revision[:-2]
324 e.parent = p
323 e.parent = p
325 versions[(e.rcs, branch)] = e.revision
324 versions[(e.rcs, branch)] = e.revision
326
325
327 # update the log cache
326 # update the log cache
328 if cache:
327 if cache:
329 if log:
328 if log:
330 # join up the old and new logs
329 # join up the old and new logs
331 listsort(log, key=lambda x:x.date)
330 listsort(log, key=lambda x:x.date)
332
331
333 if oldlog and oldlog[-1].date >= log[0].date:
332 if oldlog and oldlog[-1].date >= log[0].date:
334 raise logerror('Log cache overlaps with new log entries,'
333 raise logerror('Log cache overlaps with new log entries,'
335 ' re-run without cache.')
334 ' re-run without cache.')
336
335
337 log = oldlog + log
336 log = oldlog + log
338
337
339 # write the new cachefile
338 # write the new cachefile
340 ui.note(_('writing cvs log cache %s\n') % cachefile)
339 ui.note(_('writing cvs log cache %s\n') % cachefile)
341 pickle.dump(log, file(cachefile, 'w'))
340 pickle.dump(log, file(cachefile, 'w'))
342 else:
341 else:
343 log = oldlog
342 log = oldlog
344
343
345 ui.status(_('%d log entries\n') % len(log))
344 ui.status(_('%d log entries\n') % len(log))
346
345
347 return log
346 return log
348
347
349
348
350 class changeset(object):
349 class changeset(object):
351 '''Class changeset has the following attributes:
350 '''Class changeset has the following attributes:
352 .author - author name as CVS knows it
351 .author - author name as CVS knows it
353 .branch - name of branch this changeset is on, or None
352 .branch - name of branch this changeset is on, or None
354 .comment - commit message
353 .comment - commit message
355 .date - the commit date as a (time,tz) tuple
354 .date - the commit date as a (time,tz) tuple
356 .entries - list of logentry objects in this changeset
355 .entries - list of logentry objects in this changeset
357 .parents - list of one or two parent changesets
356 .parents - list of one or two parent changesets
358 .tags - list of tags on this changeset
357 .tags - list of tags on this changeset
359 '''
358 '''
360 def __init__(self, **entries):
359 def __init__(self, **entries):
361 self.__dict__.update(entries)
360 self.__dict__.update(entries)
362
361
363 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
362 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
364 '''Convert log into changesets.'''
363 '''Convert log into changesets.'''
365
364
366 ui.status(_('creating changesets\n'))
365 ui.status(_('creating changesets\n'))
367
366
368 # Merge changesets
367 # Merge changesets
369
368
370 listsort(log, key=lambda x:(x.comment, x.author, x.branch, x.date))
369 listsort(log, key=lambda x:(x.comment, x.author, x.branch, x.date))
371
370
372 changesets = []
371 changesets = []
373 files = {}
372 files = {}
374 c = None
373 c = None
375 for i, e in enumerate(log):
374 for i, e in enumerate(log):
376
375
377 # Check if log entry belongs to the current changeset or not.
376 # Check if log entry belongs to the current changeset or not.
378 if not (c and
377 if not (c and
379 e.comment == c.comment and
378 e.comment == c.comment and
380 e.author == c.author and
379 e.author == c.author and
381 e.branch == c.branch and
380 e.branch == c.branch and
382 ((c.date[0] + c.date[1]) <=
381 ((c.date[0] + c.date[1]) <=
383 (e.date[0] + e.date[1]) <=
382 (e.date[0] + e.date[1]) <=
384 (c.date[0] + c.date[1]) + fuzz) and
383 (c.date[0] + c.date[1]) + fuzz) and
385 e.file not in files):
384 e.file not in files):
386 c = changeset(comment=e.comment, author=e.author,
385 c = changeset(comment=e.comment, author=e.author,
387 branch=e.branch, date=e.date, entries=[])
386 branch=e.branch, date=e.date, entries=[])
388 changesets.append(c)
387 changesets.append(c)
389 files = {}
388 files = {}
390 if len(changesets) % 100 == 0:
389 if len(changesets) % 100 == 0:
391 t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
390 t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
392 ui.status(util.ellipsis(t, 80) + '\n')
391 ui.status(util.ellipsis(t, 80) + '\n')
393
392
394 c.entries.append(e)
393 c.entries.append(e)
395 files[e.file] = True
394 files[e.file] = True
396 c.date = e.date # changeset date is date of latest commit in it
395 c.date = e.date # changeset date is date of latest commit in it
397
396
398 # Sort files in each changeset
397 # Sort files in each changeset
399
398
400 for c in changesets:
399 for c in changesets:
401 def pathcompare(l, r):
400 def pathcompare(l, r):
402 'Mimic cvsps sorting order'
401 'Mimic cvsps sorting order'
403 l = l.split('/')
402 l = l.split('/')
404 r = r.split('/')
403 r = r.split('/')
405 nl = len(l)
404 nl = len(l)
406 nr = len(r)
405 nr = len(r)
407 n = min(nl, nr)
406 n = min(nl, nr)
408 for i in range(n):
407 for i in range(n):
409 if i + 1 == nl and nl < nr:
408 if i + 1 == nl and nl < nr:
410 return -1
409 return -1
411 elif i + 1 == nr and nl > nr:
410 elif i + 1 == nr and nl > nr:
412 return +1
411 return +1
413 elif l[i] < r[i]:
412 elif l[i] < r[i]:
414 return -1
413 return -1
415 elif l[i] > r[i]:
414 elif l[i] > r[i]:
416 return +1
415 return +1
417 return 0
416 return 0
418 def entitycompare(l, r):
417 def entitycompare(l, r):
419 return pathcompare(l.file, r.file)
418 return pathcompare(l.file, r.file)
420
419
421 c.entries.sort(entitycompare)
420 c.entries.sort(entitycompare)
422
421
423 # Sort changesets by date
422 # Sort changesets by date
424
423
425 def cscmp(l, r):
424 def cscmp(l, r):
426 d = sum(l.date) - sum(r.date)
425 d = sum(l.date) - sum(r.date)
427 if d:
426 if d:
428 return d
427 return d
429
428
430 # detect vendor branches and initial commits on a branch
429 # detect vendor branches and initial commits on a branch
431 le = {}
430 le = {}
432 for e in l.entries:
431 for e in l.entries:
433 le[e.rcs] = e.revision
432 le[e.rcs] = e.revision
434 re = {}
433 re = {}
435 for e in r.entries:
434 for e in r.entries:
436 re[e.rcs] = e.revision
435 re[e.rcs] = e.revision
437
436
438 d = 0
437 d = 0
439 for e in l.entries:
438 for e in l.entries:
440 if re.get(e.rcs, None) == e.parent:
439 if re.get(e.rcs, None) == e.parent:
441 assert not d
440 assert not d
442 d = 1
441 d = 1
443 break
442 break
444
443
445 for e in r.entries:
444 for e in r.entries:
446 if le.get(e.rcs, None) == e.parent:
445 if le.get(e.rcs, None) == e.parent:
447 assert not d
446 assert not d
448 d = -1
447 d = -1
449 break
448 break
450
449
451 return d
450 return d
452
451
453 changesets.sort(cscmp)
452 changesets.sort(cscmp)
454
453
455 # Collect tags
454 # Collect tags
456
455
457 globaltags = {}
456 globaltags = {}
458 for c in changesets:
457 for c in changesets:
459 tags = {}
458 tags = {}
460 for e in c.entries:
459 for e in c.entries:
461 for tag in e.tags:
460 for tag in e.tags:
462 # remember which is the latest changeset to have this tag
461 # remember which is the latest changeset to have this tag
463 globaltags[tag] = c
462 globaltags[tag] = c
464
463
465 for c in changesets:
464 for c in changesets:
466 tags = {}
465 tags = {}
467 for e in c.entries:
466 for e in c.entries:
468 for tag in e.tags:
467 for tag in e.tags:
469 tags[tag] = True
468 tags[tag] = True
470 # remember tags only if this is the latest changeset to have it
469 # remember tags only if this is the latest changeset to have it
471 tagnames = [tag for tag in tags if globaltags[tag] is c]
470 c.tags = util.sort([tag for tag in tags if globaltags[tag] is c])
472 tagnames.sort()
473 c.tags = tagnames
474
471
475 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
472 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
476 # by inserting dummy changesets with two parents, and handle
473 # by inserting dummy changesets with two parents, and handle
477 # {{mergefrombranch BRANCHNAME}} by setting two parents.
474 # {{mergefrombranch BRANCHNAME}} by setting two parents.
478
475
479 if mergeto is None:
476 if mergeto is None:
480 mergeto = r'{{mergetobranch ([-\w]+)}}'
477 mergeto = r'{{mergetobranch ([-\w]+)}}'
481 if mergeto:
478 if mergeto:
482 mergeto = re.compile(mergeto)
479 mergeto = re.compile(mergeto)
483
480
484 if mergefrom is None:
481 if mergefrom is None:
485 mergefrom = r'{{mergefrombranch ([-\w]+)}}'
482 mergefrom = r'{{mergefrombranch ([-\w]+)}}'
486 if mergefrom:
483 if mergefrom:
487 mergefrom = re.compile(mergefrom)
484 mergefrom = re.compile(mergefrom)
488
485
489 versions = {} # changeset index where we saw any particular file version
486 versions = {} # changeset index where we saw any particular file version
490 branches = {} # changeset index where we saw a branch
487 branches = {} # changeset index where we saw a branch
491 n = len(changesets)
488 n = len(changesets)
492 i = 0
489 i = 0
493 while i<n:
490 while i<n:
494 c = changesets[i]
491 c = changesets[i]
495
492
496 for f in c.entries:
493 for f in c.entries:
497 versions[(f.rcs, f.revision)] = i
494 versions[(f.rcs, f.revision)] = i
498
495
499 p = None
496 p = None
500 if c.branch in branches:
497 if c.branch in branches:
501 p = branches[c.branch]
498 p = branches[c.branch]
502 else:
499 else:
503 for f in c.entries:
500 for f in c.entries:
504 p = max(p, versions.get((f.rcs, f.parent), None))
501 p = max(p, versions.get((f.rcs, f.parent), None))
505
502
506 c.parents = []
503 c.parents = []
507 if p is not None:
504 if p is not None:
508 c.parents.append(changesets[p])
505 c.parents.append(changesets[p])
509
506
510 if mergefrom:
507 if mergefrom:
511 m = mergefrom.search(c.comment)
508 m = mergefrom.search(c.comment)
512 if m:
509 if m:
513 m = m.group(1)
510 m = m.group(1)
514 if m == 'HEAD':
511 if m == 'HEAD':
515 m = None
512 m = None
516 if m in branches and c.branch != m:
513 if m in branches and c.branch != m:
517 c.parents.append(changesets[branches[m]])
514 c.parents.append(changesets[branches[m]])
518
515
519 if mergeto:
516 if mergeto:
520 m = mergeto.search(c.comment)
517 m = mergeto.search(c.comment)
521 if m:
518 if m:
522 try:
519 try:
523 m = m.group(1)
520 m = m.group(1)
524 if m == 'HEAD':
521 if m == 'HEAD':
525 m = None
522 m = None
526 except:
523 except:
527 m = None # if no group found then merge to HEAD
524 m = None # if no group found then merge to HEAD
528 if m in branches and c.branch != m:
525 if m in branches and c.branch != m:
529 # insert empty changeset for merge
526 # insert empty changeset for merge
530 cc = changeset(author=c.author, branch=m, date=c.date,
527 cc = changeset(author=c.author, branch=m, date=c.date,
531 comment='convert-repo: CVS merge from branch %s' % c.branch,
528 comment='convert-repo: CVS merge from branch %s' % c.branch,
532 entries=[], tags=[], parents=[changesets[branches[m]], c])
529 entries=[], tags=[], parents=[changesets[branches[m]], c])
533 changesets.insert(i + 1, cc)
530 changesets.insert(i + 1, cc)
534 branches[m] = i + 1
531 branches[m] = i + 1
535
532
536 # adjust our loop counters now we have inserted a new entry
533 # adjust our loop counters now we have inserted a new entry
537 n += 1
534 n += 1
538 i += 2
535 i += 2
539 continue
536 continue
540
537
541 branches[c.branch] = i
538 branches[c.branch] = i
542 i += 1
539 i += 1
543
540
544 # Number changesets
541 # Number changesets
545
542
546 for i, c in enumerate(changesets):
543 for i, c in enumerate(changesets):
547 c.id = i + 1
544 c.id = i + 1
548
545
549 ui.status(_('%d changeset entries\n') % len(changesets))
546 ui.status(_('%d changeset entries\n') % len(changesets))
550
547
551 return changesets
548 return changesets
@@ -1,127 +1,126 b''
1 # darcs support for the convert extension
1 # darcs support for the convert extension
2
2
3 from common import NoRepo, checktool, commandline, commit, converter_source
3 from common import NoRepo, checktool, commandline, commit, converter_source
4 from mercurial.i18n import _
4 from mercurial.i18n import _
5 from mercurial import util
5 from mercurial import util
6 import os, shutil, tempfile
6 import os, shutil, tempfile
7
7
8 # The naming drift of ElementTree is fun!
8 # The naming drift of ElementTree is fun!
9
9
10 try: from xml.etree.cElementTree import ElementTree
10 try: from xml.etree.cElementTree import ElementTree
11 except ImportError:
11 except ImportError:
12 try: from xml.etree.ElementTree import ElementTree
12 try: from xml.etree.ElementTree import ElementTree
13 except ImportError:
13 except ImportError:
14 try: from elementtree.cElementTree import ElementTree
14 try: from elementtree.cElementTree import ElementTree
15 except ImportError:
15 except ImportError:
16 try: from elementtree.ElementTree import ElementTree
16 try: from elementtree.ElementTree import ElementTree
17 except ImportError: ElementTree = None
17 except ImportError: ElementTree = None
18
18
19
19
20 class darcs_source(converter_source, commandline):
20 class darcs_source(converter_source, commandline):
21 def __init__(self, ui, path, rev=None):
21 def __init__(self, ui, path, rev=None):
22 converter_source.__init__(self, ui, path, rev=rev)
22 converter_source.__init__(self, ui, path, rev=rev)
23 commandline.__init__(self, ui, 'darcs')
23 commandline.__init__(self, ui, 'darcs')
24
24
25 # check for _darcs, ElementTree, _darcs/inventory so that we can
25 # check for _darcs, ElementTree, _darcs/inventory so that we can
26 # easily skip test-convert-darcs if ElementTree is not around
26 # easily skip test-convert-darcs if ElementTree is not around
27 if not os.path.exists(os.path.join(path, '_darcs')):
27 if not os.path.exists(os.path.join(path, '_darcs')):
28 raise NoRepo("%s does not look like a darcs repo" % path)
28 raise NoRepo("%s does not look like a darcs repo" % path)
29
29
30 checktool('darcs')
30 checktool('darcs')
31
31
32 if ElementTree is None:
32 if ElementTree is None:
33 raise util.Abort(_("Python ElementTree module is not available"))
33 raise util.Abort(_("Python ElementTree module is not available"))
34
34
35 if not os.path.exists(os.path.join(path, '_darcs', 'inventory')):
35 if not os.path.exists(os.path.join(path, '_darcs', 'inventory')):
36 raise NoRepo("%s does not look like a darcs repo" % path)
36 raise NoRepo("%s does not look like a darcs repo" % path)
37
37
38 self.path = os.path.realpath(path)
38 self.path = os.path.realpath(path)
39
39
40 self.lastrev = None
40 self.lastrev = None
41 self.changes = {}
41 self.changes = {}
42 self.parents = {}
42 self.parents = {}
43 self.tags = {}
43 self.tags = {}
44
44
45 def before(self):
45 def before(self):
46 self.tmppath = tempfile.mkdtemp(
46 self.tmppath = tempfile.mkdtemp(
47 prefix='convert-' + os.path.basename(self.path) + '-')
47 prefix='convert-' + os.path.basename(self.path) + '-')
48 output, status = self.run('init', repodir=self.tmppath)
48 output, status = self.run('init', repodir=self.tmppath)
49 self.checkexit(status)
49 self.checkexit(status)
50
50
51 tree = self.xml('changes', xml_output=True, summary=True,
51 tree = self.xml('changes', xml_output=True, summary=True,
52 repodir=self.path)
52 repodir=self.path)
53 tagname = None
53 tagname = None
54 child = None
54 child = None
55 for elt in tree.findall('patch'):
55 for elt in tree.findall('patch'):
56 node = elt.get('hash')
56 node = elt.get('hash')
57 name = elt.findtext('name', '')
57 name = elt.findtext('name', '')
58 if name.startswith('TAG '):
58 if name.startswith('TAG '):
59 tagname = name[4:].strip()
59 tagname = name[4:].strip()
60 elif tagname is not None:
60 elif tagname is not None:
61 self.tags[tagname] = node
61 self.tags[tagname] = node
62 tagname = None
62 tagname = None
63 self.changes[node] = elt
63 self.changes[node] = elt
64 self.parents[child] = [node]
64 self.parents[child] = [node]
65 child = node
65 child = node
66 self.parents[child] = []
66 self.parents[child] = []
67
67
68 def after(self):
68 def after(self):
69 self.ui.debug('cleaning up %s\n' % self.tmppath)
69 self.ui.debug('cleaning up %s\n' % self.tmppath)
70 shutil.rmtree(self.tmppath, ignore_errors=True)
70 shutil.rmtree(self.tmppath, ignore_errors=True)
71
71
72 def xml(self, cmd, **kwargs):
72 def xml(self, cmd, **kwargs):
73 etree = ElementTree()
73 etree = ElementTree()
74 fp = self._run(cmd, **kwargs)
74 fp = self._run(cmd, **kwargs)
75 etree.parse(fp)
75 etree.parse(fp)
76 self.checkexit(fp.close())
76 self.checkexit(fp.close())
77 return etree.getroot()
77 return etree.getroot()
78
78
79 def getheads(self):
79 def getheads(self):
80 return self.parents[None]
80 return self.parents[None]
81
81
82 def getcommit(self, rev):
82 def getcommit(self, rev):
83 elt = self.changes[rev]
83 elt = self.changes[rev]
84 date = util.strdate(elt.get('local_date'), '%a %b %d %H:%M:%S %Z %Y')
84 date = util.strdate(elt.get('local_date'), '%a %b %d %H:%M:%S %Z %Y')
85 desc = elt.findtext('name') + '\n' + elt.findtext('comment', '')
85 desc = elt.findtext('name') + '\n' + elt.findtext('comment', '')
86 return commit(author=elt.get('author'), date=util.datestr(date),
86 return commit(author=elt.get('author'), date=util.datestr(date),
87 desc=desc.strip(), parents=self.parents[rev])
87 desc=desc.strip(), parents=self.parents[rev])
88
88
89 def pull(self, rev):
89 def pull(self, rev):
90 output, status = self.run('pull', self.path, all=True,
90 output, status = self.run('pull', self.path, all=True,
91 match='hash %s' % rev,
91 match='hash %s' % rev,
92 no_test=True, no_posthook=True,
92 no_test=True, no_posthook=True,
93 external_merge='/bin/false',
93 external_merge='/bin/false',
94 repodir=self.tmppath)
94 repodir=self.tmppath)
95 if status:
95 if status:
96 if output.find('We have conflicts in') == -1:
96 if output.find('We have conflicts in') == -1:
97 self.checkexit(status, output)
97 self.checkexit(status, output)
98 output, status = self.run('revert', all=True, repodir=self.tmppath)
98 output, status = self.run('revert', all=True, repodir=self.tmppath)
99 self.checkexit(status, output)
99 self.checkexit(status, output)
100
100
101 def getchanges(self, rev):
101 def getchanges(self, rev):
102 self.pull(rev)
102 self.pull(rev)
103 copies = {}
103 copies = {}
104 changes = []
104 changes = []
105 for elt in self.changes[rev].find('summary').getchildren():
105 for elt in self.changes[rev].find('summary').getchildren():
106 if elt.tag in ('add_directory', 'remove_directory'):
106 if elt.tag in ('add_directory', 'remove_directory'):
107 continue
107 continue
108 if elt.tag == 'move':
108 if elt.tag == 'move':
109 changes.append((elt.get('from'), rev))
109 changes.append((elt.get('from'), rev))
110 copies[elt.get('from')] = elt.get('to')
110 copies[elt.get('from')] = elt.get('to')
111 else:
111 else:
112 changes.append((elt.text.strip(), rev))
112 changes.append((elt.text.strip(), rev))
113 changes.sort()
114 self.lastrev = rev
113 self.lastrev = rev
115 return changes, copies
114 return util.sort(changes), copies
116
115
117 def getfile(self, name, rev):
116 def getfile(self, name, rev):
118 if rev != self.lastrev:
117 if rev != self.lastrev:
119 raise util.Abort(_('internal calling inconsistency'))
118 raise util.Abort(_('internal calling inconsistency'))
120 return open(os.path.join(self.tmppath, name), 'rb').read()
119 return open(os.path.join(self.tmppath, name), 'rb').read()
121
120
122 def getmode(self, name, rev):
121 def getmode(self, name, rev):
123 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
122 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
124 return (mode & 0111) and 'x' or ''
123 return (mode & 0111) and 'x' or ''
125
124
126 def gettags(self):
125 def gettags(self):
127 return self.tags
126 return self.tags
@@ -1,301 +1,299 b''
1 # GNU Arch support for the convert extension
1 # GNU Arch support for the convert extension
2
2
3 from common import NoRepo, commandline, commit, converter_source
3 from common import NoRepo, commandline, commit, converter_source
4 from mercurial.i18n import _
4 from mercurial.i18n import _
5 from mercurial import util
5 from mercurial import util
6 import os, shutil, tempfile, stat
6 import os, shutil, tempfile, stat
7
7
8 class gnuarch_source(converter_source, commandline):
8 class gnuarch_source(converter_source, commandline):
9
9
10 class gnuarch_rev:
10 class gnuarch_rev:
11 def __init__(self, rev):
11 def __init__(self, rev):
12 self.rev = rev
12 self.rev = rev
13 self.summary = ''
13 self.summary = ''
14 self.date = None
14 self.date = None
15 self.author = ''
15 self.author = ''
16 self.add_files = []
16 self.add_files = []
17 self.mod_files = []
17 self.mod_files = []
18 self.del_files = []
18 self.del_files = []
19 self.ren_files = {}
19 self.ren_files = {}
20 self.ren_dirs = {}
20 self.ren_dirs = {}
21
21
22 def __init__(self, ui, path, rev=None):
22 def __init__(self, ui, path, rev=None):
23 super(gnuarch_source, self).__init__(ui, path, rev=rev)
23 super(gnuarch_source, self).__init__(ui, path, rev=rev)
24
24
25 if not os.path.exists(os.path.join(path, '{arch}')):
25 if not os.path.exists(os.path.join(path, '{arch}')):
26 raise NoRepo(_("%s does not look like a GNU Arch repo" % path))
26 raise NoRepo(_("%s does not look like a GNU Arch repo" % path))
27
27
28 # Could use checktool, but we want to check for baz or tla.
28 # Could use checktool, but we want to check for baz or tla.
29 self.execmd = None
29 self.execmd = None
30 if util.find_exe('baz'):
30 if util.find_exe('baz'):
31 self.execmd = 'baz'
31 self.execmd = 'baz'
32 else:
32 else:
33 if util.find_exe('tla'):
33 if util.find_exe('tla'):
34 self.execmd = 'tla'
34 self.execmd = 'tla'
35 else:
35 else:
36 raise util.Abort(_('cannot find a GNU Arch tool'))
36 raise util.Abort(_('cannot find a GNU Arch tool'))
37
37
38 commandline.__init__(self, ui, self.execmd)
38 commandline.__init__(self, ui, self.execmd)
39
39
40 self.path = os.path.realpath(path)
40 self.path = os.path.realpath(path)
41 self.tmppath = None
41 self.tmppath = None
42
42
43 self.treeversion = None
43 self.treeversion = None
44 self.lastrev = None
44 self.lastrev = None
45 self.changes = {}
45 self.changes = {}
46 self.parents = {}
46 self.parents = {}
47 self.tags = {}
47 self.tags = {}
48 self.modecache = {}
48 self.modecache = {}
49
49
50 def before(self):
50 def before(self):
51 if self.execmd == 'tla':
51 if self.execmd == 'tla':
52 output = self.run0('tree-version', self.path)
52 output = self.run0('tree-version', self.path)
53 else:
53 else:
54 output = self.run0('tree-version', '-d', self.path)
54 output = self.run0('tree-version', '-d', self.path)
55 self.treeversion = output.strip()
55 self.treeversion = output.strip()
56
56
57 self.ui.status(_('analyzing tree version %s...\n' % self.treeversion))
57 self.ui.status(_('analyzing tree version %s...\n' % self.treeversion))
58
58
59 # Get name of temporary directory
59 # Get name of temporary directory
60 version = self.treeversion.split('/')
60 version = self.treeversion.split('/')
61 self.tmppath = os.path.join(tempfile.gettempdir(),
61 self.tmppath = os.path.join(tempfile.gettempdir(),
62 'hg-%s' % version[1])
62 'hg-%s' % version[1])
63
63
64 # Generate parents dictionary
64 # Generate parents dictionary
65 child = []
65 child = []
66 output, status = self.runlines('revisions', self.treeversion)
66 output, status = self.runlines('revisions', self.treeversion)
67 self.checkexit(status, 'archive registered?')
67 self.checkexit(status, 'archive registered?')
68 for l in output:
68 for l in output:
69 rev = l.strip()
69 rev = l.strip()
70 self.changes[rev] = self.gnuarch_rev(rev)
70 self.changes[rev] = self.gnuarch_rev(rev)
71
71
72 # Read author, date and summary
72 # Read author, date and summary
73 catlog = self.runlines0('cat-log', '-d', self.path, rev)
73 catlog = self.runlines0('cat-log', '-d', self.path, rev)
74 self._parsecatlog(catlog, rev)
74 self._parsecatlog(catlog, rev)
75
75
76 self.parents[rev] = child
76 self.parents[rev] = child
77 child = [rev]
77 child = [rev]
78 if rev == self.rev:
78 if rev == self.rev:
79 break
79 break
80 self.parents[None] = child
80 self.parents[None] = child
81
81
82 def after(self):
82 def after(self):
83 self.ui.debug(_('cleaning up %s\n' % self.tmppath))
83 self.ui.debug(_('cleaning up %s\n' % self.tmppath))
84 shutil.rmtree(self.tmppath, ignore_errors=True)
84 shutil.rmtree(self.tmppath, ignore_errors=True)
85
85
86 def getheads(self):
86 def getheads(self):
87 return self.parents[None]
87 return self.parents[None]
88
88
89 def getfile(self, name, rev):
89 def getfile(self, name, rev):
90 if rev != self.lastrev:
90 if rev != self.lastrev:
91 raise util.Abort(_('internal calling inconsistency'))
91 raise util.Abort(_('internal calling inconsistency'))
92
92
93 # Raise IOError if necessary (i.e. deleted files).
93 # Raise IOError if necessary (i.e. deleted files).
94 if not os.path.exists(os.path.join(self.tmppath, name)):
94 if not os.path.exists(os.path.join(self.tmppath, name)):
95 raise IOError
95 raise IOError
96
96
97 data, mode = self._getfile(name, rev)
97 data, mode = self._getfile(name, rev)
98 self.modecache[(name, rev)] = mode
98 self.modecache[(name, rev)] = mode
99
99
100 return data
100 return data
101
101
102 def getmode(self, name, rev):
102 def getmode(self, name, rev):
103 return self.modecache[(name, rev)]
103 return self.modecache[(name, rev)]
104
104
105 def getchanges(self, rev):
105 def getchanges(self, rev):
106 self.modecache = {}
106 self.modecache = {}
107 self._update(rev)
107 self._update(rev)
108 changes = []
108 changes = []
109 copies = {}
109 copies = {}
110
110
111 for f in self.changes[rev].add_files:
111 for f in self.changes[rev].add_files:
112 changes.append((f, rev))
112 changes.append((f, rev))
113
113
114 for f in self.changes[rev].mod_files:
114 for f in self.changes[rev].mod_files:
115 changes.append((f, rev))
115 changes.append((f, rev))
116
116
117 for f in self.changes[rev].del_files:
117 for f in self.changes[rev].del_files:
118 changes.append((f, rev))
118 changes.append((f, rev))
119
119
120 for src in self.changes[rev].ren_files:
120 for src in self.changes[rev].ren_files:
121 to = self.changes[rev].ren_files[src]
121 to = self.changes[rev].ren_files[src]
122 changes.append((src, rev))
122 changes.append((src, rev))
123 changes.append((to, rev))
123 changes.append((to, rev))
124 copies[src] = to
124 copies[src] = to
125
125
126 for src in self.changes[rev].ren_dirs:
126 for src in self.changes[rev].ren_dirs:
127 to = self.changes[rev].ren_dirs[src]
127 to = self.changes[rev].ren_dirs[src]
128 chgs, cps = self._rendirchanges(src, to);
128 chgs, cps = self._rendirchanges(src, to);
129 changes += [(f, rev) for f in chgs]
129 changes += [(f, rev) for f in chgs]
130 for c in cps:
130 for c in cps:
131 copies[c] = cps[c]
131 copies[c] = cps[c]
132
132
133 changes.sort()
134 self.lastrev = rev
133 self.lastrev = rev
135
134 return util.sort(changes), copies
136 return changes, copies
137
135
138 def getcommit(self, rev):
136 def getcommit(self, rev):
139 changes = self.changes[rev]
137 changes = self.changes[rev]
140 return commit(author = changes.author, date = changes.date,
138 return commit(author = changes.author, date = changes.date,
141 desc = changes.summary, parents = self.parents[rev])
139 desc = changes.summary, parents = self.parents[rev])
142
140
143 def gettags(self):
141 def gettags(self):
144 return self.tags
142 return self.tags
145
143
146 def _execute(self, cmd, *args, **kwargs):
144 def _execute(self, cmd, *args, **kwargs):
147 cmdline = [self.execmd, cmd]
145 cmdline = [self.execmd, cmd]
148 cmdline += args
146 cmdline += args
149 cmdline = [util.shellquote(arg) for arg in cmdline]
147 cmdline = [util.shellquote(arg) for arg in cmdline]
150 cmdline += ['>', util.nulldev, '2>', util.nulldev]
148 cmdline += ['>', util.nulldev, '2>', util.nulldev]
151 cmdline = util.quotecommand(' '.join(cmdline))
149 cmdline = util.quotecommand(' '.join(cmdline))
152 self.ui.debug(cmdline, '\n')
150 self.ui.debug(cmdline, '\n')
153 return os.system(cmdline)
151 return os.system(cmdline)
154
152
155 def _update(self, rev):
153 def _update(self, rev):
156 if rev == 'base-0':
154 if rev == 'base-0':
157 # Initialise 'base-0' revision
155 # Initialise 'base-0' revision
158 self._obtainrevision(rev)
156 self._obtainrevision(rev)
159 else:
157 else:
160 self.ui.debug(_('applying revision %s...\n' % rev))
158 self.ui.debug(_('applying revision %s...\n' % rev))
161 revision = '%s--%s' % (self.treeversion, rev)
159 revision = '%s--%s' % (self.treeversion, rev)
162 changeset, status = self.runlines('replay', '-d', self.tmppath,
160 changeset, status = self.runlines('replay', '-d', self.tmppath,
163 revision)
161 revision)
164 if status:
162 if status:
165 # Something went wrong while merging (baz or tla
163 # Something went wrong while merging (baz or tla
166 # issue?), get latest revision and try from there
164 # issue?), get latest revision and try from there
167 shutil.rmtree(self.tmppath, ignore_errors=True)
165 shutil.rmtree(self.tmppath, ignore_errors=True)
168 self._obtainrevision(rev)
166 self._obtainrevision(rev)
169 else:
167 else:
170 old_rev = self.parents[rev][0]
168 old_rev = self.parents[rev][0]
171 self.ui.debug(_('computing changeset between %s and %s...\n' \
169 self.ui.debug(_('computing changeset between %s and %s...\n' \
172 % (old_rev, rev)))
170 % (old_rev, rev)))
173 rev_a = '%s--%s' % (self.treeversion, old_rev)
171 rev_a = '%s--%s' % (self.treeversion, old_rev)
174 rev_b = '%s--%s' % (self.treeversion, rev)
172 rev_b = '%s--%s' % (self.treeversion, rev)
175 self._parsechangeset(changeset, rev)
173 self._parsechangeset(changeset, rev)
176
174
177 def _getfile(self, name, rev):
175 def _getfile(self, name, rev):
178 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
176 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
179 if stat.S_ISLNK(mode):
177 if stat.S_ISLNK(mode):
180 data = os.readlink(os.path.join(self.tmppath, name))
178 data = os.readlink(os.path.join(self.tmppath, name))
181 mode = mode and 'l' or ''
179 mode = mode and 'l' or ''
182 else:
180 else:
183 data = open(os.path.join(self.tmppath, name), 'rb').read()
181 data = open(os.path.join(self.tmppath, name), 'rb').read()
184 mode = (mode & 0111) and 'x' or ''
182 mode = (mode & 0111) and 'x' or ''
185 return data, mode
183 return data, mode
186
184
187 def _exclude(self, name):
185 def _exclude(self, name):
188 exclude = [ '{arch}', '.arch-ids', '.arch-inventory' ]
186 exclude = [ '{arch}', '.arch-ids', '.arch-inventory' ]
189 for exc in exclude:
187 for exc in exclude:
190 if name.find(exc) != -1:
188 if name.find(exc) != -1:
191 return True
189 return True
192 return False
190 return False
193
191
194 def _readcontents(self, path):
192 def _readcontents(self, path):
195 files = []
193 files = []
196 contents = os.listdir(path)
194 contents = os.listdir(path)
197 while len(contents) > 0:
195 while len(contents) > 0:
198 c = contents.pop()
196 c = contents.pop()
199 p = os.path.join(path, c)
197 p = os.path.join(path, c)
200 # os.walk could be used, but here we avoid internal GNU
198 # os.walk could be used, but here we avoid internal GNU
201 # Arch files and directories, thus saving a lot time.
199 # Arch files and directories, thus saving a lot time.
202 if not self._exclude(p):
200 if not self._exclude(p):
203 if os.path.isdir(p):
201 if os.path.isdir(p):
204 contents += [os.path.join(c, f) for f in os.listdir(p)]
202 contents += [os.path.join(c, f) for f in os.listdir(p)]
205 else:
203 else:
206 files.append(c)
204 files.append(c)
207 return files
205 return files
208
206
209 def _rendirchanges(self, src, dest):
207 def _rendirchanges(self, src, dest):
210 changes = []
208 changes = []
211 copies = {}
209 copies = {}
212 files = self._readcontents(os.path.join(self.tmppath, dest))
210 files = self._readcontents(os.path.join(self.tmppath, dest))
213 for f in files:
211 for f in files:
214 s = os.path.join(src, f)
212 s = os.path.join(src, f)
215 d = os.path.join(dest, f)
213 d = os.path.join(dest, f)
216 changes.append(s)
214 changes.append(s)
217 changes.append(d)
215 changes.append(d)
218 copies[s] = d
216 copies[s] = d
219 return changes, copies
217 return changes, copies
220
218
221 def _obtainrevision(self, rev):
219 def _obtainrevision(self, rev):
222 self.ui.debug(_('obtaining revision %s...\n' % rev))
220 self.ui.debug(_('obtaining revision %s...\n' % rev))
223 revision = '%s--%s' % (self.treeversion, rev)
221 revision = '%s--%s' % (self.treeversion, rev)
224 output = self._execute('get', revision, self.tmppath)
222 output = self._execute('get', revision, self.tmppath)
225 self.checkexit(output)
223 self.checkexit(output)
226 self.ui.debug(_('analysing revision %s...\n' % rev))
224 self.ui.debug(_('analysing revision %s...\n' % rev))
227 files = self._readcontents(self.tmppath)
225 files = self._readcontents(self.tmppath)
228 self.changes[rev].add_files += files
226 self.changes[rev].add_files += files
229
227
230 def _stripbasepath(self, path):
228 def _stripbasepath(self, path):
231 if path.startswith('./'):
229 if path.startswith('./'):
232 return path[2:]
230 return path[2:]
233 return path
231 return path
234
232
235 def _parsecatlog(self, data, rev):
233 def _parsecatlog(self, data, rev):
236 summary = []
234 summary = []
237 for l in data:
235 for l in data:
238 l = l.strip()
236 l = l.strip()
239 if summary:
237 if summary:
240 summary.append(l)
238 summary.append(l)
241 elif l.startswith('Summary:'):
239 elif l.startswith('Summary:'):
242 summary.append(l[len('Summary: '):])
240 summary.append(l[len('Summary: '):])
243 elif l.startswith('Standard-date:'):
241 elif l.startswith('Standard-date:'):
244 date = l[len('Standard-date: '):]
242 date = l[len('Standard-date: '):]
245 strdate = util.strdate(date, '%Y-%m-%d %H:%M:%S')
243 strdate = util.strdate(date, '%Y-%m-%d %H:%M:%S')
246 self.changes[rev].date = util.datestr(strdate)
244 self.changes[rev].date = util.datestr(strdate)
247 elif l.startswith('Creator:'):
245 elif l.startswith('Creator:'):
248 self.changes[rev].author = l[len('Creator: '):]
246 self.changes[rev].author = l[len('Creator: '):]
249 self.changes[rev].summary = '\n'.join(summary)
247 self.changes[rev].summary = '\n'.join(summary)
250
248
251 def _parsechangeset(self, data, rev):
249 def _parsechangeset(self, data, rev):
252 for l in data:
250 for l in data:
253 l = l.strip()
251 l = l.strip()
254 # Added file (ignore added directory)
252 # Added file (ignore added directory)
255 if l.startswith('A') and not l.startswith('A/'):
253 if l.startswith('A') and not l.startswith('A/'):
256 file = self._stripbasepath(l[1:].strip())
254 file = self._stripbasepath(l[1:].strip())
257 if not self._exclude(file):
255 if not self._exclude(file):
258 self.changes[rev].add_files.append(file)
256 self.changes[rev].add_files.append(file)
259 # Deleted file (ignore deleted directory)
257 # Deleted file (ignore deleted directory)
260 elif l.startswith('D') and not l.startswith('D/'):
258 elif l.startswith('D') and not l.startswith('D/'):
261 file = self._stripbasepath(l[1:].strip())
259 file = self._stripbasepath(l[1:].strip())
262 if not self._exclude(file):
260 if not self._exclude(file):
263 self.changes[rev].del_files.append(file)
261 self.changes[rev].del_files.append(file)
264 # Modified binary file
262 # Modified binary file
265 elif l.startswith('Mb'):
263 elif l.startswith('Mb'):
266 file = self._stripbasepath(l[2:].strip())
264 file = self._stripbasepath(l[2:].strip())
267 if not self._exclude(file):
265 if not self._exclude(file):
268 self.changes[rev].mod_files.append(file)
266 self.changes[rev].mod_files.append(file)
269 # Modified link
267 # Modified link
270 elif l.startswith('M->'):
268 elif l.startswith('M->'):
271 file = self._stripbasepath(l[3:].strip())
269 file = self._stripbasepath(l[3:].strip())
272 if not self._exclude(file):
270 if not self._exclude(file):
273 self.changes[rev].mod_files.append(file)
271 self.changes[rev].mod_files.append(file)
274 # Modified file
272 # Modified file
275 elif l.startswith('M'):
273 elif l.startswith('M'):
276 file = self._stripbasepath(l[1:].strip())
274 file = self._stripbasepath(l[1:].strip())
277 if not self._exclude(file):
275 if not self._exclude(file):
278 self.changes[rev].mod_files.append(file)
276 self.changes[rev].mod_files.append(file)
279 # Renamed file (or link)
277 # Renamed file (or link)
280 elif l.startswith('=>'):
278 elif l.startswith('=>'):
281 files = l[2:].strip().split(' ')
279 files = l[2:].strip().split(' ')
282 if len(files) == 1:
280 if len(files) == 1:
283 files = l[2:].strip().split('\t')
281 files = l[2:].strip().split('\t')
284 src = self._stripbasepath(files[0])
282 src = self._stripbasepath(files[0])
285 dst = self._stripbasepath(files[1])
283 dst = self._stripbasepath(files[1])
286 if not self._exclude(src) and not self._exclude(dst):
284 if not self._exclude(src) and not self._exclude(dst):
287 self.changes[rev].ren_files[src] = dst
285 self.changes[rev].ren_files[src] = dst
288 # Conversion from file to link or from link to file (modified)
286 # Conversion from file to link or from link to file (modified)
289 elif l.startswith('ch'):
287 elif l.startswith('ch'):
290 file = self._stripbasepath(l[2:].strip())
288 file = self._stripbasepath(l[2:].strip())
291 if not self._exclude(file):
289 if not self._exclude(file):
292 self.changes[rev].mod_files.append(file)
290 self.changes[rev].mod_files.append(file)
293 # Renamed directory
291 # Renamed directory
294 elif l.startswith('/>'):
292 elif l.startswith('/>'):
295 dirs = l[2:].strip().split(' ')
293 dirs = l[2:].strip().split(' ')
296 if len(dirs) == 1:
294 if len(dirs) == 1:
297 dirs = l[2:].strip().split('\t')
295 dirs = l[2:].strip().split('\t')
298 src = self._stripbasepath(dirs[0])
296 src = self._stripbasepath(dirs[0])
299 dst = self._stripbasepath(dirs[1])
297 dst = self._stripbasepath(dirs[1])
300 if not self._exclude(src) and not self._exclude(dst):
298 if not self._exclude(src) and not self._exclude(dst):
301 self.changes[rev].ren_dirs[src] = dst
299 self.changes[rev].ren_dirs[src] = dst
@@ -1,289 +1,285 b''
1 # hg backend for convert extension
1 # hg backend for convert extension
2
2
3 # Notes for hg->hg conversion:
3 # Notes for hg->hg conversion:
4 #
4 #
5 # * Old versions of Mercurial didn't trim the whitespace from the ends
5 # * Old versions of Mercurial didn't trim the whitespace from the ends
6 # of commit messages, but new versions do. Changesets created by
6 # of commit messages, but new versions do. Changesets created by
7 # those older versions, then converted, may thus have different
7 # those older versions, then converted, may thus have different
8 # hashes for changesets that are otherwise identical.
8 # hashes for changesets that are otherwise identical.
9 #
9 #
10 # * By default, the source revision is stored in the converted
10 # * By default, the source revision is stored in the converted
11 # revision. This will cause the converted revision to have a
11 # revision. This will cause the converted revision to have a
12 # different identity than the source. To avoid this, use the
12 # different identity than the source. To avoid this, use the
13 # following option: "--config convert.hg.saverev=false"
13 # following option: "--config convert.hg.saverev=false"
14
14
15
15
16 import os, time
16 import os, time
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18 from mercurial.repo import RepoError
18 from mercurial.repo import RepoError
19 from mercurial.node import bin, hex, nullid
19 from mercurial.node import bin, hex, nullid
20 from mercurial import hg, revlog, util, context
20 from mercurial import hg, revlog, util, context
21
21
22 from common import NoRepo, commit, converter_source, converter_sink
22 from common import NoRepo, commit, converter_source, converter_sink
23
23
24 class mercurial_sink(converter_sink):
24 class mercurial_sink(converter_sink):
25 def __init__(self, ui, path):
25 def __init__(self, ui, path):
26 converter_sink.__init__(self, ui, path)
26 converter_sink.__init__(self, ui, path)
27 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
27 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
28 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
28 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
29 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
29 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
30 self.lastbranch = None
30 self.lastbranch = None
31 if os.path.isdir(path) and len(os.listdir(path)) > 0:
31 if os.path.isdir(path) and len(os.listdir(path)) > 0:
32 try:
32 try:
33 self.repo = hg.repository(self.ui, path)
33 self.repo = hg.repository(self.ui, path)
34 if not self.repo.local():
34 if not self.repo.local():
35 raise NoRepo(_('%s is not a local Mercurial repo') % path)
35 raise NoRepo(_('%s is not a local Mercurial repo') % path)
36 except RepoError, err:
36 except RepoError, err:
37 ui.print_exc()
37 ui.print_exc()
38 raise NoRepo(err.args[0])
38 raise NoRepo(err.args[0])
39 else:
39 else:
40 try:
40 try:
41 ui.status(_('initializing destination %s repository\n') % path)
41 ui.status(_('initializing destination %s repository\n') % path)
42 self.repo = hg.repository(self.ui, path, create=True)
42 self.repo = hg.repository(self.ui, path, create=True)
43 if not self.repo.local():
43 if not self.repo.local():
44 raise NoRepo(_('%s is not a local Mercurial repo') % path)
44 raise NoRepo(_('%s is not a local Mercurial repo') % path)
45 self.created.append(path)
45 self.created.append(path)
46 except RepoError, err:
46 except RepoError, err:
47 ui.print_exc()
47 ui.print_exc()
48 raise NoRepo("could not create hg repo %s as sink" % path)
48 raise NoRepo("could not create hg repo %s as sink" % path)
49 self.lock = None
49 self.lock = None
50 self.wlock = None
50 self.wlock = None
51 self.filemapmode = False
51 self.filemapmode = False
52
52
53 def before(self):
53 def before(self):
54 self.ui.debug(_('run hg sink pre-conversion action\n'))
54 self.ui.debug(_('run hg sink pre-conversion action\n'))
55 self.wlock = self.repo.wlock()
55 self.wlock = self.repo.wlock()
56 self.lock = self.repo.lock()
56 self.lock = self.repo.lock()
57
57
58 def after(self):
58 def after(self):
59 self.ui.debug(_('run hg sink post-conversion action\n'))
59 self.ui.debug(_('run hg sink post-conversion action\n'))
60 self.lock = None
60 self.lock = None
61 self.wlock = None
61 self.wlock = None
62
62
63 def revmapfile(self):
63 def revmapfile(self):
64 return os.path.join(self.path, ".hg", "shamap")
64 return os.path.join(self.path, ".hg", "shamap")
65
65
66 def authorfile(self):
66 def authorfile(self):
67 return os.path.join(self.path, ".hg", "authormap")
67 return os.path.join(self.path, ".hg", "authormap")
68
68
69 def getheads(self):
69 def getheads(self):
70 h = self.repo.changelog.heads()
70 h = self.repo.changelog.heads()
71 return [ hex(x) for x in h ]
71 return [ hex(x) for x in h ]
72
72
73 def setbranch(self, branch, pbranches):
73 def setbranch(self, branch, pbranches):
74 if not self.clonebranches:
74 if not self.clonebranches:
75 return
75 return
76
76
77 setbranch = (branch != self.lastbranch)
77 setbranch = (branch != self.lastbranch)
78 self.lastbranch = branch
78 self.lastbranch = branch
79 if not branch:
79 if not branch:
80 branch = 'default'
80 branch = 'default'
81 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
81 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
82 pbranch = pbranches and pbranches[0][1] or 'default'
82 pbranch = pbranches and pbranches[0][1] or 'default'
83
83
84 branchpath = os.path.join(self.path, branch)
84 branchpath = os.path.join(self.path, branch)
85 if setbranch:
85 if setbranch:
86 self.after()
86 self.after()
87 try:
87 try:
88 self.repo = hg.repository(self.ui, branchpath)
88 self.repo = hg.repository(self.ui, branchpath)
89 except:
89 except:
90 self.repo = hg.repository(self.ui, branchpath, create=True)
90 self.repo = hg.repository(self.ui, branchpath, create=True)
91 self.before()
91 self.before()
92
92
93 # pbranches may bring revisions from other branches (merge parents)
93 # pbranches may bring revisions from other branches (merge parents)
94 # Make sure we have them, or pull them.
94 # Make sure we have them, or pull them.
95 missings = {}
95 missings = {}
96 for b in pbranches:
96 for b in pbranches:
97 try:
97 try:
98 self.repo.lookup(b[0])
98 self.repo.lookup(b[0])
99 except:
99 except:
100 missings.setdefault(b[1], []).append(b[0])
100 missings.setdefault(b[1], []).append(b[0])
101
101
102 if missings:
102 if missings:
103 self.after()
103 self.after()
104 for pbranch, heads in missings.iteritems():
104 for pbranch, heads in missings.iteritems():
105 pbranchpath = os.path.join(self.path, pbranch)
105 pbranchpath = os.path.join(self.path, pbranch)
106 prepo = hg.repository(self.ui, pbranchpath)
106 prepo = hg.repository(self.ui, pbranchpath)
107 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
107 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
108 self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
108 self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
109 self.before()
109 self.before()
110
110
111 def putcommit(self, files, copies, parents, commit, source):
111 def putcommit(self, files, copies, parents, commit, source):
112
112
113 files = dict(files)
113 files = dict(files)
114 def getfilectx(repo, memctx, f):
114 def getfilectx(repo, memctx, f):
115 v = files[f]
115 v = files[f]
116 data = source.getfile(f, v)
116 data = source.getfile(f, v)
117 e = source.getmode(f, v)
117 e = source.getmode(f, v)
118 return context.memfilectx(f, data, 'l' in e, 'x' in e, copies.get(f))
118 return context.memfilectx(f, data, 'l' in e, 'x' in e, copies.get(f))
119
119
120 pl = []
120 pl = []
121 for p in parents:
121 for p in parents:
122 if p not in pl:
122 if p not in pl:
123 pl.append(p)
123 pl.append(p)
124 parents = pl
124 parents = pl
125 nparents = len(parents)
125 nparents = len(parents)
126 if self.filemapmode and nparents == 1:
126 if self.filemapmode and nparents == 1:
127 m1node = self.repo.changelog.read(bin(parents[0]))[0]
127 m1node = self.repo.changelog.read(bin(parents[0]))[0]
128 parent = parents[0]
128 parent = parents[0]
129
129
130 if len(parents) < 2: parents.append("0" * 40)
130 if len(parents) < 2: parents.append("0" * 40)
131 if len(parents) < 2: parents.append("0" * 40)
131 if len(parents) < 2: parents.append("0" * 40)
132 p2 = parents.pop(0)
132 p2 = parents.pop(0)
133
133
134 text = commit.desc
134 text = commit.desc
135 extra = commit.extra.copy()
135 extra = commit.extra.copy()
136 if self.branchnames and commit.branch:
136 if self.branchnames and commit.branch:
137 extra['branch'] = commit.branch
137 extra['branch'] = commit.branch
138 if commit.rev:
138 if commit.rev:
139 extra['convert_revision'] = commit.rev
139 extra['convert_revision'] = commit.rev
140
140
141 while parents:
141 while parents:
142 p1 = p2
142 p1 = p2
143 p2 = parents.pop(0)
143 p2 = parents.pop(0)
144 ctx = context.memctx(self.repo, (p1, p2), text, files.keys(), getfilectx,
144 ctx = context.memctx(self.repo, (p1, p2), text, files.keys(), getfilectx,
145 commit.author, commit.date, extra)
145 commit.author, commit.date, extra)
146 a = self.repo.commitctx(ctx)
146 a = self.repo.commitctx(ctx)
147 text = "(octopus merge fixup)\n"
147 text = "(octopus merge fixup)\n"
148 p2 = hex(self.repo.changelog.tip())
148 p2 = hex(self.repo.changelog.tip())
149
149
150 if self.filemapmode and nparents == 1:
150 if self.filemapmode and nparents == 1:
151 man = self.repo.manifest
151 man = self.repo.manifest
152 mnode = self.repo.changelog.read(bin(p2))[0]
152 mnode = self.repo.changelog.read(bin(p2))[0]
153 if not man.cmp(m1node, man.revision(mnode)):
153 if not man.cmp(m1node, man.revision(mnode)):
154 self.repo.rollback()
154 self.repo.rollback()
155 return parent
155 return parent
156 return p2
156 return p2
157
157
158 def puttags(self, tags):
158 def puttags(self, tags):
159 try:
159 try:
160 parentctx = self.repo[self.tagsbranch]
160 parentctx = self.repo[self.tagsbranch]
161 tagparent = parentctx.node()
161 tagparent = parentctx.node()
162 except RepoError, inst:
162 except RepoError, inst:
163 parentctx = None
163 parentctx = None
164 tagparent = nullid
164 tagparent = nullid
165
165
166 try:
166 try:
167 old = parentctx.filectx(".hgtags").data()
167 oldlines = util.sort(parentctx['.hgtags'].data().splitlines(1))
168 oldlines = old.splitlines(1)
169 oldlines.sort()
170 except:
168 except:
171 oldlines = []
169 oldlines = []
172
170
173 newlines = [("%s %s\n" % (tags[tag], tag)) for tag in tags.keys()]
171 newlines = util.sort([("%s %s\n" % (tags[tag], tag)) for tag in tags])
174 newlines.sort()
175
172
176 if newlines == oldlines:
173 if newlines == oldlines:
177 return None
174 return None
178 data = "".join(newlines)
175 data = "".join(newlines)
179
176
180 def getfilectx(repo, memctx, f):
177 def getfilectx(repo, memctx, f):
181 return context.memfilectx(f, data, False, False, None)
178 return context.memfilectx(f, data, False, False, None)
182
179
183 self.ui.status("updating tags\n")
180 self.ui.status("updating tags\n")
184 date = "%s 0" % int(time.mktime(time.gmtime()))
181 date = "%s 0" % int(time.mktime(time.gmtime()))
185 extra = {'branch': self.tagsbranch}
182 extra = {'branch': self.tagsbranch}
186 ctx = context.memctx(self.repo, (tagparent, None), "update tags",
183 ctx = context.memctx(self.repo, (tagparent, None), "update tags",
187 [".hgtags"], getfilectx, "convert-repo", date,
184 [".hgtags"], getfilectx, "convert-repo", date,
188 extra)
185 extra)
189 self.repo.commitctx(ctx)
186 self.repo.commitctx(ctx)
190 return hex(self.repo.changelog.tip())
187 return hex(self.repo.changelog.tip())
191
188
192 def setfilemapmode(self, active):
189 def setfilemapmode(self, active):
193 self.filemapmode = active
190 self.filemapmode = active
194
191
195 class mercurial_source(converter_source):
192 class mercurial_source(converter_source):
196 def __init__(self, ui, path, rev=None):
193 def __init__(self, ui, path, rev=None):
197 converter_source.__init__(self, ui, path, rev)
194 converter_source.__init__(self, ui, path, rev)
198 self.saverev = ui.configbool('convert', 'hg.saverev', True)
195 self.saverev = ui.configbool('convert', 'hg.saverev', True)
199 try:
196 try:
200 self.repo = hg.repository(self.ui, path)
197 self.repo = hg.repository(self.ui, path)
201 # try to provoke an exception if this isn't really a hg
198 # try to provoke an exception if this isn't really a hg
202 # repo, but some other bogus compatible-looking url
199 # repo, but some other bogus compatible-looking url
203 if not self.repo.local():
200 if not self.repo.local():
204 raise RepoError()
201 raise RepoError()
205 except RepoError:
202 except RepoError:
206 ui.print_exc()
203 ui.print_exc()
207 raise NoRepo("%s is not a local Mercurial repo" % path)
204 raise NoRepo("%s is not a local Mercurial repo" % path)
208 self.lastrev = None
205 self.lastrev = None
209 self.lastctx = None
206 self.lastctx = None
210 self._changescache = None
207 self._changescache = None
211 self.convertfp = None
208 self.convertfp = None
212
209
213 def changectx(self, rev):
210 def changectx(self, rev):
214 if self.lastrev != rev:
211 if self.lastrev != rev:
215 self.lastctx = self.repo[rev]
212 self.lastctx = self.repo[rev]
216 self.lastrev = rev
213 self.lastrev = rev
217 return self.lastctx
214 return self.lastctx
218
215
219 def getheads(self):
216 def getheads(self):
220 if self.rev:
217 if self.rev:
221 return [hex(self.repo[self.rev].node())]
218 return [hex(self.repo[self.rev].node())]
222 else:
219 else:
223 return [hex(node) for node in self.repo.heads()]
220 return [hex(node) for node in self.repo.heads()]
224
221
225 def getfile(self, name, rev):
222 def getfile(self, name, rev):
226 try:
223 try:
227 return self.changectx(rev)[name].data()
224 return self.changectx(rev)[name].data()
228 except revlog.LookupError, err:
225 except revlog.LookupError, err:
229 raise IOError(err)
226 raise IOError(err)
230
227
231 def getmode(self, name, rev):
228 def getmode(self, name, rev):
232 return self.changectx(rev).manifest().flags(name)
229 return self.changectx(rev).manifest().flags(name)
233
230
234 def getchanges(self, rev):
231 def getchanges(self, rev):
235 ctx = self.changectx(rev)
232 ctx = self.changectx(rev)
236 if self._changescache and self._changescache[0] == rev:
233 if self._changescache and self._changescache[0] == rev:
237 m, a, r = self._changescache[1]
234 m, a, r = self._changescache[1]
238 else:
235 else:
239 m, a, r = self.repo.status(ctx.parents()[0].node(), ctx.node())[:3]
236 m, a, r = self.repo.status(ctx.parents()[0].node(), ctx.node())[:3]
240 changes = [(name, rev) for name in m + a + r]
237 changes = [(name, rev) for name in m + a + r]
241 changes.sort()
238 return util.sort(changes), self.getcopies(ctx, m + a)
242 return (changes, self.getcopies(ctx, m + a))
243
239
244 def getcopies(self, ctx, files):
240 def getcopies(self, ctx, files):
245 copies = {}
241 copies = {}
246 for name in files:
242 for name in files:
247 try:
243 try:
248 copies[name] = ctx.filectx(name).renamed()[0]
244 copies[name] = ctx.filectx(name).renamed()[0]
249 except TypeError:
245 except TypeError:
250 pass
246 pass
251 return copies
247 return copies
252
248
253 def getcommit(self, rev):
249 def getcommit(self, rev):
254 ctx = self.changectx(rev)
250 ctx = self.changectx(rev)
255 parents = [hex(p.node()) for p in ctx.parents() if p.node() != nullid]
251 parents = [hex(p.node()) for p in ctx.parents() if p.node() != nullid]
256 if self.saverev:
252 if self.saverev:
257 crev = rev
253 crev = rev
258 else:
254 else:
259 crev = None
255 crev = None
260 return commit(author=ctx.user(), date=util.datestr(ctx.date()),
256 return commit(author=ctx.user(), date=util.datestr(ctx.date()),
261 desc=ctx.description(), rev=crev, parents=parents,
257 desc=ctx.description(), rev=crev, parents=parents,
262 branch=ctx.branch(), extra=ctx.extra())
258 branch=ctx.branch(), extra=ctx.extra())
263
259
264 def gettags(self):
260 def gettags(self):
265 tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
261 tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
266 return dict([(name, hex(node)) for name, node in tags])
262 return dict([(name, hex(node)) for name, node in tags])
267
263
268 def getchangedfiles(self, rev, i):
264 def getchangedfiles(self, rev, i):
269 ctx = self.changectx(rev)
265 ctx = self.changectx(rev)
270 i = i or 0
266 i = i or 0
271 changes = self.repo.status(ctx.parents()[i].node(), ctx.node())[:3]
267 changes = self.repo.status(ctx.parents()[i].node(), ctx.node())[:3]
272
268
273 if i == 0:
269 if i == 0:
274 self._changescache = (rev, changes)
270 self._changescache = (rev, changes)
275
271
276 return changes[0] + changes[1] + changes[2]
272 return changes[0] + changes[1] + changes[2]
277
273
278 def converted(self, rev, destrev):
274 def converted(self, rev, destrev):
279 if self.convertfp is None:
275 if self.convertfp is None:
280 self.convertfp = open(os.path.join(self.path, '.hg', 'shamap'),
276 self.convertfp = open(os.path.join(self.path, '.hg', 'shamap'),
281 'a')
277 'a')
282 self.convertfp.write('%s %s\n' % (destrev, rev))
278 self.convertfp.write('%s %s\n' % (destrev, rev))
283 self.convertfp.flush()
279 self.convertfp.flush()
284
280
285 def before(self):
281 def before(self):
286 self.ui.debug(_('run hg source pre-conversion action\n'))
282 self.ui.debug(_('run hg source pre-conversion action\n'))
287
283
288 def after(self):
284 def after(self):
289 self.ui.debug(_('run hg source post-conversion action\n'))
285 self.ui.debug(_('run hg source post-conversion action\n'))
@@ -1,1144 +1,1140 b''
1 # Subversion 1.4/1.5 Python API backend
1 # Subversion 1.4/1.5 Python API backend
2 #
2 #
3 # Copyright(C) 2007 Daniel Holth et al
3 # Copyright(C) 2007 Daniel Holth et al
4 #
4 #
5 # Configuration options:
5 # Configuration options:
6 #
6 #
7 # convert.svn.trunk
7 # convert.svn.trunk
8 # Relative path to the trunk (default: "trunk")
8 # Relative path to the trunk (default: "trunk")
9 # convert.svn.branches
9 # convert.svn.branches
10 # Relative path to tree of branches (default: "branches")
10 # Relative path to tree of branches (default: "branches")
11 # convert.svn.tags
11 # convert.svn.tags
12 # Relative path to tree of tags (default: "tags")
12 # Relative path to tree of tags (default: "tags")
13 #
13 #
14 # Set these in a hgrc, or on the command line as follows:
14 # Set these in a hgrc, or on the command line as follows:
15 #
15 #
16 # hg convert --config convert.svn.trunk=wackoname [...]
16 # hg convert --config convert.svn.trunk=wackoname [...]
17
17
18 import locale
18 import locale
19 import os
19 import os
20 import re
20 import re
21 import sys
21 import sys
22 import cPickle as pickle
22 import cPickle as pickle
23 import tempfile
23 import tempfile
24
24
25 from mercurial import strutil, util
25 from mercurial import strutil, util
26 from mercurial.i18n import _
26 from mercurial.i18n import _
27
27
28 # Subversion stuff. Works best with very recent Python SVN bindings
28 # Subversion stuff. Works best with very recent Python SVN bindings
29 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
29 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
30 # these bindings.
30 # these bindings.
31
31
32 from cStringIO import StringIO
32 from cStringIO import StringIO
33
33
34 from common import NoRepo, commit, converter_source, encodeargs, decodeargs
34 from common import NoRepo, commit, converter_source, encodeargs, decodeargs
35 from common import commandline, converter_sink, mapfile
35 from common import commandline, converter_sink, mapfile
36
36
37 try:
37 try:
38 from svn.core import SubversionException, Pool
38 from svn.core import SubversionException, Pool
39 import svn
39 import svn
40 import svn.client
40 import svn.client
41 import svn.core
41 import svn.core
42 import svn.ra
42 import svn.ra
43 import svn.delta
43 import svn.delta
44 import transport
44 import transport
45 except ImportError:
45 except ImportError:
46 pass
46 pass
47
47
48 def geturl(path):
48 def geturl(path):
49 try:
49 try:
50 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
50 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
51 except SubversionException:
51 except SubversionException:
52 pass
52 pass
53 if os.path.isdir(path):
53 if os.path.isdir(path):
54 path = os.path.normpath(os.path.abspath(path))
54 path = os.path.normpath(os.path.abspath(path))
55 if os.name == 'nt':
55 if os.name == 'nt':
56 path = '/' + util.normpath(path)
56 path = '/' + util.normpath(path)
57 return 'file://%s' % path
57 return 'file://%s' % path
58 return path
58 return path
59
59
60 def optrev(number):
60 def optrev(number):
61 optrev = svn.core.svn_opt_revision_t()
61 optrev = svn.core.svn_opt_revision_t()
62 optrev.kind = svn.core.svn_opt_revision_number
62 optrev.kind = svn.core.svn_opt_revision_number
63 optrev.value.number = number
63 optrev.value.number = number
64 return optrev
64 return optrev
65
65
66 class changedpath(object):
66 class changedpath(object):
67 def __init__(self, p):
67 def __init__(self, p):
68 self.copyfrom_path = p.copyfrom_path
68 self.copyfrom_path = p.copyfrom_path
69 self.copyfrom_rev = p.copyfrom_rev
69 self.copyfrom_rev = p.copyfrom_rev
70 self.action = p.action
70 self.action = p.action
71
71
72 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
72 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
73 strict_node_history=False):
73 strict_node_history=False):
74 protocol = -1
74 protocol = -1
75 def receiver(orig_paths, revnum, author, date, message, pool):
75 def receiver(orig_paths, revnum, author, date, message, pool):
76 if orig_paths is not None:
76 if orig_paths is not None:
77 for k, v in orig_paths.iteritems():
77 for k, v in orig_paths.iteritems():
78 orig_paths[k] = changedpath(v)
78 orig_paths[k] = changedpath(v)
79 pickle.dump((orig_paths, revnum, author, date, message),
79 pickle.dump((orig_paths, revnum, author, date, message),
80 fp, protocol)
80 fp, protocol)
81
81
82 try:
82 try:
83 # Use an ra of our own so that our parent can consume
83 # Use an ra of our own so that our parent can consume
84 # our results without confusing the server.
84 # our results without confusing the server.
85 t = transport.SvnRaTransport(url=url)
85 t = transport.SvnRaTransport(url=url)
86 svn.ra.get_log(t.ra, paths, start, end, limit,
86 svn.ra.get_log(t.ra, paths, start, end, limit,
87 discover_changed_paths,
87 discover_changed_paths,
88 strict_node_history,
88 strict_node_history,
89 receiver)
89 receiver)
90 except SubversionException, (inst, num):
90 except SubversionException, (inst, num):
91 pickle.dump(num, fp, protocol)
91 pickle.dump(num, fp, protocol)
92 except IOError:
92 except IOError:
93 # Caller may interrupt the iteration
93 # Caller may interrupt the iteration
94 pickle.dump(None, fp, protocol)
94 pickle.dump(None, fp, protocol)
95 else:
95 else:
96 pickle.dump(None, fp, protocol)
96 pickle.dump(None, fp, protocol)
97 fp.close()
97 fp.close()
98 # With large history, cleanup process goes crazy and suddenly
98 # With large history, cleanup process goes crazy and suddenly
99 # consumes *huge* amount of memory. The output file being closed,
99 # consumes *huge* amount of memory. The output file being closed,
100 # there is no need for clean termination.
100 # there is no need for clean termination.
101 os._exit(0)
101 os._exit(0)
102
102
103 def debugsvnlog(ui, **opts):
103 def debugsvnlog(ui, **opts):
104 """Fetch SVN log in a subprocess and channel them back to parent to
104 """Fetch SVN log in a subprocess and channel them back to parent to
105 avoid memory collection issues.
105 avoid memory collection issues.
106 """
106 """
107 util.set_binary(sys.stdin)
107 util.set_binary(sys.stdin)
108 util.set_binary(sys.stdout)
108 util.set_binary(sys.stdout)
109 args = decodeargs(sys.stdin.read())
109 args = decodeargs(sys.stdin.read())
110 get_log_child(sys.stdout, *args)
110 get_log_child(sys.stdout, *args)
111
111
112 class logstream:
112 class logstream:
113 """Interruptible revision log iterator."""
113 """Interruptible revision log iterator."""
114 def __init__(self, stdout):
114 def __init__(self, stdout):
115 self._stdout = stdout
115 self._stdout = stdout
116
116
117 def __iter__(self):
117 def __iter__(self):
118 while True:
118 while True:
119 entry = pickle.load(self._stdout)
119 entry = pickle.load(self._stdout)
120 try:
120 try:
121 orig_paths, revnum, author, date, message = entry
121 orig_paths, revnum, author, date, message = entry
122 except:
122 except:
123 if entry is None:
123 if entry is None:
124 break
124 break
125 raise SubversionException("child raised exception", entry)
125 raise SubversionException("child raised exception", entry)
126 yield entry
126 yield entry
127
127
128 def close(self):
128 def close(self):
129 if self._stdout:
129 if self._stdout:
130 self._stdout.close()
130 self._stdout.close()
131 self._stdout = None
131 self._stdout = None
132
132
133 def get_log(url, paths, start, end, limit=0, discover_changed_paths=True,
133 def get_log(url, paths, start, end, limit=0, discover_changed_paths=True,
134 strict_node_history=False):
134 strict_node_history=False):
135 args = [url, paths, start, end, limit, discover_changed_paths,
135 args = [url, paths, start, end, limit, discover_changed_paths,
136 strict_node_history]
136 strict_node_history]
137 arg = encodeargs(args)
137 arg = encodeargs(args)
138 hgexe = util.hgexecutable()
138 hgexe = util.hgexecutable()
139 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
139 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
140 stdin, stdout = os.popen2(cmd, 'b')
140 stdin, stdout = os.popen2(cmd, 'b')
141 stdin.write(arg)
141 stdin.write(arg)
142 stdin.close()
142 stdin.close()
143 return logstream(stdout)
143 return logstream(stdout)
144
144
145 # SVN conversion code stolen from bzr-svn and tailor
145 # SVN conversion code stolen from bzr-svn and tailor
146 #
146 #
147 # Subversion looks like a versioned filesystem, branches structures
147 # Subversion looks like a versioned filesystem, branches structures
148 # are defined by conventions and not enforced by the tool. First,
148 # are defined by conventions and not enforced by the tool. First,
149 # we define the potential branches (modules) as "trunk" and "branches"
149 # we define the potential branches (modules) as "trunk" and "branches"
150 # children directories. Revisions are then identified by their
150 # children directories. Revisions are then identified by their
151 # module and revision number (and a repository identifier).
151 # module and revision number (and a repository identifier).
152 #
152 #
153 # The revision graph is really a tree (or a forest). By default, a
153 # The revision graph is really a tree (or a forest). By default, a
154 # revision parent is the previous revision in the same module. If the
154 # revision parent is the previous revision in the same module. If the
155 # module directory is copied/moved from another module then the
155 # module directory is copied/moved from another module then the
156 # revision is the module root and its parent the source revision in
156 # revision is the module root and its parent the source revision in
157 # the parent module. A revision has at most one parent.
157 # the parent module. A revision has at most one parent.
158 #
158 #
159 class svn_source(converter_source):
159 class svn_source(converter_source):
160 def __init__(self, ui, url, rev=None):
160 def __init__(self, ui, url, rev=None):
161 super(svn_source, self).__init__(ui, url, rev=rev)
161 super(svn_source, self).__init__(ui, url, rev=rev)
162
162
163 try:
163 try:
164 SubversionException
164 SubversionException
165 except NameError:
165 except NameError:
166 raise NoRepo('Subversion python bindings could not be loaded')
166 raise NoRepo('Subversion python bindings could not be loaded')
167
167
168 self.encoding = locale.getpreferredencoding()
168 self.encoding = locale.getpreferredencoding()
169 self.lastrevs = {}
169 self.lastrevs = {}
170
170
171 latest = None
171 latest = None
172 try:
172 try:
173 # Support file://path@rev syntax. Useful e.g. to convert
173 # Support file://path@rev syntax. Useful e.g. to convert
174 # deleted branches.
174 # deleted branches.
175 at = url.rfind('@')
175 at = url.rfind('@')
176 if at >= 0:
176 if at >= 0:
177 latest = int(url[at+1:])
177 latest = int(url[at+1:])
178 url = url[:at]
178 url = url[:at]
179 except ValueError, e:
179 except ValueError, e:
180 pass
180 pass
181 self.url = geturl(url)
181 self.url = geturl(url)
182 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
182 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
183 try:
183 try:
184 self.transport = transport.SvnRaTransport(url=self.url)
184 self.transport = transport.SvnRaTransport(url=self.url)
185 self.ra = self.transport.ra
185 self.ra = self.transport.ra
186 self.ctx = self.transport.client
186 self.ctx = self.transport.client
187 self.base = svn.ra.get_repos_root(self.ra)
187 self.base = svn.ra.get_repos_root(self.ra)
188 # Module is either empty or a repository path starting with
188 # Module is either empty or a repository path starting with
189 # a slash and not ending with a slash.
189 # a slash and not ending with a slash.
190 self.module = self.url[len(self.base):]
190 self.module = self.url[len(self.base):]
191 self.rootmodule = self.module
191 self.rootmodule = self.module
192 self.commits = {}
192 self.commits = {}
193 self.paths = {}
193 self.paths = {}
194 self.uuid = svn.ra.get_uuid(self.ra).decode(self.encoding)
194 self.uuid = svn.ra.get_uuid(self.ra).decode(self.encoding)
195 except SubversionException, e:
195 except SubversionException, e:
196 ui.print_exc()
196 ui.print_exc()
197 raise NoRepo("%s does not look like a Subversion repo" % self.url)
197 raise NoRepo("%s does not look like a Subversion repo" % self.url)
198
198
199 if rev:
199 if rev:
200 try:
200 try:
201 latest = int(rev)
201 latest = int(rev)
202 except ValueError:
202 except ValueError:
203 raise util.Abort('svn: revision %s is not an integer' % rev)
203 raise util.Abort('svn: revision %s is not an integer' % rev)
204
204
205 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
205 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
206 try:
206 try:
207 self.startrev = int(self.startrev)
207 self.startrev = int(self.startrev)
208 if self.startrev < 0:
208 if self.startrev < 0:
209 self.startrev = 0
209 self.startrev = 0
210 except ValueError:
210 except ValueError:
211 raise util.Abort(_('svn: start revision %s is not an integer')
211 raise util.Abort(_('svn: start revision %s is not an integer')
212 % self.startrev)
212 % self.startrev)
213
213
214 try:
214 try:
215 self.get_blacklist()
215 self.get_blacklist()
216 except IOError, e:
216 except IOError, e:
217 pass
217 pass
218
218
219 self.head = self.latest(self.module, latest)
219 self.head = self.latest(self.module, latest)
220 if not self.head:
220 if not self.head:
221 raise util.Abort(_('no revision found in module %s') %
221 raise util.Abort(_('no revision found in module %s') %
222 self.module.encode(self.encoding))
222 self.module.encode(self.encoding))
223 self.last_changed = self.revnum(self.head)
223 self.last_changed = self.revnum(self.head)
224
224
225 self._changescache = None
225 self._changescache = None
226
226
227 if os.path.exists(os.path.join(url, '.svn/entries')):
227 if os.path.exists(os.path.join(url, '.svn/entries')):
228 self.wc = url
228 self.wc = url
229 else:
229 else:
230 self.wc = None
230 self.wc = None
231 self.convertfp = None
231 self.convertfp = None
232
232
233 def setrevmap(self, revmap):
233 def setrevmap(self, revmap):
234 lastrevs = {}
234 lastrevs = {}
235 for revid in revmap.iterkeys():
235 for revid in revmap.iterkeys():
236 uuid, module, revnum = self.revsplit(revid)
236 uuid, module, revnum = self.revsplit(revid)
237 lastrevnum = lastrevs.setdefault(module, revnum)
237 lastrevnum = lastrevs.setdefault(module, revnum)
238 if revnum > lastrevnum:
238 if revnum > lastrevnum:
239 lastrevs[module] = revnum
239 lastrevs[module] = revnum
240 self.lastrevs = lastrevs
240 self.lastrevs = lastrevs
241
241
242 def exists(self, path, optrev):
242 def exists(self, path, optrev):
243 try:
243 try:
244 svn.client.ls(self.url.rstrip('/') + '/' + path,
244 svn.client.ls(self.url.rstrip('/') + '/' + path,
245 optrev, False, self.ctx)
245 optrev, False, self.ctx)
246 return True
246 return True
247 except SubversionException, err:
247 except SubversionException, err:
248 return False
248 return False
249
249
250 def getheads(self):
250 def getheads(self):
251
251
252 def isdir(path, revnum):
252 def isdir(path, revnum):
253 kind = svn.ra.check_path(self.ra, path, revnum)
253 kind = svn.ra.check_path(self.ra, path, revnum)
254 return kind == svn.core.svn_node_dir
254 return kind == svn.core.svn_node_dir
255
255
256 def getcfgpath(name, rev):
256 def getcfgpath(name, rev):
257 cfgpath = self.ui.config('convert', 'svn.' + name)
257 cfgpath = self.ui.config('convert', 'svn.' + name)
258 if cfgpath is not None and cfgpath.strip() == '':
258 if cfgpath is not None and cfgpath.strip() == '':
259 return None
259 return None
260 path = (cfgpath or name).strip('/')
260 path = (cfgpath or name).strip('/')
261 if not self.exists(path, rev):
261 if not self.exists(path, rev):
262 if cfgpath:
262 if cfgpath:
263 raise util.Abort(_('expected %s to be at %r, but not found')
263 raise util.Abort(_('expected %s to be at %r, but not found')
264 % (name, path))
264 % (name, path))
265 return None
265 return None
266 self.ui.note(_('found %s at %r\n') % (name, path))
266 self.ui.note(_('found %s at %r\n') % (name, path))
267 return path
267 return path
268
268
269 rev = optrev(self.last_changed)
269 rev = optrev(self.last_changed)
270 oldmodule = ''
270 oldmodule = ''
271 trunk = getcfgpath('trunk', rev)
271 trunk = getcfgpath('trunk', rev)
272 self.tags = getcfgpath('tags', rev)
272 self.tags = getcfgpath('tags', rev)
273 branches = getcfgpath('branches', rev)
273 branches = getcfgpath('branches', rev)
274
274
275 # If the project has a trunk or branches, we will extract heads
275 # If the project has a trunk or branches, we will extract heads
276 # from them. We keep the project root otherwise.
276 # from them. We keep the project root otherwise.
277 if trunk:
277 if trunk:
278 oldmodule = self.module or ''
278 oldmodule = self.module or ''
279 self.module += '/' + trunk
279 self.module += '/' + trunk
280 self.head = self.latest(self.module, self.last_changed)
280 self.head = self.latest(self.module, self.last_changed)
281 if not self.head:
281 if not self.head:
282 raise util.Abort(_('no revision found in module %s') %
282 raise util.Abort(_('no revision found in module %s') %
283 self.module.encode(self.encoding))
283 self.module.encode(self.encoding))
284
284
285 # First head in the list is the module's head
285 # First head in the list is the module's head
286 self.heads = [self.head]
286 self.heads = [self.head]
287 if self.tags is not None:
287 if self.tags is not None:
288 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
288 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
289
289
290 # Check if branches bring a few more heads to the list
290 # Check if branches bring a few more heads to the list
291 if branches:
291 if branches:
292 rpath = self.url.strip('/')
292 rpath = self.url.strip('/')
293 branchnames = svn.client.ls(rpath + '/' + branches, rev, False,
293 branchnames = svn.client.ls(rpath + '/' + branches, rev, False,
294 self.ctx)
294 self.ctx)
295 for branch in branchnames.keys():
295 for branch in branchnames.keys():
296 module = '%s/%s/%s' % (oldmodule, branches, branch)
296 module = '%s/%s/%s' % (oldmodule, branches, branch)
297 if not isdir(module, self.last_changed):
297 if not isdir(module, self.last_changed):
298 continue
298 continue
299 brevid = self.latest(module, self.last_changed)
299 brevid = self.latest(module, self.last_changed)
300 if not brevid:
300 if not brevid:
301 self.ui.note(_('ignoring empty branch %s\n') %
301 self.ui.note(_('ignoring empty branch %s\n') %
302 branch.encode(self.encoding))
302 branch.encode(self.encoding))
303 continue
303 continue
304 self.ui.note('found branch %s at %d\n' %
304 self.ui.note('found branch %s at %d\n' %
305 (branch, self.revnum(brevid)))
305 (branch, self.revnum(brevid)))
306 self.heads.append(brevid)
306 self.heads.append(brevid)
307
307
308 if self.startrev and self.heads:
308 if self.startrev and self.heads:
309 if len(self.heads) > 1:
309 if len(self.heads) > 1:
310 raise util.Abort(_('svn: start revision is not supported with '
310 raise util.Abort(_('svn: start revision is not supported with '
311 'with more than one branch'))
311 'with more than one branch'))
312 revnum = self.revnum(self.heads[0])
312 revnum = self.revnum(self.heads[0])
313 if revnum < self.startrev:
313 if revnum < self.startrev:
314 raise util.Abort(_('svn: no revision found after start revision %d')
314 raise util.Abort(_('svn: no revision found after start revision %d')
315 % self.startrev)
315 % self.startrev)
316
316
317 return self.heads
317 return self.heads
318
318
319 def getfile(self, file, rev):
319 def getfile(self, file, rev):
320 data, mode = self._getfile(file, rev)
320 data, mode = self._getfile(file, rev)
321 self.modecache[(file, rev)] = mode
321 self.modecache[(file, rev)] = mode
322 return data
322 return data
323
323
324 def getmode(self, file, rev):
324 def getmode(self, file, rev):
325 return self.modecache[(file, rev)]
325 return self.modecache[(file, rev)]
326
326
327 def getchanges(self, rev):
327 def getchanges(self, rev):
328 if self._changescache and self._changescache[0] == rev:
328 if self._changescache and self._changescache[0] == rev:
329 return self._changescache[1]
329 return self._changescache[1]
330 self._changescache = None
330 self._changescache = None
331 self.modecache = {}
331 self.modecache = {}
332 (paths, parents) = self.paths[rev]
332 (paths, parents) = self.paths[rev]
333 if parents:
333 if parents:
334 files, copies = self.expandpaths(rev, paths, parents)
334 files, copies = self.expandpaths(rev, paths, parents)
335 else:
335 else:
336 # Perform a full checkout on roots
336 # Perform a full checkout on roots
337 uuid, module, revnum = self.revsplit(rev)
337 uuid, module, revnum = self.revsplit(rev)
338 entries = svn.client.ls(self.base + module, optrev(revnum),
338 entries = svn.client.ls(self.base + module, optrev(revnum),
339 True, self.ctx)
339 True, self.ctx)
340 files = [n for n,e in entries.iteritems()
340 files = [n for n,e in entries.iteritems()
341 if e.kind == svn.core.svn_node_file]
341 if e.kind == svn.core.svn_node_file]
342 copies = {}
342 copies = {}
343
343
344 files.sort()
344 files.sort()
345 files = zip(files, [rev] * len(files))
345 files = zip(files, [rev] * len(files))
346
346
347 # caller caches the result, so free it here to release memory
347 # caller caches the result, so free it here to release memory
348 del self.paths[rev]
348 del self.paths[rev]
349 return (files, copies)
349 return (files, copies)
350
350
351 def getchangedfiles(self, rev, i):
351 def getchangedfiles(self, rev, i):
352 changes = self.getchanges(rev)
352 changes = self.getchanges(rev)
353 self._changescache = (rev, changes)
353 self._changescache = (rev, changes)
354 return [f[0] for f in changes[0]]
354 return [f[0] for f in changes[0]]
355
355
356 def getcommit(self, rev):
356 def getcommit(self, rev):
357 if rev not in self.commits:
357 if rev not in self.commits:
358 uuid, module, revnum = self.revsplit(rev)
358 uuid, module, revnum = self.revsplit(rev)
359 self.module = module
359 self.module = module
360 self.reparent(module)
360 self.reparent(module)
361 # We assume that:
361 # We assume that:
362 # - requests for revisions after "stop" come from the
362 # - requests for revisions after "stop" come from the
363 # revision graph backward traversal. Cache all of them
363 # revision graph backward traversal. Cache all of them
364 # down to stop, they will be used eventually.
364 # down to stop, they will be used eventually.
365 # - requests for revisions before "stop" come to get
365 # - requests for revisions before "stop" come to get
366 # isolated branches parents. Just fetch what is needed.
366 # isolated branches parents. Just fetch what is needed.
367 stop = self.lastrevs.get(module, 0)
367 stop = self.lastrevs.get(module, 0)
368 if revnum < stop:
368 if revnum < stop:
369 stop = revnum + 1
369 stop = revnum + 1
370 self._fetch_revisions(revnum, stop)
370 self._fetch_revisions(revnum, stop)
371 commit = self.commits[rev]
371 commit = self.commits[rev]
372 # caller caches the result, so free it here to release memory
372 # caller caches the result, so free it here to release memory
373 del self.commits[rev]
373 del self.commits[rev]
374 return commit
374 return commit
375
375
376 def gettags(self):
376 def gettags(self):
377 tags = {}
377 tags = {}
378 if self.tags is None:
378 if self.tags is None:
379 return tags
379 return tags
380
380
381 # svn tags are just a convention, project branches left in a
381 # svn tags are just a convention, project branches left in a
382 # 'tags' directory. There is no other relationship than
382 # 'tags' directory. There is no other relationship than
383 # ancestry, which is expensive to discover and makes them hard
383 # ancestry, which is expensive to discover and makes them hard
384 # to update incrementally. Worse, past revisions may be
384 # to update incrementally. Worse, past revisions may be
385 # referenced by tags far away in the future, requiring a deep
385 # referenced by tags far away in the future, requiring a deep
386 # history traversal on every calculation. Current code
386 # history traversal on every calculation. Current code
387 # performs a single backward traversal, tracking moves within
387 # performs a single backward traversal, tracking moves within
388 # the tags directory (tag renaming) and recording a new tag
388 # the tags directory (tag renaming) and recording a new tag
389 # everytime a project is copied from outside the tags
389 # everytime a project is copied from outside the tags
390 # directory. It also lists deleted tags, this behaviour may
390 # directory. It also lists deleted tags, this behaviour may
391 # change in the future.
391 # change in the future.
392 pendings = []
392 pendings = []
393 tagspath = self.tags
393 tagspath = self.tags
394 start = svn.ra.get_latest_revnum(self.ra)
394 start = svn.ra.get_latest_revnum(self.ra)
395 try:
395 try:
396 for entry in get_log(self.url, [self.tags], start, self.startrev):
396 for entry in get_log(self.url, [self.tags], start, self.startrev):
397 origpaths, revnum, author, date, message = entry
397 origpaths, revnum, author, date, message = entry
398 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
398 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
399 in origpaths.iteritems() if e.copyfrom_path]
399 in origpaths.iteritems() if e.copyfrom_path]
400 copies.sort()
400 copies.sort()
401 # Apply moves/copies from more specific to general
401 # Apply moves/copies from more specific to general
402 copies.reverse()
402 copies.reverse()
403
403
404 srctagspath = tagspath
404 srctagspath = tagspath
405 if copies and copies[-1][2] == tagspath:
405 if copies and copies[-1][2] == tagspath:
406 # Track tags directory moves
406 # Track tags directory moves
407 srctagspath = copies.pop()[0]
407 srctagspath = copies.pop()[0]
408
408
409 for source, sourcerev, dest in copies:
409 for source, sourcerev, dest in copies:
410 if not dest.startswith(tagspath + '/'):
410 if not dest.startswith(tagspath + '/'):
411 continue
411 continue
412 for tag in pendings:
412 for tag in pendings:
413 if tag[0].startswith(dest):
413 if tag[0].startswith(dest):
414 tagpath = source + tag[0][len(dest):]
414 tagpath = source + tag[0][len(dest):]
415 tag[:2] = [tagpath, sourcerev]
415 tag[:2] = [tagpath, sourcerev]
416 break
416 break
417 else:
417 else:
418 pendings.append([source, sourcerev, dest.split('/')[-1]])
418 pendings.append([source, sourcerev, dest.split('/')[-1]])
419
419
420 # Tell tag renamings from tag creations
420 # Tell tag renamings from tag creations
421 remainings = []
421 remainings = []
422 for source, sourcerev, tagname in pendings:
422 for source, sourcerev, tagname in pendings:
423 if source.startswith(srctagspath):
423 if source.startswith(srctagspath):
424 remainings.append([source, sourcerev, tagname])
424 remainings.append([source, sourcerev, tagname])
425 continue
425 continue
426 # From revision may be fake, get one with changes
426 # From revision may be fake, get one with changes
427 tagid = self.latest(source, sourcerev)
427 tagid = self.latest(source, sourcerev)
428 if tagid:
428 if tagid:
429 tags[tagname] = tagid
429 tags[tagname] = tagid
430 pendings = remainings
430 pendings = remainings
431 tagspath = srctagspath
431 tagspath = srctagspath
432
432
433 except SubversionException, (inst, num):
433 except SubversionException, (inst, num):
434 self.ui.note('no tags found at revision %d\n' % start)
434 self.ui.note('no tags found at revision %d\n' % start)
435 return tags
435 return tags
436
436
437 def converted(self, rev, destrev):
437 def converted(self, rev, destrev):
438 if not self.wc:
438 if not self.wc:
439 return
439 return
440 if self.convertfp is None:
440 if self.convertfp is None:
441 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
441 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
442 'a')
442 'a')
443 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
443 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
444 self.convertfp.flush()
444 self.convertfp.flush()
445
445
446 # -- helper functions --
446 # -- helper functions --
447
447
448 def revid(self, revnum, module=None):
448 def revid(self, revnum, module=None):
449 if not module:
449 if not module:
450 module = self.module
450 module = self.module
451 return u"svn:%s%s@%s" % (self.uuid, module.decode(self.encoding),
451 return u"svn:%s%s@%s" % (self.uuid, module.decode(self.encoding),
452 revnum)
452 revnum)
453
453
454 def revnum(self, rev):
454 def revnum(self, rev):
455 return int(rev.split('@')[-1])
455 return int(rev.split('@')[-1])
456
456
457 def revsplit(self, rev):
457 def revsplit(self, rev):
458 url, revnum = rev.encode(self.encoding).split('@', 1)
458 url, revnum = rev.encode(self.encoding).split('@', 1)
459 revnum = int(revnum)
459 revnum = int(revnum)
460 parts = url.split('/', 1)
460 parts = url.split('/', 1)
461 uuid = parts.pop(0)[4:]
461 uuid = parts.pop(0)[4:]
462 mod = ''
462 mod = ''
463 if parts:
463 if parts:
464 mod = '/' + parts[0]
464 mod = '/' + parts[0]
465 return uuid, mod, revnum
465 return uuid, mod, revnum
466
466
467 def latest(self, path, stop=0):
467 def latest(self, path, stop=0):
468 """Find the latest revid affecting path, up to stop. It may return
468 """Find the latest revid affecting path, up to stop. It may return
469 a revision in a different module, since a branch may be moved without
469 a revision in a different module, since a branch may be moved without
470 a change being reported. Return None if computed module does not
470 a change being reported. Return None if computed module does not
471 belong to rootmodule subtree.
471 belong to rootmodule subtree.
472 """
472 """
473 if not path.startswith(self.rootmodule):
473 if not path.startswith(self.rootmodule):
474 # Requests on foreign branches may be forbidden at server level
474 # Requests on foreign branches may be forbidden at server level
475 self.ui.debug(_('ignoring foreign branch %r\n') % path)
475 self.ui.debug(_('ignoring foreign branch %r\n') % path)
476 return None
476 return None
477
477
478 if not stop:
478 if not stop:
479 stop = svn.ra.get_latest_revnum(self.ra)
479 stop = svn.ra.get_latest_revnum(self.ra)
480 try:
480 try:
481 self.reparent('')
481 self.reparent('')
482 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
482 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
483 self.reparent(self.module)
483 self.reparent(self.module)
484 except SubversionException:
484 except SubversionException:
485 dirent = None
485 dirent = None
486 if not dirent:
486 if not dirent:
487 raise util.Abort('%s not found up to revision %d' % (path, stop))
487 raise util.Abort('%s not found up to revision %d' % (path, stop))
488
488
489 # stat() gives us the previous revision on this line of development, but
489 # stat() gives us the previous revision on this line of development, but
490 # it might be in *another module*. Fetch the log and detect renames down
490 # it might be in *another module*. Fetch the log and detect renames down
491 # to the latest revision.
491 # to the latest revision.
492 stream = get_log(self.url, [path], stop, dirent.created_rev)
492 stream = get_log(self.url, [path], stop, dirent.created_rev)
493 try:
493 try:
494 for entry in stream:
494 for entry in stream:
495 paths, revnum, author, date, message = entry
495 paths, revnum, author, date, message = entry
496 if revnum <= dirent.created_rev:
496 if revnum <= dirent.created_rev:
497 break
497 break
498
498
499 for p in paths:
499 for p in paths:
500 if not path.startswith(p) or not paths[p].copyfrom_path:
500 if not path.startswith(p) or not paths[p].copyfrom_path:
501 continue
501 continue
502 newpath = paths[p].copyfrom_path + path[len(p):]
502 newpath = paths[p].copyfrom_path + path[len(p):]
503 self.ui.debug("branch renamed from %s to %s at %d\n" %
503 self.ui.debug("branch renamed from %s to %s at %d\n" %
504 (path, newpath, revnum))
504 (path, newpath, revnum))
505 path = newpath
505 path = newpath
506 break
506 break
507 finally:
507 finally:
508 stream.close()
508 stream.close()
509
509
510 if not path.startswith(self.rootmodule):
510 if not path.startswith(self.rootmodule):
511 self.ui.debug(_('ignoring foreign branch %r\n') % path)
511 self.ui.debug(_('ignoring foreign branch %r\n') % path)
512 return None
512 return None
513 return self.revid(dirent.created_rev, path)
513 return self.revid(dirent.created_rev, path)
514
514
515 def get_blacklist(self):
515 def get_blacklist(self):
516 """Avoid certain revision numbers.
516 """Avoid certain revision numbers.
517 It is not uncommon for two nearby revisions to cancel each other
517 It is not uncommon for two nearby revisions to cancel each other
518 out, e.g. 'I copied trunk into a subdirectory of itself instead
518 out, e.g. 'I copied trunk into a subdirectory of itself instead
519 of making a branch'. The converted repository is significantly
519 of making a branch'. The converted repository is significantly
520 smaller if we ignore such revisions."""
520 smaller if we ignore such revisions."""
521 self.blacklist = util.set()
521 self.blacklist = util.set()
522 blacklist = self.blacklist
522 blacklist = self.blacklist
523 for line in file("blacklist.txt", "r"):
523 for line in file("blacklist.txt", "r"):
524 if not line.startswith("#"):
524 if not line.startswith("#"):
525 try:
525 try:
526 svn_rev = int(line.strip())
526 svn_rev = int(line.strip())
527 blacklist.add(svn_rev)
527 blacklist.add(svn_rev)
528 except ValueError, e:
528 except ValueError, e:
529 pass # not an integer or a comment
529 pass # not an integer or a comment
530
530
531 def is_blacklisted(self, svn_rev):
531 def is_blacklisted(self, svn_rev):
532 return svn_rev in self.blacklist
532 return svn_rev in self.blacklist
533
533
534 def reparent(self, module):
534 def reparent(self, module):
535 svn_url = self.base + module
535 svn_url = self.base + module
536 self.ui.debug("reparent to %s\n" % svn_url.encode(self.encoding))
536 self.ui.debug("reparent to %s\n" % svn_url.encode(self.encoding))
537 svn.ra.reparent(self.ra, svn_url.encode(self.encoding))
537 svn.ra.reparent(self.ra, svn_url.encode(self.encoding))
538
538
539 def expandpaths(self, rev, paths, parents):
539 def expandpaths(self, rev, paths, parents):
540 entries = []
540 entries = []
541 copyfrom = {} # Map of entrypath, revision for finding source of deleted revisions.
541 copyfrom = {} # Map of entrypath, revision for finding source of deleted revisions.
542 copies = {}
542 copies = {}
543
543
544 new_module, revnum = self.revsplit(rev)[1:]
544 new_module, revnum = self.revsplit(rev)[1:]
545 if new_module != self.module:
545 if new_module != self.module:
546 self.module = new_module
546 self.module = new_module
547 self.reparent(self.module)
547 self.reparent(self.module)
548
548
549 for path, ent in paths:
549 for path, ent in paths:
550 entrypath = self.getrelpath(path)
550 entrypath = self.getrelpath(path)
551 entry = entrypath.decode(self.encoding)
551 entry = entrypath.decode(self.encoding)
552
552
553 kind = svn.ra.check_path(self.ra, entrypath, revnum)
553 kind = svn.ra.check_path(self.ra, entrypath, revnum)
554 if kind == svn.core.svn_node_file:
554 if kind == svn.core.svn_node_file:
555 entries.append(self.recode(entry))
555 entries.append(self.recode(entry))
556 if not ent.copyfrom_path or not parents:
556 if not ent.copyfrom_path or not parents:
557 continue
557 continue
558 # Copy sources not in parent revisions cannot be represented,
558 # Copy sources not in parent revisions cannot be represented,
559 # ignore their origin for now
559 # ignore their origin for now
560 pmodule, prevnum = self.revsplit(parents[0])[1:]
560 pmodule, prevnum = self.revsplit(parents[0])[1:]
561 if ent.copyfrom_rev < prevnum:
561 if ent.copyfrom_rev < prevnum:
562 continue
562 continue
563 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
563 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
564 if not copyfrom_path:
564 if not copyfrom_path:
565 continue
565 continue
566 self.ui.debug("copied to %s from %s@%s\n" %
566 self.ui.debug("copied to %s from %s@%s\n" %
567 (entrypath, copyfrom_path, ent.copyfrom_rev))
567 (entrypath, copyfrom_path, ent.copyfrom_rev))
568 copies[self.recode(entry)] = self.recode(copyfrom_path)
568 copies[self.recode(entry)] = self.recode(copyfrom_path)
569 elif kind == 0: # gone, but had better be a deleted *file*
569 elif kind == 0: # gone, but had better be a deleted *file*
570 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
570 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
571
571
572 # if a branch is created but entries are removed in the same
572 # if a branch is created but entries are removed in the same
573 # changeset, get the right fromrev
573 # changeset, get the right fromrev
574 # parents cannot be empty here, you cannot remove things from
574 # parents cannot be empty here, you cannot remove things from
575 # a root revision.
575 # a root revision.
576 uuid, old_module, fromrev = self.revsplit(parents[0])
576 uuid, old_module, fromrev = self.revsplit(parents[0])
577
577
578 basepath = old_module + "/" + self.getrelpath(path)
578 basepath = old_module + "/" + self.getrelpath(path)
579 entrypath = basepath
579 entrypath = basepath
580
580
581 def lookup_parts(p):
581 def lookup_parts(p):
582 rc = None
582 rc = None
583 parts = p.split("/")
583 parts = p.split("/")
584 for i in range(len(parts)):
584 for i in range(len(parts)):
585 part = "/".join(parts[:i])
585 part = "/".join(parts[:i])
586 info = part, copyfrom.get(part, None)
586 info = part, copyfrom.get(part, None)
587 if info[1] is not None:
587 if info[1] is not None:
588 self.ui.debug("Found parent directory %s\n" % info[1])
588 self.ui.debug("Found parent directory %s\n" % info[1])
589 rc = info
589 rc = info
590 return rc
590 return rc
591
591
592 self.ui.debug("base, entry %s %s\n" % (basepath, entrypath))
592 self.ui.debug("base, entry %s %s\n" % (basepath, entrypath))
593
593
594 frompath, froment = lookup_parts(entrypath) or (None, revnum - 1)
594 frompath, froment = lookup_parts(entrypath) or (None, revnum - 1)
595
595
596 # need to remove fragment from lookup_parts and replace with copyfrom_path
596 # need to remove fragment from lookup_parts and replace with copyfrom_path
597 if frompath is not None:
597 if frompath is not None:
598 self.ui.debug("munge-o-matic\n")
598 self.ui.debug("munge-o-matic\n")
599 self.ui.debug(entrypath + '\n')
599 self.ui.debug(entrypath + '\n')
600 self.ui.debug(entrypath[len(frompath):] + '\n')
600 self.ui.debug(entrypath[len(frompath):] + '\n')
601 entrypath = froment.copyfrom_path + entrypath[len(frompath):]
601 entrypath = froment.copyfrom_path + entrypath[len(frompath):]
602 fromrev = froment.copyfrom_rev
602 fromrev = froment.copyfrom_rev
603 self.ui.debug("Info: %s %s %s %s\n" % (frompath, froment, ent, entrypath))
603 self.ui.debug("Info: %s %s %s %s\n" % (frompath, froment, ent, entrypath))
604
604
605 # We can avoid the reparent calls if the module has not changed
605 # We can avoid the reparent calls if the module has not changed
606 # but it probably does not worth the pain.
606 # but it probably does not worth the pain.
607 self.reparent('')
607 self.reparent('')
608 fromkind = svn.ra.check_path(self.ra, entrypath.strip('/'), fromrev)
608 fromkind = svn.ra.check_path(self.ra, entrypath.strip('/'), fromrev)
609 self.reparent(self.module)
609 self.reparent(self.module)
610
610
611 if fromkind == svn.core.svn_node_file: # a deleted file
611 if fromkind == svn.core.svn_node_file: # a deleted file
612 entries.append(self.recode(entry))
612 entries.append(self.recode(entry))
613 elif fromkind == svn.core.svn_node_dir:
613 elif fromkind == svn.core.svn_node_dir:
614 # print "Deleted/moved non-file:", revnum, path, ent
614 # print "Deleted/moved non-file:", revnum, path, ent
615 # children = self._find_children(path, revnum - 1)
615 # children = self._find_children(path, revnum - 1)
616 # print "find children %s@%d from %d action %s" % (path, revnum, ent.copyfrom_rev, ent.action)
616 # print "find children %s@%d from %d action %s" % (path, revnum, ent.copyfrom_rev, ent.action)
617 # Sometimes this is tricky. For example: in
617 # Sometimes this is tricky. For example: in
618 # The Subversion Repository revision 6940 a dir
618 # The Subversion Repository revision 6940 a dir
619 # was copied and one of its files was deleted
619 # was copied and one of its files was deleted
620 # from the new location in the same commit. This
620 # from the new location in the same commit. This
621 # code can't deal with that yet.
621 # code can't deal with that yet.
622 if ent.action == 'C':
622 if ent.action == 'C':
623 children = self._find_children(path, fromrev)
623 children = self._find_children(path, fromrev)
624 else:
624 else:
625 oroot = entrypath.strip('/')
625 oroot = entrypath.strip('/')
626 nroot = path.strip('/')
626 nroot = path.strip('/')
627 children = self._find_children(oroot, fromrev)
627 children = self._find_children(oroot, fromrev)
628 children = [s.replace(oroot,nroot) for s in children]
628 children = [s.replace(oroot,nroot) for s in children]
629 # Mark all [files, not directories] as deleted.
629 # Mark all [files, not directories] as deleted.
630 for child in children:
630 for child in children:
631 # Can we move a child directory and its
631 # Can we move a child directory and its
632 # parent in the same commit? (probably can). Could
632 # parent in the same commit? (probably can). Could
633 # cause problems if instead of revnum -1,
633 # cause problems if instead of revnum -1,
634 # we have to look in (copyfrom_path, revnum - 1)
634 # we have to look in (copyfrom_path, revnum - 1)
635 entrypath = self.getrelpath("/" + child, module=old_module)
635 entrypath = self.getrelpath("/" + child, module=old_module)
636 if entrypath:
636 if entrypath:
637 entry = self.recode(entrypath.decode(self.encoding))
637 entry = self.recode(entrypath.decode(self.encoding))
638 if entry in copies:
638 if entry in copies:
639 # deleted file within a copy
639 # deleted file within a copy
640 del copies[entry]
640 del copies[entry]
641 else:
641 else:
642 entries.append(entry)
642 entries.append(entry)
643 else:
643 else:
644 self.ui.debug('unknown path in revision %d: %s\n' % \
644 self.ui.debug('unknown path in revision %d: %s\n' % \
645 (revnum, path))
645 (revnum, path))
646 elif kind == svn.core.svn_node_dir:
646 elif kind == svn.core.svn_node_dir:
647 # Should probably synthesize normal file entries
647 # Should probably synthesize normal file entries
648 # and handle as above to clean up copy/rename handling.
648 # and handle as above to clean up copy/rename handling.
649
649
650 # If the directory just had a prop change,
650 # If the directory just had a prop change,
651 # then we shouldn't need to look for its children.
651 # then we shouldn't need to look for its children.
652 if ent.action == 'M':
652 if ent.action == 'M':
653 continue
653 continue
654
654
655 # Also this could create duplicate entries. Not sure
655 # Also this could create duplicate entries. Not sure
656 # whether this will matter. Maybe should make entries a set.
656 # whether this will matter. Maybe should make entries a set.
657 # print "Changed directory", revnum, path, ent.action, ent.copyfrom_path, ent.copyfrom_rev
657 # print "Changed directory", revnum, path, ent.action, ent.copyfrom_path, ent.copyfrom_rev
658 # This will fail if a directory was copied
658 # This will fail if a directory was copied
659 # from another branch and then some of its files
659 # from another branch and then some of its files
660 # were deleted in the same transaction.
660 # were deleted in the same transaction.
661 children = self._find_children(path, revnum)
661 children = util.sort(self._find_children(path, revnum))
662 children.sort()
663 for child in children:
662 for child in children:
664 # Can we move a child directory and its
663 # Can we move a child directory and its
665 # parent in the same commit? (probably can). Could
664 # parent in the same commit? (probably can). Could
666 # cause problems if instead of revnum -1,
665 # cause problems if instead of revnum -1,
667 # we have to look in (copyfrom_path, revnum - 1)
666 # we have to look in (copyfrom_path, revnum - 1)
668 entrypath = self.getrelpath("/" + child)
667 entrypath = self.getrelpath("/" + child)
669 # print child, self.module, entrypath
668 # print child, self.module, entrypath
670 if entrypath:
669 if entrypath:
671 # Need to filter out directories here...
670 # Need to filter out directories here...
672 kind = svn.ra.check_path(self.ra, entrypath, revnum)
671 kind = svn.ra.check_path(self.ra, entrypath, revnum)
673 if kind != svn.core.svn_node_dir:
672 if kind != svn.core.svn_node_dir:
674 entries.append(self.recode(entrypath))
673 entries.append(self.recode(entrypath))
675
674
676 # Copies here (must copy all from source)
675 # Copies here (must copy all from source)
677 # Probably not a real problem for us if
676 # Probably not a real problem for us if
678 # source does not exist
677 # source does not exist
679 if not ent.copyfrom_path or not parents:
678 if not ent.copyfrom_path or not parents:
680 continue
679 continue
681 # Copy sources not in parent revisions cannot be represented,
680 # Copy sources not in parent revisions cannot be represented,
682 # ignore their origin for now
681 # ignore their origin for now
683 pmodule, prevnum = self.revsplit(parents[0])[1:]
682 pmodule, prevnum = self.revsplit(parents[0])[1:]
684 if ent.copyfrom_rev < prevnum:
683 if ent.copyfrom_rev < prevnum:
685 continue
684 continue
686 copyfrompath = ent.copyfrom_path.decode(self.encoding)
685 copyfrompath = ent.copyfrom_path.decode(self.encoding)
687 copyfrompath = self.getrelpath(copyfrompath, pmodule)
686 copyfrompath = self.getrelpath(copyfrompath, pmodule)
688 if not copyfrompath:
687 if not copyfrompath:
689 continue
688 continue
690 copyfrom[path] = ent
689 copyfrom[path] = ent
691 self.ui.debug("mark %s came from %s:%d\n"
690 self.ui.debug("mark %s came from %s:%d\n"
692 % (path, copyfrompath, ent.copyfrom_rev))
691 % (path, copyfrompath, ent.copyfrom_rev))
693 children = self._find_children(ent.copyfrom_path, ent.copyfrom_rev)
692 children = self._find_children(ent.copyfrom_path, ent.copyfrom_rev)
694 children.sort()
693 children.sort()
695 for child in children:
694 for child in children:
696 entrypath = self.getrelpath("/" + child, pmodule)
695 entrypath = self.getrelpath("/" + child, pmodule)
697 if not entrypath:
696 if not entrypath:
698 continue
697 continue
699 entry = entrypath.decode(self.encoding)
698 entry = entrypath.decode(self.encoding)
700 copytopath = path + entry[len(copyfrompath):]
699 copytopath = path + entry[len(copyfrompath):]
701 copytopath = self.getrelpath(copytopath)
700 copytopath = self.getrelpath(copytopath)
702 copies[self.recode(copytopath)] = self.recode(entry, pmodule)
701 copies[self.recode(copytopath)] = self.recode(entry, pmodule)
703
702
704 return (util.unique(entries), copies)
703 return (util.unique(entries), copies)
705
704
706 def _fetch_revisions(self, from_revnum, to_revnum):
705 def _fetch_revisions(self, from_revnum, to_revnum):
707 if from_revnum < to_revnum:
706 if from_revnum < to_revnum:
708 from_revnum, to_revnum = to_revnum, from_revnum
707 from_revnum, to_revnum = to_revnum, from_revnum
709
708
710 self.child_cset = None
709 self.child_cset = None
711
710
712 def isdescendantof(parent, child):
711 def isdescendantof(parent, child):
713 if not child or not parent or not child.startswith(parent):
712 if not child or not parent or not child.startswith(parent):
714 return False
713 return False
715 subpath = child[len(parent):]
714 subpath = child[len(parent):]
716 return len(subpath) > 1 and subpath[0] == '/'
715 return len(subpath) > 1 and subpath[0] == '/'
717
716
718 def parselogentry(orig_paths, revnum, author, date, message):
717 def parselogentry(orig_paths, revnum, author, date, message):
719 """Return the parsed commit object or None, and True if
718 """Return the parsed commit object or None, and True if
720 the revision is a branch root.
719 the revision is a branch root.
721 """
720 """
722 self.ui.debug("parsing revision %d (%d changes)\n" %
721 self.ui.debug("parsing revision %d (%d changes)\n" %
723 (revnum, len(orig_paths)))
722 (revnum, len(orig_paths)))
724
723
725 branched = False
724 branched = False
726 rev = self.revid(revnum)
725 rev = self.revid(revnum)
727 # branch log might return entries for a parent we already have
726 # branch log might return entries for a parent we already have
728
727
729 if (rev in self.commits or revnum < to_revnum):
728 if (rev in self.commits or revnum < to_revnum):
730 return None, branched
729 return None, branched
731
730
732 parents = []
731 parents = []
733 # check whether this revision is the start of a branch or part
732 # check whether this revision is the start of a branch or part
734 # of a branch renaming
733 # of a branch renaming
735 orig_paths = orig_paths.items()
734 orig_paths = util.sort(orig_paths.items())
736 orig_paths.sort()
737 root_paths = [(p,e) for p,e in orig_paths if self.module.startswith(p)]
735 root_paths = [(p,e) for p,e in orig_paths if self.module.startswith(p)]
738 if root_paths:
736 if root_paths:
739 path, ent = root_paths[-1]
737 path, ent = root_paths[-1]
740 if ent.copyfrom_path:
738 if ent.copyfrom_path:
741 # If dir was moved while one of its file was removed
739 # If dir was moved while one of its file was removed
742 # the log may look like:
740 # the log may look like:
743 # A /dir (from /dir:x)
741 # A /dir (from /dir:x)
744 # A /dir/a (from /dir/a:y)
742 # A /dir/a (from /dir/a:y)
745 # A /dir/b (from /dir/b:z)
743 # A /dir/b (from /dir/b:z)
746 # ...
744 # ...
747 # for all remaining children.
745 # for all remaining children.
748 # Let's take the highest child element from rev as source.
746 # Let's take the highest child element from rev as source.
749 copies = [(p,e) for p,e in orig_paths[:-1]
747 copies = [(p,e) for p,e in orig_paths[:-1]
750 if isdescendantof(ent.copyfrom_path, e.copyfrom_path)]
748 if isdescendantof(ent.copyfrom_path, e.copyfrom_path)]
751 fromrev = max([e.copyfrom_rev for p,e in copies] + [ent.copyfrom_rev])
749 fromrev = max([e.copyfrom_rev for p,e in copies] + [ent.copyfrom_rev])
752 branched = True
750 branched = True
753 newpath = ent.copyfrom_path + self.module[len(path):]
751 newpath = ent.copyfrom_path + self.module[len(path):]
754 # ent.copyfrom_rev may not be the actual last revision
752 # ent.copyfrom_rev may not be the actual last revision
755 previd = self.latest(newpath, fromrev)
753 previd = self.latest(newpath, fromrev)
756 if previd is not None:
754 if previd is not None:
757 prevmodule, prevnum = self.revsplit(previd)[1:]
755 prevmodule, prevnum = self.revsplit(previd)[1:]
758 if prevnum >= self.startrev:
756 if prevnum >= self.startrev:
759 parents = [previd]
757 parents = [previd]
760 self.ui.note('found parent of branch %s at %d: %s\n' %
758 self.ui.note('found parent of branch %s at %d: %s\n' %
761 (self.module, prevnum, prevmodule))
759 (self.module, prevnum, prevmodule))
762 else:
760 else:
763 self.ui.debug("No copyfrom path, don't know what to do.\n")
761 self.ui.debug("No copyfrom path, don't know what to do.\n")
764
762
765 paths = []
763 paths = []
766 # filter out unrelated paths
764 # filter out unrelated paths
767 for path, ent in orig_paths:
765 for path, ent in orig_paths:
768 if self.getrelpath(path) is None:
766 if self.getrelpath(path) is None:
769 continue
767 continue
770 paths.append((path, ent))
768 paths.append((path, ent))
771
769
772 # Example SVN datetime. Includes microseconds.
770 # Example SVN datetime. Includes microseconds.
773 # ISO-8601 conformant
771 # ISO-8601 conformant
774 # '2007-01-04T17:35:00.902377Z'
772 # '2007-01-04T17:35:00.902377Z'
775 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
773 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
776
774
777 log = message and self.recode(message) or ''
775 log = message and self.recode(message) or ''
778 author = author and self.recode(author) or ''
776 author = author and self.recode(author) or ''
779 try:
777 try:
780 branch = self.module.split("/")[-1]
778 branch = self.module.split("/")[-1]
781 if branch == 'trunk':
779 if branch == 'trunk':
782 branch = ''
780 branch = ''
783 except IndexError:
781 except IndexError:
784 branch = None
782 branch = None
785
783
786 cset = commit(author=author,
784 cset = commit(author=author,
787 date=util.datestr(date),
785 date=util.datestr(date),
788 desc=log,
786 desc=log,
789 parents=parents,
787 parents=parents,
790 branch=branch,
788 branch=branch,
791 rev=rev.encode('utf-8'))
789 rev=rev.encode('utf-8'))
792
790
793 self.commits[rev] = cset
791 self.commits[rev] = cset
794 # The parents list is *shared* among self.paths and the
792 # The parents list is *shared* among self.paths and the
795 # commit object. Both will be updated below.
793 # commit object. Both will be updated below.
796 self.paths[rev] = (paths, cset.parents)
794 self.paths[rev] = (paths, cset.parents)
797 if self.child_cset and not self.child_cset.parents:
795 if self.child_cset and not self.child_cset.parents:
798 self.child_cset.parents[:] = [rev]
796 self.child_cset.parents[:] = [rev]
799 self.child_cset = cset
797 self.child_cset = cset
800 return cset, branched
798 return cset, branched
801
799
802 self.ui.note('fetching revision log for "%s" from %d to %d\n' %
800 self.ui.note('fetching revision log for "%s" from %d to %d\n' %
803 (self.module, from_revnum, to_revnum))
801 (self.module, from_revnum, to_revnum))
804
802
805 try:
803 try:
806 firstcset = None
804 firstcset = None
807 lastonbranch = False
805 lastonbranch = False
808 stream = get_log(self.url, [self.module], from_revnum, to_revnum)
806 stream = get_log(self.url, [self.module], from_revnum, to_revnum)
809 try:
807 try:
810 for entry in stream:
808 for entry in stream:
811 paths, revnum, author, date, message = entry
809 paths, revnum, author, date, message = entry
812 if revnum < self.startrev:
810 if revnum < self.startrev:
813 lastonbranch = True
811 lastonbranch = True
814 break
812 break
815 if self.is_blacklisted(revnum):
813 if self.is_blacklisted(revnum):
816 self.ui.note('skipping blacklisted revision %d\n'
814 self.ui.note('skipping blacklisted revision %d\n'
817 % revnum)
815 % revnum)
818 continue
816 continue
819 if paths is None:
817 if paths is None:
820 self.ui.debug('revision %d has no entries\n' % revnum)
818 self.ui.debug('revision %d has no entries\n' % revnum)
821 continue
819 continue
822 cset, lastonbranch = parselogentry(paths, revnum, author,
820 cset, lastonbranch = parselogentry(paths, revnum, author,
823 date, message)
821 date, message)
824 if cset:
822 if cset:
825 firstcset = cset
823 firstcset = cset
826 if lastonbranch:
824 if lastonbranch:
827 break
825 break
828 finally:
826 finally:
829 stream.close()
827 stream.close()
830
828
831 if not lastonbranch and firstcset and not firstcset.parents:
829 if not lastonbranch and firstcset and not firstcset.parents:
832 # The first revision of the sequence (the last fetched one)
830 # The first revision of the sequence (the last fetched one)
833 # has invalid parents if not a branch root. Find the parent
831 # has invalid parents if not a branch root. Find the parent
834 # revision now, if any.
832 # revision now, if any.
835 try:
833 try:
836 firstrevnum = self.revnum(firstcset.rev)
834 firstrevnum = self.revnum(firstcset.rev)
837 if firstrevnum > 1:
835 if firstrevnum > 1:
838 latest = self.latest(self.module, firstrevnum - 1)
836 latest = self.latest(self.module, firstrevnum - 1)
839 if latest:
837 if latest:
840 firstcset.parents.append(latest)
838 firstcset.parents.append(latest)
841 except util.Abort:
839 except util.Abort:
842 pass
840 pass
843 except SubversionException, (inst, num):
841 except SubversionException, (inst, num):
844 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
842 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
845 raise util.Abort('svn: branch has no revision %s' % to_revnum)
843 raise util.Abort('svn: branch has no revision %s' % to_revnum)
846 raise
844 raise
847
845
848 def _getfile(self, file, rev):
846 def _getfile(self, file, rev):
849 io = StringIO()
847 io = StringIO()
850 # TODO: ra.get_file transmits the whole file instead of diffs.
848 # TODO: ra.get_file transmits the whole file instead of diffs.
851 mode = ''
849 mode = ''
852 try:
850 try:
853 new_module, revnum = self.revsplit(rev)[1:]
851 new_module, revnum = self.revsplit(rev)[1:]
854 if self.module != new_module:
852 if self.module != new_module:
855 self.module = new_module
853 self.module = new_module
856 self.reparent(self.module)
854 self.reparent(self.module)
857 info = svn.ra.get_file(self.ra, file, revnum, io)
855 info = svn.ra.get_file(self.ra, file, revnum, io)
858 if isinstance(info, list):
856 if isinstance(info, list):
859 info = info[-1]
857 info = info[-1]
860 mode = ("svn:executable" in info) and 'x' or ''
858 mode = ("svn:executable" in info) and 'x' or ''
861 mode = ("svn:special" in info) and 'l' or mode
859 mode = ("svn:special" in info) and 'l' or mode
862 except SubversionException, e:
860 except SubversionException, e:
863 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
861 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
864 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
862 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
865 if e.apr_err in notfound: # File not found
863 if e.apr_err in notfound: # File not found
866 raise IOError()
864 raise IOError()
867 raise
865 raise
868 data = io.getvalue()
866 data = io.getvalue()
869 if mode == 'l':
867 if mode == 'l':
870 link_prefix = "link "
868 link_prefix = "link "
871 if data.startswith(link_prefix):
869 if data.startswith(link_prefix):
872 data = data[len(link_prefix):]
870 data = data[len(link_prefix):]
873 return data, mode
871 return data, mode
874
872
875 def _find_children(self, path, revnum):
873 def _find_children(self, path, revnum):
876 path = path.strip('/')
874 path = path.strip('/')
877 pool = Pool()
875 pool = Pool()
878 rpath = '/'.join([self.base, path]).strip('/')
876 rpath = '/'.join([self.base, path]).strip('/')
879 return ['%s/%s' % (path, x) for x in svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool).keys()]
877 return ['%s/%s' % (path, x) for x in svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool).keys()]
880
878
881 def getrelpath(self, path, module=None):
879 def getrelpath(self, path, module=None):
882 if module is None:
880 if module is None:
883 module = self.module
881 module = self.module
884 # Given the repository url of this wc, say
882 # Given the repository url of this wc, say
885 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
883 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
886 # extract the "entry" portion (a relative path) from what
884 # extract the "entry" portion (a relative path) from what
887 # svn log --xml says, ie
885 # svn log --xml says, ie
888 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
886 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
889 # that is to say "tests/PloneTestCase.py"
887 # that is to say "tests/PloneTestCase.py"
890 if path.startswith(module):
888 if path.startswith(module):
891 relative = path.rstrip('/')[len(module):]
889 relative = path.rstrip('/')[len(module):]
892 if relative.startswith('/'):
890 if relative.startswith('/'):
893 return relative[1:]
891 return relative[1:]
894 elif relative == '':
892 elif relative == '':
895 return relative
893 return relative
896
894
897 # The path is outside our tracked tree...
895 # The path is outside our tracked tree...
898 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
896 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
899 return None
897 return None
900
898
901 pre_revprop_change = '''#!/bin/sh
899 pre_revprop_change = '''#!/bin/sh
902
900
903 REPOS="$1"
901 REPOS="$1"
904 REV="$2"
902 REV="$2"
905 USER="$3"
903 USER="$3"
906 PROPNAME="$4"
904 PROPNAME="$4"
907 ACTION="$5"
905 ACTION="$5"
908
906
909 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
907 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
910 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
908 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
911 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
909 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
912
910
913 echo "Changing prohibited revision property" >&2
911 echo "Changing prohibited revision property" >&2
914 exit 1
912 exit 1
915 '''
913 '''
916
914
917 class svn_sink(converter_sink, commandline):
915 class svn_sink(converter_sink, commandline):
918 commit_re = re.compile(r'Committed revision (\d+).', re.M)
916 commit_re = re.compile(r'Committed revision (\d+).', re.M)
919
917
920 def prerun(self):
918 def prerun(self):
921 if self.wc:
919 if self.wc:
922 os.chdir(self.wc)
920 os.chdir(self.wc)
923
921
924 def postrun(self):
922 def postrun(self):
925 if self.wc:
923 if self.wc:
926 os.chdir(self.cwd)
924 os.chdir(self.cwd)
927
925
928 def join(self, name):
926 def join(self, name):
929 return os.path.join(self.wc, '.svn', name)
927 return os.path.join(self.wc, '.svn', name)
930
928
931 def revmapfile(self):
929 def revmapfile(self):
932 return self.join('hg-shamap')
930 return self.join('hg-shamap')
933
931
934 def authorfile(self):
932 def authorfile(self):
935 return self.join('hg-authormap')
933 return self.join('hg-authormap')
936
934
937 def __init__(self, ui, path):
935 def __init__(self, ui, path):
938 converter_sink.__init__(self, ui, path)
936 converter_sink.__init__(self, ui, path)
939 commandline.__init__(self, ui, 'svn')
937 commandline.__init__(self, ui, 'svn')
940 self.delete = []
938 self.delete = []
941 self.setexec = []
939 self.setexec = []
942 self.delexec = []
940 self.delexec = []
943 self.copies = []
941 self.copies = []
944 self.wc = None
942 self.wc = None
945 self.cwd = os.getcwd()
943 self.cwd = os.getcwd()
946
944
947 path = os.path.realpath(path)
945 path = os.path.realpath(path)
948
946
949 created = False
947 created = False
950 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
948 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
951 self.wc = path
949 self.wc = path
952 self.run0('update')
950 self.run0('update')
953 else:
951 else:
954 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
952 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
955
953
956 if os.path.isdir(os.path.dirname(path)):
954 if os.path.isdir(os.path.dirname(path)):
957 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
955 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
958 ui.status(_('initializing svn repo %r\n') %
956 ui.status(_('initializing svn repo %r\n') %
959 os.path.basename(path))
957 os.path.basename(path))
960 commandline(ui, 'svnadmin').run0('create', path)
958 commandline(ui, 'svnadmin').run0('create', path)
961 created = path
959 created = path
962 path = util.normpath(path)
960 path = util.normpath(path)
963 if not path.startswith('/'):
961 if not path.startswith('/'):
964 path = '/' + path
962 path = '/' + path
965 path = 'file://' + path
963 path = 'file://' + path
966
964
967 ui.status(_('initializing svn wc %r\n') % os.path.basename(wcpath))
965 ui.status(_('initializing svn wc %r\n') % os.path.basename(wcpath))
968 self.run0('checkout', path, wcpath)
966 self.run0('checkout', path, wcpath)
969
967
970 self.wc = wcpath
968 self.wc = wcpath
971 self.opener = util.opener(self.wc)
969 self.opener = util.opener(self.wc)
972 self.wopener = util.opener(self.wc)
970 self.wopener = util.opener(self.wc)
973 self.childmap = mapfile(ui, self.join('hg-childmap'))
971 self.childmap = mapfile(ui, self.join('hg-childmap'))
974 self.is_exec = util.checkexec(self.wc) and util.is_exec or None
972 self.is_exec = util.checkexec(self.wc) and util.is_exec or None
975
973
976 if created:
974 if created:
977 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
975 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
978 fp = open(hook, 'w')
976 fp = open(hook, 'w')
979 fp.write(pre_revprop_change)
977 fp.write(pre_revprop_change)
980 fp.close()
978 fp.close()
981 util.set_flags(hook, "x")
979 util.set_flags(hook, "x")
982
980
983 xport = transport.SvnRaTransport(url=geturl(path))
981 xport = transport.SvnRaTransport(url=geturl(path))
984 self.uuid = svn.ra.get_uuid(xport.ra)
982 self.uuid = svn.ra.get_uuid(xport.ra)
985
983
986 def wjoin(self, *names):
984 def wjoin(self, *names):
987 return os.path.join(self.wc, *names)
985 return os.path.join(self.wc, *names)
988
986
989 def putfile(self, filename, flags, data):
987 def putfile(self, filename, flags, data):
990 if 'l' in flags:
988 if 'l' in flags:
991 self.wopener.symlink(data, filename)
989 self.wopener.symlink(data, filename)
992 else:
990 else:
993 try:
991 try:
994 if os.path.islink(self.wjoin(filename)):
992 if os.path.islink(self.wjoin(filename)):
995 os.unlink(filename)
993 os.unlink(filename)
996 except OSError:
994 except OSError:
997 pass
995 pass
998 self.wopener(filename, 'w').write(data)
996 self.wopener(filename, 'w').write(data)
999
997
1000 if self.is_exec:
998 if self.is_exec:
1001 was_exec = self.is_exec(self.wjoin(filename))
999 was_exec = self.is_exec(self.wjoin(filename))
1002 else:
1000 else:
1003 # On filesystems not supporting execute-bit, there is no way
1001 # On filesystems not supporting execute-bit, there is no way
1004 # to know if it is set but asking subversion. Setting it
1002 # to know if it is set but asking subversion. Setting it
1005 # systematically is just as expensive and much simpler.
1003 # systematically is just as expensive and much simpler.
1006 was_exec = 'x' not in flags
1004 was_exec = 'x' not in flags
1007
1005
1008 util.set_flags(self.wjoin(filename), flags)
1006 util.set_flags(self.wjoin(filename), flags)
1009 if was_exec:
1007 if was_exec:
1010 if 'x' not in flags:
1008 if 'x' not in flags:
1011 self.delexec.append(filename)
1009 self.delexec.append(filename)
1012 else:
1010 else:
1013 if 'x' in flags:
1011 if 'x' in flags:
1014 self.setexec.append(filename)
1012 self.setexec.append(filename)
1015
1013
1016 def _copyfile(self, source, dest):
1014 def _copyfile(self, source, dest):
1017 # SVN's copy command pukes if the destination file exists, but
1015 # SVN's copy command pukes if the destination file exists, but
1018 # our copyfile method expects to record a copy that has
1016 # our copyfile method expects to record a copy that has
1019 # already occurred. Cross the semantic gap.
1017 # already occurred. Cross the semantic gap.
1020 wdest = self.wjoin(dest)
1018 wdest = self.wjoin(dest)
1021 exists = os.path.exists(wdest)
1019 exists = os.path.exists(wdest)
1022 if exists:
1020 if exists:
1023 fd, tempname = tempfile.mkstemp(
1021 fd, tempname = tempfile.mkstemp(
1024 prefix='hg-copy-', dir=os.path.dirname(wdest))
1022 prefix='hg-copy-', dir=os.path.dirname(wdest))
1025 os.close(fd)
1023 os.close(fd)
1026 os.unlink(tempname)
1024 os.unlink(tempname)
1027 os.rename(wdest, tempname)
1025 os.rename(wdest, tempname)
1028 try:
1026 try:
1029 self.run0('copy', source, dest)
1027 self.run0('copy', source, dest)
1030 finally:
1028 finally:
1031 if exists:
1029 if exists:
1032 try:
1030 try:
1033 os.unlink(wdest)
1031 os.unlink(wdest)
1034 except OSError:
1032 except OSError:
1035 pass
1033 pass
1036 os.rename(tempname, wdest)
1034 os.rename(tempname, wdest)
1037
1035
1038 def dirs_of(self, files):
1036 def dirs_of(self, files):
1039 dirs = util.set()
1037 dirs = util.set()
1040 for f in files:
1038 for f in files:
1041 if os.path.isdir(self.wjoin(f)):
1039 if os.path.isdir(self.wjoin(f)):
1042 dirs.add(f)
1040 dirs.add(f)
1043 for i in strutil.rfindall(f, '/'):
1041 for i in strutil.rfindall(f, '/'):
1044 dirs.add(f[:i])
1042 dirs.add(f[:i])
1045 return dirs
1043 return dirs
1046
1044
1047 def add_dirs(self, files):
1045 def add_dirs(self, files):
1048 add_dirs = [d for d in self.dirs_of(files)
1046 add_dirs = [d for d in util.sort(self.dirs_of(files))
1049 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1047 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1050 if add_dirs:
1048 if add_dirs:
1051 add_dirs.sort()
1052 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1049 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1053 return add_dirs
1050 return add_dirs
1054
1051
1055 def add_files(self, files):
1052 def add_files(self, files):
1056 if files:
1053 if files:
1057 self.xargs(files, 'add', quiet=True)
1054 self.xargs(files, 'add', quiet=True)
1058 return files
1055 return files
1059
1056
1060 def tidy_dirs(self, names):
1057 def tidy_dirs(self, names):
1061 dirs = list(self.dirs_of(names))
1058 dirs = util.sort(self.dirs_of(names))
1062 dirs.sort()
1063 dirs.reverse()
1059 dirs.reverse()
1064 deleted = []
1060 deleted = []
1065 for d in dirs:
1061 for d in dirs:
1066 wd = self.wjoin(d)
1062 wd = self.wjoin(d)
1067 if os.listdir(wd) == '.svn':
1063 if os.listdir(wd) == '.svn':
1068 self.run0('delete', d)
1064 self.run0('delete', d)
1069 deleted.append(d)
1065 deleted.append(d)
1070 return deleted
1066 return deleted
1071
1067
1072 def addchild(self, parent, child):
1068 def addchild(self, parent, child):
1073 self.childmap[parent] = child
1069 self.childmap[parent] = child
1074
1070
1075 def revid(self, rev):
1071 def revid(self, rev):
1076 return u"svn:%s@%s" % (self.uuid, rev)
1072 return u"svn:%s@%s" % (self.uuid, rev)
1077
1073
1078 def putcommit(self, files, copies, parents, commit, source):
1074 def putcommit(self, files, copies, parents, commit, source):
1079 # Apply changes to working copy
1075 # Apply changes to working copy
1080 for f, v in files:
1076 for f, v in files:
1081 try:
1077 try:
1082 data = source.getfile(f, v)
1078 data = source.getfile(f, v)
1083 except IOError, inst:
1079 except IOError, inst:
1084 self.delete.append(f)
1080 self.delete.append(f)
1085 else:
1081 else:
1086 e = source.getmode(f, v)
1082 e = source.getmode(f, v)
1087 self.putfile(f, e, data)
1083 self.putfile(f, e, data)
1088 if f in copies:
1084 if f in copies:
1089 self.copies.append([copies[f], f])
1085 self.copies.append([copies[f], f])
1090 files = [f[0] for f in files]
1086 files = [f[0] for f in files]
1091
1087
1092 for parent in parents:
1088 for parent in parents:
1093 try:
1089 try:
1094 return self.revid(self.childmap[parent])
1090 return self.revid(self.childmap[parent])
1095 except KeyError:
1091 except KeyError:
1096 pass
1092 pass
1097 entries = util.set(self.delete)
1093 entries = util.set(self.delete)
1098 files = util.frozenset(files)
1094 files = util.frozenset(files)
1099 entries.update(self.add_dirs(files.difference(entries)))
1095 entries.update(self.add_dirs(files.difference(entries)))
1100 if self.copies:
1096 if self.copies:
1101 for s, d in self.copies:
1097 for s, d in self.copies:
1102 self._copyfile(s, d)
1098 self._copyfile(s, d)
1103 self.copies = []
1099 self.copies = []
1104 if self.delete:
1100 if self.delete:
1105 self.xargs(self.delete, 'delete')
1101 self.xargs(self.delete, 'delete')
1106 self.delete = []
1102 self.delete = []
1107 entries.update(self.add_files(files.difference(entries)))
1103 entries.update(self.add_files(files.difference(entries)))
1108 entries.update(self.tidy_dirs(entries))
1104 entries.update(self.tidy_dirs(entries))
1109 if self.delexec:
1105 if self.delexec:
1110 self.xargs(self.delexec, 'propdel', 'svn:executable')
1106 self.xargs(self.delexec, 'propdel', 'svn:executable')
1111 self.delexec = []
1107 self.delexec = []
1112 if self.setexec:
1108 if self.setexec:
1113 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1109 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1114 self.setexec = []
1110 self.setexec = []
1115
1111
1116 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1112 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1117 fp = os.fdopen(fd, 'w')
1113 fp = os.fdopen(fd, 'w')
1118 fp.write(commit.desc)
1114 fp.write(commit.desc)
1119 fp.close()
1115 fp.close()
1120 try:
1116 try:
1121 output = self.run0('commit',
1117 output = self.run0('commit',
1122 username=util.shortuser(commit.author),
1118 username=util.shortuser(commit.author),
1123 file=messagefile,
1119 file=messagefile,
1124 encoding='utf-8')
1120 encoding='utf-8')
1125 try:
1121 try:
1126 rev = self.commit_re.search(output).group(1)
1122 rev = self.commit_re.search(output).group(1)
1127 except AttributeError:
1123 except AttributeError:
1128 self.ui.warn(_('unexpected svn output:\n'))
1124 self.ui.warn(_('unexpected svn output:\n'))
1129 self.ui.warn(output)
1125 self.ui.warn(output)
1130 raise util.Abort(_('unable to cope with svn output'))
1126 raise util.Abort(_('unable to cope with svn output'))
1131 if commit.rev:
1127 if commit.rev:
1132 self.run('propset', 'hg:convert-rev', commit.rev,
1128 self.run('propset', 'hg:convert-rev', commit.rev,
1133 revprop=True, revision=rev)
1129 revprop=True, revision=rev)
1134 if commit.branch and commit.branch != 'default':
1130 if commit.branch and commit.branch != 'default':
1135 self.run('propset', 'hg:convert-branch', commit.branch,
1131 self.run('propset', 'hg:convert-branch', commit.branch,
1136 revprop=True, revision=rev)
1132 revprop=True, revision=rev)
1137 for parent in parents:
1133 for parent in parents:
1138 self.addchild(parent, rev)
1134 self.addchild(parent, rev)
1139 return self.revid(rev)
1135 return self.revid(rev)
1140 finally:
1136 finally:
1141 os.unlink(messagefile)
1137 os.unlink(messagefile)
1142
1138
1143 def puttags(self, tags):
1139 def puttags(self, tags):
1144 self.ui.warn(_('XXX TAGS NOT IMPLEMENTED YET\n'))
1140 self.ui.warn(_('XXX TAGS NOT IMPLEMENTED YET\n'))
@@ -1,327 +1,326 b''
1 # ASCII graph log extension for Mercurial
1 # ASCII graph log extension for Mercurial
2 #
2 #
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of
5 # This software may be used and distributed according to the terms of
6 # the GNU General Public License, incorporated herein by reference.
6 # the GNU General Public License, incorporated herein by reference.
7 '''show revision graphs in terminal windows'''
7 '''show revision graphs in terminal windows'''
8
8
9 import os
9 import os
10 import sys
10 import sys
11 from mercurial.cmdutil import revrange, show_changeset
11 from mercurial.cmdutil import revrange, show_changeset
12 from mercurial.commands import templateopts
12 from mercurial.commands import templateopts
13 from mercurial.i18n import _
13 from mercurial.i18n import _
14 from mercurial.node import nullrev
14 from mercurial.node import nullrev
15 from mercurial.util import Abort, canonpath
15 from mercurial.util import Abort, canonpath
16 from mercurial import util
16
17
17 def revision_grapher(repo, start_rev, stop_rev):
18 def revision_grapher(repo, start_rev, stop_rev):
18 """incremental revision grapher
19 """incremental revision grapher
19
20
20 This generator function walks through the revision history from
21 This generator function walks through the revision history from
21 revision start_rev to revision stop_rev (which must be less than
22 revision start_rev to revision stop_rev (which must be less than
22 or equal to start_rev) and for each revision emits tuples with the
23 or equal to start_rev) and for each revision emits tuples with the
23 following elements:
24 following elements:
24
25
25 - Current revision.
26 - Current revision.
26 - Current node.
27 - Current node.
27 - Column of the current node in the set of ongoing edges.
28 - Column of the current node in the set of ongoing edges.
28 - Edges; a list of (col, next_col) indicating the edges between
29 - Edges; a list of (col, next_col) indicating the edges between
29 the current node and its parents.
30 the current node and its parents.
30 - Number of columns (ongoing edges) in the current revision.
31 - Number of columns (ongoing edges) in the current revision.
31 - The difference between the number of columns (ongoing edges)
32 - The difference between the number of columns (ongoing edges)
32 in the next revision and the number of columns (ongoing edges)
33 in the next revision and the number of columns (ongoing edges)
33 in the current revision. That is: -1 means one column removed;
34 in the current revision. That is: -1 means one column removed;
34 0 means no columns added or removed; 1 means one column added.
35 0 means no columns added or removed; 1 means one column added.
35 """
36 """
36
37
37 assert start_rev >= stop_rev
38 assert start_rev >= stop_rev
38 curr_rev = start_rev
39 curr_rev = start_rev
39 revs = []
40 revs = []
40 while curr_rev >= stop_rev:
41 while curr_rev >= stop_rev:
41 node = repo.changelog.node(curr_rev)
42 node = repo.changelog.node(curr_rev)
42
43
43 # Compute revs and next_revs.
44 # Compute revs and next_revs.
44 if curr_rev not in revs:
45 if curr_rev not in revs:
45 # New head.
46 # New head.
46 revs.append(curr_rev)
47 revs.append(curr_rev)
47 rev_index = revs.index(curr_rev)
48 rev_index = revs.index(curr_rev)
48 next_revs = revs[:]
49 next_revs = revs[:]
49
50
50 # Add parents to next_revs.
51 # Add parents to next_revs.
51 parents = get_rev_parents(repo, curr_rev)
52 parents = get_rev_parents(repo, curr_rev)
52 parents_to_add = []
53 parents_to_add = []
53 for parent in parents:
54 for parent in parents:
54 if parent not in next_revs:
55 if parent not in next_revs:
55 parents_to_add.append(parent)
56 parents_to_add.append(parent)
56 parents_to_add.sort()
57 next_revs[rev_index:rev_index + 1] = util.sort(parents_to_add)
57 next_revs[rev_index:rev_index + 1] = parents_to_add
58
58
59 edges = []
59 edges = []
60 for parent in parents:
60 for parent in parents:
61 edges.append((rev_index, next_revs.index(parent)))
61 edges.append((rev_index, next_revs.index(parent)))
62
62
63 n_columns_diff = len(next_revs) - len(revs)
63 n_columns_diff = len(next_revs) - len(revs)
64 yield (curr_rev, node, rev_index, edges, len(revs), n_columns_diff)
64 yield (curr_rev, node, rev_index, edges, len(revs), n_columns_diff)
65
65
66 revs = next_revs
66 revs = next_revs
67 curr_rev -= 1
67 curr_rev -= 1
68
68
69 def filelog_grapher(repo, path, start_rev, stop_rev):
69 def filelog_grapher(repo, path, start_rev, stop_rev):
70 """incremental file log grapher
70 """incremental file log grapher
71
71
72 This generator function walks through the revision history of a
72 This generator function walks through the revision history of a
73 single file from revision start_rev to revision stop_rev (which must
73 single file from revision start_rev to revision stop_rev (which must
74 be less than or equal to start_rev) and for each revision emits
74 be less than or equal to start_rev) and for each revision emits
75 tuples with the following elements:
75 tuples with the following elements:
76
76
77 - Current revision.
77 - Current revision.
78 - Current node.
78 - Current node.
79 - Column of the current node in the set of ongoing edges.
79 - Column of the current node in the set of ongoing edges.
80 - Edges; a list of (col, next_col) indicating the edges between
80 - Edges; a list of (col, next_col) indicating the edges between
81 the current node and its parents.
81 the current node and its parents.
82 - Number of columns (ongoing edges) in the current revision.
82 - Number of columns (ongoing edges) in the current revision.
83 - The difference between the number of columns (ongoing edges)
83 - The difference between the number of columns (ongoing edges)
84 in the next revision and the number of columns (ongoing edges)
84 in the next revision and the number of columns (ongoing edges)
85 in the current revision. That is: -1 means one column removed;
85 in the current revision. That is: -1 means one column removed;
86 0 means no columns added or removed; 1 means one column added.
86 0 means no columns added or removed; 1 means one column added.
87 """
87 """
88
88
89 assert start_rev >= stop_rev
89 assert start_rev >= stop_rev
90 curr_rev = start_rev
90 curr_rev = start_rev
91 revs = []
91 revs = []
92 filerev = len(repo.file(path)) - 1
92 filerev = len(repo.file(path)) - 1
93 while filerev >= 0:
93 while filerev >= 0:
94 fctx = repo.filectx(path, fileid=filerev)
94 fctx = repo.filectx(path, fileid=filerev)
95
95
96 # Compute revs and next_revs.
96 # Compute revs and next_revs.
97 if filerev not in revs:
97 if filerev not in revs:
98 revs.append(filerev)
98 revs.append(filerev)
99 rev_index = revs.index(filerev)
99 rev_index = revs.index(filerev)
100 next_revs = revs[:]
100 next_revs = revs[:]
101
101
102 # Add parents to next_revs.
102 # Add parents to next_revs.
103 parents = [f.filerev() for f in fctx.parents() if f.path() == path]
103 parents = [f.filerev() for f in fctx.parents() if f.path() == path]
104 parents_to_add = []
104 parents_to_add = []
105 for parent in parents:
105 for parent in parents:
106 if parent not in next_revs:
106 if parent not in next_revs:
107 parents_to_add.append(parent)
107 parents_to_add.append(parent)
108 parents_to_add.sort()
108 next_revs[rev_index:rev_index + 1] = util.sort(parents_to_add)
109 next_revs[rev_index:rev_index + 1] = parents_to_add
110
109
111 edges = []
110 edges = []
112 for parent in parents:
111 for parent in parents:
113 edges.append((rev_index, next_revs.index(parent)))
112 edges.append((rev_index, next_revs.index(parent)))
114
113
115 changerev = fctx.linkrev()
114 changerev = fctx.linkrev()
116 if changerev <= start_rev:
115 if changerev <= start_rev:
117 node = repo.changelog.node(changerev)
116 node = repo.changelog.node(changerev)
118 n_columns_diff = len(next_revs) - len(revs)
117 n_columns_diff = len(next_revs) - len(revs)
119 yield (changerev, node, rev_index, edges, len(revs), n_columns_diff)
118 yield (changerev, node, rev_index, edges, len(revs), n_columns_diff)
120 if changerev <= stop_rev:
119 if changerev <= stop_rev:
121 break
120 break
122 revs = next_revs
121 revs = next_revs
123 filerev -= 1
122 filerev -= 1
124
123
125 def get_rev_parents(repo, rev):
124 def get_rev_parents(repo, rev):
126 return [x for x in repo.changelog.parentrevs(rev) if x != nullrev]
125 return [x for x in repo.changelog.parentrevs(rev) if x != nullrev]
127
126
128 def fix_long_right_edges(edges):
127 def fix_long_right_edges(edges):
129 for (i, (start, end)) in enumerate(edges):
128 for (i, (start, end)) in enumerate(edges):
130 if end > start:
129 if end > start:
131 edges[i] = (start, end + 1)
130 edges[i] = (start, end + 1)
132
131
133 def draw_edges(edges, nodeline, interline):
132 def draw_edges(edges, nodeline, interline):
134 for (start, end) in edges:
133 for (start, end) in edges:
135 if start == end + 1:
134 if start == end + 1:
136 interline[2 * end + 1] = "/"
135 interline[2 * end + 1] = "/"
137 elif start == end - 1:
136 elif start == end - 1:
138 interline[2 * start + 1] = "\\"
137 interline[2 * start + 1] = "\\"
139 elif start == end:
138 elif start == end:
140 interline[2 * start] = "|"
139 interline[2 * start] = "|"
141 else:
140 else:
142 nodeline[2 * end] = "+"
141 nodeline[2 * end] = "+"
143 if start > end:
142 if start > end:
144 (start, end) = (end,start)
143 (start, end) = (end,start)
145 for i in range(2 * start + 1, 2 * end):
144 for i in range(2 * start + 1, 2 * end):
146 if nodeline[i] != "+":
145 if nodeline[i] != "+":
147 nodeline[i] = "-"
146 nodeline[i] = "-"
148
147
149 def format_line(line, level, logstr):
148 def format_line(line, level, logstr):
150 text = "%-*s %s" % (2 * level, "".join(line), logstr)
149 text = "%-*s %s" % (2 * level, "".join(line), logstr)
151 return "%s\n" % text.rstrip()
150 return "%s\n" % text.rstrip()
152
151
153 def get_nodeline_edges_tail(
152 def get_nodeline_edges_tail(
154 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
153 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
155 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
154 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
156 # Still going in the same non-vertical direction.
155 # Still going in the same non-vertical direction.
157 if n_columns_diff == -1:
156 if n_columns_diff == -1:
158 start = max(node_index + 1, p_node_index)
157 start = max(node_index + 1, p_node_index)
159 tail = ["|", " "] * (start - node_index - 1)
158 tail = ["|", " "] * (start - node_index - 1)
160 tail.extend(["/", " "] * (n_columns - start))
159 tail.extend(["/", " "] * (n_columns - start))
161 return tail
160 return tail
162 else:
161 else:
163 return ["\\", " "] * (n_columns - node_index - 1)
162 return ["\\", " "] * (n_columns - node_index - 1)
164 else:
163 else:
165 return ["|", " "] * (n_columns - node_index - 1)
164 return ["|", " "] * (n_columns - node_index - 1)
166
165
167 def get_padding_line(ni, n_columns, edges):
166 def get_padding_line(ni, n_columns, edges):
168 line = []
167 line = []
169 line.extend(["|", " "] * ni)
168 line.extend(["|", " "] * ni)
170 if (ni, ni - 1) in edges or (ni, ni) in edges:
169 if (ni, ni - 1) in edges or (ni, ni) in edges:
171 # (ni, ni - 1) (ni, ni)
170 # (ni, ni - 1) (ni, ni)
172 # | | | | | | | |
171 # | | | | | | | |
173 # +---o | | o---+
172 # +---o | | o---+
174 # | | c | | c | |
173 # | | c | | c | |
175 # | |/ / | |/ /
174 # | |/ / | |/ /
176 # | | | | | |
175 # | | | | | |
177 c = "|"
176 c = "|"
178 else:
177 else:
179 c = " "
178 c = " "
180 line.extend([c, " "])
179 line.extend([c, " "])
181 line.extend(["|", " "] * (n_columns - ni - 1))
180 line.extend(["|", " "] * (n_columns - ni - 1))
182 return line
181 return line
183
182
184 def get_limit(limit_opt):
183 def get_limit(limit_opt):
185 if limit_opt:
184 if limit_opt:
186 try:
185 try:
187 limit = int(limit_opt)
186 limit = int(limit_opt)
188 except ValueError:
187 except ValueError:
189 raise Abort(_("limit must be a positive integer"))
188 raise Abort(_("limit must be a positive integer"))
190 if limit <= 0:
189 if limit <= 0:
191 raise Abort(_("limit must be positive"))
190 raise Abort(_("limit must be positive"))
192 else:
191 else:
193 limit = sys.maxint
192 limit = sys.maxint
194 return limit
193 return limit
195
194
196 def get_revs(repo, rev_opt):
195 def get_revs(repo, rev_opt):
197 if rev_opt:
196 if rev_opt:
198 revs = revrange(repo, rev_opt)
197 revs = revrange(repo, rev_opt)
199 return (max(revs), min(revs))
198 return (max(revs), min(revs))
200 else:
199 else:
201 return (len(repo) - 1, 0)
200 return (len(repo) - 1, 0)
202
201
203 def graphlog(ui, repo, path=None, **opts):
202 def graphlog(ui, repo, path=None, **opts):
204 """show revision history alongside an ASCII revision graph
203 """show revision history alongside an ASCII revision graph
205
204
206 Print a revision history alongside a revision graph drawn with
205 Print a revision history alongside a revision graph drawn with
207 ASCII characters.
206 ASCII characters.
208
207
209 Nodes printed as an @ character are parents of the working
208 Nodes printed as an @ character are parents of the working
210 directory.
209 directory.
211 """
210 """
212
211
213 limit = get_limit(opts["limit"])
212 limit = get_limit(opts["limit"])
214 (start_rev, stop_rev) = get_revs(repo, opts["rev"])
213 (start_rev, stop_rev) = get_revs(repo, opts["rev"])
215 stop_rev = max(stop_rev, start_rev - limit + 1)
214 stop_rev = max(stop_rev, start_rev - limit + 1)
216 if start_rev == nullrev:
215 if start_rev == nullrev:
217 return
216 return
218 cs_printer = show_changeset(ui, repo, opts)
217 cs_printer = show_changeset(ui, repo, opts)
219 if path:
218 if path:
220 cpath = canonpath(repo.root, os.getcwd(), path)
219 cpath = canonpath(repo.root, os.getcwd(), path)
221 grapher = filelog_grapher(repo, cpath, start_rev, stop_rev)
220 grapher = filelog_grapher(repo, cpath, start_rev, stop_rev)
222 else:
221 else:
223 grapher = revision_grapher(repo, start_rev, stop_rev)
222 grapher = revision_grapher(repo, start_rev, stop_rev)
224 repo_parents = repo.dirstate.parents()
223 repo_parents = repo.dirstate.parents()
225 prev_n_columns_diff = 0
224 prev_n_columns_diff = 0
226 prev_node_index = 0
225 prev_node_index = 0
227
226
228 for (rev, node, node_index, edges, n_columns, n_columns_diff) in grapher:
227 for (rev, node, node_index, edges, n_columns, n_columns_diff) in grapher:
229 # log_strings is the list of all log strings to draw alongside
228 # log_strings is the list of all log strings to draw alongside
230 # the graph.
229 # the graph.
231 ui.pushbuffer()
230 ui.pushbuffer()
232 cs_printer.show(rev, node)
231 cs_printer.show(rev, node)
233 log_strings = ui.popbuffer().split("\n")[:-1]
232 log_strings = ui.popbuffer().split("\n")[:-1]
234
233
235 if n_columns_diff == -1:
234 if n_columns_diff == -1:
236 # Transform
235 # Transform
237 #
236 #
238 # | | | | | |
237 # | | | | | |
239 # o | | into o---+
238 # o | | into o---+
240 # |X / |/ /
239 # |X / |/ /
241 # | | | |
240 # | | | |
242 fix_long_right_edges(edges)
241 fix_long_right_edges(edges)
243
242
244 # add_padding_line says whether to rewrite
243 # add_padding_line says whether to rewrite
245 #
244 #
246 # | | | | | | | |
245 # | | | | | | | |
247 # | o---+ into | o---+
246 # | o---+ into | o---+
248 # | / / | | | # <--- padding line
247 # | / / | | | # <--- padding line
249 # o | | | / /
248 # o | | | / /
250 # o | |
249 # o | |
251 add_padding_line = (len(log_strings) > 2 and
250 add_padding_line = (len(log_strings) > 2 and
252 n_columns_diff == -1 and
251 n_columns_diff == -1 and
253 [x for (x, y) in edges if x + 1 < y])
252 [x for (x, y) in edges if x + 1 < y])
254
253
255 # fix_nodeline_tail says whether to rewrite
254 # fix_nodeline_tail says whether to rewrite
256 #
255 #
257 # | | o | | | | o | |
256 # | | o | | | | o | |
258 # | | |/ / | | |/ /
257 # | | |/ / | | |/ /
259 # | o | | into | o / / # <--- fixed nodeline tail
258 # | o | | into | o / / # <--- fixed nodeline tail
260 # | |/ / | |/ /
259 # | |/ / | |/ /
261 # o | | o | |
260 # o | | o | |
262 fix_nodeline_tail = len(log_strings) <= 2 and not add_padding_line
261 fix_nodeline_tail = len(log_strings) <= 2 and not add_padding_line
263
262
264 # nodeline is the line containing the node character (@ or o).
263 # nodeline is the line containing the node character (@ or o).
265 nodeline = ["|", " "] * node_index
264 nodeline = ["|", " "] * node_index
266 if node in repo_parents:
265 if node in repo_parents:
267 node_ch = "@"
266 node_ch = "@"
268 else:
267 else:
269 node_ch = "o"
268 node_ch = "o"
270 nodeline.extend([node_ch, " "])
269 nodeline.extend([node_ch, " "])
271
270
272 nodeline.extend(
271 nodeline.extend(
273 get_nodeline_edges_tail(
272 get_nodeline_edges_tail(
274 node_index, prev_node_index, n_columns, n_columns_diff,
273 node_index, prev_node_index, n_columns, n_columns_diff,
275 prev_n_columns_diff, fix_nodeline_tail))
274 prev_n_columns_diff, fix_nodeline_tail))
276
275
277 # shift_interline is the line containing the non-vertical
276 # shift_interline is the line containing the non-vertical
278 # edges between this entry and the next.
277 # edges between this entry and the next.
279 shift_interline = ["|", " "] * node_index
278 shift_interline = ["|", " "] * node_index
280 if n_columns_diff == -1:
279 if n_columns_diff == -1:
281 n_spaces = 1
280 n_spaces = 1
282 edge_ch = "/"
281 edge_ch = "/"
283 elif n_columns_diff == 0:
282 elif n_columns_diff == 0:
284 n_spaces = 2
283 n_spaces = 2
285 edge_ch = "|"
284 edge_ch = "|"
286 else:
285 else:
287 n_spaces = 3
286 n_spaces = 3
288 edge_ch = "\\"
287 edge_ch = "\\"
289 shift_interline.extend(n_spaces * [" "])
288 shift_interline.extend(n_spaces * [" "])
290 shift_interline.extend([edge_ch, " "] * (n_columns - node_index - 1))
289 shift_interline.extend([edge_ch, " "] * (n_columns - node_index - 1))
291
290
292 # Draw edges from the current node to its parents.
291 # Draw edges from the current node to its parents.
293 draw_edges(edges, nodeline, shift_interline)
292 draw_edges(edges, nodeline, shift_interline)
294
293
295 # lines is the list of all graph lines to print.
294 # lines is the list of all graph lines to print.
296 lines = [nodeline]
295 lines = [nodeline]
297 if add_padding_line:
296 if add_padding_line:
298 lines.append(get_padding_line(node_index, n_columns, edges))
297 lines.append(get_padding_line(node_index, n_columns, edges))
299 lines.append(shift_interline)
298 lines.append(shift_interline)
300
299
301 # Make sure that there are as many graph lines as there are
300 # Make sure that there are as many graph lines as there are
302 # log strings.
301 # log strings.
303 while len(log_strings) < len(lines):
302 while len(log_strings) < len(lines):
304 log_strings.append("")
303 log_strings.append("")
305 if len(lines) < len(log_strings):
304 if len(lines) < len(log_strings):
306 extra_interline = ["|", " "] * (n_columns + n_columns_diff)
305 extra_interline = ["|", " "] * (n_columns + n_columns_diff)
307 while len(lines) < len(log_strings):
306 while len(lines) < len(log_strings):
308 lines.append(extra_interline)
307 lines.append(extra_interline)
309
308
310 # Print lines.
309 # Print lines.
311 indentation_level = max(n_columns, n_columns + n_columns_diff)
310 indentation_level = max(n_columns, n_columns + n_columns_diff)
312 for (line, logstr) in zip(lines, log_strings):
311 for (line, logstr) in zip(lines, log_strings):
313 ui.write(format_line(line, indentation_level, logstr))
312 ui.write(format_line(line, indentation_level, logstr))
314
313
315 # ...and start over.
314 # ...and start over.
316 prev_node_index = node_index
315 prev_node_index = node_index
317 prev_n_columns_diff = n_columns_diff
316 prev_n_columns_diff = n_columns_diff
318
317
319 cmdtable = {
318 cmdtable = {
320 "glog":
319 "glog":
321 (graphlog,
320 (graphlog,
322 [('l', 'limit', '', _('limit number of changes displayed')),
321 [('l', 'limit', '', _('limit number of changes displayed')),
323 ('p', 'patch', False, _('show patch')),
322 ('p', 'patch', False, _('show patch')),
324 ('r', 'rev', [], _('show the specified revision or range')),
323 ('r', 'rev', [], _('show the specified revision or range')),
325 ] + templateopts,
324 ] + templateopts,
326 _('hg glog [OPTION]... [FILE]')),
325 _('hg glog [OPTION]... [FILE]')),
327 }
326 }
@@ -1,717 +1,715 b''
1 # server.py - inotify status server
1 # server.py - inotify status server
2 #
2 #
3 # Copyright 2006, 2007, 2008 Bryan O'Sullivan <bos@serpentine.com>
3 # Copyright 2006, 2007, 2008 Bryan O'Sullivan <bos@serpentine.com>
4 # Copyright 2007, 2008 Brendan Cully <brendan@kublai.com>
4 # Copyright 2007, 2008 Brendan Cully <brendan@kublai.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from mercurial.i18n import gettext as _
9 from mercurial.i18n import gettext as _
10 from mercurial import osutil, ui, util
10 from mercurial import osutil, ui, util
11 import common
11 import common
12 import errno, os, select, socket, stat, struct, sys, time
12 import errno, os, select, socket, stat, struct, sys, time
13
13
14 try:
14 try:
15 import hgext.inotify.linux as inotify
15 import hgext.inotify.linux as inotify
16 from hgext.inotify.linux import watcher
16 from hgext.inotify.linux import watcher
17 except ImportError:
17 except ImportError:
18 print >> sys.stderr, '*** native support is required for this extension'
18 print >> sys.stderr, '*** native support is required for this extension'
19 raise
19 raise
20
20
21 class AlreadyStartedException(Exception): pass
21 class AlreadyStartedException(Exception): pass
22
22
23 def join(a, b):
23 def join(a, b):
24 if a:
24 if a:
25 if a[-1] == '/':
25 if a[-1] == '/':
26 return a + b
26 return a + b
27 return a + '/' + b
27 return a + '/' + b
28 return b
28 return b
29
29
30 walk_ignored_errors = (errno.ENOENT, errno.ENAMETOOLONG)
30 walk_ignored_errors = (errno.ENOENT, errno.ENAMETOOLONG)
31
31
32 def walkrepodirs(repo):
32 def walkrepodirs(repo):
33 '''Iterate over all subdirectories of this repo.
33 '''Iterate over all subdirectories of this repo.
34 Exclude the .hg directory, any nested repos, and ignored dirs.'''
34 Exclude the .hg directory, any nested repos, and ignored dirs.'''
35 rootslash = repo.root + os.sep
35 rootslash = repo.root + os.sep
36 def walkit(dirname, top):
36 def walkit(dirname, top):
37 hginside = False
37 hginside = False
38 try:
38 try:
39 for name, kind in osutil.listdir(rootslash + dirname):
39 for name, kind in osutil.listdir(rootslash + dirname):
40 if kind == stat.S_IFDIR:
40 if kind == stat.S_IFDIR:
41 if name == '.hg':
41 if name == '.hg':
42 hginside = True
42 hginside = True
43 if not top: break
43 if not top: break
44 else:
44 else:
45 d = join(dirname, name)
45 d = join(dirname, name)
46 if repo.dirstate._ignore(d):
46 if repo.dirstate._ignore(d):
47 continue
47 continue
48 for subdir, hginsub in walkit(d, False):
48 for subdir, hginsub in walkit(d, False):
49 if not hginsub:
49 if not hginsub:
50 yield subdir, False
50 yield subdir, False
51 except OSError, err:
51 except OSError, err:
52 if err.errno not in walk_ignored_errors:
52 if err.errno not in walk_ignored_errors:
53 raise
53 raise
54 yield rootslash + dirname, hginside
54 yield rootslash + dirname, hginside
55 for dirname, hginside in walkit('', True):
55 for dirname, hginside in walkit('', True):
56 yield dirname
56 yield dirname
57
57
58 def walk(repo, root):
58 def walk(repo, root):
59 '''Like os.walk, but only yields regular files.'''
59 '''Like os.walk, but only yields regular files.'''
60
60
61 # This function is critical to performance during startup.
61 # This function is critical to performance during startup.
62
62
63 reporoot = root == ''
63 reporoot = root == ''
64 rootslash = repo.root + os.sep
64 rootslash = repo.root + os.sep
65
65
66 def walkit(root, reporoot):
66 def walkit(root, reporoot):
67 files, dirs = [], []
67 files, dirs = [], []
68 hginside = False
68 hginside = False
69
69
70 try:
70 try:
71 fullpath = rootslash + root
71 fullpath = rootslash + root
72 for name, kind in osutil.listdir(fullpath):
72 for name, kind in osutil.listdir(fullpath):
73 if kind == stat.S_IFDIR:
73 if kind == stat.S_IFDIR:
74 if name == '.hg':
74 if name == '.hg':
75 hginside = True
75 hginside = True
76 if reporoot:
76 if reporoot:
77 continue
77 continue
78 else:
78 else:
79 break
79 break
80 dirs.append(name)
80 dirs.append(name)
81 elif kind in (stat.S_IFREG, stat.S_IFLNK):
81 elif kind in (stat.S_IFREG, stat.S_IFLNK):
82 path = join(root, name)
82 path = join(root, name)
83 files.append((name, kind))
83 files.append((name, kind))
84
84
85 yield hginside, fullpath, dirs, files
85 yield hginside, fullpath, dirs, files
86
86
87 for subdir in dirs:
87 for subdir in dirs:
88 path = join(root, subdir)
88 path = join(root, subdir)
89 if repo.dirstate._ignore(path):
89 if repo.dirstate._ignore(path):
90 continue
90 continue
91 for result in walkit(path, False):
91 for result in walkit(path, False):
92 if not result[0]:
92 if not result[0]:
93 yield result
93 yield result
94 except OSError, err:
94 except OSError, err:
95 if err.errno not in walk_ignored_errors:
95 if err.errno not in walk_ignored_errors:
96 raise
96 raise
97 for result in walkit(root, reporoot):
97 for result in walkit(root, reporoot):
98 yield result[1:]
98 yield result[1:]
99
99
100 def _explain_watch_limit(ui, repo, count):
100 def _explain_watch_limit(ui, repo, count):
101 path = '/proc/sys/fs/inotify/max_user_watches'
101 path = '/proc/sys/fs/inotify/max_user_watches'
102 try:
102 try:
103 limit = int(file(path).read())
103 limit = int(file(path).read())
104 except IOError, err:
104 except IOError, err:
105 if err.errno != errno.ENOENT:
105 if err.errno != errno.ENOENT:
106 raise
106 raise
107 raise util.Abort(_('this system does not seem to '
107 raise util.Abort(_('this system does not seem to '
108 'support inotify'))
108 'support inotify'))
109 ui.warn(_('*** the current per-user limit on the number '
109 ui.warn(_('*** the current per-user limit on the number '
110 'of inotify watches is %s\n') % limit)
110 'of inotify watches is %s\n') % limit)
111 ui.warn(_('*** this limit is too low to watch every '
111 ui.warn(_('*** this limit is too low to watch every '
112 'directory in this repository\n'))
112 'directory in this repository\n'))
113 ui.warn(_('*** counting directories: '))
113 ui.warn(_('*** counting directories: '))
114 ndirs = len(list(walkrepodirs(repo)))
114 ndirs = len(list(walkrepodirs(repo)))
115 ui.warn(_('found %d\n') % ndirs)
115 ui.warn(_('found %d\n') % ndirs)
116 newlimit = min(limit, 1024)
116 newlimit = min(limit, 1024)
117 while newlimit < ((limit + ndirs) * 1.1):
117 while newlimit < ((limit + ndirs) * 1.1):
118 newlimit *= 2
118 newlimit *= 2
119 ui.warn(_('*** to raise the limit from %d to %d (run as root):\n') %
119 ui.warn(_('*** to raise the limit from %d to %d (run as root):\n') %
120 (limit, newlimit))
120 (limit, newlimit))
121 ui.warn(_('*** echo %d > %s\n') % (newlimit, path))
121 ui.warn(_('*** echo %d > %s\n') % (newlimit, path))
122 raise util.Abort(_('cannot watch %s until inotify watch limit is raised')
122 raise util.Abort(_('cannot watch %s until inotify watch limit is raised')
123 % repo.root)
123 % repo.root)
124
124
125 class Watcher(object):
125 class Watcher(object):
126 poll_events = select.POLLIN
126 poll_events = select.POLLIN
127 statuskeys = 'almr!?'
127 statuskeys = 'almr!?'
128
128
129 def __init__(self, ui, repo, master):
129 def __init__(self, ui, repo, master):
130 self.ui = ui
130 self.ui = ui
131 self.repo = repo
131 self.repo = repo
132 self.wprefix = self.repo.wjoin('')
132 self.wprefix = self.repo.wjoin('')
133 self.timeout = None
133 self.timeout = None
134 self.master = master
134 self.master = master
135 self.mask = (
135 self.mask = (
136 inotify.IN_ATTRIB |
136 inotify.IN_ATTRIB |
137 inotify.IN_CREATE |
137 inotify.IN_CREATE |
138 inotify.IN_DELETE |
138 inotify.IN_DELETE |
139 inotify.IN_DELETE_SELF |
139 inotify.IN_DELETE_SELF |
140 inotify.IN_MODIFY |
140 inotify.IN_MODIFY |
141 inotify.IN_MOVED_FROM |
141 inotify.IN_MOVED_FROM |
142 inotify.IN_MOVED_TO |
142 inotify.IN_MOVED_TO |
143 inotify.IN_MOVE_SELF |
143 inotify.IN_MOVE_SELF |
144 inotify.IN_ONLYDIR |
144 inotify.IN_ONLYDIR |
145 inotify.IN_UNMOUNT |
145 inotify.IN_UNMOUNT |
146 0)
146 0)
147 try:
147 try:
148 self.watcher = watcher.Watcher()
148 self.watcher = watcher.Watcher()
149 except OSError, err:
149 except OSError, err:
150 raise util.Abort(_('inotify service not available: %s') %
150 raise util.Abort(_('inotify service not available: %s') %
151 err.strerror)
151 err.strerror)
152 self.threshold = watcher.Threshold(self.watcher)
152 self.threshold = watcher.Threshold(self.watcher)
153 self.registered = True
153 self.registered = True
154 self.fileno = self.watcher.fileno
154 self.fileno = self.watcher.fileno
155
155
156 self.repo.dirstate.__class__.inotifyserver = True
156 self.repo.dirstate.__class__.inotifyserver = True
157
157
158 self.tree = {}
158 self.tree = {}
159 self.statcache = {}
159 self.statcache = {}
160 self.statustrees = dict([(s, {}) for s in self.statuskeys])
160 self.statustrees = dict([(s, {}) for s in self.statuskeys])
161
161
162 self.watches = 0
162 self.watches = 0
163 self.last_event = None
163 self.last_event = None
164
164
165 self.eventq = {}
165 self.eventq = {}
166 self.deferred = 0
166 self.deferred = 0
167
167
168 self.ds_info = self.dirstate_info()
168 self.ds_info = self.dirstate_info()
169 self.scan()
169 self.scan()
170
170
171 def event_time(self):
171 def event_time(self):
172 last = self.last_event
172 last = self.last_event
173 now = time.time()
173 now = time.time()
174 self.last_event = now
174 self.last_event = now
175
175
176 if last is None:
176 if last is None:
177 return 'start'
177 return 'start'
178 delta = now - last
178 delta = now - last
179 if delta < 5:
179 if delta < 5:
180 return '+%.3f' % delta
180 return '+%.3f' % delta
181 if delta < 50:
181 if delta < 50:
182 return '+%.2f' % delta
182 return '+%.2f' % delta
183 return '+%.1f' % delta
183 return '+%.1f' % delta
184
184
185 def dirstate_info(self):
185 def dirstate_info(self):
186 try:
186 try:
187 st = os.lstat(self.repo.join('dirstate'))
187 st = os.lstat(self.repo.join('dirstate'))
188 return st.st_mtime, st.st_ino
188 return st.st_mtime, st.st_ino
189 except OSError, err:
189 except OSError, err:
190 if err.errno != errno.ENOENT:
190 if err.errno != errno.ENOENT:
191 raise
191 raise
192 return 0, 0
192 return 0, 0
193
193
194 def add_watch(self, path, mask):
194 def add_watch(self, path, mask):
195 if not path:
195 if not path:
196 return
196 return
197 if self.watcher.path(path) is None:
197 if self.watcher.path(path) is None:
198 if self.ui.debugflag:
198 if self.ui.debugflag:
199 self.ui.note(_('watching %r\n') % path[len(self.wprefix):])
199 self.ui.note(_('watching %r\n') % path[len(self.wprefix):])
200 try:
200 try:
201 self.watcher.add(path, mask)
201 self.watcher.add(path, mask)
202 self.watches += 1
202 self.watches += 1
203 except OSError, err:
203 except OSError, err:
204 if err.errno in (errno.ENOENT, errno.ENOTDIR):
204 if err.errno in (errno.ENOENT, errno.ENOTDIR):
205 return
205 return
206 if err.errno != errno.ENOSPC:
206 if err.errno != errno.ENOSPC:
207 raise
207 raise
208 _explain_watch_limit(self.ui, self.repo, self.watches)
208 _explain_watch_limit(self.ui, self.repo, self.watches)
209
209
210 def setup(self):
210 def setup(self):
211 self.ui.note(_('watching directories under %r\n') % self.repo.root)
211 self.ui.note(_('watching directories under %r\n') % self.repo.root)
212 self.add_watch(self.repo.path, inotify.IN_DELETE)
212 self.add_watch(self.repo.path, inotify.IN_DELETE)
213 self.check_dirstate()
213 self.check_dirstate()
214
214
215 def wpath(self, evt):
215 def wpath(self, evt):
216 path = evt.fullpath
216 path = evt.fullpath
217 if path == self.repo.root:
217 if path == self.repo.root:
218 return ''
218 return ''
219 if path.startswith(self.wprefix):
219 if path.startswith(self.wprefix):
220 return path[len(self.wprefix):]
220 return path[len(self.wprefix):]
221 raise 'wtf? ' + path
221 raise 'wtf? ' + path
222
222
223 def dir(self, tree, path):
223 def dir(self, tree, path):
224 if path:
224 if path:
225 for name in path.split('/'):
225 for name in path.split('/'):
226 tree.setdefault(name, {})
226 tree.setdefault(name, {})
227 tree = tree[name]
227 tree = tree[name]
228 return tree
228 return tree
229
229
230 def lookup(self, path, tree):
230 def lookup(self, path, tree):
231 if path:
231 if path:
232 try:
232 try:
233 for name in path.split('/'):
233 for name in path.split('/'):
234 tree = tree[name]
234 tree = tree[name]
235 except KeyError:
235 except KeyError:
236 return 'x'
236 return 'x'
237 except TypeError:
237 except TypeError:
238 return 'd'
238 return 'd'
239 return tree
239 return tree
240
240
241 def split(self, path):
241 def split(self, path):
242 c = path.rfind('/')
242 c = path.rfind('/')
243 if c == -1:
243 if c == -1:
244 return '', path
244 return '', path
245 return path[:c], path[c+1:]
245 return path[:c], path[c+1:]
246
246
247 def filestatus(self, fn, st):
247 def filestatus(self, fn, st):
248 try:
248 try:
249 type_, mode, size, time = self.repo.dirstate._map[fn][:4]
249 type_, mode, size, time = self.repo.dirstate._map[fn][:4]
250 except KeyError:
250 except KeyError:
251 type_ = '?'
251 type_ = '?'
252 if type_ == 'n':
252 if type_ == 'n':
253 if not st:
253 if not st:
254 return '!'
254 return '!'
255 st_mode, st_size, st_mtime = st
255 st_mode, st_size, st_mtime = st
256 if size and (size != st_size or (mode ^ st_mode) & 0100):
256 if size and (size != st_size or (mode ^ st_mode) & 0100):
257 return 'm'
257 return 'm'
258 if time != int(st_mtime):
258 if time != int(st_mtime):
259 return 'l'
259 return 'l'
260 return 'n'
260 return 'n'
261 if type_ in 'ma' and not st:
261 if type_ in 'ma' and not st:
262 return '!'
262 return '!'
263 if type_ == '?' and self.repo.dirstate._ignore(fn):
263 if type_ == '?' and self.repo.dirstate._ignore(fn):
264 return 'i'
264 return 'i'
265 return type_
265 return type_
266
266
267 def updatestatus(self, wfn, st=None, status=None, oldstatus=None):
267 def updatestatus(self, wfn, st=None, status=None, oldstatus=None):
268 if st:
268 if st:
269 status = self.filestatus(wfn, st)
269 status = self.filestatus(wfn, st)
270 else:
270 else:
271 self.statcache.pop(wfn, None)
271 self.statcache.pop(wfn, None)
272 root, fn = self.split(wfn)
272 root, fn = self.split(wfn)
273 d = self.dir(self.tree, root)
273 d = self.dir(self.tree, root)
274 if oldstatus is None:
274 if oldstatus is None:
275 oldstatus = d.get(fn)
275 oldstatus = d.get(fn)
276 isdir = False
276 isdir = False
277 if oldstatus:
277 if oldstatus:
278 try:
278 try:
279 if not status:
279 if not status:
280 if oldstatus in 'almn':
280 if oldstatus in 'almn':
281 status = '!'
281 status = '!'
282 elif oldstatus == 'r':
282 elif oldstatus == 'r':
283 status = 'r'
283 status = 'r'
284 except TypeError:
284 except TypeError:
285 # oldstatus may be a dict left behind by a deleted
285 # oldstatus may be a dict left behind by a deleted
286 # directory
286 # directory
287 isdir = True
287 isdir = True
288 else:
288 else:
289 if oldstatus in self.statuskeys and oldstatus != status:
289 if oldstatus in self.statuskeys and oldstatus != status:
290 del self.dir(self.statustrees[oldstatus], root)[fn]
290 del self.dir(self.statustrees[oldstatus], root)[fn]
291 if self.ui.debugflag and oldstatus != status:
291 if self.ui.debugflag and oldstatus != status:
292 if isdir:
292 if isdir:
293 self.ui.note('status: %r dir(%d) -> %s\n' %
293 self.ui.note('status: %r dir(%d) -> %s\n' %
294 (wfn, len(oldstatus), status))
294 (wfn, len(oldstatus), status))
295 else:
295 else:
296 self.ui.note('status: %r %s -> %s\n' %
296 self.ui.note('status: %r %s -> %s\n' %
297 (wfn, oldstatus, status))
297 (wfn, oldstatus, status))
298 if not isdir:
298 if not isdir:
299 if status and status != 'i':
299 if status and status != 'i':
300 d[fn] = status
300 d[fn] = status
301 if status in self.statuskeys:
301 if status in self.statuskeys:
302 dd = self.dir(self.statustrees[status], root)
302 dd = self.dir(self.statustrees[status], root)
303 if oldstatus != status or fn not in dd:
303 if oldstatus != status or fn not in dd:
304 dd[fn] = status
304 dd[fn] = status
305 else:
305 else:
306 d.pop(fn, None)
306 d.pop(fn, None)
307
307
308 def check_deleted(self, key):
308 def check_deleted(self, key):
309 # Files that had been deleted but were present in the dirstate
309 # Files that had been deleted but were present in the dirstate
310 # may have vanished from the dirstate; we must clean them up.
310 # may have vanished from the dirstate; we must clean them up.
311 nuke = []
311 nuke = []
312 for wfn, ignore in self.walk(key, self.statustrees[key]):
312 for wfn, ignore in self.walk(key, self.statustrees[key]):
313 if wfn not in self.repo.dirstate:
313 if wfn not in self.repo.dirstate:
314 nuke.append(wfn)
314 nuke.append(wfn)
315 for wfn in nuke:
315 for wfn in nuke:
316 root, fn = self.split(wfn)
316 root, fn = self.split(wfn)
317 del self.dir(self.statustrees[key], root)[fn]
317 del self.dir(self.statustrees[key], root)[fn]
318 del self.dir(self.tree, root)[fn]
318 del self.dir(self.tree, root)[fn]
319
319
320 def scan(self, topdir=''):
320 def scan(self, topdir=''):
321 self.handle_timeout()
321 self.handle_timeout()
322 ds = self.repo.dirstate._map.copy()
322 ds = self.repo.dirstate._map.copy()
323 self.add_watch(join(self.repo.root, topdir), self.mask)
323 self.add_watch(join(self.repo.root, topdir), self.mask)
324 for root, dirs, entries in walk(self.repo, topdir):
324 for root, dirs, entries in walk(self.repo, topdir):
325 for d in dirs:
325 for d in dirs:
326 self.add_watch(join(root, d), self.mask)
326 self.add_watch(join(root, d), self.mask)
327 wroot = root[len(self.wprefix):]
327 wroot = root[len(self.wprefix):]
328 d = self.dir(self.tree, wroot)
328 d = self.dir(self.tree, wroot)
329 for fn, kind in entries:
329 for fn, kind in entries:
330 wfn = join(wroot, fn)
330 wfn = join(wroot, fn)
331 self.updatestatus(wfn, self.getstat(wfn))
331 self.updatestatus(wfn, self.getstat(wfn))
332 ds.pop(wfn, None)
332 ds.pop(wfn, None)
333 wtopdir = topdir
333 wtopdir = topdir
334 if wtopdir and wtopdir[-1] != '/':
334 if wtopdir and wtopdir[-1] != '/':
335 wtopdir += '/'
335 wtopdir += '/'
336 for wfn, state in ds.iteritems():
336 for wfn, state in ds.iteritems():
337 if not wfn.startswith(wtopdir):
337 if not wfn.startswith(wtopdir):
338 continue
338 continue
339 status = state[0]
339 status = state[0]
340 st = self.getstat(wfn)
340 st = self.getstat(wfn)
341 if status == 'r' and not st:
341 if status == 'r' and not st:
342 self.updatestatus(wfn, st, status=status)
342 self.updatestatus(wfn, st, status=status)
343 else:
343 else:
344 self.updatestatus(wfn, st, oldstatus=status)
344 self.updatestatus(wfn, st, oldstatus=status)
345 self.check_deleted('!')
345 self.check_deleted('!')
346 self.check_deleted('r')
346 self.check_deleted('r')
347
347
348 def check_dirstate(self):
348 def check_dirstate(self):
349 ds_info = self.dirstate_info()
349 ds_info = self.dirstate_info()
350 if ds_info == self.ds_info:
350 if ds_info == self.ds_info:
351 return
351 return
352 self.ds_info = ds_info
352 self.ds_info = ds_info
353 if not self.ui.debugflag:
353 if not self.ui.debugflag:
354 self.last_event = None
354 self.last_event = None
355 self.ui.note(_('%s dirstate reload\n') % self.event_time())
355 self.ui.note(_('%s dirstate reload\n') % self.event_time())
356 self.repo.dirstate.invalidate()
356 self.repo.dirstate.invalidate()
357 self.scan()
357 self.scan()
358 self.ui.note(_('%s end dirstate reload\n') % self.event_time())
358 self.ui.note(_('%s end dirstate reload\n') % self.event_time())
359
359
360 def walk(self, states, tree, prefix=''):
360 def walk(self, states, tree, prefix=''):
361 # This is the "inner loop" when talking to the client.
361 # This is the "inner loop" when talking to the client.
362
362
363 for name, val in tree.iteritems():
363 for name, val in tree.iteritems():
364 path = join(prefix, name)
364 path = join(prefix, name)
365 try:
365 try:
366 if val in states:
366 if val in states:
367 yield path, val
367 yield path, val
368 except TypeError:
368 except TypeError:
369 for p in self.walk(states, val, path):
369 for p in self.walk(states, val, path):
370 yield p
370 yield p
371
371
372 def update_hgignore(self):
372 def update_hgignore(self):
373 # An update of the ignore file can potentially change the
373 # An update of the ignore file can potentially change the
374 # states of all unknown and ignored files.
374 # states of all unknown and ignored files.
375
375
376 # XXX If the user has other ignore files outside the repo, or
376 # XXX If the user has other ignore files outside the repo, or
377 # changes their list of ignore files at run time, we'll
377 # changes their list of ignore files at run time, we'll
378 # potentially never see changes to them. We could get the
378 # potentially never see changes to them. We could get the
379 # client to report to us what ignore data they're using.
379 # client to report to us what ignore data they're using.
380 # But it's easier to do nothing than to open that can of
380 # But it's easier to do nothing than to open that can of
381 # worms.
381 # worms.
382
382
383 if self.repo.dirstate.ignorefunc is not None:
383 if self.repo.dirstate.ignorefunc is not None:
384 self.repo.dirstate.ignorefunc = None
384 self.repo.dirstate.ignorefunc = None
385 self.ui.note('rescanning due to .hgignore change\n')
385 self.ui.note('rescanning due to .hgignore change\n')
386 self.scan()
386 self.scan()
387
387
388 def getstat(self, wpath):
388 def getstat(self, wpath):
389 try:
389 try:
390 return self.statcache[wpath]
390 return self.statcache[wpath]
391 except KeyError:
391 except KeyError:
392 try:
392 try:
393 return self.stat(wpath)
393 return self.stat(wpath)
394 except OSError, err:
394 except OSError, err:
395 if err.errno != errno.ENOENT:
395 if err.errno != errno.ENOENT:
396 raise
396 raise
397
397
398 def stat(self, wpath):
398 def stat(self, wpath):
399 try:
399 try:
400 st = os.lstat(join(self.wprefix, wpath))
400 st = os.lstat(join(self.wprefix, wpath))
401 ret = st.st_mode, st.st_size, st.st_mtime
401 ret = st.st_mode, st.st_size, st.st_mtime
402 self.statcache[wpath] = ret
402 self.statcache[wpath] = ret
403 return ret
403 return ret
404 except OSError, err:
404 except OSError, err:
405 self.statcache.pop(wpath, None)
405 self.statcache.pop(wpath, None)
406 raise
406 raise
407
407
408 def created(self, wpath):
408 def created(self, wpath):
409 if wpath == '.hgignore':
409 if wpath == '.hgignore':
410 self.update_hgignore()
410 self.update_hgignore()
411 try:
411 try:
412 st = self.stat(wpath)
412 st = self.stat(wpath)
413 if stat.S_ISREG(st[0]):
413 if stat.S_ISREG(st[0]):
414 self.updatestatus(wpath, st)
414 self.updatestatus(wpath, st)
415 except OSError, err:
415 except OSError, err:
416 pass
416 pass
417
417
418 def modified(self, wpath):
418 def modified(self, wpath):
419 if wpath == '.hgignore':
419 if wpath == '.hgignore':
420 self.update_hgignore()
420 self.update_hgignore()
421 try:
421 try:
422 st = self.stat(wpath)
422 st = self.stat(wpath)
423 if stat.S_ISREG(st[0]):
423 if stat.S_ISREG(st[0]):
424 if self.repo.dirstate[wpath] in 'lmn':
424 if self.repo.dirstate[wpath] in 'lmn':
425 self.updatestatus(wpath, st)
425 self.updatestatus(wpath, st)
426 except OSError:
426 except OSError:
427 pass
427 pass
428
428
429 def deleted(self, wpath):
429 def deleted(self, wpath):
430 if wpath == '.hgignore':
430 if wpath == '.hgignore':
431 self.update_hgignore()
431 self.update_hgignore()
432 elif wpath.startswith('.hg/'):
432 elif wpath.startswith('.hg/'):
433 if wpath == '.hg/wlock':
433 if wpath == '.hg/wlock':
434 self.check_dirstate()
434 self.check_dirstate()
435 return
435 return
436
436
437 self.updatestatus(wpath, None)
437 self.updatestatus(wpath, None)
438
438
439 def schedule_work(self, wpath, evt):
439 def schedule_work(self, wpath, evt):
440 self.eventq.setdefault(wpath, [])
440 self.eventq.setdefault(wpath, [])
441 prev = self.eventq[wpath]
441 prev = self.eventq[wpath]
442 try:
442 try:
443 if prev and evt == 'm' and prev[-1] in 'cm':
443 if prev and evt == 'm' and prev[-1] in 'cm':
444 return
444 return
445 self.eventq[wpath].append(evt)
445 self.eventq[wpath].append(evt)
446 finally:
446 finally:
447 self.deferred += 1
447 self.deferred += 1
448 self.timeout = 250
448 self.timeout = 250
449
449
450 def deferred_event(self, wpath, evt):
450 def deferred_event(self, wpath, evt):
451 if evt == 'c':
451 if evt == 'c':
452 self.created(wpath)
452 self.created(wpath)
453 elif evt == 'm':
453 elif evt == 'm':
454 self.modified(wpath)
454 self.modified(wpath)
455 elif evt == 'd':
455 elif evt == 'd':
456 self.deleted(wpath)
456 self.deleted(wpath)
457
457
458 def process_create(self, wpath, evt):
458 def process_create(self, wpath, evt):
459 if self.ui.debugflag:
459 if self.ui.debugflag:
460 self.ui.note(_('%s event: created %s\n') %
460 self.ui.note(_('%s event: created %s\n') %
461 (self.event_time(), wpath))
461 (self.event_time(), wpath))
462
462
463 if evt.mask & inotify.IN_ISDIR:
463 if evt.mask & inotify.IN_ISDIR:
464 self.scan(wpath)
464 self.scan(wpath)
465 else:
465 else:
466 self.schedule_work(wpath, 'c')
466 self.schedule_work(wpath, 'c')
467
467
468 def process_delete(self, wpath, evt):
468 def process_delete(self, wpath, evt):
469 if self.ui.debugflag:
469 if self.ui.debugflag:
470 self.ui.note(('%s event: deleted %s\n') %
470 self.ui.note(('%s event: deleted %s\n') %
471 (self.event_time(), wpath))
471 (self.event_time(), wpath))
472
472
473 if evt.mask & inotify.IN_ISDIR:
473 if evt.mask & inotify.IN_ISDIR:
474 self.scan(wpath)
474 self.scan(wpath)
475 else:
475 else:
476 self.schedule_work(wpath, 'd')
476 self.schedule_work(wpath, 'd')
477
477
478 def process_modify(self, wpath, evt):
478 def process_modify(self, wpath, evt):
479 if self.ui.debugflag:
479 if self.ui.debugflag:
480 self.ui.note(_('%s event: modified %s\n') %
480 self.ui.note(_('%s event: modified %s\n') %
481 (self.event_time(), wpath))
481 (self.event_time(), wpath))
482
482
483 if not (evt.mask & inotify.IN_ISDIR):
483 if not (evt.mask & inotify.IN_ISDIR):
484 self.schedule_work(wpath, 'm')
484 self.schedule_work(wpath, 'm')
485
485
486 def process_unmount(self, evt):
486 def process_unmount(self, evt):
487 self.ui.warn(_('filesystem containing %s was unmounted\n') %
487 self.ui.warn(_('filesystem containing %s was unmounted\n') %
488 evt.fullpath)
488 evt.fullpath)
489 sys.exit(0)
489 sys.exit(0)
490
490
491 def handle_event(self, fd, event):
491 def handle_event(self, fd, event):
492 if self.ui.debugflag:
492 if self.ui.debugflag:
493 self.ui.note('%s readable: %d bytes\n' %
493 self.ui.note('%s readable: %d bytes\n' %
494 (self.event_time(), self.threshold.readable()))
494 (self.event_time(), self.threshold.readable()))
495 if not self.threshold():
495 if not self.threshold():
496 if self.registered:
496 if self.registered:
497 if self.ui.debugflag:
497 if self.ui.debugflag:
498 self.ui.note('%s below threshold - unhooking\n' %
498 self.ui.note('%s below threshold - unhooking\n' %
499 (self.event_time()))
499 (self.event_time()))
500 self.master.poll.unregister(fd)
500 self.master.poll.unregister(fd)
501 self.registered = False
501 self.registered = False
502 self.timeout = 250
502 self.timeout = 250
503 else:
503 else:
504 self.read_events()
504 self.read_events()
505
505
506 def read_events(self, bufsize=None):
506 def read_events(self, bufsize=None):
507 events = self.watcher.read(bufsize)
507 events = self.watcher.read(bufsize)
508 if self.ui.debugflag:
508 if self.ui.debugflag:
509 self.ui.note('%s reading %d events\n' %
509 self.ui.note('%s reading %d events\n' %
510 (self.event_time(), len(events)))
510 (self.event_time(), len(events)))
511 for evt in events:
511 for evt in events:
512 wpath = self.wpath(evt)
512 wpath = self.wpath(evt)
513 if evt.mask & inotify.IN_UNMOUNT:
513 if evt.mask & inotify.IN_UNMOUNT:
514 self.process_unmount(wpath, evt)
514 self.process_unmount(wpath, evt)
515 elif evt.mask & (inotify.IN_MODIFY | inotify.IN_ATTRIB):
515 elif evt.mask & (inotify.IN_MODIFY | inotify.IN_ATTRIB):
516 self.process_modify(wpath, evt)
516 self.process_modify(wpath, evt)
517 elif evt.mask & (inotify.IN_DELETE | inotify.IN_DELETE_SELF |
517 elif evt.mask & (inotify.IN_DELETE | inotify.IN_DELETE_SELF |
518 inotify.IN_MOVED_FROM):
518 inotify.IN_MOVED_FROM):
519 self.process_delete(wpath, evt)
519 self.process_delete(wpath, evt)
520 elif evt.mask & (inotify.IN_CREATE | inotify.IN_MOVED_TO):
520 elif evt.mask & (inotify.IN_CREATE | inotify.IN_MOVED_TO):
521 self.process_create(wpath, evt)
521 self.process_create(wpath, evt)
522
522
523 def handle_timeout(self):
523 def handle_timeout(self):
524 if not self.registered:
524 if not self.registered:
525 if self.ui.debugflag:
525 if self.ui.debugflag:
526 self.ui.note('%s hooking back up with %d bytes readable\n' %
526 self.ui.note('%s hooking back up with %d bytes readable\n' %
527 (self.event_time(), self.threshold.readable()))
527 (self.event_time(), self.threshold.readable()))
528 self.read_events(0)
528 self.read_events(0)
529 self.master.poll.register(self, select.POLLIN)
529 self.master.poll.register(self, select.POLLIN)
530 self.registered = True
530 self.registered = True
531
531
532 if self.eventq:
532 if self.eventq:
533 if self.ui.debugflag:
533 if self.ui.debugflag:
534 self.ui.note('%s processing %d deferred events as %d\n' %
534 self.ui.note('%s processing %d deferred events as %d\n' %
535 (self.event_time(), self.deferred,
535 (self.event_time(), self.deferred,
536 len(self.eventq)))
536 len(self.eventq)))
537 eventq = self.eventq.items()
537 for wpath, evts in util.sort(self.eventq.items()):
538 eventq.sort()
539 for wpath, evts in eventq:
540 for evt in evts:
538 for evt in evts:
541 self.deferred_event(wpath, evt)
539 self.deferred_event(wpath, evt)
542 self.eventq.clear()
540 self.eventq.clear()
543 self.deferred = 0
541 self.deferred = 0
544 self.timeout = None
542 self.timeout = None
545
543
546 def shutdown(self):
544 def shutdown(self):
547 self.watcher.close()
545 self.watcher.close()
548
546
549 class Server(object):
547 class Server(object):
550 poll_events = select.POLLIN
548 poll_events = select.POLLIN
551
549
552 def __init__(self, ui, repo, watcher, timeout):
550 def __init__(self, ui, repo, watcher, timeout):
553 self.ui = ui
551 self.ui = ui
554 self.repo = repo
552 self.repo = repo
555 self.watcher = watcher
553 self.watcher = watcher
556 self.timeout = timeout
554 self.timeout = timeout
557 self.sock = socket.socket(socket.AF_UNIX)
555 self.sock = socket.socket(socket.AF_UNIX)
558 self.sockpath = self.repo.join('inotify.sock')
556 self.sockpath = self.repo.join('inotify.sock')
559 try:
557 try:
560 self.sock.bind(self.sockpath)
558 self.sock.bind(self.sockpath)
561 except socket.error, err:
559 except socket.error, err:
562 if err[0] == errno.EADDRINUSE:
560 if err[0] == errno.EADDRINUSE:
563 raise AlreadyStartedException(_('could not start server: %s') \
561 raise AlreadyStartedException(_('could not start server: %s') \
564 % err[1])
562 % err[1])
565 raise
563 raise
566 self.sock.listen(5)
564 self.sock.listen(5)
567 self.fileno = self.sock.fileno
565 self.fileno = self.sock.fileno
568
566
569 def handle_timeout(self):
567 def handle_timeout(self):
570 pass
568 pass
571
569
572 def handle_event(self, fd, event):
570 def handle_event(self, fd, event):
573 sock, addr = self.sock.accept()
571 sock, addr = self.sock.accept()
574
572
575 cs = common.recvcs(sock)
573 cs = common.recvcs(sock)
576 version = ord(cs.read(1))
574 version = ord(cs.read(1))
577
575
578 sock.sendall(chr(common.version))
576 sock.sendall(chr(common.version))
579
577
580 if version != common.version:
578 if version != common.version:
581 self.ui.warn(_('received query from incompatible client '
579 self.ui.warn(_('received query from incompatible client '
582 'version %d\n') % version)
580 'version %d\n') % version)
583 return
581 return
584
582
585 names = cs.read().split('\0')
583 names = cs.read().split('\0')
586
584
587 states = names.pop()
585 states = names.pop()
588
586
589 self.ui.note(_('answering query for %r\n') % states)
587 self.ui.note(_('answering query for %r\n') % states)
590
588
591 if self.watcher.timeout:
589 if self.watcher.timeout:
592 # We got a query while a rescan is pending. Make sure we
590 # We got a query while a rescan is pending. Make sure we
593 # rescan before responding, or we could give back a wrong
591 # rescan before responding, or we could give back a wrong
594 # answer.
592 # answer.
595 self.watcher.handle_timeout()
593 self.watcher.handle_timeout()
596
594
597 if not names:
595 if not names:
598 def genresult(states, tree):
596 def genresult(states, tree):
599 for fn, state in self.watcher.walk(states, tree):
597 for fn, state in self.watcher.walk(states, tree):
600 yield fn
598 yield fn
601 else:
599 else:
602 def genresult(states, tree):
600 def genresult(states, tree):
603 for fn in names:
601 for fn in names:
604 l = self.watcher.lookup(fn, tree)
602 l = self.watcher.lookup(fn, tree)
605 try:
603 try:
606 if l in states:
604 if l in states:
607 yield fn
605 yield fn
608 except TypeError:
606 except TypeError:
609 for f, s in self.watcher.walk(states, l, fn):
607 for f, s in self.watcher.walk(states, l, fn):
610 yield f
608 yield f
611
609
612 results = ['\0'.join(r) for r in [
610 results = ['\0'.join(r) for r in [
613 genresult('l', self.watcher.statustrees['l']),
611 genresult('l', self.watcher.statustrees['l']),
614 genresult('m', self.watcher.statustrees['m']),
612 genresult('m', self.watcher.statustrees['m']),
615 genresult('a', self.watcher.statustrees['a']),
613 genresult('a', self.watcher.statustrees['a']),
616 genresult('r', self.watcher.statustrees['r']),
614 genresult('r', self.watcher.statustrees['r']),
617 genresult('!', self.watcher.statustrees['!']),
615 genresult('!', self.watcher.statustrees['!']),
618 '?' in states and genresult('?', self.watcher.statustrees['?']) or [],
616 '?' in states and genresult('?', self.watcher.statustrees['?']) or [],
619 [],
617 [],
620 'c' in states and genresult('n', self.watcher.tree) or [],
618 'c' in states and genresult('n', self.watcher.tree) or [],
621 ]]
619 ]]
622
620
623 try:
621 try:
624 try:
622 try:
625 sock.sendall(struct.pack(common.resphdrfmt,
623 sock.sendall(struct.pack(common.resphdrfmt,
626 *map(len, results)))
624 *map(len, results)))
627 sock.sendall(''.join(results))
625 sock.sendall(''.join(results))
628 finally:
626 finally:
629 sock.shutdown(socket.SHUT_WR)
627 sock.shutdown(socket.SHUT_WR)
630 except socket.error, err:
628 except socket.error, err:
631 if err[0] != errno.EPIPE:
629 if err[0] != errno.EPIPE:
632 raise
630 raise
633
631
634 def shutdown(self):
632 def shutdown(self):
635 self.sock.close()
633 self.sock.close()
636 try:
634 try:
637 os.unlink(self.sockpath)
635 os.unlink(self.sockpath)
638 except OSError, err:
636 except OSError, err:
639 if err.errno != errno.ENOENT:
637 if err.errno != errno.ENOENT:
640 raise
638 raise
641
639
642 class Master(object):
640 class Master(object):
643 def __init__(self, ui, repo, timeout=None):
641 def __init__(self, ui, repo, timeout=None):
644 self.ui = ui
642 self.ui = ui
645 self.repo = repo
643 self.repo = repo
646 self.poll = select.poll()
644 self.poll = select.poll()
647 self.watcher = Watcher(ui, repo, self)
645 self.watcher = Watcher(ui, repo, self)
648 self.server = Server(ui, repo, self.watcher, timeout)
646 self.server = Server(ui, repo, self.watcher, timeout)
649 self.table = {}
647 self.table = {}
650 for obj in (self.watcher, self.server):
648 for obj in (self.watcher, self.server):
651 fd = obj.fileno()
649 fd = obj.fileno()
652 self.table[fd] = obj
650 self.table[fd] = obj
653 self.poll.register(fd, obj.poll_events)
651 self.poll.register(fd, obj.poll_events)
654
652
655 def register(self, fd, mask):
653 def register(self, fd, mask):
656 self.poll.register(fd, mask)
654 self.poll.register(fd, mask)
657
655
658 def shutdown(self):
656 def shutdown(self):
659 for obj in self.table.itervalues():
657 for obj in self.table.itervalues():
660 obj.shutdown()
658 obj.shutdown()
661
659
662 def run(self):
660 def run(self):
663 self.watcher.setup()
661 self.watcher.setup()
664 self.ui.note(_('finished setup\n'))
662 self.ui.note(_('finished setup\n'))
665 if os.getenv('TIME_STARTUP'):
663 if os.getenv('TIME_STARTUP'):
666 sys.exit(0)
664 sys.exit(0)
667 while True:
665 while True:
668 timeout = None
666 timeout = None
669 timeobj = None
667 timeobj = None
670 for obj in self.table.itervalues():
668 for obj in self.table.itervalues():
671 if obj.timeout is not None and (timeout is None or obj.timeout < timeout):
669 if obj.timeout is not None and (timeout is None or obj.timeout < timeout):
672 timeout, timeobj = obj.timeout, obj
670 timeout, timeobj = obj.timeout, obj
673 try:
671 try:
674 if self.ui.debugflag:
672 if self.ui.debugflag:
675 if timeout is None:
673 if timeout is None:
676 self.ui.note('polling: no timeout\n')
674 self.ui.note('polling: no timeout\n')
677 else:
675 else:
678 self.ui.note('polling: %sms timeout\n' % timeout)
676 self.ui.note('polling: %sms timeout\n' % timeout)
679 events = self.poll.poll(timeout)
677 events = self.poll.poll(timeout)
680 except select.error, err:
678 except select.error, err:
681 if err[0] == errno.EINTR:
679 if err[0] == errno.EINTR:
682 continue
680 continue
683 raise
681 raise
684 if events:
682 if events:
685 for fd, event in events:
683 for fd, event in events:
686 self.table[fd].handle_event(fd, event)
684 self.table[fd].handle_event(fd, event)
687 elif timeobj:
685 elif timeobj:
688 timeobj.handle_timeout()
686 timeobj.handle_timeout()
689
687
690 def start(ui, repo):
688 def start(ui, repo):
691 m = Master(ui, repo)
689 m = Master(ui, repo)
692 sys.stdout.flush()
690 sys.stdout.flush()
693 sys.stderr.flush()
691 sys.stderr.flush()
694
692
695 pid = os.fork()
693 pid = os.fork()
696 if pid:
694 if pid:
697 return pid
695 return pid
698
696
699 os.setsid()
697 os.setsid()
700
698
701 fd = os.open('/dev/null', os.O_RDONLY)
699 fd = os.open('/dev/null', os.O_RDONLY)
702 os.dup2(fd, 0)
700 os.dup2(fd, 0)
703 if fd > 0:
701 if fd > 0:
704 os.close(fd)
702 os.close(fd)
705
703
706 fd = os.open(ui.config('inotify', 'log', '/dev/null'),
704 fd = os.open(ui.config('inotify', 'log', '/dev/null'),
707 os.O_RDWR | os.O_CREAT | os.O_TRUNC)
705 os.O_RDWR | os.O_CREAT | os.O_TRUNC)
708 os.dup2(fd, 1)
706 os.dup2(fd, 1)
709 os.dup2(fd, 2)
707 os.dup2(fd, 2)
710 if fd > 2:
708 if fd > 2:
711 os.close(fd)
709 os.close(fd)
712
710
713 try:
711 try:
714 m.run()
712 m.run()
715 finally:
713 finally:
716 m.shutdown()
714 m.shutdown()
717 os._exit(0)
715 os._exit(0)
@@ -1,564 +1,562 b''
1 # keyword.py - $Keyword$ expansion for Mercurial
1 # keyword.py - $Keyword$ expansion for Mercurial
2 #
2 #
3 # Copyright 2007, 2008 Christian Ebert <blacktrash@gmx.net>
3 # Copyright 2007, 2008 Christian Ebert <blacktrash@gmx.net>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 # $Id$
8 # $Id$
9 #
9 #
10 # Keyword expansion hack against the grain of a DSCM
10 # Keyword expansion hack against the grain of a DSCM
11 #
11 #
12 # There are many good reasons why this is not needed in a distributed
12 # There are many good reasons why this is not needed in a distributed
13 # SCM, still it may be useful in very small projects based on single
13 # SCM, still it may be useful in very small projects based on single
14 # files (like LaTeX packages), that are mostly addressed to an audience
14 # files (like LaTeX packages), that are mostly addressed to an audience
15 # not running a version control system.
15 # not running a version control system.
16 #
16 #
17 # For in-depth discussion refer to
17 # For in-depth discussion refer to
18 # <http://www.selenic.com/mercurial/wiki/index.cgi/KeywordPlan>.
18 # <http://www.selenic.com/mercurial/wiki/index.cgi/KeywordPlan>.
19 #
19 #
20 # Keyword expansion is based on Mercurial's changeset template mappings.
20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 #
21 #
22 # Binary files are not touched.
22 # Binary files are not touched.
23 #
23 #
24 # Setup in hgrc:
24 # Setup in hgrc:
25 #
25 #
26 # [extensions]
26 # [extensions]
27 # # enable extension
27 # # enable extension
28 # hgext.keyword =
28 # hgext.keyword =
29 #
29 #
30 # Files to act upon/ignore are specified in the [keyword] section.
30 # Files to act upon/ignore are specified in the [keyword] section.
31 # Customized keyword template mappings in the [keywordmaps] section.
31 # Customized keyword template mappings in the [keywordmaps] section.
32 #
32 #
33 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
33 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
34
34
35 '''keyword expansion in local repositories
35 '''keyword expansion in local repositories
36
36
37 This extension expands RCS/CVS-like or self-customized $Keywords$
37 This extension expands RCS/CVS-like or self-customized $Keywords$
38 in tracked text files selected by your configuration.
38 in tracked text files selected by your configuration.
39
39
40 Keywords are only expanded in local repositories and not stored in
40 Keywords are only expanded in local repositories and not stored in
41 the change history. The mechanism can be regarded as a convenience
41 the change history. The mechanism can be regarded as a convenience
42 for the current user or for archive distribution.
42 for the current user or for archive distribution.
43
43
44 Configuration is done in the [keyword] and [keywordmaps] sections
44 Configuration is done in the [keyword] and [keywordmaps] sections
45 of hgrc files.
45 of hgrc files.
46
46
47 Example:
47 Example:
48
48
49 [keyword]
49 [keyword]
50 # expand keywords in every python file except those matching "x*"
50 # expand keywords in every python file except those matching "x*"
51 **.py =
51 **.py =
52 x* = ignore
52 x* = ignore
53
53
54 Note: the more specific you are in your filename patterns
54 Note: the more specific you are in your filename patterns
55 the less you lose speed in huge repos.
55 the less you lose speed in huge repos.
56
56
57 For [keywordmaps] template mapping and expansion demonstration and
57 For [keywordmaps] template mapping and expansion demonstration and
58 control run "hg kwdemo".
58 control run "hg kwdemo".
59
59
60 An additional date template filter {date|utcdate} is provided.
60 An additional date template filter {date|utcdate} is provided.
61
61
62 The default template mappings (view with "hg kwdemo -d") can be replaced
62 The default template mappings (view with "hg kwdemo -d") can be replaced
63 with customized keywords and templates.
63 with customized keywords and templates.
64 Again, run "hg kwdemo" to control the results of your config changes.
64 Again, run "hg kwdemo" to control the results of your config changes.
65
65
66 Before changing/disabling active keywords, run "hg kwshrink" to avoid
66 Before changing/disabling active keywords, run "hg kwshrink" to avoid
67 the risk of inadvertedly storing expanded keywords in the change history.
67 the risk of inadvertedly storing expanded keywords in the change history.
68
68
69 To force expansion after enabling it, or a configuration change, run
69 To force expansion after enabling it, or a configuration change, run
70 "hg kwexpand".
70 "hg kwexpand".
71
71
72 Also, when committing with the record extension or using mq's qrecord, be aware
72 Also, when committing with the record extension or using mq's qrecord, be aware
73 that keywords cannot be updated. Again, run "hg kwexpand" on the files in
73 that keywords cannot be updated. Again, run "hg kwexpand" on the files in
74 question to update keyword expansions after all changes have been checked in.
74 question to update keyword expansions after all changes have been checked in.
75
75
76 Expansions spanning more than one line and incremental expansions,
76 Expansions spanning more than one line and incremental expansions,
77 like CVS' $Log$, are not supported. A keyword template map
77 like CVS' $Log$, are not supported. A keyword template map
78 "Log = {desc}" expands to the first line of the changeset description.
78 "Log = {desc}" expands to the first line of the changeset description.
79 '''
79 '''
80
80
81 from mercurial import commands, cmdutil, dispatch, filelog, revlog
81 from mercurial import commands, cmdutil, dispatch, filelog, revlog
82 from mercurial import patch, localrepo, templater, templatefilters, util
82 from mercurial import patch, localrepo, templater, templatefilters, util
83 from mercurial.hgweb import webcommands
83 from mercurial.hgweb import webcommands
84 from mercurial.node import nullid, hex
84 from mercurial.node import nullid, hex
85 from mercurial.i18n import _
85 from mercurial.i18n import _
86 import re, shutil, tempfile, time
86 import re, shutil, tempfile, time
87
87
88 commands.optionalrepo += ' kwdemo'
88 commands.optionalrepo += ' kwdemo'
89
89
90 # hg commands that do not act on keywords
90 # hg commands that do not act on keywords
91 nokwcommands = ('add addremove annotate bundle copy export grep incoming init'
91 nokwcommands = ('add addremove annotate bundle copy export grep incoming init'
92 ' log outgoing push rename rollback tip'
92 ' log outgoing push rename rollback tip'
93 ' convert email glog')
93 ' convert email glog')
94
94
95 # hg commands that trigger expansion only when writing to working dir,
95 # hg commands that trigger expansion only when writing to working dir,
96 # not when reading filelog, and unexpand when reading from working dir
96 # not when reading filelog, and unexpand when reading from working dir
97 restricted = 'record qfold qimport qnew qpush qrefresh qrecord'
97 restricted = 'record qfold qimport qnew qpush qrefresh qrecord'
98
98
99 def utcdate(date):
99 def utcdate(date):
100 '''Returns hgdate in cvs-like UTC format.'''
100 '''Returns hgdate in cvs-like UTC format.'''
101 return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0]))
101 return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0]))
102
102
103 # make keyword tools accessible
103 # make keyword tools accessible
104 kwtools = {'templater': None, 'hgcmd': '', 'inc': [], 'exc': ['.hg*']}
104 kwtools = {'templater': None, 'hgcmd': '', 'inc': [], 'exc': ['.hg*']}
105
105
106
106
107 class kwtemplater(object):
107 class kwtemplater(object):
108 '''
108 '''
109 Sets up keyword templates, corresponding keyword regex, and
109 Sets up keyword templates, corresponding keyword regex, and
110 provides keyword substitution functions.
110 provides keyword substitution functions.
111 '''
111 '''
112 templates = {
112 templates = {
113 'Revision': '{node|short}',
113 'Revision': '{node|short}',
114 'Author': '{author|user}',
114 'Author': '{author|user}',
115 'Date': '{date|utcdate}',
115 'Date': '{date|utcdate}',
116 'RCSFile': '{file|basename},v',
116 'RCSFile': '{file|basename},v',
117 'Source': '{root}/{file},v',
117 'Source': '{root}/{file},v',
118 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
118 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
119 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
119 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
120 }
120 }
121
121
122 def __init__(self, ui, repo):
122 def __init__(self, ui, repo):
123 self.ui = ui
123 self.ui = ui
124 self.repo = repo
124 self.repo = repo
125 self.matcher = util.matcher(repo.root,
125 self.matcher = util.matcher(repo.root,
126 inc=kwtools['inc'], exc=kwtools['exc'])[1]
126 inc=kwtools['inc'], exc=kwtools['exc'])[1]
127 self.restrict = kwtools['hgcmd'] in restricted.split()
127 self.restrict = kwtools['hgcmd'] in restricted.split()
128
128
129 kwmaps = self.ui.configitems('keywordmaps')
129 kwmaps = self.ui.configitems('keywordmaps')
130 if kwmaps: # override default templates
130 if kwmaps: # override default templates
131 kwmaps = [(k, templater.parsestring(v, False))
131 kwmaps = [(k, templater.parsestring(v, False))
132 for (k, v) in kwmaps]
132 for (k, v) in kwmaps]
133 self.templates = dict(kwmaps)
133 self.templates = dict(kwmaps)
134 escaped = map(re.escape, self.templates.keys())
134 escaped = map(re.escape, self.templates.keys())
135 kwpat = r'\$(%s)(: [^$\n\r]*? )??\$' % '|'.join(escaped)
135 kwpat = r'\$(%s)(: [^$\n\r]*? )??\$' % '|'.join(escaped)
136 self.re_kw = re.compile(kwpat)
136 self.re_kw = re.compile(kwpat)
137
137
138 templatefilters.filters['utcdate'] = utcdate
138 templatefilters.filters['utcdate'] = utcdate
139 self.ct = cmdutil.changeset_templater(self.ui, self.repo,
139 self.ct = cmdutil.changeset_templater(self.ui, self.repo,
140 False, '', False)
140 False, '', False)
141
141
142 def getnode(self, path, fnode):
142 def getnode(self, path, fnode):
143 '''Derives changenode from file path and filenode.'''
143 '''Derives changenode from file path and filenode.'''
144 # used by kwfilelog.read and kwexpand
144 # used by kwfilelog.read and kwexpand
145 c = self.repo.filectx(path, fileid=fnode)
145 c = self.repo.filectx(path, fileid=fnode)
146 return c.node()
146 return c.node()
147
147
148 def substitute(self, data, path, node, subfunc):
148 def substitute(self, data, path, node, subfunc):
149 '''Replaces keywords in data with expanded template.'''
149 '''Replaces keywords in data with expanded template.'''
150 def kwsub(mobj):
150 def kwsub(mobj):
151 kw = mobj.group(1)
151 kw = mobj.group(1)
152 self.ct.use_template(self.templates[kw])
152 self.ct.use_template(self.templates[kw])
153 self.ui.pushbuffer()
153 self.ui.pushbuffer()
154 self.ct.show(changenode=node, root=self.repo.root, file=path)
154 self.ct.show(changenode=node, root=self.repo.root, file=path)
155 ekw = templatefilters.firstline(self.ui.popbuffer())
155 ekw = templatefilters.firstline(self.ui.popbuffer())
156 return '$%s: %s $' % (kw, ekw)
156 return '$%s: %s $' % (kw, ekw)
157 return subfunc(kwsub, data)
157 return subfunc(kwsub, data)
158
158
159 def expand(self, path, node, data):
159 def expand(self, path, node, data):
160 '''Returns data with keywords expanded.'''
160 '''Returns data with keywords expanded.'''
161 if not self.restrict and self.matcher(path) and not util.binary(data):
161 if not self.restrict and self.matcher(path) and not util.binary(data):
162 changenode = self.getnode(path, node)
162 changenode = self.getnode(path, node)
163 return self.substitute(data, path, changenode, self.re_kw.sub)
163 return self.substitute(data, path, changenode, self.re_kw.sub)
164 return data
164 return data
165
165
166 def iskwfile(self, path, flagfunc):
166 def iskwfile(self, path, flagfunc):
167 '''Returns true if path matches [keyword] pattern
167 '''Returns true if path matches [keyword] pattern
168 and is not a symbolic link.
168 and is not a symbolic link.
169 Caveat: localrepository._link fails on Windows.'''
169 Caveat: localrepository._link fails on Windows.'''
170 return self.matcher(path) and not 'l' in flagfunc(path)
170 return self.matcher(path) and not 'l' in flagfunc(path)
171
171
172 def overwrite(self, node, expand, files):
172 def overwrite(self, node, expand, files):
173 '''Overwrites selected files expanding/shrinking keywords.'''
173 '''Overwrites selected files expanding/shrinking keywords.'''
174 if node is not None: # commit
174 if node is not None: # commit
175 ctx = self.repo[node]
175 ctx = self.repo[node]
176 mf = ctx.manifest()
176 mf = ctx.manifest()
177 files = [f for f in ctx.files() if f in mf]
177 files = [f for f in ctx.files() if f in mf]
178 notify = self.ui.debug
178 notify = self.ui.debug
179 else: # kwexpand/kwshrink
179 else: # kwexpand/kwshrink
180 ctx = self.repo['.']
180 ctx = self.repo['.']
181 mf = ctx.manifest()
181 mf = ctx.manifest()
182 notify = self.ui.note
182 notify = self.ui.note
183 candidates = [f for f in files if self.iskwfile(f, ctx.flags)]
183 candidates = [f for f in files if self.iskwfile(f, ctx.flags)]
184 if candidates:
184 if candidates:
185 self.restrict = True # do not expand when reading
185 self.restrict = True # do not expand when reading
186 candidates.sort()
187 action = expand and 'expanding' or 'shrinking'
186 action = expand and 'expanding' or 'shrinking'
188 for f in candidates:
187 for f in candidates:
189 fp = self.repo.file(f)
188 fp = self.repo.file(f)
190 data = fp.read(mf[f])
189 data = fp.read(mf[f])
191 if util.binary(data):
190 if util.binary(data):
192 continue
191 continue
193 if expand:
192 if expand:
194 changenode = node or self.getnode(f, mf[f])
193 changenode = node or self.getnode(f, mf[f])
195 data, found = self.substitute(data, f, changenode,
194 data, found = self.substitute(data, f, changenode,
196 self.re_kw.subn)
195 self.re_kw.subn)
197 else:
196 else:
198 found = self.re_kw.search(data)
197 found = self.re_kw.search(data)
199 if found:
198 if found:
200 notify(_('overwriting %s %s keywords\n') % (f, action))
199 notify(_('overwriting %s %s keywords\n') % (f, action))
201 self.repo.wwrite(f, data, mf.flags(f))
200 self.repo.wwrite(f, data, mf.flags(f))
202 self.repo.dirstate.normal(f)
201 self.repo.dirstate.normal(f)
203 self.restrict = False
202 self.restrict = False
204
203
205 def shrinktext(self, text):
204 def shrinktext(self, text):
206 '''Unconditionally removes all keyword substitutions from text.'''
205 '''Unconditionally removes all keyword substitutions from text.'''
207 return self.re_kw.sub(r'$\1$', text)
206 return self.re_kw.sub(r'$\1$', text)
208
207
209 def shrink(self, fname, text):
208 def shrink(self, fname, text):
210 '''Returns text with all keyword substitutions removed.'''
209 '''Returns text with all keyword substitutions removed.'''
211 if self.matcher(fname) and not util.binary(text):
210 if self.matcher(fname) and not util.binary(text):
212 return self.shrinktext(text)
211 return self.shrinktext(text)
213 return text
212 return text
214
213
215 def shrinklines(self, fname, lines):
214 def shrinklines(self, fname, lines):
216 '''Returns lines with keyword substitutions removed.'''
215 '''Returns lines with keyword substitutions removed.'''
217 if self.matcher(fname):
216 if self.matcher(fname):
218 text = ''.join(lines)
217 text = ''.join(lines)
219 if not util.binary(text):
218 if not util.binary(text):
220 return self.shrinktext(text).splitlines(True)
219 return self.shrinktext(text).splitlines(True)
221 return lines
220 return lines
222
221
223 def wread(self, fname, data):
222 def wread(self, fname, data):
224 '''If in restricted mode returns data read from wdir with
223 '''If in restricted mode returns data read from wdir with
225 keyword substitutions removed.'''
224 keyword substitutions removed.'''
226 return self.restrict and self.shrink(fname, data) or data
225 return self.restrict and self.shrink(fname, data) or data
227
226
228 class kwfilelog(filelog.filelog):
227 class kwfilelog(filelog.filelog):
229 '''
228 '''
230 Subclass of filelog to hook into its read, add, cmp methods.
229 Subclass of filelog to hook into its read, add, cmp methods.
231 Keywords are "stored" unexpanded, and processed on reading.
230 Keywords are "stored" unexpanded, and processed on reading.
232 '''
231 '''
233 def __init__(self, opener, kwt, path):
232 def __init__(self, opener, kwt, path):
234 super(kwfilelog, self).__init__(opener, path)
233 super(kwfilelog, self).__init__(opener, path)
235 self.kwt = kwt
234 self.kwt = kwt
236 self.path = path
235 self.path = path
237
236
238 def read(self, node):
237 def read(self, node):
239 '''Expands keywords when reading filelog.'''
238 '''Expands keywords when reading filelog.'''
240 data = super(kwfilelog, self).read(node)
239 data = super(kwfilelog, self).read(node)
241 return self.kwt.expand(self.path, node, data)
240 return self.kwt.expand(self.path, node, data)
242
241
243 def add(self, text, meta, tr, link, p1=None, p2=None):
242 def add(self, text, meta, tr, link, p1=None, p2=None):
244 '''Removes keyword substitutions when adding to filelog.'''
243 '''Removes keyword substitutions when adding to filelog.'''
245 text = self.kwt.shrink(self.path, text)
244 text = self.kwt.shrink(self.path, text)
246 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
245 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
247
246
248 def cmp(self, node, text):
247 def cmp(self, node, text):
249 '''Removes keyword substitutions for comparison.'''
248 '''Removes keyword substitutions for comparison.'''
250 text = self.kwt.shrink(self.path, text)
249 text = self.kwt.shrink(self.path, text)
251 if self.renamed(node):
250 if self.renamed(node):
252 t2 = super(kwfilelog, self).read(node)
251 t2 = super(kwfilelog, self).read(node)
253 return t2 != text
252 return t2 != text
254 return revlog.revlog.cmp(self, node, text)
253 return revlog.revlog.cmp(self, node, text)
255
254
256 def _status(ui, repo, kwt, unknown, *pats, **opts):
255 def _status(ui, repo, kwt, unknown, *pats, **opts):
257 '''Bails out if [keyword] configuration is not active.
256 '''Bails out if [keyword] configuration is not active.
258 Returns status of working directory.'''
257 Returns status of working directory.'''
259 if kwt:
258 if kwt:
260 matcher = cmdutil.match(repo, pats, opts)
259 matcher = cmdutil.match(repo, pats, opts)
261 return repo.status(match=matcher, unknown=unknown, clean=True)
260 return repo.status(match=matcher, unknown=unknown, clean=True)
262 if ui.configitems('keyword'):
261 if ui.configitems('keyword'):
263 raise util.Abort(_('[keyword] patterns cannot match'))
262 raise util.Abort(_('[keyword] patterns cannot match'))
264 raise util.Abort(_('no [keyword] patterns configured'))
263 raise util.Abort(_('no [keyword] patterns configured'))
265
264
266 def _kwfwrite(ui, repo, expand, *pats, **opts):
265 def _kwfwrite(ui, repo, expand, *pats, **opts):
267 '''Selects files and passes them to kwtemplater.overwrite.'''
266 '''Selects files and passes them to kwtemplater.overwrite.'''
268 if repo.dirstate.parents()[1] != nullid:
267 if repo.dirstate.parents()[1] != nullid:
269 raise util.Abort(_('outstanding uncommitted merge'))
268 raise util.Abort(_('outstanding uncommitted merge'))
270 kwt = kwtools['templater']
269 kwt = kwtools['templater']
271 status = _status(ui, repo, kwt, False, *pats, **opts)
270 status = _status(ui, repo, kwt, False, *pats, **opts)
272 modified, added, removed, deleted = status[:4]
271 modified, added, removed, deleted = status[:4]
273 if modified or added or removed or deleted:
272 if modified or added or removed or deleted:
274 raise util.Abort(_('outstanding uncommitted changes'))
273 raise util.Abort(_('outstanding uncommitted changes'))
275 wlock = lock = None
274 wlock = lock = None
276 try:
275 try:
277 wlock = repo.wlock()
276 wlock = repo.wlock()
278 lock = repo.lock()
277 lock = repo.lock()
279 kwt.overwrite(None, expand, status[6])
278 kwt.overwrite(None, expand, status[6])
280 finally:
279 finally:
281 del wlock, lock
280 del wlock, lock
282
281
283
282
284 def demo(ui, repo, *args, **opts):
283 def demo(ui, repo, *args, **opts):
285 '''print [keywordmaps] configuration and an expansion example
284 '''print [keywordmaps] configuration and an expansion example
286
285
287 Show current, custom, or default keyword template maps
286 Show current, custom, or default keyword template maps
288 and their expansion.
287 and their expansion.
289
288
290 Extend current configuration by specifying maps as arguments
289 Extend current configuration by specifying maps as arguments
291 and optionally by reading from an additional hgrc file.
290 and optionally by reading from an additional hgrc file.
292
291
293 Override current keyword template maps with "default" option.
292 Override current keyword template maps with "default" option.
294 '''
293 '''
295 def demostatus(stat):
294 def demostatus(stat):
296 ui.status(_('\n\t%s\n') % stat)
295 ui.status(_('\n\t%s\n') % stat)
297
296
298 def demoitems(section, items):
297 def demoitems(section, items):
299 ui.write('[%s]\n' % section)
298 ui.write('[%s]\n' % section)
300 for k, v in items:
299 for k, v in items:
301 ui.write('%s = %s\n' % (k, v))
300 ui.write('%s = %s\n' % (k, v))
302
301
303 msg = 'hg keyword config and expansion example'
302 msg = 'hg keyword config and expansion example'
304 kwstatus = 'current'
303 kwstatus = 'current'
305 fn = 'demo.txt'
304 fn = 'demo.txt'
306 branchname = 'demobranch'
305 branchname = 'demobranch'
307 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
306 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
308 ui.note(_('creating temporary repo at %s\n') % tmpdir)
307 ui.note(_('creating temporary repo at %s\n') % tmpdir)
309 repo = localrepo.localrepository(ui, tmpdir, True)
308 repo = localrepo.localrepository(ui, tmpdir, True)
310 ui.setconfig('keyword', fn, '')
309 ui.setconfig('keyword', fn, '')
311 if args or opts.get('rcfile'):
310 if args or opts.get('rcfile'):
312 kwstatus = 'custom'
311 kwstatus = 'custom'
313 if opts.get('rcfile'):
312 if opts.get('rcfile'):
314 ui.readconfig(opts.get('rcfile'))
313 ui.readconfig(opts.get('rcfile'))
315 if opts.get('default'):
314 if opts.get('default'):
316 kwstatus = 'default'
315 kwstatus = 'default'
317 kwmaps = kwtemplater.templates
316 kwmaps = kwtemplater.templates
318 if ui.configitems('keywordmaps'):
317 if ui.configitems('keywordmaps'):
319 # override maps from optional rcfile
318 # override maps from optional rcfile
320 for k, v in kwmaps.iteritems():
319 for k, v in kwmaps.iteritems():
321 ui.setconfig('keywordmaps', k, v)
320 ui.setconfig('keywordmaps', k, v)
322 elif args:
321 elif args:
323 # simulate hgrc parsing
322 # simulate hgrc parsing
324 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
323 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
325 fp = repo.opener('hgrc', 'w')
324 fp = repo.opener('hgrc', 'w')
326 fp.writelines(rcmaps)
325 fp.writelines(rcmaps)
327 fp.close()
326 fp.close()
328 ui.readconfig(repo.join('hgrc'))
327 ui.readconfig(repo.join('hgrc'))
329 if not opts.get('default'):
328 if not opts.get('default'):
330 kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates
329 kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates
331 uisetup(ui)
330 uisetup(ui)
332 reposetup(ui, repo)
331 reposetup(ui, repo)
333 for k, v in ui.configitems('extensions'):
332 for k, v in ui.configitems('extensions'):
334 if k.endswith('keyword'):
333 if k.endswith('keyword'):
335 extension = '%s = %s' % (k, v)
334 extension = '%s = %s' % (k, v)
336 break
335 break
337 demostatus('config using %s keyword template maps' % kwstatus)
336 demostatus('config using %s keyword template maps' % kwstatus)
338 ui.write('[extensions]\n%s\n' % extension)
337 ui.write('[extensions]\n%s\n' % extension)
339 demoitems('keyword', ui.configitems('keyword'))
338 demoitems('keyword', ui.configitems('keyword'))
340 demoitems('keywordmaps', kwmaps.iteritems())
339 demoitems('keywordmaps', kwmaps.iteritems())
341 keywords = '$' + '$\n$'.join(kwmaps.keys()) + '$\n'
340 keywords = '$' + '$\n$'.join(kwmaps.keys()) + '$\n'
342 repo.wopener(fn, 'w').write(keywords)
341 repo.wopener(fn, 'w').write(keywords)
343 repo.add([fn])
342 repo.add([fn])
344 path = repo.wjoin(fn)
343 path = repo.wjoin(fn)
345 ui.note(_('\n%s keywords written to %s:\n') % (kwstatus, path))
344 ui.note(_('\n%s keywords written to %s:\n') % (kwstatus, path))
346 ui.note(keywords)
345 ui.note(keywords)
347 ui.note('\nhg -R "%s" branch "%s"\n' % (tmpdir, branchname))
346 ui.note('\nhg -R "%s" branch "%s"\n' % (tmpdir, branchname))
348 # silence branch command if not verbose
347 # silence branch command if not verbose
349 quiet = ui.quiet
348 quiet = ui.quiet
350 ui.quiet = not ui.verbose
349 ui.quiet = not ui.verbose
351 commands.branch(ui, repo, branchname)
350 commands.branch(ui, repo, branchname)
352 ui.quiet = quiet
351 ui.quiet = quiet
353 for name, cmd in ui.configitems('hooks'):
352 for name, cmd in ui.configitems('hooks'):
354 if name.split('.', 1)[0].find('commit') > -1:
353 if name.split('.', 1)[0].find('commit') > -1:
355 repo.ui.setconfig('hooks', name, '')
354 repo.ui.setconfig('hooks', name, '')
356 ui.note(_('unhooked all commit hooks\n'))
355 ui.note(_('unhooked all commit hooks\n'))
357 ui.note('hg -R "%s" ci -m "%s"\n' % (tmpdir, msg))
356 ui.note('hg -R "%s" ci -m "%s"\n' % (tmpdir, msg))
358 repo.commit(text=msg)
357 repo.commit(text=msg)
359 format = ui.verbose and ' in %s' % path or ''
358 format = ui.verbose and ' in %s' % path or ''
360 demostatus('%s keywords expanded%s' % (kwstatus, format))
359 demostatus('%s keywords expanded%s' % (kwstatus, format))
361 ui.write(repo.wread(fn))
360 ui.write(repo.wread(fn))
362 ui.debug(_('\nremoving temporary repo %s\n') % tmpdir)
361 ui.debug(_('\nremoving temporary repo %s\n') % tmpdir)
363 shutil.rmtree(tmpdir, ignore_errors=True)
362 shutil.rmtree(tmpdir, ignore_errors=True)
364
363
365 def expand(ui, repo, *pats, **opts):
364 def expand(ui, repo, *pats, **opts):
366 '''expand keywords in working directory
365 '''expand keywords in working directory
367
366
368 Run after (re)enabling keyword expansion.
367 Run after (re)enabling keyword expansion.
369
368
370 kwexpand refuses to run if given files contain local changes.
369 kwexpand refuses to run if given files contain local changes.
371 '''
370 '''
372 # 3rd argument sets expansion to True
371 # 3rd argument sets expansion to True
373 _kwfwrite(ui, repo, True, *pats, **opts)
372 _kwfwrite(ui, repo, True, *pats, **opts)
374
373
375 def files(ui, repo, *pats, **opts):
374 def files(ui, repo, *pats, **opts):
376 '''print files currently configured for keyword expansion
375 '''print files currently configured for keyword expansion
377
376
378 Crosscheck which files in working directory are potential targets for
377 Crosscheck which files in working directory are potential targets for
379 keyword expansion.
378 keyword expansion.
380 That is, files matched by [keyword] config patterns but not symlinks.
379 That is, files matched by [keyword] config patterns but not symlinks.
381 '''
380 '''
382 kwt = kwtools['templater']
381 kwt = kwtools['templater']
383 status = _status(ui, repo, kwt, opts.get('untracked'), *pats, **opts)
382 status = _status(ui, repo, kwt, opts.get('untracked'), *pats, **opts)
384 modified, added, removed, deleted, unknown, ignored, clean = status
383 modified, added, removed, deleted, unknown, ignored, clean = status
385 files = modified + added + clean + unknown
384 files = util.sort(modified + added + clean + unknown)
386 files.sort()
387 wctx = repo[None]
385 wctx = repo[None]
388 kwfiles = [f for f in files if kwt.iskwfile(f, wctx.flags)]
386 kwfiles = [f for f in files if kwt.iskwfile(f, wctx.flags)]
389 cwd = pats and repo.getcwd() or ''
387 cwd = pats and repo.getcwd() or ''
390 kwfstats = not opts.get('ignore') and (('K', kwfiles),) or ()
388 kwfstats = not opts.get('ignore') and (('K', kwfiles),) or ()
391 if opts.get('all') or opts.get('ignore'):
389 if opts.get('all') or opts.get('ignore'):
392 kwfstats += (('I', [f for f in files if f not in kwfiles]),)
390 kwfstats += (('I', [f for f in files if f not in kwfiles]),)
393 for char, filenames in kwfstats:
391 for char, filenames in kwfstats:
394 format = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
392 format = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
395 for f in filenames:
393 for f in filenames:
396 ui.write(format % repo.pathto(f, cwd))
394 ui.write(format % repo.pathto(f, cwd))
397
395
398 def shrink(ui, repo, *pats, **opts):
396 def shrink(ui, repo, *pats, **opts):
399 '''revert expanded keywords in working directory
397 '''revert expanded keywords in working directory
400
398
401 Run before changing/disabling active keywords
399 Run before changing/disabling active keywords
402 or if you experience problems with "hg import" or "hg merge".
400 or if you experience problems with "hg import" or "hg merge".
403
401
404 kwshrink refuses to run if given files contain local changes.
402 kwshrink refuses to run if given files contain local changes.
405 '''
403 '''
406 # 3rd argument sets expansion to False
404 # 3rd argument sets expansion to False
407 _kwfwrite(ui, repo, False, *pats, **opts)
405 _kwfwrite(ui, repo, False, *pats, **opts)
408
406
409
407
410 def uisetup(ui):
408 def uisetup(ui):
411 '''Collects [keyword] config in kwtools.
409 '''Collects [keyword] config in kwtools.
412 Monkeypatches dispatch._parse if needed.'''
410 Monkeypatches dispatch._parse if needed.'''
413
411
414 for pat, opt in ui.configitems('keyword'):
412 for pat, opt in ui.configitems('keyword'):
415 if opt != 'ignore':
413 if opt != 'ignore':
416 kwtools['inc'].append(pat)
414 kwtools['inc'].append(pat)
417 else:
415 else:
418 kwtools['exc'].append(pat)
416 kwtools['exc'].append(pat)
419
417
420 if kwtools['inc']:
418 if kwtools['inc']:
421 def kwdispatch_parse(ui, args):
419 def kwdispatch_parse(ui, args):
422 '''Monkeypatch dispatch._parse to obtain running hg command.'''
420 '''Monkeypatch dispatch._parse to obtain running hg command.'''
423 cmd, func, args, options, cmdoptions = dispatch_parse(ui, args)
421 cmd, func, args, options, cmdoptions = dispatch_parse(ui, args)
424 kwtools['hgcmd'] = cmd
422 kwtools['hgcmd'] = cmd
425 return cmd, func, args, options, cmdoptions
423 return cmd, func, args, options, cmdoptions
426
424
427 dispatch_parse = dispatch._parse
425 dispatch_parse = dispatch._parse
428 dispatch._parse = kwdispatch_parse
426 dispatch._parse = kwdispatch_parse
429
427
430 def reposetup(ui, repo):
428 def reposetup(ui, repo):
431 '''Sets up repo as kwrepo for keyword substitution.
429 '''Sets up repo as kwrepo for keyword substitution.
432 Overrides file method to return kwfilelog instead of filelog
430 Overrides file method to return kwfilelog instead of filelog
433 if file matches user configuration.
431 if file matches user configuration.
434 Wraps commit to overwrite configured files with updated
432 Wraps commit to overwrite configured files with updated
435 keyword substitutions.
433 keyword substitutions.
436 Monkeypatches patch and webcommands.'''
434 Monkeypatches patch and webcommands.'''
437
435
438 try:
436 try:
439 if (not repo.local() or not kwtools['inc']
437 if (not repo.local() or not kwtools['inc']
440 or kwtools['hgcmd'] in nokwcommands.split()
438 or kwtools['hgcmd'] in nokwcommands.split()
441 or '.hg' in util.splitpath(repo.root)
439 or '.hg' in util.splitpath(repo.root)
442 or repo._url.startswith('bundle:')):
440 or repo._url.startswith('bundle:')):
443 return
441 return
444 except AttributeError:
442 except AttributeError:
445 pass
443 pass
446
444
447 kwtools['templater'] = kwt = kwtemplater(ui, repo)
445 kwtools['templater'] = kwt = kwtemplater(ui, repo)
448
446
449 class kwrepo(repo.__class__):
447 class kwrepo(repo.__class__):
450 def file(self, f):
448 def file(self, f):
451 if f[0] == '/':
449 if f[0] == '/':
452 f = f[1:]
450 f = f[1:]
453 return kwfilelog(self.sopener, kwt, f)
451 return kwfilelog(self.sopener, kwt, f)
454
452
455 def wread(self, filename):
453 def wread(self, filename):
456 data = super(kwrepo, self).wread(filename)
454 data = super(kwrepo, self).wread(filename)
457 return kwt.wread(filename, data)
455 return kwt.wread(filename, data)
458
456
459 def commit(self, files=None, text='', user=None, date=None,
457 def commit(self, files=None, text='', user=None, date=None,
460 match=None, force=False, force_editor=False,
458 match=None, force=False, force_editor=False,
461 p1=None, p2=None, extra={}, empty_ok=False):
459 p1=None, p2=None, extra={}, empty_ok=False):
462 wlock = lock = None
460 wlock = lock = None
463 _p1 = _p2 = None
461 _p1 = _p2 = None
464 try:
462 try:
465 wlock = self.wlock()
463 wlock = self.wlock()
466 lock = self.lock()
464 lock = self.lock()
467 # store and postpone commit hooks
465 # store and postpone commit hooks
468 commithooks = {}
466 commithooks = {}
469 for name, cmd in ui.configitems('hooks'):
467 for name, cmd in ui.configitems('hooks'):
470 if name.split('.', 1)[0] == 'commit':
468 if name.split('.', 1)[0] == 'commit':
471 commithooks[name] = cmd
469 commithooks[name] = cmd
472 ui.setconfig('hooks', name, None)
470 ui.setconfig('hooks', name, None)
473 if commithooks:
471 if commithooks:
474 # store parents for commit hook environment
472 # store parents for commit hook environment
475 if p1 is None:
473 if p1 is None:
476 _p1, _p2 = repo.dirstate.parents()
474 _p1, _p2 = repo.dirstate.parents()
477 else:
475 else:
478 _p1, _p2 = p1, p2 or nullid
476 _p1, _p2 = p1, p2 or nullid
479 _p1 = hex(_p1)
477 _p1 = hex(_p1)
480 if _p2 == nullid:
478 if _p2 == nullid:
481 _p2 = ''
479 _p2 = ''
482 else:
480 else:
483 _p2 = hex(_p2)
481 _p2 = hex(_p2)
484
482
485 n = super(kwrepo, self).commit(files, text, user, date, match,
483 n = super(kwrepo, self).commit(files, text, user, date, match,
486 force, force_editor, p1, p2,
484 force, force_editor, p1, p2,
487 extra, empty_ok)
485 extra, empty_ok)
488
486
489 # restore commit hooks
487 # restore commit hooks
490 for name, cmd in commithooks.iteritems():
488 for name, cmd in commithooks.iteritems():
491 ui.setconfig('hooks', name, cmd)
489 ui.setconfig('hooks', name, cmd)
492 if n is not None:
490 if n is not None:
493 kwt.overwrite(n, True, None)
491 kwt.overwrite(n, True, None)
494 repo.hook('commit', node=n, parent1=_p1, parent2=_p2)
492 repo.hook('commit', node=n, parent1=_p1, parent2=_p2)
495 return n
493 return n
496 finally:
494 finally:
497 del wlock, lock
495 del wlock, lock
498
496
499 # monkeypatches
497 # monkeypatches
500 def kwpatchfile_init(self, ui, fname, missing=False):
498 def kwpatchfile_init(self, ui, fname, missing=False):
501 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
499 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
502 rejects or conflicts due to expanded keywords in working dir.'''
500 rejects or conflicts due to expanded keywords in working dir.'''
503 patchfile_init(self, ui, fname, missing)
501 patchfile_init(self, ui, fname, missing)
504 # shrink keywords read from working dir
502 # shrink keywords read from working dir
505 self.lines = kwt.shrinklines(self.fname, self.lines)
503 self.lines = kwt.shrinklines(self.fname, self.lines)
506
504
507 def kw_diff(repo, node1=None, node2=None, match=None,
505 def kw_diff(repo, node1=None, node2=None, match=None,
508 fp=None, changes=None, opts=None):
506 fp=None, changes=None, opts=None):
509 '''Monkeypatch patch.diff to avoid expansion except when
507 '''Monkeypatch patch.diff to avoid expansion except when
510 comparing against working dir.'''
508 comparing against working dir.'''
511 if node2 is not None:
509 if node2 is not None:
512 kwt.matcher = util.never
510 kwt.matcher = util.never
513 elif node1 is not None and node1 != repo['.'].node():
511 elif node1 is not None and node1 != repo['.'].node():
514 kwt.restrict = True
512 kwt.restrict = True
515 patch_diff(repo, node1, node2, match, fp, changes, opts)
513 patch_diff(repo, node1, node2, match, fp, changes, opts)
516
514
517 def kwweb_annotate(web, req, tmpl):
515 def kwweb_annotate(web, req, tmpl):
518 '''Wraps webcommands.annotate turning off keyword expansion.'''
516 '''Wraps webcommands.annotate turning off keyword expansion.'''
519 kwt.matcher = util.never
517 kwt.matcher = util.never
520 return webcommands_annotate(web, req, tmpl)
518 return webcommands_annotate(web, req, tmpl)
521
519
522 def kwweb_changeset(web, req, tmpl):
520 def kwweb_changeset(web, req, tmpl):
523 '''Wraps webcommands.changeset turning off keyword expansion.'''
521 '''Wraps webcommands.changeset turning off keyword expansion.'''
524 kwt.matcher = util.never
522 kwt.matcher = util.never
525 return webcommands_changeset(web, req, tmpl)
523 return webcommands_changeset(web, req, tmpl)
526
524
527 def kwweb_filediff(web, req, tmpl):
525 def kwweb_filediff(web, req, tmpl):
528 '''Wraps webcommands.filediff turning off keyword expansion.'''
526 '''Wraps webcommands.filediff turning off keyword expansion.'''
529 kwt.matcher = util.never
527 kwt.matcher = util.never
530 return webcommands_filediff(web, req, tmpl)
528 return webcommands_filediff(web, req, tmpl)
531
529
532 repo.__class__ = kwrepo
530 repo.__class__ = kwrepo
533
531
534 patchfile_init = patch.patchfile.__init__
532 patchfile_init = patch.patchfile.__init__
535 patch_diff = patch.diff
533 patch_diff = patch.diff
536 webcommands_annotate = webcommands.annotate
534 webcommands_annotate = webcommands.annotate
537 webcommands_changeset = webcommands.changeset
535 webcommands_changeset = webcommands.changeset
538 webcommands_filediff = webcommands.filediff
536 webcommands_filediff = webcommands.filediff
539
537
540 patch.patchfile.__init__ = kwpatchfile_init
538 patch.patchfile.__init__ = kwpatchfile_init
541 patch.diff = kw_diff
539 patch.diff = kw_diff
542 webcommands.annotate = kwweb_annotate
540 webcommands.annotate = kwweb_annotate
543 webcommands.changeset = webcommands.rev = kwweb_changeset
541 webcommands.changeset = webcommands.rev = kwweb_changeset
544 webcommands.filediff = webcommands.diff = kwweb_filediff
542 webcommands.filediff = webcommands.diff = kwweb_filediff
545
543
546
544
547 cmdtable = {
545 cmdtable = {
548 'kwdemo':
546 'kwdemo':
549 (demo,
547 (demo,
550 [('d', 'default', None, _('show default keyword template maps')),
548 [('d', 'default', None, _('show default keyword template maps')),
551 ('f', 'rcfile', [], _('read maps from rcfile'))],
549 ('f', 'rcfile', [], _('read maps from rcfile'))],
552 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...')),
550 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...')),
553 'kwexpand': (expand, commands.walkopts,
551 'kwexpand': (expand, commands.walkopts,
554 _('hg kwexpand [OPTION]... [FILE]...')),
552 _('hg kwexpand [OPTION]... [FILE]...')),
555 'kwfiles':
553 'kwfiles':
556 (files,
554 (files,
557 [('a', 'all', None, _('show keyword status flags of all files')),
555 [('a', 'all', None, _('show keyword status flags of all files')),
558 ('i', 'ignore', None, _('show files excluded from expansion')),
556 ('i', 'ignore', None, _('show files excluded from expansion')),
559 ('u', 'untracked', None, _('additionally show untracked files')),
557 ('u', 'untracked', None, _('additionally show untracked files')),
560 ] + commands.walkopts,
558 ] + commands.walkopts,
561 _('hg kwfiles [OPTION]... [FILE]...')),
559 _('hg kwfiles [OPTION]... [FILE]...')),
562 'kwshrink': (shrink, commands.walkopts,
560 'kwshrink': (shrink, commands.walkopts,
563 _('hg kwshrink [OPTION]... [FILE]...')),
561 _('hg kwshrink [OPTION]... [FILE]...')),
564 }
562 }
@@ -1,2458 +1,2455 b''
1 # mq.py - patch queues for mercurial
1 # mq.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 '''patch management and development
8 '''patch management and development
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use "hg help command" for more details):
17 Common tasks (use "hg help command" for more details):
18
18
19 prepare repository to work with patches qinit
19 prepare repository to work with patches qinit
20 create new patch qnew
20 create new patch qnew
21 import existing patch qimport
21 import existing patch qimport
22
22
23 print patch series qseries
23 print patch series qseries
24 print applied patches qapplied
24 print applied patches qapplied
25 print name of top applied patch qtop
25 print name of top applied patch qtop
26
26
27 add known patch to applied stack qpush
27 add known patch to applied stack qpush
28 remove patch from applied stack qpop
28 remove patch from applied stack qpop
29 refresh contents of top applied patch qrefresh
29 refresh contents of top applied patch qrefresh
30 '''
30 '''
31
31
32 from mercurial.i18n import _
32 from mercurial.i18n import _
33 from mercurial.node import bin, hex, short
33 from mercurial.node import bin, hex, short
34 from mercurial.repo import RepoError
34 from mercurial.repo import RepoError
35 from mercurial import commands, cmdutil, hg, patch, revlog, util
35 from mercurial import commands, cmdutil, hg, patch, revlog, util
36 from mercurial import repair
36 from mercurial import repair
37 import os, sys, re, errno
37 import os, sys, re, errno
38
38
39 commands.norepo += " qclone"
39 commands.norepo += " qclone"
40
40
41 # Patch names looks like unix-file names.
41 # Patch names looks like unix-file names.
42 # They must be joinable with queue directory and result in the patch path.
42 # They must be joinable with queue directory and result in the patch path.
43 normname = util.normpath
43 normname = util.normpath
44
44
45 class statusentry:
45 class statusentry:
46 def __init__(self, rev, name=None):
46 def __init__(self, rev, name=None):
47 if not name:
47 if not name:
48 fields = rev.split(':', 1)
48 fields = rev.split(':', 1)
49 if len(fields) == 2:
49 if len(fields) == 2:
50 self.rev, self.name = fields
50 self.rev, self.name = fields
51 else:
51 else:
52 self.rev, self.name = None, None
52 self.rev, self.name = None, None
53 else:
53 else:
54 self.rev, self.name = rev, name
54 self.rev, self.name = rev, name
55
55
56 def __str__(self):
56 def __str__(self):
57 return self.rev + ':' + self.name
57 return self.rev + ':' + self.name
58
58
59 class queue:
59 class queue:
60 def __init__(self, ui, path, patchdir=None):
60 def __init__(self, ui, path, patchdir=None):
61 self.basepath = path
61 self.basepath = path
62 self.path = patchdir or os.path.join(path, "patches")
62 self.path = patchdir or os.path.join(path, "patches")
63 self.opener = util.opener(self.path)
63 self.opener = util.opener(self.path)
64 self.ui = ui
64 self.ui = ui
65 self.applied = []
65 self.applied = []
66 self.full_series = []
66 self.full_series = []
67 self.applied_dirty = 0
67 self.applied_dirty = 0
68 self.series_dirty = 0
68 self.series_dirty = 0
69 self.series_path = "series"
69 self.series_path = "series"
70 self.status_path = "status"
70 self.status_path = "status"
71 self.guards_path = "guards"
71 self.guards_path = "guards"
72 self.active_guards = None
72 self.active_guards = None
73 self.guards_dirty = False
73 self.guards_dirty = False
74 self._diffopts = None
74 self._diffopts = None
75
75
76 if os.path.exists(self.join(self.series_path)):
76 if os.path.exists(self.join(self.series_path)):
77 self.full_series = self.opener(self.series_path).read().splitlines()
77 self.full_series = self.opener(self.series_path).read().splitlines()
78 self.parse_series()
78 self.parse_series()
79
79
80 if os.path.exists(self.join(self.status_path)):
80 if os.path.exists(self.join(self.status_path)):
81 lines = self.opener(self.status_path).read().splitlines()
81 lines = self.opener(self.status_path).read().splitlines()
82 self.applied = [statusentry(l) for l in lines]
82 self.applied = [statusentry(l) for l in lines]
83
83
84 def diffopts(self):
84 def diffopts(self):
85 if self._diffopts is None:
85 if self._diffopts is None:
86 self._diffopts = patch.diffopts(self.ui)
86 self._diffopts = patch.diffopts(self.ui)
87 return self._diffopts
87 return self._diffopts
88
88
89 def join(self, *p):
89 def join(self, *p):
90 return os.path.join(self.path, *p)
90 return os.path.join(self.path, *p)
91
91
92 def find_series(self, patch):
92 def find_series(self, patch):
93 pre = re.compile("(\s*)([^#]+)")
93 pre = re.compile("(\s*)([^#]+)")
94 index = 0
94 index = 0
95 for l in self.full_series:
95 for l in self.full_series:
96 m = pre.match(l)
96 m = pre.match(l)
97 if m:
97 if m:
98 s = m.group(2)
98 s = m.group(2)
99 s = s.rstrip()
99 s = s.rstrip()
100 if s == patch:
100 if s == patch:
101 return index
101 return index
102 index += 1
102 index += 1
103 return None
103 return None
104
104
105 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
105 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
106
106
107 def parse_series(self):
107 def parse_series(self):
108 self.series = []
108 self.series = []
109 self.series_guards = []
109 self.series_guards = []
110 for l in self.full_series:
110 for l in self.full_series:
111 h = l.find('#')
111 h = l.find('#')
112 if h == -1:
112 if h == -1:
113 patch = l
113 patch = l
114 comment = ''
114 comment = ''
115 elif h == 0:
115 elif h == 0:
116 continue
116 continue
117 else:
117 else:
118 patch = l[:h]
118 patch = l[:h]
119 comment = l[h:]
119 comment = l[h:]
120 patch = patch.strip()
120 patch = patch.strip()
121 if patch:
121 if patch:
122 if patch in self.series:
122 if patch in self.series:
123 raise util.Abort(_('%s appears more than once in %s') %
123 raise util.Abort(_('%s appears more than once in %s') %
124 (patch, self.join(self.series_path)))
124 (patch, self.join(self.series_path)))
125 self.series.append(patch)
125 self.series.append(patch)
126 self.series_guards.append(self.guard_re.findall(comment))
126 self.series_guards.append(self.guard_re.findall(comment))
127
127
128 def check_guard(self, guard):
128 def check_guard(self, guard):
129 if not guard:
129 if not guard:
130 return _('guard cannot be an empty string')
130 return _('guard cannot be an empty string')
131 bad_chars = '# \t\r\n\f'
131 bad_chars = '# \t\r\n\f'
132 first = guard[0]
132 first = guard[0]
133 for c in '-+':
133 for c in '-+':
134 if first == c:
134 if first == c:
135 return (_('guard %r starts with invalid character: %r') %
135 return (_('guard %r starts with invalid character: %r') %
136 (guard, c))
136 (guard, c))
137 for c in bad_chars:
137 for c in bad_chars:
138 if c in guard:
138 if c in guard:
139 return _('invalid character in guard %r: %r') % (guard, c)
139 return _('invalid character in guard %r: %r') % (guard, c)
140
140
141 def set_active(self, guards):
141 def set_active(self, guards):
142 for guard in guards:
142 for guard in guards:
143 bad = self.check_guard(guard)
143 bad = self.check_guard(guard)
144 if bad:
144 if bad:
145 raise util.Abort(bad)
145 raise util.Abort(bad)
146 guards = dict.fromkeys(guards).keys()
146 guards = util.sort(util.unique(guards))
147 guards.sort()
148 self.ui.debug('active guards: %s\n' % ' '.join(guards))
147 self.ui.debug('active guards: %s\n' % ' '.join(guards))
149 self.active_guards = guards
148 self.active_guards = guards
150 self.guards_dirty = True
149 self.guards_dirty = True
151
150
152 def active(self):
151 def active(self):
153 if self.active_guards is None:
152 if self.active_guards is None:
154 self.active_guards = []
153 self.active_guards = []
155 try:
154 try:
156 guards = self.opener(self.guards_path).read().split()
155 guards = self.opener(self.guards_path).read().split()
157 except IOError, err:
156 except IOError, err:
158 if err.errno != errno.ENOENT: raise
157 if err.errno != errno.ENOENT: raise
159 guards = []
158 guards = []
160 for i, guard in enumerate(guards):
159 for i, guard in enumerate(guards):
161 bad = self.check_guard(guard)
160 bad = self.check_guard(guard)
162 if bad:
161 if bad:
163 self.ui.warn('%s:%d: %s\n' %
162 self.ui.warn('%s:%d: %s\n' %
164 (self.join(self.guards_path), i + 1, bad))
163 (self.join(self.guards_path), i + 1, bad))
165 else:
164 else:
166 self.active_guards.append(guard)
165 self.active_guards.append(guard)
167 return self.active_guards
166 return self.active_guards
168
167
169 def set_guards(self, idx, guards):
168 def set_guards(self, idx, guards):
170 for g in guards:
169 for g in guards:
171 if len(g) < 2:
170 if len(g) < 2:
172 raise util.Abort(_('guard %r too short') % g)
171 raise util.Abort(_('guard %r too short') % g)
173 if g[0] not in '-+':
172 if g[0] not in '-+':
174 raise util.Abort(_('guard %r starts with invalid char') % g)
173 raise util.Abort(_('guard %r starts with invalid char') % g)
175 bad = self.check_guard(g[1:])
174 bad = self.check_guard(g[1:])
176 if bad:
175 if bad:
177 raise util.Abort(bad)
176 raise util.Abort(bad)
178 drop = self.guard_re.sub('', self.full_series[idx])
177 drop = self.guard_re.sub('', self.full_series[idx])
179 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
178 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
180 self.parse_series()
179 self.parse_series()
181 self.series_dirty = True
180 self.series_dirty = True
182
181
183 def pushable(self, idx):
182 def pushable(self, idx):
184 if isinstance(idx, str):
183 if isinstance(idx, str):
185 idx = self.series.index(idx)
184 idx = self.series.index(idx)
186 patchguards = self.series_guards[idx]
185 patchguards = self.series_guards[idx]
187 if not patchguards:
186 if not patchguards:
188 return True, None
187 return True, None
189 default = False
188 default = False
190 guards = self.active()
189 guards = self.active()
191 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
190 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
192 if exactneg:
191 if exactneg:
193 return False, exactneg[0]
192 return False, exactneg[0]
194 pos = [g for g in patchguards if g[0] == '+']
193 pos = [g for g in patchguards if g[0] == '+']
195 exactpos = [g for g in pos if g[1:] in guards]
194 exactpos = [g for g in pos if g[1:] in guards]
196 if pos:
195 if pos:
197 if exactpos:
196 if exactpos:
198 return True, exactpos[0]
197 return True, exactpos[0]
199 return False, pos
198 return False, pos
200 return True, ''
199 return True, ''
201
200
202 def explain_pushable(self, idx, all_patches=False):
201 def explain_pushable(self, idx, all_patches=False):
203 write = all_patches and self.ui.write or self.ui.warn
202 write = all_patches and self.ui.write or self.ui.warn
204 if all_patches or self.ui.verbose:
203 if all_patches or self.ui.verbose:
205 if isinstance(idx, str):
204 if isinstance(idx, str):
206 idx = self.series.index(idx)
205 idx = self.series.index(idx)
207 pushable, why = self.pushable(idx)
206 pushable, why = self.pushable(idx)
208 if all_patches and pushable:
207 if all_patches and pushable:
209 if why is None:
208 if why is None:
210 write(_('allowing %s - no guards in effect\n') %
209 write(_('allowing %s - no guards in effect\n') %
211 self.series[idx])
210 self.series[idx])
212 else:
211 else:
213 if not why:
212 if not why:
214 write(_('allowing %s - no matching negative guards\n') %
213 write(_('allowing %s - no matching negative guards\n') %
215 self.series[idx])
214 self.series[idx])
216 else:
215 else:
217 write(_('allowing %s - guarded by %r\n') %
216 write(_('allowing %s - guarded by %r\n') %
218 (self.series[idx], why))
217 (self.series[idx], why))
219 if not pushable:
218 if not pushable:
220 if why:
219 if why:
221 write(_('skipping %s - guarded by %r\n') %
220 write(_('skipping %s - guarded by %r\n') %
222 (self.series[idx], why))
221 (self.series[idx], why))
223 else:
222 else:
224 write(_('skipping %s - no matching guards\n') %
223 write(_('skipping %s - no matching guards\n') %
225 self.series[idx])
224 self.series[idx])
226
225
227 def save_dirty(self):
226 def save_dirty(self):
228 def write_list(items, path):
227 def write_list(items, path):
229 fp = self.opener(path, 'w')
228 fp = self.opener(path, 'w')
230 for i in items:
229 for i in items:
231 fp.write("%s\n" % i)
230 fp.write("%s\n" % i)
232 fp.close()
231 fp.close()
233 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
232 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
234 if self.series_dirty: write_list(self.full_series, self.series_path)
233 if self.series_dirty: write_list(self.full_series, self.series_path)
235 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
234 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
236
235
237 def readheaders(self, patch):
236 def readheaders(self, patch):
238 def eatdiff(lines):
237 def eatdiff(lines):
239 while lines:
238 while lines:
240 l = lines[-1]
239 l = lines[-1]
241 if (l.startswith("diff -") or
240 if (l.startswith("diff -") or
242 l.startswith("Index:") or
241 l.startswith("Index:") or
243 l.startswith("===========")):
242 l.startswith("===========")):
244 del lines[-1]
243 del lines[-1]
245 else:
244 else:
246 break
245 break
247 def eatempty(lines):
246 def eatempty(lines):
248 while lines:
247 while lines:
249 l = lines[-1]
248 l = lines[-1]
250 if re.match('\s*$', l):
249 if re.match('\s*$', l):
251 del lines[-1]
250 del lines[-1]
252 else:
251 else:
253 break
252 break
254
253
255 pf = self.join(patch)
254 pf = self.join(patch)
256 message = []
255 message = []
257 comments = []
256 comments = []
258 user = None
257 user = None
259 date = None
258 date = None
260 format = None
259 format = None
261 subject = None
260 subject = None
262 diffstart = 0
261 diffstart = 0
263
262
264 for line in file(pf):
263 for line in file(pf):
265 line = line.rstrip()
264 line = line.rstrip()
266 if line.startswith('diff --git'):
265 if line.startswith('diff --git'):
267 diffstart = 2
266 diffstart = 2
268 break
267 break
269 if diffstart:
268 if diffstart:
270 if line.startswith('+++ '):
269 if line.startswith('+++ '):
271 diffstart = 2
270 diffstart = 2
272 break
271 break
273 if line.startswith("--- "):
272 if line.startswith("--- "):
274 diffstart = 1
273 diffstart = 1
275 continue
274 continue
276 elif format == "hgpatch":
275 elif format == "hgpatch":
277 # parse values when importing the result of an hg export
276 # parse values when importing the result of an hg export
278 if line.startswith("# User "):
277 if line.startswith("# User "):
279 user = line[7:]
278 user = line[7:]
280 elif line.startswith("# Date "):
279 elif line.startswith("# Date "):
281 date = line[7:]
280 date = line[7:]
282 elif not line.startswith("# ") and line:
281 elif not line.startswith("# ") and line:
283 message.append(line)
282 message.append(line)
284 format = None
283 format = None
285 elif line == '# HG changeset patch':
284 elif line == '# HG changeset patch':
286 format = "hgpatch"
285 format = "hgpatch"
287 elif (format != "tagdone" and (line.startswith("Subject: ") or
286 elif (format != "tagdone" and (line.startswith("Subject: ") or
288 line.startswith("subject: "))):
287 line.startswith("subject: "))):
289 subject = line[9:]
288 subject = line[9:]
290 format = "tag"
289 format = "tag"
291 elif (format != "tagdone" and (line.startswith("From: ") or
290 elif (format != "tagdone" and (line.startswith("From: ") or
292 line.startswith("from: "))):
291 line.startswith("from: "))):
293 user = line[6:]
292 user = line[6:]
294 format = "tag"
293 format = "tag"
295 elif format == "tag" and line == "":
294 elif format == "tag" and line == "":
296 # when looking for tags (subject: from: etc) they
295 # when looking for tags (subject: from: etc) they
297 # end once you find a blank line in the source
296 # end once you find a blank line in the source
298 format = "tagdone"
297 format = "tagdone"
299 elif message or line:
298 elif message or line:
300 message.append(line)
299 message.append(line)
301 comments.append(line)
300 comments.append(line)
302
301
303 eatdiff(message)
302 eatdiff(message)
304 eatdiff(comments)
303 eatdiff(comments)
305 eatempty(message)
304 eatempty(message)
306 eatempty(comments)
305 eatempty(comments)
307
306
308 # make sure message isn't empty
307 # make sure message isn't empty
309 if format and format.startswith("tag") and subject:
308 if format and format.startswith("tag") and subject:
310 message.insert(0, "")
309 message.insert(0, "")
311 message.insert(0, subject)
310 message.insert(0, subject)
312 return (message, comments, user, date, diffstart > 1)
311 return (message, comments, user, date, diffstart > 1)
313
312
314 def removeundo(self, repo):
313 def removeundo(self, repo):
315 undo = repo.sjoin('undo')
314 undo = repo.sjoin('undo')
316 if not os.path.exists(undo):
315 if not os.path.exists(undo):
317 return
316 return
318 try:
317 try:
319 os.unlink(undo)
318 os.unlink(undo)
320 except OSError, inst:
319 except OSError, inst:
321 self.ui.warn('error removing undo: %s\n' % str(inst))
320 self.ui.warn('error removing undo: %s\n' % str(inst))
322
321
323 def printdiff(self, repo, node1, node2=None, files=None,
322 def printdiff(self, repo, node1, node2=None, files=None,
324 fp=None, changes=None, opts={}):
323 fp=None, changes=None, opts={}):
325 m = cmdutil.match(repo, files, opts)
324 m = cmdutil.match(repo, files, opts)
326 patch.diff(repo, node1, node2, m, fp, changes, self.diffopts())
325 patch.diff(repo, node1, node2, m, fp, changes, self.diffopts())
327
326
328 def mergeone(self, repo, mergeq, head, patch, rev):
327 def mergeone(self, repo, mergeq, head, patch, rev):
329 # first try just applying the patch
328 # first try just applying the patch
330 (err, n) = self.apply(repo, [ patch ], update_status=False,
329 (err, n) = self.apply(repo, [ patch ], update_status=False,
331 strict=True, merge=rev)
330 strict=True, merge=rev)
332
331
333 if err == 0:
332 if err == 0:
334 return (err, n)
333 return (err, n)
335
334
336 if n is None:
335 if n is None:
337 raise util.Abort(_("apply failed for patch %s") % patch)
336 raise util.Abort(_("apply failed for patch %s") % patch)
338
337
339 self.ui.warn("patch didn't work out, merging %s\n" % patch)
338 self.ui.warn("patch didn't work out, merging %s\n" % patch)
340
339
341 # apply failed, strip away that rev and merge.
340 # apply failed, strip away that rev and merge.
342 hg.clean(repo, head)
341 hg.clean(repo, head)
343 self.strip(repo, n, update=False, backup='strip')
342 self.strip(repo, n, update=False, backup='strip')
344
343
345 ctx = repo[rev]
344 ctx = repo[rev]
346 ret = hg.merge(repo, rev)
345 ret = hg.merge(repo, rev)
347 if ret:
346 if ret:
348 raise util.Abort(_("update returned %d") % ret)
347 raise util.Abort(_("update returned %d") % ret)
349 n = repo.commit(None, ctx.description(), ctx.user(), force=1)
348 n = repo.commit(None, ctx.description(), ctx.user(), force=1)
350 if n == None:
349 if n == None:
351 raise util.Abort(_("repo commit failed"))
350 raise util.Abort(_("repo commit failed"))
352 try:
351 try:
353 message, comments, user, date, patchfound = mergeq.readheaders(patch)
352 message, comments, user, date, patchfound = mergeq.readheaders(patch)
354 except:
353 except:
355 raise util.Abort(_("unable to read %s") % patch)
354 raise util.Abort(_("unable to read %s") % patch)
356
355
357 patchf = self.opener(patch, "w")
356 patchf = self.opener(patch, "w")
358 if comments:
357 if comments:
359 comments = "\n".join(comments) + '\n\n'
358 comments = "\n".join(comments) + '\n\n'
360 patchf.write(comments)
359 patchf.write(comments)
361 self.printdiff(repo, head, n, fp=patchf)
360 self.printdiff(repo, head, n, fp=patchf)
362 patchf.close()
361 patchf.close()
363 self.removeundo(repo)
362 self.removeundo(repo)
364 return (0, n)
363 return (0, n)
365
364
366 def qparents(self, repo, rev=None):
365 def qparents(self, repo, rev=None):
367 if rev is None:
366 if rev is None:
368 (p1, p2) = repo.dirstate.parents()
367 (p1, p2) = repo.dirstate.parents()
369 if p2 == revlog.nullid:
368 if p2 == revlog.nullid:
370 return p1
369 return p1
371 if len(self.applied) == 0:
370 if len(self.applied) == 0:
372 return None
371 return None
373 return revlog.bin(self.applied[-1].rev)
372 return revlog.bin(self.applied[-1].rev)
374 pp = repo.changelog.parents(rev)
373 pp = repo.changelog.parents(rev)
375 if pp[1] != revlog.nullid:
374 if pp[1] != revlog.nullid:
376 arevs = [ x.rev for x in self.applied ]
375 arevs = [ x.rev for x in self.applied ]
377 p0 = revlog.hex(pp[0])
376 p0 = revlog.hex(pp[0])
378 p1 = revlog.hex(pp[1])
377 p1 = revlog.hex(pp[1])
379 if p0 in arevs:
378 if p0 in arevs:
380 return pp[0]
379 return pp[0]
381 if p1 in arevs:
380 if p1 in arevs:
382 return pp[1]
381 return pp[1]
383 return pp[0]
382 return pp[0]
384
383
385 def mergepatch(self, repo, mergeq, series):
384 def mergepatch(self, repo, mergeq, series):
386 if len(self.applied) == 0:
385 if len(self.applied) == 0:
387 # each of the patches merged in will have two parents. This
386 # each of the patches merged in will have two parents. This
388 # can confuse the qrefresh, qdiff, and strip code because it
387 # can confuse the qrefresh, qdiff, and strip code because it
389 # needs to know which parent is actually in the patch queue.
388 # needs to know which parent is actually in the patch queue.
390 # so, we insert a merge marker with only one parent. This way
389 # so, we insert a merge marker with only one parent. This way
391 # the first patch in the queue is never a merge patch
390 # the first patch in the queue is never a merge patch
392 #
391 #
393 pname = ".hg.patches.merge.marker"
392 pname = ".hg.patches.merge.marker"
394 n = repo.commit(None, '[mq]: merge marker', user=None, force=1)
393 n = repo.commit(None, '[mq]: merge marker', user=None, force=1)
395 self.removeundo(repo)
394 self.removeundo(repo)
396 self.applied.append(statusentry(revlog.hex(n), pname))
395 self.applied.append(statusentry(revlog.hex(n), pname))
397 self.applied_dirty = 1
396 self.applied_dirty = 1
398
397
399 head = self.qparents(repo)
398 head = self.qparents(repo)
400
399
401 for patch in series:
400 for patch in series:
402 patch = mergeq.lookup(patch, strict=True)
401 patch = mergeq.lookup(patch, strict=True)
403 if not patch:
402 if not patch:
404 self.ui.warn("patch %s does not exist\n" % patch)
403 self.ui.warn("patch %s does not exist\n" % patch)
405 return (1, None)
404 return (1, None)
406 pushable, reason = self.pushable(patch)
405 pushable, reason = self.pushable(patch)
407 if not pushable:
406 if not pushable:
408 self.explain_pushable(patch, all_patches=True)
407 self.explain_pushable(patch, all_patches=True)
409 continue
408 continue
410 info = mergeq.isapplied(patch)
409 info = mergeq.isapplied(patch)
411 if not info:
410 if not info:
412 self.ui.warn("patch %s is not applied\n" % patch)
411 self.ui.warn("patch %s is not applied\n" % patch)
413 return (1, None)
412 return (1, None)
414 rev = revlog.bin(info[1])
413 rev = revlog.bin(info[1])
415 (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
414 (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
416 if head:
415 if head:
417 self.applied.append(statusentry(revlog.hex(head), patch))
416 self.applied.append(statusentry(revlog.hex(head), patch))
418 self.applied_dirty = 1
417 self.applied_dirty = 1
419 if err:
418 if err:
420 return (err, head)
419 return (err, head)
421 self.save_dirty()
420 self.save_dirty()
422 return (0, head)
421 return (0, head)
423
422
424 def patch(self, repo, patchfile):
423 def patch(self, repo, patchfile):
425 '''Apply patchfile to the working directory.
424 '''Apply patchfile to the working directory.
426 patchfile: file name of patch'''
425 patchfile: file name of patch'''
427 files = {}
426 files = {}
428 try:
427 try:
429 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
428 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
430 files=files)
429 files=files)
431 except Exception, inst:
430 except Exception, inst:
432 self.ui.note(str(inst) + '\n')
431 self.ui.note(str(inst) + '\n')
433 if not self.ui.verbose:
432 if not self.ui.verbose:
434 self.ui.warn("patch failed, unable to continue (try -v)\n")
433 self.ui.warn("patch failed, unable to continue (try -v)\n")
435 return (False, files, False)
434 return (False, files, False)
436
435
437 return (True, files, fuzz)
436 return (True, files, fuzz)
438
437
439 def apply(self, repo, series, list=False, update_status=True,
438 def apply(self, repo, series, list=False, update_status=True,
440 strict=False, patchdir=None, merge=None, all_files={}):
439 strict=False, patchdir=None, merge=None, all_files={}):
441 wlock = lock = tr = None
440 wlock = lock = tr = None
442 try:
441 try:
443 wlock = repo.wlock()
442 wlock = repo.wlock()
444 lock = repo.lock()
443 lock = repo.lock()
445 tr = repo.transaction()
444 tr = repo.transaction()
446 try:
445 try:
447 ret = self._apply(repo, series, list, update_status,
446 ret = self._apply(repo, series, list, update_status,
448 strict, patchdir, merge, all_files=all_files)
447 strict, patchdir, merge, all_files=all_files)
449 tr.close()
448 tr.close()
450 self.save_dirty()
449 self.save_dirty()
451 return ret
450 return ret
452 except:
451 except:
453 try:
452 try:
454 tr.abort()
453 tr.abort()
455 finally:
454 finally:
456 repo.invalidate()
455 repo.invalidate()
457 repo.dirstate.invalidate()
456 repo.dirstate.invalidate()
458 raise
457 raise
459 finally:
458 finally:
460 del tr, lock, wlock
459 del tr, lock, wlock
461 self.removeundo(repo)
460 self.removeundo(repo)
462
461
463 def _apply(self, repo, series, list=False, update_status=True,
462 def _apply(self, repo, series, list=False, update_status=True,
464 strict=False, patchdir=None, merge=None, all_files={}):
463 strict=False, patchdir=None, merge=None, all_files={}):
465 # TODO unify with commands.py
464 # TODO unify with commands.py
466 if not patchdir:
465 if not patchdir:
467 patchdir = self.path
466 patchdir = self.path
468 err = 0
467 err = 0
469 n = None
468 n = None
470 for patchname in series:
469 for patchname in series:
471 pushable, reason = self.pushable(patchname)
470 pushable, reason = self.pushable(patchname)
472 if not pushable:
471 if not pushable:
473 self.explain_pushable(patchname, all_patches=True)
472 self.explain_pushable(patchname, all_patches=True)
474 continue
473 continue
475 self.ui.warn("applying %s\n" % patchname)
474 self.ui.warn("applying %s\n" % patchname)
476 pf = os.path.join(patchdir, patchname)
475 pf = os.path.join(patchdir, patchname)
477
476
478 try:
477 try:
479 message, comments, user, date, patchfound = self.readheaders(patchname)
478 message, comments, user, date, patchfound = self.readheaders(patchname)
480 except:
479 except:
481 self.ui.warn("Unable to read %s\n" % patchname)
480 self.ui.warn("Unable to read %s\n" % patchname)
482 err = 1
481 err = 1
483 break
482 break
484
483
485 if not message:
484 if not message:
486 message = "imported patch %s\n" % patchname
485 message = "imported patch %s\n" % patchname
487 else:
486 else:
488 if list:
487 if list:
489 message.append("\nimported patch %s" % patchname)
488 message.append("\nimported patch %s" % patchname)
490 message = '\n'.join(message)
489 message = '\n'.join(message)
491
490
492 (patcherr, files, fuzz) = self.patch(repo, pf)
491 (patcherr, files, fuzz) = self.patch(repo, pf)
493 all_files.update(files)
492 all_files.update(files)
494 patcherr = not patcherr
493 patcherr = not patcherr
495
494
496 if merge and files:
495 if merge and files:
497 # Mark as removed/merged and update dirstate parent info
496 # Mark as removed/merged and update dirstate parent info
498 removed = []
497 removed = []
499 merged = []
498 merged = []
500 for f in files:
499 for f in files:
501 if os.path.exists(repo.wjoin(f)):
500 if os.path.exists(repo.wjoin(f)):
502 merged.append(f)
501 merged.append(f)
503 else:
502 else:
504 removed.append(f)
503 removed.append(f)
505 for f in removed:
504 for f in removed:
506 repo.dirstate.remove(f)
505 repo.dirstate.remove(f)
507 for f in merged:
506 for f in merged:
508 repo.dirstate.merge(f)
507 repo.dirstate.merge(f)
509 p1, p2 = repo.dirstate.parents()
508 p1, p2 = repo.dirstate.parents()
510 repo.dirstate.setparents(p1, merge)
509 repo.dirstate.setparents(p1, merge)
511
510
512 files = patch.updatedir(self.ui, repo, files)
511 files = patch.updatedir(self.ui, repo, files)
513 match = cmdutil.matchfiles(repo, files or [])
512 match = cmdutil.matchfiles(repo, files or [])
514 n = repo.commit(files, message, user, date, match=match,
513 n = repo.commit(files, message, user, date, match=match,
515 force=True)
514 force=True)
516
515
517 if n == None:
516 if n == None:
518 raise util.Abort(_("repo commit failed"))
517 raise util.Abort(_("repo commit failed"))
519
518
520 if update_status:
519 if update_status:
521 self.applied.append(statusentry(revlog.hex(n), patchname))
520 self.applied.append(statusentry(revlog.hex(n), patchname))
522
521
523 if patcherr:
522 if patcherr:
524 if not patchfound:
523 if not patchfound:
525 self.ui.warn("patch %s is empty\n" % patchname)
524 self.ui.warn("patch %s is empty\n" % patchname)
526 err = 0
525 err = 0
527 else:
526 else:
528 self.ui.warn("patch failed, rejects left in working dir\n")
527 self.ui.warn("patch failed, rejects left in working dir\n")
529 err = 1
528 err = 1
530 break
529 break
531
530
532 if fuzz and strict:
531 if fuzz and strict:
533 self.ui.warn("fuzz found when applying patch, stopping\n")
532 self.ui.warn("fuzz found when applying patch, stopping\n")
534 err = 1
533 err = 1
535 break
534 break
536 return (err, n)
535 return (err, n)
537
536
538 def _clean_series(self, patches):
537 def _clean_series(self, patches):
539 indices = [self.find_series(p) for p in patches]
538 indices = util.sort([self.find_series(p) for p in patches])
540 indices.sort()
541 for i in indices[-1::-1]:
539 for i in indices[-1::-1]:
542 del self.full_series[i]
540 del self.full_series[i]
543 self.parse_series()
541 self.parse_series()
544 self.series_dirty = 1
542 self.series_dirty = 1
545
543
546 def finish(self, repo, revs):
544 def finish(self, repo, revs):
547 revs.sort()
545 revs.sort()
548 firstrev = repo.changelog.rev(revlog.bin(self.applied[0].rev))
546 firstrev = repo[self.applied[0].rev].rev()
549 appliedbase = 0
547 appliedbase = 0
550 patches = []
548 patches = []
551 for rev in revs:
549 for rev in util.sort(revs):
552 if rev < firstrev:
550 if rev < firstrev:
553 raise util.Abort(_('revision %d is not managed') % rev)
551 raise util.Abort(_('revision %d is not managed') % rev)
554 base = revlog.bin(self.applied[appliedbase].rev)
552 base = revlog.bin(self.applied[appliedbase].rev)
555 node = repo.changelog.node(rev)
553 node = repo.changelog.node(rev)
556 if node != base:
554 if node != base:
557 raise util.Abort(_('cannot delete revision %d above '
555 raise util.Abort(_('cannot delete revision %d above '
558 'applied patches') % rev)
556 'applied patches') % rev)
559 patches.append(self.applied[appliedbase].name)
557 patches.append(self.applied[appliedbase].name)
560 appliedbase += 1
558 appliedbase += 1
561
559
562 r = self.qrepo()
560 r = self.qrepo()
563 if r:
561 if r:
564 r.remove(patches, True)
562 r.remove(patches, True)
565 else:
563 else:
566 for p in patches:
564 for p in patches:
567 os.unlink(self.join(p))
565 os.unlink(self.join(p))
568
566
569 del self.applied[:appliedbase]
567 del self.applied[:appliedbase]
570 self.applied_dirty = 1
568 self.applied_dirty = 1
571 self._clean_series(patches)
569 self._clean_series(patches)
572
570
573 def delete(self, repo, patches, opts):
571 def delete(self, repo, patches, opts):
574 if not patches and not opts.get('rev'):
572 if not patches and not opts.get('rev'):
575 raise util.Abort(_('qdelete requires at least one revision or '
573 raise util.Abort(_('qdelete requires at least one revision or '
576 'patch name'))
574 'patch name'))
577
575
578 realpatches = []
576 realpatches = []
579 for patch in patches:
577 for patch in patches:
580 patch = self.lookup(patch, strict=True)
578 patch = self.lookup(patch, strict=True)
581 info = self.isapplied(patch)
579 info = self.isapplied(patch)
582 if info:
580 if info:
583 raise util.Abort(_("cannot delete applied patch %s") % patch)
581 raise util.Abort(_("cannot delete applied patch %s") % patch)
584 if patch not in self.series:
582 if patch not in self.series:
585 raise util.Abort(_("patch %s not in series file") % patch)
583 raise util.Abort(_("patch %s not in series file") % patch)
586 realpatches.append(patch)
584 realpatches.append(patch)
587
585
588 appliedbase = 0
586 appliedbase = 0
589 if opts.get('rev'):
587 if opts.get('rev'):
590 if not self.applied:
588 if not self.applied:
591 raise util.Abort(_('no patches applied'))
589 raise util.Abort(_('no patches applied'))
592 revs = cmdutil.revrange(repo, opts['rev'])
590 revs = cmdutil.revrange(repo, opts['rev'])
593 if len(revs) > 1 and revs[0] > revs[1]:
591 if len(revs) > 1 and revs[0] > revs[1]:
594 revs.reverse()
592 revs.reverse()
595 for rev in revs:
593 for rev in revs:
596 if appliedbase >= len(self.applied):
594 if appliedbase >= len(self.applied):
597 raise util.Abort(_("revision %d is not managed") % rev)
595 raise util.Abort(_("revision %d is not managed") % rev)
598
596
599 base = revlog.bin(self.applied[appliedbase].rev)
597 base = revlog.bin(self.applied[appliedbase].rev)
600 node = repo.changelog.node(rev)
598 node = repo.changelog.node(rev)
601 if node != base:
599 if node != base:
602 raise util.Abort(_("cannot delete revision %d above "
600 raise util.Abort(_("cannot delete revision %d above "
603 "applied patches") % rev)
601 "applied patches") % rev)
604 realpatches.append(self.applied[appliedbase].name)
602 realpatches.append(self.applied[appliedbase].name)
605 appliedbase += 1
603 appliedbase += 1
606
604
607 if not opts.get('keep'):
605 if not opts.get('keep'):
608 r = self.qrepo()
606 r = self.qrepo()
609 if r:
607 if r:
610 r.remove(realpatches, True)
608 r.remove(realpatches, True)
611 else:
609 else:
612 for p in realpatches:
610 for p in realpatches:
613 os.unlink(self.join(p))
611 os.unlink(self.join(p))
614
612
615 if appliedbase:
613 if appliedbase:
616 del self.applied[:appliedbase]
614 del self.applied[:appliedbase]
617 self.applied_dirty = 1
615 self.applied_dirty = 1
618 self._clean_series(realpatches)
616 self._clean_series(realpatches)
619
617
620 def check_toppatch(self, repo):
618 def check_toppatch(self, repo):
621 if len(self.applied) > 0:
619 if len(self.applied) > 0:
622 top = revlog.bin(self.applied[-1].rev)
620 top = revlog.bin(self.applied[-1].rev)
623 pp = repo.dirstate.parents()
621 pp = repo.dirstate.parents()
624 if top not in pp:
622 if top not in pp:
625 raise util.Abort(_("working directory revision is not qtip"))
623 raise util.Abort(_("working directory revision is not qtip"))
626 return top
624 return top
627 return None
625 return None
628 def check_localchanges(self, repo, force=False, refresh=True):
626 def check_localchanges(self, repo, force=False, refresh=True):
629 m, a, r, d = repo.status()[:4]
627 m, a, r, d = repo.status()[:4]
630 if m or a or r or d:
628 if m or a or r or d:
631 if not force:
629 if not force:
632 if refresh:
630 if refresh:
633 raise util.Abort(_("local changes found, refresh first"))
631 raise util.Abort(_("local changes found, refresh first"))
634 else:
632 else:
635 raise util.Abort(_("local changes found"))
633 raise util.Abort(_("local changes found"))
636 return m, a, r, d
634 return m, a, r, d
637
635
638 _reserved = ('series', 'status', 'guards')
636 _reserved = ('series', 'status', 'guards')
639 def check_reserved_name(self, name):
637 def check_reserved_name(self, name):
640 if (name in self._reserved or name.startswith('.hg')
638 if (name in self._reserved or name.startswith('.hg')
641 or name.startswith('.mq')):
639 or name.startswith('.mq')):
642 raise util.Abort(_('"%s" cannot be used as the name of a patch')
640 raise util.Abort(_('"%s" cannot be used as the name of a patch')
643 % name)
641 % name)
644
642
645 def new(self, repo, patch, *pats, **opts):
643 def new(self, repo, patch, *pats, **opts):
646 msg = opts.get('msg')
644 msg = opts.get('msg')
647 force = opts.get('force')
645 force = opts.get('force')
648 user = opts.get('user')
646 user = opts.get('user')
649 date = opts.get('date')
647 date = opts.get('date')
650 if date:
648 if date:
651 date = util.parsedate(date)
649 date = util.parsedate(date)
652 self.check_reserved_name(patch)
650 self.check_reserved_name(patch)
653 if os.path.exists(self.join(patch)):
651 if os.path.exists(self.join(patch)):
654 raise util.Abort(_('patch "%s" already exists') % patch)
652 raise util.Abort(_('patch "%s" already exists') % patch)
655 if opts.get('include') or opts.get('exclude') or pats:
653 if opts.get('include') or opts.get('exclude') or pats:
656 match = cmdutil.match(repo, pats, opts)
654 match = cmdutil.match(repo, pats, opts)
657 m, a, r, d = repo.status(match=match)[:4]
655 m, a, r, d = repo.status(match=match)[:4]
658 else:
656 else:
659 m, a, r, d = self.check_localchanges(repo, force)
657 m, a, r, d = self.check_localchanges(repo, force)
660 match = cmdutil.match(repo, m + a + r)
658 match = cmdutil.match(repo, m + a + r)
661 commitfiles = m + a + r
659 commitfiles = m + a + r
662 self.check_toppatch(repo)
660 self.check_toppatch(repo)
663 wlock = repo.wlock()
661 wlock = repo.wlock()
664 try:
662 try:
665 insert = self.full_series_end()
663 insert = self.full_series_end()
666 commitmsg = msg and msg or ("[mq]: %s" % patch)
664 commitmsg = msg and msg or ("[mq]: %s" % patch)
667 n = repo.commit(commitfiles, commitmsg, user, date, match=match, force=True)
665 n = repo.commit(commitfiles, commitmsg, user, date, match=match, force=True)
668 if n == None:
666 if n == None:
669 raise util.Abort(_("repo commit failed"))
667 raise util.Abort(_("repo commit failed"))
670 self.full_series[insert:insert] = [patch]
668 self.full_series[insert:insert] = [patch]
671 self.applied.append(statusentry(revlog.hex(n), patch))
669 self.applied.append(statusentry(revlog.hex(n), patch))
672 self.parse_series()
670 self.parse_series()
673 self.series_dirty = 1
671 self.series_dirty = 1
674 self.applied_dirty = 1
672 self.applied_dirty = 1
675 p = self.opener(patch, "w")
673 p = self.opener(patch, "w")
676 if date:
674 if date:
677 p.write("# HG changeset patch\n")
675 p.write("# HG changeset patch\n")
678 if user:
676 if user:
679 p.write("# User " + user + "\n")
677 p.write("# User " + user + "\n")
680 p.write("# Date %d %d\n" % date)
678 p.write("# Date %d %d\n" % date)
681 p.write("\n")
679 p.write("\n")
682 elif user:
680 elif user:
683 p.write("From: " + user + "\n")
681 p.write("From: " + user + "\n")
684 p.write("\n")
682 p.write("\n")
685 if msg:
683 if msg:
686 msg = msg + "\n"
684 msg = msg + "\n"
687 p.write(msg)
685 p.write(msg)
688 p.close()
686 p.close()
689 wlock = None
687 wlock = None
690 r = self.qrepo()
688 r = self.qrepo()
691 if r: r.add([patch])
689 if r: r.add([patch])
692 if commitfiles:
690 if commitfiles:
693 self.refresh(repo, short=True, git=opts.get('git'))
691 self.refresh(repo, short=True, git=opts.get('git'))
694 self.removeundo(repo)
692 self.removeundo(repo)
695 finally:
693 finally:
696 del wlock
694 del wlock
697
695
698 def strip(self, repo, rev, update=True, backup="all", force=None):
696 def strip(self, repo, rev, update=True, backup="all", force=None):
699 wlock = lock = None
697 wlock = lock = None
700 try:
698 try:
701 wlock = repo.wlock()
699 wlock = repo.wlock()
702 lock = repo.lock()
700 lock = repo.lock()
703
701
704 if update:
702 if update:
705 self.check_localchanges(repo, force=force, refresh=False)
703 self.check_localchanges(repo, force=force, refresh=False)
706 urev = self.qparents(repo, rev)
704 urev = self.qparents(repo, rev)
707 hg.clean(repo, urev)
705 hg.clean(repo, urev)
708 repo.dirstate.write()
706 repo.dirstate.write()
709
707
710 self.removeundo(repo)
708 self.removeundo(repo)
711 repair.strip(self.ui, repo, rev, backup)
709 repair.strip(self.ui, repo, rev, backup)
712 # strip may have unbundled a set of backed up revisions after
710 # strip may have unbundled a set of backed up revisions after
713 # the actual strip
711 # the actual strip
714 self.removeundo(repo)
712 self.removeundo(repo)
715 finally:
713 finally:
716 del lock, wlock
714 del lock, wlock
717
715
718 def isapplied(self, patch):
716 def isapplied(self, patch):
719 """returns (index, rev, patch)"""
717 """returns (index, rev, patch)"""
720 for i in xrange(len(self.applied)):
718 for i in xrange(len(self.applied)):
721 a = self.applied[i]
719 a = self.applied[i]
722 if a.name == patch:
720 if a.name == patch:
723 return (i, a.rev, a.name)
721 return (i, a.rev, a.name)
724 return None
722 return None
725
723
726 # if the exact patch name does not exist, we try a few
724 # if the exact patch name does not exist, we try a few
727 # variations. If strict is passed, we try only #1
725 # variations. If strict is passed, we try only #1
728 #
726 #
729 # 1) a number to indicate an offset in the series file
727 # 1) a number to indicate an offset in the series file
730 # 2) a unique substring of the patch name was given
728 # 2) a unique substring of the patch name was given
731 # 3) patchname[-+]num to indicate an offset in the series file
729 # 3) patchname[-+]num to indicate an offset in the series file
732 def lookup(self, patch, strict=False):
730 def lookup(self, patch, strict=False):
733 patch = patch and str(patch)
731 patch = patch and str(patch)
734
732
735 def partial_name(s):
733 def partial_name(s):
736 if s in self.series:
734 if s in self.series:
737 return s
735 return s
738 matches = [x for x in self.series if s in x]
736 matches = [x for x in self.series if s in x]
739 if len(matches) > 1:
737 if len(matches) > 1:
740 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
738 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
741 for m in matches:
739 for m in matches:
742 self.ui.warn(' %s\n' % m)
740 self.ui.warn(' %s\n' % m)
743 return None
741 return None
744 if matches:
742 if matches:
745 return matches[0]
743 return matches[0]
746 if len(self.series) > 0 and len(self.applied) > 0:
744 if len(self.series) > 0 and len(self.applied) > 0:
747 if s == 'qtip':
745 if s == 'qtip':
748 return self.series[self.series_end(True)-1]
746 return self.series[self.series_end(True)-1]
749 if s == 'qbase':
747 if s == 'qbase':
750 return self.series[0]
748 return self.series[0]
751 return None
749 return None
752 if patch == None:
750 if patch == None:
753 return None
751 return None
754
752
755 # we don't want to return a partial match until we make
753 # we don't want to return a partial match until we make
756 # sure the file name passed in does not exist (checked below)
754 # sure the file name passed in does not exist (checked below)
757 res = partial_name(patch)
755 res = partial_name(patch)
758 if res and res == patch:
756 if res and res == patch:
759 return res
757 return res
760
758
761 if not os.path.isfile(self.join(patch)):
759 if not os.path.isfile(self.join(patch)):
762 try:
760 try:
763 sno = int(patch)
761 sno = int(patch)
764 except(ValueError, OverflowError):
762 except(ValueError, OverflowError):
765 pass
763 pass
766 else:
764 else:
767 if sno < len(self.series):
765 if sno < len(self.series):
768 return self.series[sno]
766 return self.series[sno]
769 if not strict:
767 if not strict:
770 # return any partial match made above
768 # return any partial match made above
771 if res:
769 if res:
772 return res
770 return res
773 minus = patch.rfind('-')
771 minus = patch.rfind('-')
774 if minus >= 0:
772 if minus >= 0:
775 res = partial_name(patch[:minus])
773 res = partial_name(patch[:minus])
776 if res:
774 if res:
777 i = self.series.index(res)
775 i = self.series.index(res)
778 try:
776 try:
779 off = int(patch[minus+1:] or 1)
777 off = int(patch[minus+1:] or 1)
780 except(ValueError, OverflowError):
778 except(ValueError, OverflowError):
781 pass
779 pass
782 else:
780 else:
783 if i - off >= 0:
781 if i - off >= 0:
784 return self.series[i - off]
782 return self.series[i - off]
785 plus = patch.rfind('+')
783 plus = patch.rfind('+')
786 if plus >= 0:
784 if plus >= 0:
787 res = partial_name(patch[:plus])
785 res = partial_name(patch[:plus])
788 if res:
786 if res:
789 i = self.series.index(res)
787 i = self.series.index(res)
790 try:
788 try:
791 off = int(patch[plus+1:] or 1)
789 off = int(patch[plus+1:] or 1)
792 except(ValueError, OverflowError):
790 except(ValueError, OverflowError):
793 pass
791 pass
794 else:
792 else:
795 if i + off < len(self.series):
793 if i + off < len(self.series):
796 return self.series[i + off]
794 return self.series[i + off]
797 raise util.Abort(_("patch %s not in series") % patch)
795 raise util.Abort(_("patch %s not in series") % patch)
798
796
799 def push(self, repo, patch=None, force=False, list=False,
797 def push(self, repo, patch=None, force=False, list=False,
800 mergeq=None):
798 mergeq=None):
801 wlock = repo.wlock()
799 wlock = repo.wlock()
802 if repo.dirstate.parents()[0] != repo.changelog.tip():
800 if repo.dirstate.parents()[0] != repo.changelog.tip():
803 self.ui.status(_("(working directory not at tip)\n"))
801 self.ui.status(_("(working directory not at tip)\n"))
804
802
805 try:
803 try:
806 patch = self.lookup(patch)
804 patch = self.lookup(patch)
807 # Suppose our series file is: A B C and the current 'top'
805 # Suppose our series file is: A B C and the current 'top'
808 # patch is B. qpush C should be performed (moving forward)
806 # patch is B. qpush C should be performed (moving forward)
809 # qpush B is a NOP (no change) qpush A is an error (can't
807 # qpush B is a NOP (no change) qpush A is an error (can't
810 # go backwards with qpush)
808 # go backwards with qpush)
811 if patch:
809 if patch:
812 info = self.isapplied(patch)
810 info = self.isapplied(patch)
813 if info:
811 if info:
814 if info[0] < len(self.applied) - 1:
812 if info[0] < len(self.applied) - 1:
815 raise util.Abort(
813 raise util.Abort(
816 _("cannot push to a previous patch: %s") % patch)
814 _("cannot push to a previous patch: %s") % patch)
817 if info[0] < len(self.series) - 1:
815 if info[0] < len(self.series) - 1:
818 self.ui.warn(
816 self.ui.warn(
819 _('qpush: %s is already at the top\n') % patch)
817 _('qpush: %s is already at the top\n') % patch)
820 else:
818 else:
821 self.ui.warn(_('all patches are currently applied\n'))
819 self.ui.warn(_('all patches are currently applied\n'))
822 return
820 return
823
821
824 # Following the above example, starting at 'top' of B:
822 # Following the above example, starting at 'top' of B:
825 # qpush should be performed (pushes C), but a subsequent
823 # qpush should be performed (pushes C), but a subsequent
826 # qpush without an argument is an error (nothing to
824 # qpush without an argument is an error (nothing to
827 # apply). This allows a loop of "...while hg qpush..." to
825 # apply). This allows a loop of "...while hg qpush..." to
828 # work as it detects an error when done
826 # work as it detects an error when done
829 if self.series_end() == len(self.series):
827 if self.series_end() == len(self.series):
830 self.ui.warn(_('patch series already fully applied\n'))
828 self.ui.warn(_('patch series already fully applied\n'))
831 return 1
829 return 1
832 if not force:
830 if not force:
833 self.check_localchanges(repo)
831 self.check_localchanges(repo)
834
832
835 self.applied_dirty = 1;
833 self.applied_dirty = 1;
836 start = self.series_end()
834 start = self.series_end()
837 if start > 0:
835 if start > 0:
838 self.check_toppatch(repo)
836 self.check_toppatch(repo)
839 if not patch:
837 if not patch:
840 patch = self.series[start]
838 patch = self.series[start]
841 end = start + 1
839 end = start + 1
842 else:
840 else:
843 end = self.series.index(patch, start) + 1
841 end = self.series.index(patch, start) + 1
844 s = self.series[start:end]
842 s = self.series[start:end]
845 all_files = {}
843 all_files = {}
846 try:
844 try:
847 if mergeq:
845 if mergeq:
848 ret = self.mergepatch(repo, mergeq, s)
846 ret = self.mergepatch(repo, mergeq, s)
849 else:
847 else:
850 ret = self.apply(repo, s, list, all_files=all_files)
848 ret = self.apply(repo, s, list, all_files=all_files)
851 except:
849 except:
852 self.ui.warn(_('cleaning up working directory...'))
850 self.ui.warn(_('cleaning up working directory...'))
853 node = repo.dirstate.parents()[0]
851 node = repo.dirstate.parents()[0]
854 hg.revert(repo, node, None)
852 hg.revert(repo, node, None)
855 unknown = repo.status(unknown=True)[4]
853 unknown = repo.status(unknown=True)[4]
856 # only remove unknown files that we know we touched or
854 # only remove unknown files that we know we touched or
857 # created while patching
855 # created while patching
858 for f in unknown:
856 for f in unknown:
859 if f in all_files:
857 if f in all_files:
860 util.unlink(repo.wjoin(f))
858 util.unlink(repo.wjoin(f))
861 self.ui.warn(_('done\n'))
859 self.ui.warn(_('done\n'))
862 raise
860 raise
863 top = self.applied[-1].name
861 top = self.applied[-1].name
864 if ret[0]:
862 if ret[0]:
865 self.ui.write(
863 self.ui.write(
866 "Errors during apply, please fix and refresh %s\n" % top)
864 "Errors during apply, please fix and refresh %s\n" % top)
867 else:
865 else:
868 self.ui.write("Now at: %s\n" % top)
866 self.ui.write("Now at: %s\n" % top)
869 return ret[0]
867 return ret[0]
870 finally:
868 finally:
871 del wlock
869 del wlock
872
870
873 def pop(self, repo, patch=None, force=False, update=True, all=False):
871 def pop(self, repo, patch=None, force=False, update=True, all=False):
874 def getfile(f, rev, flags):
872 def getfile(f, rev, flags):
875 t = repo.file(f).read(rev)
873 t = repo.file(f).read(rev)
876 repo.wwrite(f, t, flags)
874 repo.wwrite(f, t, flags)
877
875
878 wlock = repo.wlock()
876 wlock = repo.wlock()
879 try:
877 try:
880 if patch:
878 if patch:
881 # index, rev, patch
879 # index, rev, patch
882 info = self.isapplied(patch)
880 info = self.isapplied(patch)
883 if not info:
881 if not info:
884 patch = self.lookup(patch)
882 patch = self.lookup(patch)
885 info = self.isapplied(patch)
883 info = self.isapplied(patch)
886 if not info:
884 if not info:
887 raise util.Abort(_("patch %s is not applied") % patch)
885 raise util.Abort(_("patch %s is not applied") % patch)
888
886
889 if len(self.applied) == 0:
887 if len(self.applied) == 0:
890 # Allow qpop -a to work repeatedly,
888 # Allow qpop -a to work repeatedly,
891 # but not qpop without an argument
889 # but not qpop without an argument
892 self.ui.warn(_("no patches applied\n"))
890 self.ui.warn(_("no patches applied\n"))
893 return not all
891 return not all
894
892
895 if not update:
893 if not update:
896 parents = repo.dirstate.parents()
894 parents = repo.dirstate.parents()
897 rr = [ revlog.bin(x.rev) for x in self.applied ]
895 rr = [ revlog.bin(x.rev) for x in self.applied ]
898 for p in parents:
896 for p in parents:
899 if p in rr:
897 if p in rr:
900 self.ui.warn("qpop: forcing dirstate update\n")
898 self.ui.warn("qpop: forcing dirstate update\n")
901 update = True
899 update = True
902
900
903 if not force and update:
901 if not force and update:
904 self.check_localchanges(repo)
902 self.check_localchanges(repo)
905
903
906 self.applied_dirty = 1;
904 self.applied_dirty = 1;
907 end = len(self.applied)
905 end = len(self.applied)
908 if not patch:
906 if not patch:
909 if all:
907 if all:
910 popi = 0
908 popi = 0
911 else:
909 else:
912 popi = len(self.applied) - 1
910 popi = len(self.applied) - 1
913 else:
911 else:
914 popi = info[0] + 1
912 popi = info[0] + 1
915 if popi >= end:
913 if popi >= end:
916 self.ui.warn("qpop: %s is already at the top\n" % patch)
914 self.ui.warn("qpop: %s is already at the top\n" % patch)
917 return
915 return
918 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
916 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
919
917
920 start = info[0]
918 start = info[0]
921 rev = revlog.bin(info[1])
919 rev = revlog.bin(info[1])
922
920
923 if update:
921 if update:
924 top = self.check_toppatch(repo)
922 top = self.check_toppatch(repo)
925
923
926 if repo.changelog.heads(rev) != [revlog.bin(self.applied[-1].rev)]:
924 if repo.changelog.heads(rev) != [revlog.bin(self.applied[-1].rev)]:
927 raise util.Abort("popping would remove a revision not "
925 raise util.Abort("popping would remove a revision not "
928 "managed by this patch queue")
926 "managed by this patch queue")
929
927
930 # we know there are no local changes, so we can make a simplified
928 # we know there are no local changes, so we can make a simplified
931 # form of hg.update.
929 # form of hg.update.
932 if update:
930 if update:
933 qp = self.qparents(repo, rev)
931 qp = self.qparents(repo, rev)
934 changes = repo.changelog.read(qp)
932 changes = repo.changelog.read(qp)
935 mmap = repo.manifest.read(changes[0])
933 mmap = repo.manifest.read(changes[0])
936 m, a, r, d = repo.status(qp, top)[:4]
934 m, a, r, d = repo.status(qp, top)[:4]
937 if d:
935 if d:
938 raise util.Abort("deletions found between repo revs")
936 raise util.Abort("deletions found between repo revs")
939 for f in m:
937 for f in m:
940 getfile(f, mmap[f], mmap.flags(f))
938 getfile(f, mmap[f], mmap.flags(f))
941 for f in r:
939 for f in r:
942 getfile(f, mmap[f], mmap.flags(f))
940 getfile(f, mmap[f], mmap.flags(f))
943 for f in m + r:
941 for f in m + r:
944 repo.dirstate.normal(f)
942 repo.dirstate.normal(f)
945 for f in a:
943 for f in a:
946 try:
944 try:
947 os.unlink(repo.wjoin(f))
945 os.unlink(repo.wjoin(f))
948 except OSError, e:
946 except OSError, e:
949 if e.errno != errno.ENOENT:
947 if e.errno != errno.ENOENT:
950 raise
948 raise
951 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
949 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
952 except: pass
950 except: pass
953 repo.dirstate.forget(f)
951 repo.dirstate.forget(f)
954 repo.dirstate.setparents(qp, revlog.nullid)
952 repo.dirstate.setparents(qp, revlog.nullid)
955 del self.applied[start:end]
953 del self.applied[start:end]
956 self.strip(repo, rev, update=False, backup='strip')
954 self.strip(repo, rev, update=False, backup='strip')
957 if len(self.applied):
955 if len(self.applied):
958 self.ui.write("Now at: %s\n" % self.applied[-1].name)
956 self.ui.write("Now at: %s\n" % self.applied[-1].name)
959 else:
957 else:
960 self.ui.write("Patch queue now empty\n")
958 self.ui.write("Patch queue now empty\n")
961 finally:
959 finally:
962 del wlock
960 del wlock
963
961
964 def diff(self, repo, pats, opts):
962 def diff(self, repo, pats, opts):
965 top = self.check_toppatch(repo)
963 top = self.check_toppatch(repo)
966 if not top:
964 if not top:
967 self.ui.write("No patches applied\n")
965 self.ui.write("No patches applied\n")
968 return
966 return
969 qp = self.qparents(repo, top)
967 qp = self.qparents(repo, top)
970 self._diffopts = patch.diffopts(self.ui, opts)
968 self._diffopts = patch.diffopts(self.ui, opts)
971 self.printdiff(repo, qp, files=pats, opts=opts)
969 self.printdiff(repo, qp, files=pats, opts=opts)
972
970
973 def refresh(self, repo, pats=None, **opts):
971 def refresh(self, repo, pats=None, **opts):
974 if len(self.applied) == 0:
972 if len(self.applied) == 0:
975 self.ui.write("No patches applied\n")
973 self.ui.write("No patches applied\n")
976 return 1
974 return 1
977 newdate = opts.get('date')
975 newdate = opts.get('date')
978 if newdate:
976 if newdate:
979 newdate = '%d %d' % util.parsedate(newdate)
977 newdate = '%d %d' % util.parsedate(newdate)
980 wlock = repo.wlock()
978 wlock = repo.wlock()
981 try:
979 try:
982 self.check_toppatch(repo)
980 self.check_toppatch(repo)
983 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
981 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
984 top = revlog.bin(top)
982 top = revlog.bin(top)
985 if repo.changelog.heads(top) != [top]:
983 if repo.changelog.heads(top) != [top]:
986 raise util.Abort("cannot refresh a revision with children")
984 raise util.Abort("cannot refresh a revision with children")
987 cparents = repo.changelog.parents(top)
985 cparents = repo.changelog.parents(top)
988 patchparent = self.qparents(repo, top)
986 patchparent = self.qparents(repo, top)
989 message, comments, user, date, patchfound = self.readheaders(patchfn)
987 message, comments, user, date, patchfound = self.readheaders(patchfn)
990
988
991 patchf = self.opener(patchfn, 'r+')
989 patchf = self.opener(patchfn, 'r+')
992
990
993 # if the patch was a git patch, refresh it as a git patch
991 # if the patch was a git patch, refresh it as a git patch
994 for line in patchf:
992 for line in patchf:
995 if line.startswith('diff --git'):
993 if line.startswith('diff --git'):
996 self.diffopts().git = True
994 self.diffopts().git = True
997 break
995 break
998
996
999 msg = opts.get('msg', '').rstrip()
997 msg = opts.get('msg', '').rstrip()
1000 if msg and comments:
998 if msg and comments:
1001 # Remove existing message, keeping the rest of the comments
999 # Remove existing message, keeping the rest of the comments
1002 # fields.
1000 # fields.
1003 # If comments contains 'subject: ', message will prepend
1001 # If comments contains 'subject: ', message will prepend
1004 # the field and a blank line.
1002 # the field and a blank line.
1005 if message:
1003 if message:
1006 subj = 'subject: ' + message[0].lower()
1004 subj = 'subject: ' + message[0].lower()
1007 for i in xrange(len(comments)):
1005 for i in xrange(len(comments)):
1008 if subj == comments[i].lower():
1006 if subj == comments[i].lower():
1009 del comments[i]
1007 del comments[i]
1010 message = message[2:]
1008 message = message[2:]
1011 break
1009 break
1012 ci = 0
1010 ci = 0
1013 for mi in xrange(len(message)):
1011 for mi in xrange(len(message)):
1014 while message[mi] != comments[ci]:
1012 while message[mi] != comments[ci]:
1015 ci += 1
1013 ci += 1
1016 del comments[ci]
1014 del comments[ci]
1017
1015
1018 def setheaderfield(comments, prefixes, new):
1016 def setheaderfield(comments, prefixes, new):
1019 # Update all references to a field in the patch header.
1017 # Update all references to a field in the patch header.
1020 # If none found, add it email style.
1018 # If none found, add it email style.
1021 res = False
1019 res = False
1022 for prefix in prefixes:
1020 for prefix in prefixes:
1023 for i in xrange(len(comments)):
1021 for i in xrange(len(comments)):
1024 if comments[i].startswith(prefix):
1022 if comments[i].startswith(prefix):
1025 comments[i] = prefix + new
1023 comments[i] = prefix + new
1026 res = True
1024 res = True
1027 break
1025 break
1028 return res
1026 return res
1029
1027
1030 newuser = opts.get('user')
1028 newuser = opts.get('user')
1031 if newuser:
1029 if newuser:
1032 if not setheaderfield(comments, ['From: ', '# User '], newuser):
1030 if not setheaderfield(comments, ['From: ', '# User '], newuser):
1033 try:
1031 try:
1034 patchheaderat = comments.index('# HG changeset patch')
1032 patchheaderat = comments.index('# HG changeset patch')
1035 comments.insert(patchheaderat + 1,'# User ' + newuser)
1033 comments.insert(patchheaderat + 1,'# User ' + newuser)
1036 except ValueError:
1034 except ValueError:
1037 comments = ['From: ' + newuser, ''] + comments
1035 comments = ['From: ' + newuser, ''] + comments
1038 user = newuser
1036 user = newuser
1039
1037
1040 if newdate:
1038 if newdate:
1041 if setheaderfield(comments, ['# Date '], newdate):
1039 if setheaderfield(comments, ['# Date '], newdate):
1042 date = newdate
1040 date = newdate
1043
1041
1044 if msg:
1042 if msg:
1045 comments.append(msg)
1043 comments.append(msg)
1046
1044
1047 patchf.seek(0)
1045 patchf.seek(0)
1048 patchf.truncate()
1046 patchf.truncate()
1049
1047
1050 if comments:
1048 if comments:
1051 comments = "\n".join(comments) + '\n\n'
1049 comments = "\n".join(comments) + '\n\n'
1052 patchf.write(comments)
1050 patchf.write(comments)
1053
1051
1054 if opts.get('git'):
1052 if opts.get('git'):
1055 self.diffopts().git = True
1053 self.diffopts().git = True
1056 matchfn = cmdutil.match(repo, pats, opts)
1054 matchfn = cmdutil.match(repo, pats, opts)
1057 tip = repo.changelog.tip()
1055 tip = repo.changelog.tip()
1058 if top == tip:
1056 if top == tip:
1059 # if the top of our patch queue is also the tip, there is an
1057 # if the top of our patch queue is also the tip, there is an
1060 # optimization here. We update the dirstate in place and strip
1058 # optimization here. We update the dirstate in place and strip
1061 # off the tip commit. Then just commit the current directory
1059 # off the tip commit. Then just commit the current directory
1062 # tree. We can also send repo.commit the list of files
1060 # tree. We can also send repo.commit the list of files
1063 # changed to speed up the diff
1061 # changed to speed up the diff
1064 #
1062 #
1065 # in short mode, we only diff the files included in the
1063 # in short mode, we only diff the files included in the
1066 # patch already
1064 # patch already
1067 #
1065 #
1068 # this should really read:
1066 # this should really read:
1069 # mm, dd, aa, aa2 = repo.status(tip, patchparent)[:4]
1067 # mm, dd, aa, aa2 = repo.status(tip, patchparent)[:4]
1070 # but we do it backwards to take advantage of manifest/chlog
1068 # but we do it backwards to take advantage of manifest/chlog
1071 # caching against the next repo.status call
1069 # caching against the next repo.status call
1072 #
1070 #
1073 mm, aa, dd, aa2 = repo.status(patchparent, tip)[:4]
1071 mm, aa, dd, aa2 = repo.status(patchparent, tip)[:4]
1074 changes = repo.changelog.read(tip)
1072 changes = repo.changelog.read(tip)
1075 man = repo.manifest.read(changes[0])
1073 man = repo.manifest.read(changes[0])
1076 aaa = aa[:]
1074 aaa = aa[:]
1077 if opts.get('short'):
1075 if opts.get('short'):
1078 match = cmdutil.matchfiles(repo, mm + aa + dd)
1076 match = cmdutil.matchfiles(repo, mm + aa + dd)
1079 else:
1077 else:
1080 match = cmdutil.matchall(repo)
1078 match = cmdutil.matchall(repo)
1081 m, a, r, d = repo.status(match=match)[:4]
1079 m, a, r, d = repo.status(match=match)[:4]
1082
1080
1083 # we might end up with files that were added between
1081 # we might end up with files that were added between
1084 # tip and the dirstate parent, but then changed in the
1082 # tip and the dirstate parent, but then changed in the
1085 # local dirstate. in this case, we want them to only
1083 # local dirstate. in this case, we want them to only
1086 # show up in the added section
1084 # show up in the added section
1087 for x in m:
1085 for x in m:
1088 if x not in aa:
1086 if x not in aa:
1089 mm.append(x)
1087 mm.append(x)
1090 # we might end up with files added by the local dirstate that
1088 # we might end up with files added by the local dirstate that
1091 # were deleted by the patch. In this case, they should only
1089 # were deleted by the patch. In this case, they should only
1092 # show up in the changed section.
1090 # show up in the changed section.
1093 for x in a:
1091 for x in a:
1094 if x in dd:
1092 if x in dd:
1095 del dd[dd.index(x)]
1093 del dd[dd.index(x)]
1096 mm.append(x)
1094 mm.append(x)
1097 else:
1095 else:
1098 aa.append(x)
1096 aa.append(x)
1099 # make sure any files deleted in the local dirstate
1097 # make sure any files deleted in the local dirstate
1100 # are not in the add or change column of the patch
1098 # are not in the add or change column of the patch
1101 forget = []
1099 forget = []
1102 for x in d + r:
1100 for x in d + r:
1103 if x in aa:
1101 if x in aa:
1104 del aa[aa.index(x)]
1102 del aa[aa.index(x)]
1105 forget.append(x)
1103 forget.append(x)
1106 continue
1104 continue
1107 elif x in mm:
1105 elif x in mm:
1108 del mm[mm.index(x)]
1106 del mm[mm.index(x)]
1109 dd.append(x)
1107 dd.append(x)
1110
1108
1111 m = util.unique(mm)
1109 m = util.unique(mm)
1112 r = util.unique(dd)
1110 r = util.unique(dd)
1113 a = util.unique(aa)
1111 a = util.unique(aa)
1114 c = [filter(matchfn, l) for l in (m, a, r)]
1112 c = [filter(matchfn, l) for l in (m, a, r)]
1115 match = cmdutil.matchfiles(repo, util.unique(c[0] + c[1] + c[2]))
1113 match = cmdutil.matchfiles(repo, util.unique(c[0] + c[1] + c[2]))
1116 patch.diff(repo, patchparent, match=match,
1114 patch.diff(repo, patchparent, match=match,
1117 fp=patchf, changes=c, opts=self.diffopts())
1115 fp=patchf, changes=c, opts=self.diffopts())
1118 patchf.close()
1116 patchf.close()
1119
1117
1120 repo.dirstate.setparents(*cparents)
1118 repo.dirstate.setparents(*cparents)
1121 copies = {}
1119 copies = {}
1122 for dst in a:
1120 for dst in a:
1123 src = repo.dirstate.copied(dst)
1121 src = repo.dirstate.copied(dst)
1124 if src is not None:
1122 if src is not None:
1125 copies.setdefault(src, []).append(dst)
1123 copies.setdefault(src, []).append(dst)
1126 repo.dirstate.add(dst)
1124 repo.dirstate.add(dst)
1127 # remember the copies between patchparent and tip
1125 # remember the copies between patchparent and tip
1128 # this may be slow, so don't do it if we're not tracking copies
1126 # this may be slow, so don't do it if we're not tracking copies
1129 if self.diffopts().git:
1127 if self.diffopts().git:
1130 for dst in aaa:
1128 for dst in aaa:
1131 f = repo.file(dst)
1129 f = repo.file(dst)
1132 src = f.renamed(man[dst])
1130 src = f.renamed(man[dst])
1133 if src:
1131 if src:
1134 copies[src[0]] = copies.get(dst, [])
1132 copies[src[0]] = copies.get(dst, [])
1135 if dst in a:
1133 if dst in a:
1136 copies[src[0]].append(dst)
1134 copies[src[0]].append(dst)
1137 # we can't copy a file created by the patch itself
1135 # we can't copy a file created by the patch itself
1138 if dst in copies:
1136 if dst in copies:
1139 del copies[dst]
1137 del copies[dst]
1140 for src, dsts in copies.iteritems():
1138 for src, dsts in copies.iteritems():
1141 for dst in dsts:
1139 for dst in dsts:
1142 repo.dirstate.copy(src, dst)
1140 repo.dirstate.copy(src, dst)
1143 for f in r:
1141 for f in r:
1144 repo.dirstate.remove(f)
1142 repo.dirstate.remove(f)
1145 # if the patch excludes a modified file, mark that
1143 # if the patch excludes a modified file, mark that
1146 # file with mtime=0 so status can see it.
1144 # file with mtime=0 so status can see it.
1147 mm = []
1145 mm = []
1148 for i in xrange(len(m)-1, -1, -1):
1146 for i in xrange(len(m)-1, -1, -1):
1149 if not matchfn(m[i]):
1147 if not matchfn(m[i]):
1150 mm.append(m[i])
1148 mm.append(m[i])
1151 del m[i]
1149 del m[i]
1152 for f in m:
1150 for f in m:
1153 repo.dirstate.normal(f)
1151 repo.dirstate.normal(f)
1154 for f in mm:
1152 for f in mm:
1155 repo.dirstate.normallookup(f)
1153 repo.dirstate.normallookup(f)
1156 for f in forget:
1154 for f in forget:
1157 repo.dirstate.forget(f)
1155 repo.dirstate.forget(f)
1158
1156
1159 if not msg:
1157 if not msg:
1160 if not message:
1158 if not message:
1161 message = "[mq]: %s\n" % patchfn
1159 message = "[mq]: %s\n" % patchfn
1162 else:
1160 else:
1163 message = "\n".join(message)
1161 message = "\n".join(message)
1164 else:
1162 else:
1165 message = msg
1163 message = msg
1166
1164
1167 if not user:
1165 if not user:
1168 user = changes[1]
1166 user = changes[1]
1169
1167
1170 self.applied.pop()
1168 self.applied.pop()
1171 self.applied_dirty = 1
1169 self.applied_dirty = 1
1172 self.strip(repo, top, update=False,
1170 self.strip(repo, top, update=False,
1173 backup='strip')
1171 backup='strip')
1174 n = repo.commit(match.files(), message, user, date, match=match,
1172 n = repo.commit(match.files(), message, user, date, match=match,
1175 force=1)
1173 force=1)
1176 self.applied.append(statusentry(revlog.hex(n), patchfn))
1174 self.applied.append(statusentry(revlog.hex(n), patchfn))
1177 self.removeundo(repo)
1175 self.removeundo(repo)
1178 else:
1176 else:
1179 self.printdiff(repo, patchparent, fp=patchf)
1177 self.printdiff(repo, patchparent, fp=patchf)
1180 patchf.close()
1178 patchf.close()
1181 added = repo.status()[1]
1179 added = repo.status()[1]
1182 for a in added:
1180 for a in added:
1183 f = repo.wjoin(a)
1181 f = repo.wjoin(a)
1184 try:
1182 try:
1185 os.unlink(f)
1183 os.unlink(f)
1186 except OSError, e:
1184 except OSError, e:
1187 if e.errno != errno.ENOENT:
1185 if e.errno != errno.ENOENT:
1188 raise
1186 raise
1189 try: os.removedirs(os.path.dirname(f))
1187 try: os.removedirs(os.path.dirname(f))
1190 except: pass
1188 except: pass
1191 # forget the file copies in the dirstate
1189 # forget the file copies in the dirstate
1192 # push should readd the files later on
1190 # push should readd the files later on
1193 repo.dirstate.forget(a)
1191 repo.dirstate.forget(a)
1194 self.pop(repo, force=True)
1192 self.pop(repo, force=True)
1195 self.push(repo, force=True)
1193 self.push(repo, force=True)
1196 finally:
1194 finally:
1197 del wlock
1195 del wlock
1198
1196
1199 def init(self, repo, create=False):
1197 def init(self, repo, create=False):
1200 if not create and os.path.isdir(self.path):
1198 if not create and os.path.isdir(self.path):
1201 raise util.Abort(_("patch queue directory already exists"))
1199 raise util.Abort(_("patch queue directory already exists"))
1202 try:
1200 try:
1203 os.mkdir(self.path)
1201 os.mkdir(self.path)
1204 except OSError, inst:
1202 except OSError, inst:
1205 if inst.errno != errno.EEXIST or not create:
1203 if inst.errno != errno.EEXIST or not create:
1206 raise
1204 raise
1207 if create:
1205 if create:
1208 return self.qrepo(create=True)
1206 return self.qrepo(create=True)
1209
1207
1210 def unapplied(self, repo, patch=None):
1208 def unapplied(self, repo, patch=None):
1211 if patch and patch not in self.series:
1209 if patch and patch not in self.series:
1212 raise util.Abort(_("patch %s is not in series file") % patch)
1210 raise util.Abort(_("patch %s is not in series file") % patch)
1213 if not patch:
1211 if not patch:
1214 start = self.series_end()
1212 start = self.series_end()
1215 else:
1213 else:
1216 start = self.series.index(patch) + 1
1214 start = self.series.index(patch) + 1
1217 unapplied = []
1215 unapplied = []
1218 for i in xrange(start, len(self.series)):
1216 for i in xrange(start, len(self.series)):
1219 pushable, reason = self.pushable(i)
1217 pushable, reason = self.pushable(i)
1220 if pushable:
1218 if pushable:
1221 unapplied.append((i, self.series[i]))
1219 unapplied.append((i, self.series[i]))
1222 self.explain_pushable(i)
1220 self.explain_pushable(i)
1223 return unapplied
1221 return unapplied
1224
1222
1225 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1223 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1226 summary=False):
1224 summary=False):
1227 def displayname(patchname):
1225 def displayname(patchname):
1228 if summary:
1226 if summary:
1229 msg = self.readheaders(patchname)[0]
1227 msg = self.readheaders(patchname)[0]
1230 msg = msg and ': ' + msg[0] or ': '
1228 msg = msg and ': ' + msg[0] or ': '
1231 else:
1229 else:
1232 msg = ''
1230 msg = ''
1233 return '%s%s' % (patchname, msg)
1231 return '%s%s' % (patchname, msg)
1234
1232
1235 applied = dict.fromkeys([p.name for p in self.applied])
1233 applied = dict.fromkeys([p.name for p in self.applied])
1236 if length is None:
1234 if length is None:
1237 length = len(self.series) - start
1235 length = len(self.series) - start
1238 if not missing:
1236 if not missing:
1239 for i in xrange(start, start+length):
1237 for i in xrange(start, start+length):
1240 patch = self.series[i]
1238 patch = self.series[i]
1241 if patch in applied:
1239 if patch in applied:
1242 stat = 'A'
1240 stat = 'A'
1243 elif self.pushable(i)[0]:
1241 elif self.pushable(i)[0]:
1244 stat = 'U'
1242 stat = 'U'
1245 else:
1243 else:
1246 stat = 'G'
1244 stat = 'G'
1247 pfx = ''
1245 pfx = ''
1248 if self.ui.verbose:
1246 if self.ui.verbose:
1249 pfx = '%d %s ' % (i, stat)
1247 pfx = '%d %s ' % (i, stat)
1250 elif status and status != stat:
1248 elif status and status != stat:
1251 continue
1249 continue
1252 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1250 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1253 else:
1251 else:
1254 msng_list = []
1252 msng_list = []
1255 for root, dirs, files in os.walk(self.path):
1253 for root, dirs, files in os.walk(self.path):
1256 d = root[len(self.path) + 1:]
1254 d = root[len(self.path) + 1:]
1257 for f in files:
1255 for f in files:
1258 fl = os.path.join(d, f)
1256 fl = os.path.join(d, f)
1259 if (fl not in self.series and
1257 if (fl not in self.series and
1260 fl not in (self.status_path, self.series_path,
1258 fl not in (self.status_path, self.series_path,
1261 self.guards_path)
1259 self.guards_path)
1262 and not fl.startswith('.')):
1260 and not fl.startswith('.')):
1263 msng_list.append(fl)
1261 msng_list.append(fl)
1264 msng_list.sort()
1262 for x in util.sort(msng_list):
1265 for x in msng_list:
1266 pfx = self.ui.verbose and ('D ') or ''
1263 pfx = self.ui.verbose and ('D ') or ''
1267 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1264 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1268
1265
1269 def issaveline(self, l):
1266 def issaveline(self, l):
1270 if l.name == '.hg.patches.save.line':
1267 if l.name == '.hg.patches.save.line':
1271 return True
1268 return True
1272
1269
1273 def qrepo(self, create=False):
1270 def qrepo(self, create=False):
1274 if create or os.path.isdir(self.join(".hg")):
1271 if create or os.path.isdir(self.join(".hg")):
1275 return hg.repository(self.ui, path=self.path, create=create)
1272 return hg.repository(self.ui, path=self.path, create=create)
1276
1273
1277 def restore(self, repo, rev, delete=None, qupdate=None):
1274 def restore(self, repo, rev, delete=None, qupdate=None):
1278 c = repo.changelog.read(rev)
1275 c = repo.changelog.read(rev)
1279 desc = c[4].strip()
1276 desc = c[4].strip()
1280 lines = desc.splitlines()
1277 lines = desc.splitlines()
1281 i = 0
1278 i = 0
1282 datastart = None
1279 datastart = None
1283 series = []
1280 series = []
1284 applied = []
1281 applied = []
1285 qpp = None
1282 qpp = None
1286 for i in xrange(0, len(lines)):
1283 for i in xrange(0, len(lines)):
1287 if lines[i] == 'Patch Data:':
1284 if lines[i] == 'Patch Data:':
1288 datastart = i + 1
1285 datastart = i + 1
1289 elif lines[i].startswith('Dirstate:'):
1286 elif lines[i].startswith('Dirstate:'):
1290 l = lines[i].rstrip()
1287 l = lines[i].rstrip()
1291 l = l[10:].split(' ')
1288 l = l[10:].split(' ')
1292 qpp = [ bin(x) for x in l ]
1289 qpp = [ bin(x) for x in l ]
1293 elif datastart != None:
1290 elif datastart != None:
1294 l = lines[i].rstrip()
1291 l = lines[i].rstrip()
1295 se = statusentry(l)
1292 se = statusentry(l)
1296 file_ = se.name
1293 file_ = se.name
1297 if se.rev:
1294 if se.rev:
1298 applied.append(se)
1295 applied.append(se)
1299 else:
1296 else:
1300 series.append(file_)
1297 series.append(file_)
1301 if datastart == None:
1298 if datastart == None:
1302 self.ui.warn("No saved patch data found\n")
1299 self.ui.warn("No saved patch data found\n")
1303 return 1
1300 return 1
1304 self.ui.warn("restoring status: %s\n" % lines[0])
1301 self.ui.warn("restoring status: %s\n" % lines[0])
1305 self.full_series = series
1302 self.full_series = series
1306 self.applied = applied
1303 self.applied = applied
1307 self.parse_series()
1304 self.parse_series()
1308 self.series_dirty = 1
1305 self.series_dirty = 1
1309 self.applied_dirty = 1
1306 self.applied_dirty = 1
1310 heads = repo.changelog.heads()
1307 heads = repo.changelog.heads()
1311 if delete:
1308 if delete:
1312 if rev not in heads:
1309 if rev not in heads:
1313 self.ui.warn("save entry has children, leaving it alone\n")
1310 self.ui.warn("save entry has children, leaving it alone\n")
1314 else:
1311 else:
1315 self.ui.warn("removing save entry %s\n" % short(rev))
1312 self.ui.warn("removing save entry %s\n" % short(rev))
1316 pp = repo.dirstate.parents()
1313 pp = repo.dirstate.parents()
1317 if rev in pp:
1314 if rev in pp:
1318 update = True
1315 update = True
1319 else:
1316 else:
1320 update = False
1317 update = False
1321 self.strip(repo, rev, update=update, backup='strip')
1318 self.strip(repo, rev, update=update, backup='strip')
1322 if qpp:
1319 if qpp:
1323 self.ui.warn("saved queue repository parents: %s %s\n" %
1320 self.ui.warn("saved queue repository parents: %s %s\n" %
1324 (short(qpp[0]), short(qpp[1])))
1321 (short(qpp[0]), short(qpp[1])))
1325 if qupdate:
1322 if qupdate:
1326 self.ui.status(_("queue directory updating\n"))
1323 self.ui.status(_("queue directory updating\n"))
1327 r = self.qrepo()
1324 r = self.qrepo()
1328 if not r:
1325 if not r:
1329 self.ui.warn("Unable to load queue repository\n")
1326 self.ui.warn("Unable to load queue repository\n")
1330 return 1
1327 return 1
1331 hg.clean(r, qpp[0])
1328 hg.clean(r, qpp[0])
1332
1329
1333 def save(self, repo, msg=None):
1330 def save(self, repo, msg=None):
1334 if len(self.applied) == 0:
1331 if len(self.applied) == 0:
1335 self.ui.warn("save: no patches applied, exiting\n")
1332 self.ui.warn("save: no patches applied, exiting\n")
1336 return 1
1333 return 1
1337 if self.issaveline(self.applied[-1]):
1334 if self.issaveline(self.applied[-1]):
1338 self.ui.warn("status is already saved\n")
1335 self.ui.warn("status is already saved\n")
1339 return 1
1336 return 1
1340
1337
1341 ar = [ ':' + x for x in self.full_series ]
1338 ar = [ ':' + x for x in self.full_series ]
1342 if not msg:
1339 if not msg:
1343 msg = "hg patches saved state"
1340 msg = "hg patches saved state"
1344 else:
1341 else:
1345 msg = "hg patches: " + msg.rstrip('\r\n')
1342 msg = "hg patches: " + msg.rstrip('\r\n')
1346 r = self.qrepo()
1343 r = self.qrepo()
1347 if r:
1344 if r:
1348 pp = r.dirstate.parents()
1345 pp = r.dirstate.parents()
1349 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1346 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1350 msg += "\n\nPatch Data:\n"
1347 msg += "\n\nPatch Data:\n"
1351 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1348 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1352 "\n".join(ar) + '\n' or "")
1349 "\n".join(ar) + '\n' or "")
1353 n = repo.commit(None, text, user=None, force=1)
1350 n = repo.commit(None, text, user=None, force=1)
1354 if not n:
1351 if not n:
1355 self.ui.warn("repo commit failed\n")
1352 self.ui.warn("repo commit failed\n")
1356 return 1
1353 return 1
1357 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1354 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1358 self.applied_dirty = 1
1355 self.applied_dirty = 1
1359 self.removeundo(repo)
1356 self.removeundo(repo)
1360
1357
1361 def full_series_end(self):
1358 def full_series_end(self):
1362 if len(self.applied) > 0:
1359 if len(self.applied) > 0:
1363 p = self.applied[-1].name
1360 p = self.applied[-1].name
1364 end = self.find_series(p)
1361 end = self.find_series(p)
1365 if end == None:
1362 if end == None:
1366 return len(self.full_series)
1363 return len(self.full_series)
1367 return end + 1
1364 return end + 1
1368 return 0
1365 return 0
1369
1366
1370 def series_end(self, all_patches=False):
1367 def series_end(self, all_patches=False):
1371 """If all_patches is False, return the index of the next pushable patch
1368 """If all_patches is False, return the index of the next pushable patch
1372 in the series, or the series length. If all_patches is True, return the
1369 in the series, or the series length. If all_patches is True, return the
1373 index of the first patch past the last applied one.
1370 index of the first patch past the last applied one.
1374 """
1371 """
1375 end = 0
1372 end = 0
1376 def next(start):
1373 def next(start):
1377 if all_patches:
1374 if all_patches:
1378 return start
1375 return start
1379 i = start
1376 i = start
1380 while i < len(self.series):
1377 while i < len(self.series):
1381 p, reason = self.pushable(i)
1378 p, reason = self.pushable(i)
1382 if p:
1379 if p:
1383 break
1380 break
1384 self.explain_pushable(i)
1381 self.explain_pushable(i)
1385 i += 1
1382 i += 1
1386 return i
1383 return i
1387 if len(self.applied) > 0:
1384 if len(self.applied) > 0:
1388 p = self.applied[-1].name
1385 p = self.applied[-1].name
1389 try:
1386 try:
1390 end = self.series.index(p)
1387 end = self.series.index(p)
1391 except ValueError:
1388 except ValueError:
1392 return 0
1389 return 0
1393 return next(end + 1)
1390 return next(end + 1)
1394 return next(end)
1391 return next(end)
1395
1392
1396 def appliedname(self, index):
1393 def appliedname(self, index):
1397 pname = self.applied[index].name
1394 pname = self.applied[index].name
1398 if not self.ui.verbose:
1395 if not self.ui.verbose:
1399 p = pname
1396 p = pname
1400 else:
1397 else:
1401 p = str(self.series.index(pname)) + " " + pname
1398 p = str(self.series.index(pname)) + " " + pname
1402 return p
1399 return p
1403
1400
1404 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1401 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1405 force=None, git=False):
1402 force=None, git=False):
1406 def checkseries(patchname):
1403 def checkseries(patchname):
1407 if patchname in self.series:
1404 if patchname in self.series:
1408 raise util.Abort(_('patch %s is already in the series file')
1405 raise util.Abort(_('patch %s is already in the series file')
1409 % patchname)
1406 % patchname)
1410 def checkfile(patchname):
1407 def checkfile(patchname):
1411 if not force and os.path.exists(self.join(patchname)):
1408 if not force and os.path.exists(self.join(patchname)):
1412 raise util.Abort(_('patch "%s" already exists')
1409 raise util.Abort(_('patch "%s" already exists')
1413 % patchname)
1410 % patchname)
1414
1411
1415 if rev:
1412 if rev:
1416 if files:
1413 if files:
1417 raise util.Abort(_('option "-r" not valid when importing '
1414 raise util.Abort(_('option "-r" not valid when importing '
1418 'files'))
1415 'files'))
1419 rev = cmdutil.revrange(repo, rev)
1416 rev = cmdutil.revrange(repo, rev)
1420 rev.sort(lambda x, y: cmp(y, x))
1417 rev.sort(lambda x, y: cmp(y, x))
1421 if (len(files) > 1 or len(rev) > 1) and patchname:
1418 if (len(files) > 1 or len(rev) > 1) and patchname:
1422 raise util.Abort(_('option "-n" not valid when importing multiple '
1419 raise util.Abort(_('option "-n" not valid when importing multiple '
1423 'patches'))
1420 'patches'))
1424 i = 0
1421 i = 0
1425 added = []
1422 added = []
1426 if rev:
1423 if rev:
1427 # If mq patches are applied, we can only import revisions
1424 # If mq patches are applied, we can only import revisions
1428 # that form a linear path to qbase.
1425 # that form a linear path to qbase.
1429 # Otherwise, they should form a linear path to a head.
1426 # Otherwise, they should form a linear path to a head.
1430 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1427 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1431 if len(heads) > 1:
1428 if len(heads) > 1:
1432 raise util.Abort(_('revision %d is the root of more than one '
1429 raise util.Abort(_('revision %d is the root of more than one '
1433 'branch') % rev[-1])
1430 'branch') % rev[-1])
1434 if self.applied:
1431 if self.applied:
1435 base = revlog.hex(repo.changelog.node(rev[0]))
1432 base = revlog.hex(repo.changelog.node(rev[0]))
1436 if base in [n.rev for n in self.applied]:
1433 if base in [n.rev for n in self.applied]:
1437 raise util.Abort(_('revision %d is already managed')
1434 raise util.Abort(_('revision %d is already managed')
1438 % rev[0])
1435 % rev[0])
1439 if heads != [revlog.bin(self.applied[-1].rev)]:
1436 if heads != [revlog.bin(self.applied[-1].rev)]:
1440 raise util.Abort(_('revision %d is not the parent of '
1437 raise util.Abort(_('revision %d is not the parent of '
1441 'the queue') % rev[0])
1438 'the queue') % rev[0])
1442 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1439 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1443 lastparent = repo.changelog.parentrevs(base)[0]
1440 lastparent = repo.changelog.parentrevs(base)[0]
1444 else:
1441 else:
1445 if heads != [repo.changelog.node(rev[0])]:
1442 if heads != [repo.changelog.node(rev[0])]:
1446 raise util.Abort(_('revision %d has unmanaged children')
1443 raise util.Abort(_('revision %d has unmanaged children')
1447 % rev[0])
1444 % rev[0])
1448 lastparent = None
1445 lastparent = None
1449
1446
1450 if git:
1447 if git:
1451 self.diffopts().git = True
1448 self.diffopts().git = True
1452
1449
1453 for r in rev:
1450 for r in rev:
1454 p1, p2 = repo.changelog.parentrevs(r)
1451 p1, p2 = repo.changelog.parentrevs(r)
1455 n = repo.changelog.node(r)
1452 n = repo.changelog.node(r)
1456 if p2 != revlog.nullrev:
1453 if p2 != revlog.nullrev:
1457 raise util.Abort(_('cannot import merge revision %d') % r)
1454 raise util.Abort(_('cannot import merge revision %d') % r)
1458 if lastparent and lastparent != r:
1455 if lastparent and lastparent != r:
1459 raise util.Abort(_('revision %d is not the parent of %d')
1456 raise util.Abort(_('revision %d is not the parent of %d')
1460 % (r, lastparent))
1457 % (r, lastparent))
1461 lastparent = p1
1458 lastparent = p1
1462
1459
1463 if not patchname:
1460 if not patchname:
1464 patchname = normname('%d.diff' % r)
1461 patchname = normname('%d.diff' % r)
1465 self.check_reserved_name(patchname)
1462 self.check_reserved_name(patchname)
1466 checkseries(patchname)
1463 checkseries(patchname)
1467 checkfile(patchname)
1464 checkfile(patchname)
1468 self.full_series.insert(0, patchname)
1465 self.full_series.insert(0, patchname)
1469
1466
1470 patchf = self.opener(patchname, "w")
1467 patchf = self.opener(patchname, "w")
1471 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1468 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1472 patchf.close()
1469 patchf.close()
1473
1470
1474 se = statusentry(revlog.hex(n), patchname)
1471 se = statusentry(revlog.hex(n), patchname)
1475 self.applied.insert(0, se)
1472 self.applied.insert(0, se)
1476
1473
1477 added.append(patchname)
1474 added.append(patchname)
1478 patchname = None
1475 patchname = None
1479 self.parse_series()
1476 self.parse_series()
1480 self.applied_dirty = 1
1477 self.applied_dirty = 1
1481
1478
1482 for filename in files:
1479 for filename in files:
1483 if existing:
1480 if existing:
1484 if filename == '-':
1481 if filename == '-':
1485 raise util.Abort(_('-e is incompatible with import from -'))
1482 raise util.Abort(_('-e is incompatible with import from -'))
1486 if not patchname:
1483 if not patchname:
1487 patchname = normname(filename)
1484 patchname = normname(filename)
1488 self.check_reserved_name(patchname)
1485 self.check_reserved_name(patchname)
1489 if not os.path.isfile(self.join(patchname)):
1486 if not os.path.isfile(self.join(patchname)):
1490 raise util.Abort(_("patch %s does not exist") % patchname)
1487 raise util.Abort(_("patch %s does not exist") % patchname)
1491 else:
1488 else:
1492 try:
1489 try:
1493 if filename == '-':
1490 if filename == '-':
1494 if not patchname:
1491 if not patchname:
1495 raise util.Abort(_('need --name to import a patch from -'))
1492 raise util.Abort(_('need --name to import a patch from -'))
1496 text = sys.stdin.read()
1493 text = sys.stdin.read()
1497 else:
1494 else:
1498 text = file(filename, 'rb').read()
1495 text = file(filename, 'rb').read()
1499 except IOError:
1496 except IOError:
1500 raise util.Abort(_("unable to read %s") % patchname)
1497 raise util.Abort(_("unable to read %s") % patchname)
1501 if not patchname:
1498 if not patchname:
1502 patchname = normname(os.path.basename(filename))
1499 patchname = normname(os.path.basename(filename))
1503 self.check_reserved_name(patchname)
1500 self.check_reserved_name(patchname)
1504 checkfile(patchname)
1501 checkfile(patchname)
1505 patchf = self.opener(patchname, "w")
1502 patchf = self.opener(patchname, "w")
1506 patchf.write(text)
1503 patchf.write(text)
1507 checkseries(patchname)
1504 checkseries(patchname)
1508 index = self.full_series_end() + i
1505 index = self.full_series_end() + i
1509 self.full_series[index:index] = [patchname]
1506 self.full_series[index:index] = [patchname]
1510 self.parse_series()
1507 self.parse_series()
1511 self.ui.warn("adding %s to series file\n" % patchname)
1508 self.ui.warn("adding %s to series file\n" % patchname)
1512 i += 1
1509 i += 1
1513 added.append(patchname)
1510 added.append(patchname)
1514 patchname = None
1511 patchname = None
1515 self.series_dirty = 1
1512 self.series_dirty = 1
1516 qrepo = self.qrepo()
1513 qrepo = self.qrepo()
1517 if qrepo:
1514 if qrepo:
1518 qrepo.add(added)
1515 qrepo.add(added)
1519
1516
1520 def delete(ui, repo, *patches, **opts):
1517 def delete(ui, repo, *patches, **opts):
1521 """remove patches from queue
1518 """remove patches from queue
1522
1519
1523 The patches must not be applied, unless they are arguments to
1520 The patches must not be applied, unless they are arguments to
1524 the --rev parameter. At least one patch or revision is required.
1521 the --rev parameter. At least one patch or revision is required.
1525
1522
1526 With --rev, mq will stop managing the named revisions (converting
1523 With --rev, mq will stop managing the named revisions (converting
1527 them to regular mercurial changesets). The qfinish command should be
1524 them to regular mercurial changesets). The qfinish command should be
1528 used as an alternative for qdel -r, as the latter option is deprecated.
1525 used as an alternative for qdel -r, as the latter option is deprecated.
1529
1526
1530 With --keep, the patch files are preserved in the patch directory."""
1527 With --keep, the patch files are preserved in the patch directory."""
1531 q = repo.mq
1528 q = repo.mq
1532 q.delete(repo, patches, opts)
1529 q.delete(repo, patches, opts)
1533 q.save_dirty()
1530 q.save_dirty()
1534 return 0
1531 return 0
1535
1532
1536 def applied(ui, repo, patch=None, **opts):
1533 def applied(ui, repo, patch=None, **opts):
1537 """print the patches already applied"""
1534 """print the patches already applied"""
1538 q = repo.mq
1535 q = repo.mq
1539 if patch:
1536 if patch:
1540 if patch not in q.series:
1537 if patch not in q.series:
1541 raise util.Abort(_("patch %s is not in series file") % patch)
1538 raise util.Abort(_("patch %s is not in series file") % patch)
1542 end = q.series.index(patch) + 1
1539 end = q.series.index(patch) + 1
1543 else:
1540 else:
1544 end = q.series_end(True)
1541 end = q.series_end(True)
1545 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1542 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1546
1543
1547 def unapplied(ui, repo, patch=None, **opts):
1544 def unapplied(ui, repo, patch=None, **opts):
1548 """print the patches not yet applied"""
1545 """print the patches not yet applied"""
1549 q = repo.mq
1546 q = repo.mq
1550 if patch:
1547 if patch:
1551 if patch not in q.series:
1548 if patch not in q.series:
1552 raise util.Abort(_("patch %s is not in series file") % patch)
1549 raise util.Abort(_("patch %s is not in series file") % patch)
1553 start = q.series.index(patch) + 1
1550 start = q.series.index(patch) + 1
1554 else:
1551 else:
1555 start = q.series_end(True)
1552 start = q.series_end(True)
1556 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1553 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1557
1554
1558 def qimport(ui, repo, *filename, **opts):
1555 def qimport(ui, repo, *filename, **opts):
1559 """import a patch
1556 """import a patch
1560
1557
1561 The patch is inserted into the series after the last applied patch.
1558 The patch is inserted into the series after the last applied patch.
1562 If no patches have been applied, qimport prepends the patch
1559 If no patches have been applied, qimport prepends the patch
1563 to the series.
1560 to the series.
1564
1561
1565 The patch will have the same name as its source file unless you
1562 The patch will have the same name as its source file unless you
1566 give it a new one with --name.
1563 give it a new one with --name.
1567
1564
1568 You can register an existing patch inside the patch directory
1565 You can register an existing patch inside the patch directory
1569 with the --existing flag.
1566 with the --existing flag.
1570
1567
1571 With --force, an existing patch of the same name will be overwritten.
1568 With --force, an existing patch of the same name will be overwritten.
1572
1569
1573 An existing changeset may be placed under mq control with --rev
1570 An existing changeset may be placed under mq control with --rev
1574 (e.g. qimport --rev tip -n patch will place tip under mq control).
1571 (e.g. qimport --rev tip -n patch will place tip under mq control).
1575 With --git, patches imported with --rev will use the git diff
1572 With --git, patches imported with --rev will use the git diff
1576 format.
1573 format.
1577 """
1574 """
1578 q = repo.mq
1575 q = repo.mq
1579 q.qimport(repo, filename, patchname=opts['name'],
1576 q.qimport(repo, filename, patchname=opts['name'],
1580 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1577 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1581 git=opts['git'])
1578 git=opts['git'])
1582 q.save_dirty()
1579 q.save_dirty()
1583 return 0
1580 return 0
1584
1581
1585 def init(ui, repo, **opts):
1582 def init(ui, repo, **opts):
1586 """init a new queue repository
1583 """init a new queue repository
1587
1584
1588 The queue repository is unversioned by default. If -c is
1585 The queue repository is unversioned by default. If -c is
1589 specified, qinit will create a separate nested repository
1586 specified, qinit will create a separate nested repository
1590 for patches (qinit -c may also be run later to convert
1587 for patches (qinit -c may also be run later to convert
1591 an unversioned patch repository into a versioned one).
1588 an unversioned patch repository into a versioned one).
1592 You can use qcommit to commit changes to this queue repository."""
1589 You can use qcommit to commit changes to this queue repository."""
1593 q = repo.mq
1590 q = repo.mq
1594 r = q.init(repo, create=opts['create_repo'])
1591 r = q.init(repo, create=opts['create_repo'])
1595 q.save_dirty()
1592 q.save_dirty()
1596 if r:
1593 if r:
1597 if not os.path.exists(r.wjoin('.hgignore')):
1594 if not os.path.exists(r.wjoin('.hgignore')):
1598 fp = r.wopener('.hgignore', 'w')
1595 fp = r.wopener('.hgignore', 'w')
1599 fp.write('^\\.hg\n')
1596 fp.write('^\\.hg\n')
1600 fp.write('^\\.mq\n')
1597 fp.write('^\\.mq\n')
1601 fp.write('syntax: glob\n')
1598 fp.write('syntax: glob\n')
1602 fp.write('status\n')
1599 fp.write('status\n')
1603 fp.write('guards\n')
1600 fp.write('guards\n')
1604 fp.close()
1601 fp.close()
1605 if not os.path.exists(r.wjoin('series')):
1602 if not os.path.exists(r.wjoin('series')):
1606 r.wopener('series', 'w').close()
1603 r.wopener('series', 'w').close()
1607 r.add(['.hgignore', 'series'])
1604 r.add(['.hgignore', 'series'])
1608 commands.add(ui, r)
1605 commands.add(ui, r)
1609 return 0
1606 return 0
1610
1607
1611 def clone(ui, source, dest=None, **opts):
1608 def clone(ui, source, dest=None, **opts):
1612 '''clone main and patch repository at same time
1609 '''clone main and patch repository at same time
1613
1610
1614 If source is local, destination will have no patches applied. If
1611 If source is local, destination will have no patches applied. If
1615 source is remote, this command can not check if patches are
1612 source is remote, this command can not check if patches are
1616 applied in source, so cannot guarantee that patches are not
1613 applied in source, so cannot guarantee that patches are not
1617 applied in destination. If you clone remote repository, be sure
1614 applied in destination. If you clone remote repository, be sure
1618 before that it has no patches applied.
1615 before that it has no patches applied.
1619
1616
1620 Source patch repository is looked for in <src>/.hg/patches by
1617 Source patch repository is looked for in <src>/.hg/patches by
1621 default. Use -p <url> to change.
1618 default. Use -p <url> to change.
1622
1619
1623 The patch directory must be a nested mercurial repository, as
1620 The patch directory must be a nested mercurial repository, as
1624 would be created by qinit -c.
1621 would be created by qinit -c.
1625 '''
1622 '''
1626 def patchdir(repo):
1623 def patchdir(repo):
1627 url = repo.url()
1624 url = repo.url()
1628 if url.endswith('/'):
1625 if url.endswith('/'):
1629 url = url[:-1]
1626 url = url[:-1]
1630 return url + '/.hg/patches'
1627 return url + '/.hg/patches'
1631 cmdutil.setremoteconfig(ui, opts)
1628 cmdutil.setremoteconfig(ui, opts)
1632 if dest is None:
1629 if dest is None:
1633 dest = hg.defaultdest(source)
1630 dest = hg.defaultdest(source)
1634 sr = hg.repository(ui, ui.expandpath(source))
1631 sr = hg.repository(ui, ui.expandpath(source))
1635 patchespath = opts['patches'] or patchdir(sr)
1632 patchespath = opts['patches'] or patchdir(sr)
1636 try:
1633 try:
1637 pr = hg.repository(ui, patchespath)
1634 pr = hg.repository(ui, patchespath)
1638 except RepoError:
1635 except RepoError:
1639 raise util.Abort(_('versioned patch repository not found'
1636 raise util.Abort(_('versioned patch repository not found'
1640 ' (see qinit -c)'))
1637 ' (see qinit -c)'))
1641 qbase, destrev = None, None
1638 qbase, destrev = None, None
1642 if sr.local():
1639 if sr.local():
1643 if sr.mq.applied:
1640 if sr.mq.applied:
1644 qbase = revlog.bin(sr.mq.applied[0].rev)
1641 qbase = revlog.bin(sr.mq.applied[0].rev)
1645 if not hg.islocal(dest):
1642 if not hg.islocal(dest):
1646 heads = dict.fromkeys(sr.heads())
1643 heads = dict.fromkeys(sr.heads())
1647 for h in sr.heads(qbase):
1644 for h in sr.heads(qbase):
1648 del heads[h]
1645 del heads[h]
1649 destrev = heads.keys()
1646 destrev = heads.keys()
1650 destrev.append(sr.changelog.parents(qbase)[0])
1647 destrev.append(sr.changelog.parents(qbase)[0])
1651 elif sr.capable('lookup'):
1648 elif sr.capable('lookup'):
1652 try:
1649 try:
1653 qbase = sr.lookup('qbase')
1650 qbase = sr.lookup('qbase')
1654 except RepoError:
1651 except RepoError:
1655 pass
1652 pass
1656 ui.note(_('cloning main repo\n'))
1653 ui.note(_('cloning main repo\n'))
1657 sr, dr = hg.clone(ui, sr.url(), dest,
1654 sr, dr = hg.clone(ui, sr.url(), dest,
1658 pull=opts['pull'],
1655 pull=opts['pull'],
1659 rev=destrev,
1656 rev=destrev,
1660 update=False,
1657 update=False,
1661 stream=opts['uncompressed'])
1658 stream=opts['uncompressed'])
1662 ui.note(_('cloning patch repo\n'))
1659 ui.note(_('cloning patch repo\n'))
1663 spr, dpr = hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1660 spr, dpr = hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1664 pull=opts['pull'], update=not opts['noupdate'],
1661 pull=opts['pull'], update=not opts['noupdate'],
1665 stream=opts['uncompressed'])
1662 stream=opts['uncompressed'])
1666 if dr.local():
1663 if dr.local():
1667 if qbase:
1664 if qbase:
1668 ui.note(_('stripping applied patches from destination repo\n'))
1665 ui.note(_('stripping applied patches from destination repo\n'))
1669 dr.mq.strip(dr, qbase, update=False, backup=None)
1666 dr.mq.strip(dr, qbase, update=False, backup=None)
1670 if not opts['noupdate']:
1667 if not opts['noupdate']:
1671 ui.note(_('updating destination repo\n'))
1668 ui.note(_('updating destination repo\n'))
1672 hg.update(dr, dr.changelog.tip())
1669 hg.update(dr, dr.changelog.tip())
1673
1670
1674 def commit(ui, repo, *pats, **opts):
1671 def commit(ui, repo, *pats, **opts):
1675 """commit changes in the queue repository"""
1672 """commit changes in the queue repository"""
1676 q = repo.mq
1673 q = repo.mq
1677 r = q.qrepo()
1674 r = q.qrepo()
1678 if not r: raise util.Abort('no queue repository')
1675 if not r: raise util.Abort('no queue repository')
1679 commands.commit(r.ui, r, *pats, **opts)
1676 commands.commit(r.ui, r, *pats, **opts)
1680
1677
1681 def series(ui, repo, **opts):
1678 def series(ui, repo, **opts):
1682 """print the entire series file"""
1679 """print the entire series file"""
1683 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1680 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1684 return 0
1681 return 0
1685
1682
1686 def top(ui, repo, **opts):
1683 def top(ui, repo, **opts):
1687 """print the name of the current patch"""
1684 """print the name of the current patch"""
1688 q = repo.mq
1685 q = repo.mq
1689 t = q.applied and q.series_end(True) or 0
1686 t = q.applied and q.series_end(True) or 0
1690 if t:
1687 if t:
1691 return q.qseries(repo, start=t-1, length=1, status='A',
1688 return q.qseries(repo, start=t-1, length=1, status='A',
1692 summary=opts.get('summary'))
1689 summary=opts.get('summary'))
1693 else:
1690 else:
1694 ui.write("No patches applied\n")
1691 ui.write("No patches applied\n")
1695 return 1
1692 return 1
1696
1693
1697 def next(ui, repo, **opts):
1694 def next(ui, repo, **opts):
1698 """print the name of the next patch"""
1695 """print the name of the next patch"""
1699 q = repo.mq
1696 q = repo.mq
1700 end = q.series_end()
1697 end = q.series_end()
1701 if end == len(q.series):
1698 if end == len(q.series):
1702 ui.write("All patches applied\n")
1699 ui.write("All patches applied\n")
1703 return 1
1700 return 1
1704 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1701 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1705
1702
1706 def prev(ui, repo, **opts):
1703 def prev(ui, repo, **opts):
1707 """print the name of the previous patch"""
1704 """print the name of the previous patch"""
1708 q = repo.mq
1705 q = repo.mq
1709 l = len(q.applied)
1706 l = len(q.applied)
1710 if l == 1:
1707 if l == 1:
1711 ui.write("Only one patch applied\n")
1708 ui.write("Only one patch applied\n")
1712 return 1
1709 return 1
1713 if not l:
1710 if not l:
1714 ui.write("No patches applied\n")
1711 ui.write("No patches applied\n")
1715 return 1
1712 return 1
1716 return q.qseries(repo, start=l-2, length=1, status='A',
1713 return q.qseries(repo, start=l-2, length=1, status='A',
1717 summary=opts.get('summary'))
1714 summary=opts.get('summary'))
1718
1715
1719 def setupheaderopts(ui, opts):
1716 def setupheaderopts(ui, opts):
1720 def do(opt,val):
1717 def do(opt,val):
1721 if not opts[opt] and opts['current' + opt]:
1718 if not opts[opt] and opts['current' + opt]:
1722 opts[opt] = val
1719 opts[opt] = val
1723 do('user', ui.username())
1720 do('user', ui.username())
1724 do('date', "%d %d" % util.makedate())
1721 do('date', "%d %d" % util.makedate())
1725
1722
1726 def new(ui, repo, patch, *args, **opts):
1723 def new(ui, repo, patch, *args, **opts):
1727 """create a new patch
1724 """create a new patch
1728
1725
1729 qnew creates a new patch on top of the currently-applied patch
1726 qnew creates a new patch on top of the currently-applied patch
1730 (if any). It will refuse to run if there are any outstanding
1727 (if any). It will refuse to run if there are any outstanding
1731 changes unless -f is specified, in which case the patch will
1728 changes unless -f is specified, in which case the patch will
1732 be initialised with them. You may also use -I, -X, and/or a list of
1729 be initialised with them. You may also use -I, -X, and/or a list of
1733 files after the patch name to add only changes to matching files
1730 files after the patch name to add only changes to matching files
1734 to the new patch, leaving the rest as uncommitted modifications.
1731 to the new patch, leaving the rest as uncommitted modifications.
1735
1732
1736 -e, -m or -l set the patch header as well as the commit message.
1733 -e, -m or -l set the patch header as well as the commit message.
1737 If none is specified, the patch header is empty and the
1734 If none is specified, the patch header is empty and the
1738 commit message is '[mq]: PATCH'"""
1735 commit message is '[mq]: PATCH'"""
1739 q = repo.mq
1736 q = repo.mq
1740 message = cmdutil.logmessage(opts)
1737 message = cmdutil.logmessage(opts)
1741 if opts['edit']:
1738 if opts['edit']:
1742 message = ui.edit(message, ui.username())
1739 message = ui.edit(message, ui.username())
1743 opts['msg'] = message
1740 opts['msg'] = message
1744 setupheaderopts(ui, opts)
1741 setupheaderopts(ui, opts)
1745 q.new(repo, patch, *args, **opts)
1742 q.new(repo, patch, *args, **opts)
1746 q.save_dirty()
1743 q.save_dirty()
1747 return 0
1744 return 0
1748
1745
1749 def refresh(ui, repo, *pats, **opts):
1746 def refresh(ui, repo, *pats, **opts):
1750 """update the current patch
1747 """update the current patch
1751
1748
1752 If any file patterns are provided, the refreshed patch will contain only
1749 If any file patterns are provided, the refreshed patch will contain only
1753 the modifications that match those patterns; the remaining modifications
1750 the modifications that match those patterns; the remaining modifications
1754 will remain in the working directory.
1751 will remain in the working directory.
1755
1752
1756 hg add/remove/copy/rename work as usual, though you might want to use
1753 hg add/remove/copy/rename work as usual, though you might want to use
1757 git-style patches (--git or [diff] git=1) to track copies and renames.
1754 git-style patches (--git or [diff] git=1) to track copies and renames.
1758 """
1755 """
1759 q = repo.mq
1756 q = repo.mq
1760 message = cmdutil.logmessage(opts)
1757 message = cmdutil.logmessage(opts)
1761 if opts['edit']:
1758 if opts['edit']:
1762 if not q.applied:
1759 if not q.applied:
1763 ui.write(_("No patches applied\n"))
1760 ui.write(_("No patches applied\n"))
1764 return 1
1761 return 1
1765 if message:
1762 if message:
1766 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1763 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1767 patch = q.applied[-1].name
1764 patch = q.applied[-1].name
1768 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1765 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1769 message = ui.edit('\n'.join(message), user or ui.username())
1766 message = ui.edit('\n'.join(message), user or ui.username())
1770 setupheaderopts(ui, opts)
1767 setupheaderopts(ui, opts)
1771 ret = q.refresh(repo, pats, msg=message, **opts)
1768 ret = q.refresh(repo, pats, msg=message, **opts)
1772 q.save_dirty()
1769 q.save_dirty()
1773 return ret
1770 return ret
1774
1771
1775 def diff(ui, repo, *pats, **opts):
1772 def diff(ui, repo, *pats, **opts):
1776 """diff of the current patch and subsequent modifications
1773 """diff of the current patch and subsequent modifications
1777
1774
1778 Shows a diff which includes the current patch as well as any changes which
1775 Shows a diff which includes the current patch as well as any changes which
1779 have been made in the working directory since the last refresh (thus
1776 have been made in the working directory since the last refresh (thus
1780 showing what the current patch would become after a qrefresh).
1777 showing what the current patch would become after a qrefresh).
1781
1778
1782 Use 'hg diff' if you only want to see the changes made since the last
1779 Use 'hg diff' if you only want to see the changes made since the last
1783 qrefresh, or 'hg export qtip' if you want to see changes made by the
1780 qrefresh, or 'hg export qtip' if you want to see changes made by the
1784 current patch without including changes made since the qrefresh.
1781 current patch without including changes made since the qrefresh.
1785 """
1782 """
1786 repo.mq.diff(repo, pats, opts)
1783 repo.mq.diff(repo, pats, opts)
1787 return 0
1784 return 0
1788
1785
1789 def fold(ui, repo, *files, **opts):
1786 def fold(ui, repo, *files, **opts):
1790 """fold the named patches into the current patch
1787 """fold the named patches into the current patch
1791
1788
1792 Patches must not yet be applied. Each patch will be successively
1789 Patches must not yet be applied. Each patch will be successively
1793 applied to the current patch in the order given. If all the
1790 applied to the current patch in the order given. If all the
1794 patches apply successfully, the current patch will be refreshed
1791 patches apply successfully, the current patch will be refreshed
1795 with the new cumulative patch, and the folded patches will
1792 with the new cumulative patch, and the folded patches will
1796 be deleted. With -k/--keep, the folded patch files will not
1793 be deleted. With -k/--keep, the folded patch files will not
1797 be removed afterwards.
1794 be removed afterwards.
1798
1795
1799 The header for each folded patch will be concatenated with
1796 The header for each folded patch will be concatenated with
1800 the current patch header, separated by a line of '* * *'."""
1797 the current patch header, separated by a line of '* * *'."""
1801
1798
1802 q = repo.mq
1799 q = repo.mq
1803
1800
1804 if not files:
1801 if not files:
1805 raise util.Abort(_('qfold requires at least one patch name'))
1802 raise util.Abort(_('qfold requires at least one patch name'))
1806 if not q.check_toppatch(repo):
1803 if not q.check_toppatch(repo):
1807 raise util.Abort(_('No patches applied'))
1804 raise util.Abort(_('No patches applied'))
1808
1805
1809 message = cmdutil.logmessage(opts)
1806 message = cmdutil.logmessage(opts)
1810 if opts['edit']:
1807 if opts['edit']:
1811 if message:
1808 if message:
1812 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1809 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1813
1810
1814 parent = q.lookup('qtip')
1811 parent = q.lookup('qtip')
1815 patches = []
1812 patches = []
1816 messages = []
1813 messages = []
1817 for f in files:
1814 for f in files:
1818 p = q.lookup(f)
1815 p = q.lookup(f)
1819 if p in patches or p == parent:
1816 if p in patches or p == parent:
1820 ui.warn(_('Skipping already folded patch %s') % p)
1817 ui.warn(_('Skipping already folded patch %s') % p)
1821 if q.isapplied(p):
1818 if q.isapplied(p):
1822 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1819 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1823 patches.append(p)
1820 patches.append(p)
1824
1821
1825 for p in patches:
1822 for p in patches:
1826 if not message:
1823 if not message:
1827 messages.append(q.readheaders(p)[0])
1824 messages.append(q.readheaders(p)[0])
1828 pf = q.join(p)
1825 pf = q.join(p)
1829 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1826 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1830 if not patchsuccess:
1827 if not patchsuccess:
1831 raise util.Abort(_('Error folding patch %s') % p)
1828 raise util.Abort(_('Error folding patch %s') % p)
1832 patch.updatedir(ui, repo, files)
1829 patch.updatedir(ui, repo, files)
1833
1830
1834 if not message:
1831 if not message:
1835 message, comments, user = q.readheaders(parent)[0:3]
1832 message, comments, user = q.readheaders(parent)[0:3]
1836 for msg in messages:
1833 for msg in messages:
1837 message.append('* * *')
1834 message.append('* * *')
1838 message.extend(msg)
1835 message.extend(msg)
1839 message = '\n'.join(message)
1836 message = '\n'.join(message)
1840
1837
1841 if opts['edit']:
1838 if opts['edit']:
1842 message = ui.edit(message, user or ui.username())
1839 message = ui.edit(message, user or ui.username())
1843
1840
1844 q.refresh(repo, msg=message)
1841 q.refresh(repo, msg=message)
1845 q.delete(repo, patches, opts)
1842 q.delete(repo, patches, opts)
1846 q.save_dirty()
1843 q.save_dirty()
1847
1844
1848 def goto(ui, repo, patch, **opts):
1845 def goto(ui, repo, patch, **opts):
1849 '''push or pop patches until named patch is at top of stack'''
1846 '''push or pop patches until named patch is at top of stack'''
1850 q = repo.mq
1847 q = repo.mq
1851 patch = q.lookup(patch)
1848 patch = q.lookup(patch)
1852 if q.isapplied(patch):
1849 if q.isapplied(patch):
1853 ret = q.pop(repo, patch, force=opts['force'])
1850 ret = q.pop(repo, patch, force=opts['force'])
1854 else:
1851 else:
1855 ret = q.push(repo, patch, force=opts['force'])
1852 ret = q.push(repo, patch, force=opts['force'])
1856 q.save_dirty()
1853 q.save_dirty()
1857 return ret
1854 return ret
1858
1855
1859 def guard(ui, repo, *args, **opts):
1856 def guard(ui, repo, *args, **opts):
1860 '''set or print guards for a patch
1857 '''set or print guards for a patch
1861
1858
1862 Guards control whether a patch can be pushed. A patch with no
1859 Guards control whether a patch can be pushed. A patch with no
1863 guards is always pushed. A patch with a positive guard ("+foo") is
1860 guards is always pushed. A patch with a positive guard ("+foo") is
1864 pushed only if the qselect command has activated it. A patch with
1861 pushed only if the qselect command has activated it. A patch with
1865 a negative guard ("-foo") is never pushed if the qselect command
1862 a negative guard ("-foo") is never pushed if the qselect command
1866 has activated it.
1863 has activated it.
1867
1864
1868 With no arguments, print the currently active guards.
1865 With no arguments, print the currently active guards.
1869 With arguments, set guards for the named patch.
1866 With arguments, set guards for the named patch.
1870
1867
1871 To set a negative guard "-foo" on topmost patch ("--" is needed so
1868 To set a negative guard "-foo" on topmost patch ("--" is needed so
1872 hg will not interpret "-foo" as an option):
1869 hg will not interpret "-foo" as an option):
1873 hg qguard -- -foo
1870 hg qguard -- -foo
1874
1871
1875 To set guards on another patch:
1872 To set guards on another patch:
1876 hg qguard other.patch +2.6.17 -stable
1873 hg qguard other.patch +2.6.17 -stable
1877 '''
1874 '''
1878 def status(idx):
1875 def status(idx):
1879 guards = q.series_guards[idx] or ['unguarded']
1876 guards = q.series_guards[idx] or ['unguarded']
1880 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1877 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1881 q = repo.mq
1878 q = repo.mq
1882 patch = None
1879 patch = None
1883 args = list(args)
1880 args = list(args)
1884 if opts['list']:
1881 if opts['list']:
1885 if args or opts['none']:
1882 if args or opts['none']:
1886 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1883 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1887 for i in xrange(len(q.series)):
1884 for i in xrange(len(q.series)):
1888 status(i)
1885 status(i)
1889 return
1886 return
1890 if not args or args[0][0:1] in '-+':
1887 if not args or args[0][0:1] in '-+':
1891 if not q.applied:
1888 if not q.applied:
1892 raise util.Abort(_('no patches applied'))
1889 raise util.Abort(_('no patches applied'))
1893 patch = q.applied[-1].name
1890 patch = q.applied[-1].name
1894 if patch is None and args[0][0:1] not in '-+':
1891 if patch is None and args[0][0:1] not in '-+':
1895 patch = args.pop(0)
1892 patch = args.pop(0)
1896 if patch is None:
1893 if patch is None:
1897 raise util.Abort(_('no patch to work with'))
1894 raise util.Abort(_('no patch to work with'))
1898 if args or opts['none']:
1895 if args or opts['none']:
1899 idx = q.find_series(patch)
1896 idx = q.find_series(patch)
1900 if idx is None:
1897 if idx is None:
1901 raise util.Abort(_('no patch named %s') % patch)
1898 raise util.Abort(_('no patch named %s') % patch)
1902 q.set_guards(idx, args)
1899 q.set_guards(idx, args)
1903 q.save_dirty()
1900 q.save_dirty()
1904 else:
1901 else:
1905 status(q.series.index(q.lookup(patch)))
1902 status(q.series.index(q.lookup(patch)))
1906
1903
1907 def header(ui, repo, patch=None):
1904 def header(ui, repo, patch=None):
1908 """Print the header of the topmost or specified patch"""
1905 """Print the header of the topmost or specified patch"""
1909 q = repo.mq
1906 q = repo.mq
1910
1907
1911 if patch:
1908 if patch:
1912 patch = q.lookup(patch)
1909 patch = q.lookup(patch)
1913 else:
1910 else:
1914 if not q.applied:
1911 if not q.applied:
1915 ui.write('No patches applied\n')
1912 ui.write('No patches applied\n')
1916 return 1
1913 return 1
1917 patch = q.lookup('qtip')
1914 patch = q.lookup('qtip')
1918 message = repo.mq.readheaders(patch)[0]
1915 message = repo.mq.readheaders(patch)[0]
1919
1916
1920 ui.write('\n'.join(message) + '\n')
1917 ui.write('\n'.join(message) + '\n')
1921
1918
1922 def lastsavename(path):
1919 def lastsavename(path):
1923 (directory, base) = os.path.split(path)
1920 (directory, base) = os.path.split(path)
1924 names = os.listdir(directory)
1921 names = os.listdir(directory)
1925 namere = re.compile("%s.([0-9]+)" % base)
1922 namere = re.compile("%s.([0-9]+)" % base)
1926 maxindex = None
1923 maxindex = None
1927 maxname = None
1924 maxname = None
1928 for f in names:
1925 for f in names:
1929 m = namere.match(f)
1926 m = namere.match(f)
1930 if m:
1927 if m:
1931 index = int(m.group(1))
1928 index = int(m.group(1))
1932 if maxindex == None or index > maxindex:
1929 if maxindex == None or index > maxindex:
1933 maxindex = index
1930 maxindex = index
1934 maxname = f
1931 maxname = f
1935 if maxname:
1932 if maxname:
1936 return (os.path.join(directory, maxname), maxindex)
1933 return (os.path.join(directory, maxname), maxindex)
1937 return (None, None)
1934 return (None, None)
1938
1935
1939 def savename(path):
1936 def savename(path):
1940 (last, index) = lastsavename(path)
1937 (last, index) = lastsavename(path)
1941 if last is None:
1938 if last is None:
1942 index = 0
1939 index = 0
1943 newpath = path + ".%d" % (index + 1)
1940 newpath = path + ".%d" % (index + 1)
1944 return newpath
1941 return newpath
1945
1942
1946 def push(ui, repo, patch=None, **opts):
1943 def push(ui, repo, patch=None, **opts):
1947 """push the next patch onto the stack
1944 """push the next patch onto the stack
1948
1945
1949 When --force is applied, all local changes in patched files will be lost.
1946 When --force is applied, all local changes in patched files will be lost.
1950 """
1947 """
1951 q = repo.mq
1948 q = repo.mq
1952 mergeq = None
1949 mergeq = None
1953
1950
1954 if opts['all']:
1951 if opts['all']:
1955 if not q.series:
1952 if not q.series:
1956 ui.warn(_('no patches in series\n'))
1953 ui.warn(_('no patches in series\n'))
1957 return 0
1954 return 0
1958 patch = q.series[-1]
1955 patch = q.series[-1]
1959 if opts['merge']:
1956 if opts['merge']:
1960 if opts['name']:
1957 if opts['name']:
1961 newpath = repo.join(opts['name'])
1958 newpath = repo.join(opts['name'])
1962 else:
1959 else:
1963 newpath, i = lastsavename(q.path)
1960 newpath, i = lastsavename(q.path)
1964 if not newpath:
1961 if not newpath:
1965 ui.warn("no saved queues found, please use -n\n")
1962 ui.warn("no saved queues found, please use -n\n")
1966 return 1
1963 return 1
1967 mergeq = queue(ui, repo.join(""), newpath)
1964 mergeq = queue(ui, repo.join(""), newpath)
1968 ui.warn("merging with queue at: %s\n" % mergeq.path)
1965 ui.warn("merging with queue at: %s\n" % mergeq.path)
1969 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1966 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1970 mergeq=mergeq)
1967 mergeq=mergeq)
1971 return ret
1968 return ret
1972
1969
1973 def pop(ui, repo, patch=None, **opts):
1970 def pop(ui, repo, patch=None, **opts):
1974 """pop the current patch off the stack
1971 """pop the current patch off the stack
1975
1972
1976 By default, pops off the top of the patch stack. If given a patch name,
1973 By default, pops off the top of the patch stack. If given a patch name,
1977 keeps popping off patches until the named patch is at the top of the stack.
1974 keeps popping off patches until the named patch is at the top of the stack.
1978 """
1975 """
1979 localupdate = True
1976 localupdate = True
1980 if opts['name']:
1977 if opts['name']:
1981 q = queue(ui, repo.join(""), repo.join(opts['name']))
1978 q = queue(ui, repo.join(""), repo.join(opts['name']))
1982 ui.warn('using patch queue: %s\n' % q.path)
1979 ui.warn('using patch queue: %s\n' % q.path)
1983 localupdate = False
1980 localupdate = False
1984 else:
1981 else:
1985 q = repo.mq
1982 q = repo.mq
1986 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1983 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1987 all=opts['all'])
1984 all=opts['all'])
1988 q.save_dirty()
1985 q.save_dirty()
1989 return ret
1986 return ret
1990
1987
1991 def rename(ui, repo, patch, name=None, **opts):
1988 def rename(ui, repo, patch, name=None, **opts):
1992 """rename a patch
1989 """rename a patch
1993
1990
1994 With one argument, renames the current patch to PATCH1.
1991 With one argument, renames the current patch to PATCH1.
1995 With two arguments, renames PATCH1 to PATCH2."""
1992 With two arguments, renames PATCH1 to PATCH2."""
1996
1993
1997 q = repo.mq
1994 q = repo.mq
1998
1995
1999 if not name:
1996 if not name:
2000 name = patch
1997 name = patch
2001 patch = None
1998 patch = None
2002
1999
2003 if patch:
2000 if patch:
2004 patch = q.lookup(patch)
2001 patch = q.lookup(patch)
2005 else:
2002 else:
2006 if not q.applied:
2003 if not q.applied:
2007 ui.write(_('No patches applied\n'))
2004 ui.write(_('No patches applied\n'))
2008 return
2005 return
2009 patch = q.lookup('qtip')
2006 patch = q.lookup('qtip')
2010 absdest = q.join(name)
2007 absdest = q.join(name)
2011 if os.path.isdir(absdest):
2008 if os.path.isdir(absdest):
2012 name = normname(os.path.join(name, os.path.basename(patch)))
2009 name = normname(os.path.join(name, os.path.basename(patch)))
2013 absdest = q.join(name)
2010 absdest = q.join(name)
2014 if os.path.exists(absdest):
2011 if os.path.exists(absdest):
2015 raise util.Abort(_('%s already exists') % absdest)
2012 raise util.Abort(_('%s already exists') % absdest)
2016
2013
2017 if name in q.series:
2014 if name in q.series:
2018 raise util.Abort(_('A patch named %s already exists in the series file') % name)
2015 raise util.Abort(_('A patch named %s already exists in the series file') % name)
2019
2016
2020 if ui.verbose:
2017 if ui.verbose:
2021 ui.write('Renaming %s to %s\n' % (patch, name))
2018 ui.write('Renaming %s to %s\n' % (patch, name))
2022 i = q.find_series(patch)
2019 i = q.find_series(patch)
2023 guards = q.guard_re.findall(q.full_series[i])
2020 guards = q.guard_re.findall(q.full_series[i])
2024 q.full_series[i] = name + ''.join([' #' + g for g in guards])
2021 q.full_series[i] = name + ''.join([' #' + g for g in guards])
2025 q.parse_series()
2022 q.parse_series()
2026 q.series_dirty = 1
2023 q.series_dirty = 1
2027
2024
2028 info = q.isapplied(patch)
2025 info = q.isapplied(patch)
2029 if info:
2026 if info:
2030 q.applied[info[0]] = statusentry(info[1], name)
2027 q.applied[info[0]] = statusentry(info[1], name)
2031 q.applied_dirty = 1
2028 q.applied_dirty = 1
2032
2029
2033 util.rename(q.join(patch), absdest)
2030 util.rename(q.join(patch), absdest)
2034 r = q.qrepo()
2031 r = q.qrepo()
2035 if r:
2032 if r:
2036 wlock = r.wlock()
2033 wlock = r.wlock()
2037 try:
2034 try:
2038 if r.dirstate[patch] == 'a':
2035 if r.dirstate[patch] == 'a':
2039 r.dirstate.forget(patch)
2036 r.dirstate.forget(patch)
2040 r.dirstate.add(name)
2037 r.dirstate.add(name)
2041 else:
2038 else:
2042 if r.dirstate[name] == 'r':
2039 if r.dirstate[name] == 'r':
2043 r.undelete([name])
2040 r.undelete([name])
2044 r.copy(patch, name)
2041 r.copy(patch, name)
2045 r.remove([patch], False)
2042 r.remove([patch], False)
2046 finally:
2043 finally:
2047 del wlock
2044 del wlock
2048
2045
2049 q.save_dirty()
2046 q.save_dirty()
2050
2047
2051 def restore(ui, repo, rev, **opts):
2048 def restore(ui, repo, rev, **opts):
2052 """restore the queue state saved by a rev"""
2049 """restore the queue state saved by a rev"""
2053 rev = repo.lookup(rev)
2050 rev = repo.lookup(rev)
2054 q = repo.mq
2051 q = repo.mq
2055 q.restore(repo, rev, delete=opts['delete'],
2052 q.restore(repo, rev, delete=opts['delete'],
2056 qupdate=opts['update'])
2053 qupdate=opts['update'])
2057 q.save_dirty()
2054 q.save_dirty()
2058 return 0
2055 return 0
2059
2056
2060 def save(ui, repo, **opts):
2057 def save(ui, repo, **opts):
2061 """save current queue state"""
2058 """save current queue state"""
2062 q = repo.mq
2059 q = repo.mq
2063 message = cmdutil.logmessage(opts)
2060 message = cmdutil.logmessage(opts)
2064 ret = q.save(repo, msg=message)
2061 ret = q.save(repo, msg=message)
2065 if ret:
2062 if ret:
2066 return ret
2063 return ret
2067 q.save_dirty()
2064 q.save_dirty()
2068 if opts['copy']:
2065 if opts['copy']:
2069 path = q.path
2066 path = q.path
2070 if opts['name']:
2067 if opts['name']:
2071 newpath = os.path.join(q.basepath, opts['name'])
2068 newpath = os.path.join(q.basepath, opts['name'])
2072 if os.path.exists(newpath):
2069 if os.path.exists(newpath):
2073 if not os.path.isdir(newpath):
2070 if not os.path.isdir(newpath):
2074 raise util.Abort(_('destination %s exists and is not '
2071 raise util.Abort(_('destination %s exists and is not '
2075 'a directory') % newpath)
2072 'a directory') % newpath)
2076 if not opts['force']:
2073 if not opts['force']:
2077 raise util.Abort(_('destination %s exists, '
2074 raise util.Abort(_('destination %s exists, '
2078 'use -f to force') % newpath)
2075 'use -f to force') % newpath)
2079 else:
2076 else:
2080 newpath = savename(path)
2077 newpath = savename(path)
2081 ui.warn("copy %s to %s\n" % (path, newpath))
2078 ui.warn("copy %s to %s\n" % (path, newpath))
2082 util.copyfiles(path, newpath)
2079 util.copyfiles(path, newpath)
2083 if opts['empty']:
2080 if opts['empty']:
2084 try:
2081 try:
2085 os.unlink(q.join(q.status_path))
2082 os.unlink(q.join(q.status_path))
2086 except:
2083 except:
2087 pass
2084 pass
2088 return 0
2085 return 0
2089
2086
2090 def strip(ui, repo, rev, **opts):
2087 def strip(ui, repo, rev, **opts):
2091 """strip a revision and all its descendants from the repository
2088 """strip a revision and all its descendants from the repository
2092
2089
2093 If one of the working dir's parent revisions is stripped, the working
2090 If one of the working dir's parent revisions is stripped, the working
2094 directory will be updated to the parent of the stripped revision.
2091 directory will be updated to the parent of the stripped revision.
2095 """
2092 """
2096 backup = 'all'
2093 backup = 'all'
2097 if opts['backup']:
2094 if opts['backup']:
2098 backup = 'strip'
2095 backup = 'strip'
2099 elif opts['nobackup']:
2096 elif opts['nobackup']:
2100 backup = 'none'
2097 backup = 'none'
2101
2098
2102 rev = repo.lookup(rev)
2099 rev = repo.lookup(rev)
2103 p = repo.dirstate.parents()
2100 p = repo.dirstate.parents()
2104 cl = repo.changelog
2101 cl = repo.changelog
2105 update = True
2102 update = True
2106 if p[0] == revlog.nullid:
2103 if p[0] == revlog.nullid:
2107 update = False
2104 update = False
2108 elif p[1] == revlog.nullid and rev != cl.ancestor(p[0], rev):
2105 elif p[1] == revlog.nullid and rev != cl.ancestor(p[0], rev):
2109 update = False
2106 update = False
2110 elif rev not in (cl.ancestor(p[0], rev), cl.ancestor(p[1], rev)):
2107 elif rev not in (cl.ancestor(p[0], rev), cl.ancestor(p[1], rev)):
2111 update = False
2108 update = False
2112
2109
2113 repo.mq.strip(repo, rev, backup=backup, update=update, force=opts['force'])
2110 repo.mq.strip(repo, rev, backup=backup, update=update, force=opts['force'])
2114 return 0
2111 return 0
2115
2112
2116 def select(ui, repo, *args, **opts):
2113 def select(ui, repo, *args, **opts):
2117 '''set or print guarded patches to push
2114 '''set or print guarded patches to push
2118
2115
2119 Use the qguard command to set or print guards on patch, then use
2116 Use the qguard command to set or print guards on patch, then use
2120 qselect to tell mq which guards to use. A patch will be pushed if it
2117 qselect to tell mq which guards to use. A patch will be pushed if it
2121 has no guards or any positive guards match the currently selected guard,
2118 has no guards or any positive guards match the currently selected guard,
2122 but will not be pushed if any negative guards match the current guard.
2119 but will not be pushed if any negative guards match the current guard.
2123 For example:
2120 For example:
2124
2121
2125 qguard foo.patch -stable (negative guard)
2122 qguard foo.patch -stable (negative guard)
2126 qguard bar.patch +stable (positive guard)
2123 qguard bar.patch +stable (positive guard)
2127 qselect stable
2124 qselect stable
2128
2125
2129 This activates the "stable" guard. mq will skip foo.patch (because
2126 This activates the "stable" guard. mq will skip foo.patch (because
2130 it has a negative match) but push bar.patch (because it
2127 it has a negative match) but push bar.patch (because it
2131 has a positive match).
2128 has a positive match).
2132
2129
2133 With no arguments, prints the currently active guards.
2130 With no arguments, prints the currently active guards.
2134 With one argument, sets the active guard.
2131 With one argument, sets the active guard.
2135
2132
2136 Use -n/--none to deactivate guards (no other arguments needed).
2133 Use -n/--none to deactivate guards (no other arguments needed).
2137 When no guards are active, patches with positive guards are skipped
2134 When no guards are active, patches with positive guards are skipped
2138 and patches with negative guards are pushed.
2135 and patches with negative guards are pushed.
2139
2136
2140 qselect can change the guards on applied patches. It does not pop
2137 qselect can change the guards on applied patches. It does not pop
2141 guarded patches by default. Use --pop to pop back to the last applied
2138 guarded patches by default. Use --pop to pop back to the last applied
2142 patch that is not guarded. Use --reapply (which implies --pop) to push
2139 patch that is not guarded. Use --reapply (which implies --pop) to push
2143 back to the current patch afterwards, but skip guarded patches.
2140 back to the current patch afterwards, but skip guarded patches.
2144
2141
2145 Use -s/--series to print a list of all guards in the series file (no
2142 Use -s/--series to print a list of all guards in the series file (no
2146 other arguments needed). Use -v for more information.'''
2143 other arguments needed). Use -v for more information.'''
2147
2144
2148 q = repo.mq
2145 q = repo.mq
2149 guards = q.active()
2146 guards = q.active()
2150 if args or opts['none']:
2147 if args or opts['none']:
2151 old_unapplied = q.unapplied(repo)
2148 old_unapplied = q.unapplied(repo)
2152 old_guarded = [i for i in xrange(len(q.applied)) if
2149 old_guarded = [i for i in xrange(len(q.applied)) if
2153 not q.pushable(i)[0]]
2150 not q.pushable(i)[0]]
2154 q.set_active(args)
2151 q.set_active(args)
2155 q.save_dirty()
2152 q.save_dirty()
2156 if not args:
2153 if not args:
2157 ui.status(_('guards deactivated\n'))
2154 ui.status(_('guards deactivated\n'))
2158 if not opts['pop'] and not opts['reapply']:
2155 if not opts['pop'] and not opts['reapply']:
2159 unapplied = q.unapplied(repo)
2156 unapplied = q.unapplied(repo)
2160 guarded = [i for i in xrange(len(q.applied))
2157 guarded = [i for i in xrange(len(q.applied))
2161 if not q.pushable(i)[0]]
2158 if not q.pushable(i)[0]]
2162 if len(unapplied) != len(old_unapplied):
2159 if len(unapplied) != len(old_unapplied):
2163 ui.status(_('number of unguarded, unapplied patches has '
2160 ui.status(_('number of unguarded, unapplied patches has '
2164 'changed from %d to %d\n') %
2161 'changed from %d to %d\n') %
2165 (len(old_unapplied), len(unapplied)))
2162 (len(old_unapplied), len(unapplied)))
2166 if len(guarded) != len(old_guarded):
2163 if len(guarded) != len(old_guarded):
2167 ui.status(_('number of guarded, applied patches has changed '
2164 ui.status(_('number of guarded, applied patches has changed '
2168 'from %d to %d\n') %
2165 'from %d to %d\n') %
2169 (len(old_guarded), len(guarded)))
2166 (len(old_guarded), len(guarded)))
2170 elif opts['series']:
2167 elif opts['series']:
2171 guards = {}
2168 guards = {}
2172 noguards = 0
2169 noguards = 0
2173 for gs in q.series_guards:
2170 for gs in q.series_guards:
2174 if not gs:
2171 if not gs:
2175 noguards += 1
2172 noguards += 1
2176 for g in gs:
2173 for g in gs:
2177 guards.setdefault(g, 0)
2174 guards.setdefault(g, 0)
2178 guards[g] += 1
2175 guards[g] += 1
2179 if ui.verbose:
2176 if ui.verbose:
2180 guards['NONE'] = noguards
2177 guards['NONE'] = noguards
2181 guards = guards.items()
2178 guards = guards.items()
2182 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2179 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2183 if guards:
2180 if guards:
2184 ui.note(_('guards in series file:\n'))
2181 ui.note(_('guards in series file:\n'))
2185 for guard, count in guards:
2182 for guard, count in guards:
2186 ui.note('%2d ' % count)
2183 ui.note('%2d ' % count)
2187 ui.write(guard, '\n')
2184 ui.write(guard, '\n')
2188 else:
2185 else:
2189 ui.note(_('no guards in series file\n'))
2186 ui.note(_('no guards in series file\n'))
2190 else:
2187 else:
2191 if guards:
2188 if guards:
2192 ui.note(_('active guards:\n'))
2189 ui.note(_('active guards:\n'))
2193 for g in guards:
2190 for g in guards:
2194 ui.write(g, '\n')
2191 ui.write(g, '\n')
2195 else:
2192 else:
2196 ui.write(_('no active guards\n'))
2193 ui.write(_('no active guards\n'))
2197 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2194 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2198 popped = False
2195 popped = False
2199 if opts['pop'] or opts['reapply']:
2196 if opts['pop'] or opts['reapply']:
2200 for i in xrange(len(q.applied)):
2197 for i in xrange(len(q.applied)):
2201 pushable, reason = q.pushable(i)
2198 pushable, reason = q.pushable(i)
2202 if not pushable:
2199 if not pushable:
2203 ui.status(_('popping guarded patches\n'))
2200 ui.status(_('popping guarded patches\n'))
2204 popped = True
2201 popped = True
2205 if i == 0:
2202 if i == 0:
2206 q.pop(repo, all=True)
2203 q.pop(repo, all=True)
2207 else:
2204 else:
2208 q.pop(repo, i-1)
2205 q.pop(repo, i-1)
2209 break
2206 break
2210 if popped:
2207 if popped:
2211 try:
2208 try:
2212 if reapply:
2209 if reapply:
2213 ui.status(_('reapplying unguarded patches\n'))
2210 ui.status(_('reapplying unguarded patches\n'))
2214 q.push(repo, reapply)
2211 q.push(repo, reapply)
2215 finally:
2212 finally:
2216 q.save_dirty()
2213 q.save_dirty()
2217
2214
2218 def finish(ui, repo, *revrange, **opts):
2215 def finish(ui, repo, *revrange, **opts):
2219 """move applied patches into repository history
2216 """move applied patches into repository history
2220
2217
2221 Finishes the specified revisions (corresponding to applied patches) by
2218 Finishes the specified revisions (corresponding to applied patches) by
2222 moving them out of mq control into regular repository history.
2219 moving them out of mq control into regular repository history.
2223
2220
2224 Accepts a revision range or the --all option. If --all is specified, all
2221 Accepts a revision range or the --all option. If --all is specified, all
2225 applied mq revisions are removed from mq control. Otherwise, the given
2222 applied mq revisions are removed from mq control. Otherwise, the given
2226 revisions must be at the base of the stack of applied patches.
2223 revisions must be at the base of the stack of applied patches.
2227
2224
2228 This can be especially useful if your changes have been applied to an
2225 This can be especially useful if your changes have been applied to an
2229 upstream repository, or if you are about to push your changes to upstream.
2226 upstream repository, or if you are about to push your changes to upstream.
2230 """
2227 """
2231 if not opts['applied'] and not revrange:
2228 if not opts['applied'] and not revrange:
2232 raise util.Abort(_('no revisions specified'))
2229 raise util.Abort(_('no revisions specified'))
2233 elif opts['applied']:
2230 elif opts['applied']:
2234 revrange = ('qbase:qtip',) + revrange
2231 revrange = ('qbase:qtip',) + revrange
2235
2232
2236 q = repo.mq
2233 q = repo.mq
2237 if not q.applied:
2234 if not q.applied:
2238 ui.status(_('no patches applied\n'))
2235 ui.status(_('no patches applied\n'))
2239 return 0
2236 return 0
2240
2237
2241 revs = cmdutil.revrange(repo, revrange)
2238 revs = cmdutil.revrange(repo, revrange)
2242 q.finish(repo, revs)
2239 q.finish(repo, revs)
2243 q.save_dirty()
2240 q.save_dirty()
2244 return 0
2241 return 0
2245
2242
2246 def reposetup(ui, repo):
2243 def reposetup(ui, repo):
2247 class mqrepo(repo.__class__):
2244 class mqrepo(repo.__class__):
2248 def abort_if_wdir_patched(self, errmsg, force=False):
2245 def abort_if_wdir_patched(self, errmsg, force=False):
2249 if self.mq.applied and not force:
2246 if self.mq.applied and not force:
2250 parent = revlog.hex(self.dirstate.parents()[0])
2247 parent = revlog.hex(self.dirstate.parents()[0])
2251 if parent in [s.rev for s in self.mq.applied]:
2248 if parent in [s.rev for s in self.mq.applied]:
2252 raise util.Abort(errmsg)
2249 raise util.Abort(errmsg)
2253
2250
2254 def commit(self, *args, **opts):
2251 def commit(self, *args, **opts):
2255 if len(args) >= 6:
2252 if len(args) >= 6:
2256 force = args[5]
2253 force = args[5]
2257 else:
2254 else:
2258 force = opts.get('force')
2255 force = opts.get('force')
2259 self.abort_if_wdir_patched(
2256 self.abort_if_wdir_patched(
2260 _('cannot commit over an applied mq patch'),
2257 _('cannot commit over an applied mq patch'),
2261 force)
2258 force)
2262
2259
2263 return super(mqrepo, self).commit(*args, **opts)
2260 return super(mqrepo, self).commit(*args, **opts)
2264
2261
2265 def push(self, remote, force=False, revs=None):
2262 def push(self, remote, force=False, revs=None):
2266 if self.mq.applied and not force and not revs:
2263 if self.mq.applied and not force and not revs:
2267 raise util.Abort(_('source has mq patches applied'))
2264 raise util.Abort(_('source has mq patches applied'))
2268 return super(mqrepo, self).push(remote, force, revs)
2265 return super(mqrepo, self).push(remote, force, revs)
2269
2266
2270 def tags(self):
2267 def tags(self):
2271 if self.tagscache:
2268 if self.tagscache:
2272 return self.tagscache
2269 return self.tagscache
2273
2270
2274 tagscache = super(mqrepo, self).tags()
2271 tagscache = super(mqrepo, self).tags()
2275
2272
2276 q = self.mq
2273 q = self.mq
2277 if not q.applied:
2274 if not q.applied:
2278 return tagscache
2275 return tagscache
2279
2276
2280 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2277 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2281
2278
2282 if mqtags[-1][0] not in self.changelog.nodemap:
2279 if mqtags[-1][0] not in self.changelog.nodemap:
2283 self.ui.warn('mq status file refers to unknown node %s\n'
2280 self.ui.warn('mq status file refers to unknown node %s\n'
2284 % revlog.short(mqtags[-1][0]))
2281 % revlog.short(mqtags[-1][0]))
2285 return tagscache
2282 return tagscache
2286
2283
2287 mqtags.append((mqtags[-1][0], 'qtip'))
2284 mqtags.append((mqtags[-1][0], 'qtip'))
2288 mqtags.append((mqtags[0][0], 'qbase'))
2285 mqtags.append((mqtags[0][0], 'qbase'))
2289 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2286 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2290 for patch in mqtags:
2287 for patch in mqtags:
2291 if patch[1] in tagscache:
2288 if patch[1] in tagscache:
2292 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2289 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2293 else:
2290 else:
2294 tagscache[patch[1]] = patch[0]
2291 tagscache[patch[1]] = patch[0]
2295
2292
2296 return tagscache
2293 return tagscache
2297
2294
2298 def _branchtags(self, partial, lrev):
2295 def _branchtags(self, partial, lrev):
2299 q = self.mq
2296 q = self.mq
2300 if not q.applied:
2297 if not q.applied:
2301 return super(mqrepo, self)._branchtags(partial, lrev)
2298 return super(mqrepo, self)._branchtags(partial, lrev)
2302
2299
2303 cl = self.changelog
2300 cl = self.changelog
2304 qbasenode = revlog.bin(q.applied[0].rev)
2301 qbasenode = revlog.bin(q.applied[0].rev)
2305 if qbasenode not in cl.nodemap:
2302 if qbasenode not in cl.nodemap:
2306 self.ui.warn('mq status file refers to unknown node %s\n'
2303 self.ui.warn('mq status file refers to unknown node %s\n'
2307 % revlog.short(qbasenode))
2304 % revlog.short(qbasenode))
2308 return super(mqrepo, self)._branchtags(partial, lrev)
2305 return super(mqrepo, self)._branchtags(partial, lrev)
2309
2306
2310 qbase = cl.rev(qbasenode)
2307 qbase = cl.rev(qbasenode)
2311 start = lrev + 1
2308 start = lrev + 1
2312 if start < qbase:
2309 if start < qbase:
2313 # update the cache (excluding the patches) and save it
2310 # update the cache (excluding the patches) and save it
2314 self._updatebranchcache(partial, lrev+1, qbase)
2311 self._updatebranchcache(partial, lrev+1, qbase)
2315 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2312 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2316 start = qbase
2313 start = qbase
2317 # if start = qbase, the cache is as updated as it should be.
2314 # if start = qbase, the cache is as updated as it should be.
2318 # if start > qbase, the cache includes (part of) the patches.
2315 # if start > qbase, the cache includes (part of) the patches.
2319 # we might as well use it, but we won't save it.
2316 # we might as well use it, but we won't save it.
2320
2317
2321 # update the cache up to the tip
2318 # update the cache up to the tip
2322 self._updatebranchcache(partial, start, len(cl))
2319 self._updatebranchcache(partial, start, len(cl))
2323
2320
2324 return partial
2321 return partial
2325
2322
2326 if repo.local():
2323 if repo.local():
2327 repo.__class__ = mqrepo
2324 repo.__class__ = mqrepo
2328 repo.mq = queue(ui, repo.join(""))
2325 repo.mq = queue(ui, repo.join(""))
2329
2326
2330 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2327 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2331
2328
2332 headeropts = [
2329 headeropts = [
2333 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2330 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2334 ('u', 'user', '', _('add "From: <given user>" to patch')),
2331 ('u', 'user', '', _('add "From: <given user>" to patch')),
2335 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2332 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2336 ('d', 'date', '', _('add "Date: <given date>" to patch'))]
2333 ('d', 'date', '', _('add "Date: <given date>" to patch'))]
2337
2334
2338 cmdtable = {
2335 cmdtable = {
2339 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2336 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2340 "qclone":
2337 "qclone":
2341 (clone,
2338 (clone,
2342 [('', 'pull', None, _('use pull protocol to copy metadata')),
2339 [('', 'pull', None, _('use pull protocol to copy metadata')),
2343 ('U', 'noupdate', None, _('do not update the new working directories')),
2340 ('U', 'noupdate', None, _('do not update the new working directories')),
2344 ('', 'uncompressed', None,
2341 ('', 'uncompressed', None,
2345 _('use uncompressed transfer (fast over LAN)')),
2342 _('use uncompressed transfer (fast over LAN)')),
2346 ('p', 'patches', '', _('location of source patch repo')),
2343 ('p', 'patches', '', _('location of source patch repo')),
2347 ] + commands.remoteopts,
2344 ] + commands.remoteopts,
2348 _('hg qclone [OPTION]... SOURCE [DEST]')),
2345 _('hg qclone [OPTION]... SOURCE [DEST]')),
2349 "qcommit|qci":
2346 "qcommit|qci":
2350 (commit,
2347 (commit,
2351 commands.table["^commit|ci"][1],
2348 commands.table["^commit|ci"][1],
2352 _('hg qcommit [OPTION]... [FILE]...')),
2349 _('hg qcommit [OPTION]... [FILE]...')),
2353 "^qdiff":
2350 "^qdiff":
2354 (diff,
2351 (diff,
2355 commands.diffopts + commands.diffopts2 + commands.walkopts,
2352 commands.diffopts + commands.diffopts2 + commands.walkopts,
2356 _('hg qdiff [OPTION]... [FILE]...')),
2353 _('hg qdiff [OPTION]... [FILE]...')),
2357 "qdelete|qremove|qrm":
2354 "qdelete|qremove|qrm":
2358 (delete,
2355 (delete,
2359 [('k', 'keep', None, _('keep patch file')),
2356 [('k', 'keep', None, _('keep patch file')),
2360 ('r', 'rev', [], _('stop managing a revision'))],
2357 ('r', 'rev', [], _('stop managing a revision'))],
2361 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2358 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2362 'qfold':
2359 'qfold':
2363 (fold,
2360 (fold,
2364 [('e', 'edit', None, _('edit patch header')),
2361 [('e', 'edit', None, _('edit patch header')),
2365 ('k', 'keep', None, _('keep folded patch files')),
2362 ('k', 'keep', None, _('keep folded patch files')),
2366 ] + commands.commitopts,
2363 ] + commands.commitopts,
2367 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2364 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2368 'qgoto':
2365 'qgoto':
2369 (goto,
2366 (goto,
2370 [('f', 'force', None, _('overwrite any local changes'))],
2367 [('f', 'force', None, _('overwrite any local changes'))],
2371 _('hg qgoto [OPTION]... PATCH')),
2368 _('hg qgoto [OPTION]... PATCH')),
2372 'qguard':
2369 'qguard':
2373 (guard,
2370 (guard,
2374 [('l', 'list', None, _('list all patches and guards')),
2371 [('l', 'list', None, _('list all patches and guards')),
2375 ('n', 'none', None, _('drop all guards'))],
2372 ('n', 'none', None, _('drop all guards'))],
2376 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2373 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2377 'qheader': (header, [], _('hg qheader [PATCH]')),
2374 'qheader': (header, [], _('hg qheader [PATCH]')),
2378 "^qimport":
2375 "^qimport":
2379 (qimport,
2376 (qimport,
2380 [('e', 'existing', None, 'import file in patch dir'),
2377 [('e', 'existing', None, 'import file in patch dir'),
2381 ('n', 'name', '', 'patch file name'),
2378 ('n', 'name', '', 'patch file name'),
2382 ('f', 'force', None, 'overwrite existing files'),
2379 ('f', 'force', None, 'overwrite existing files'),
2383 ('r', 'rev', [], 'place existing revisions under mq control'),
2380 ('r', 'rev', [], 'place existing revisions under mq control'),
2384 ('g', 'git', None, _('use git extended diff format'))],
2381 ('g', 'git', None, _('use git extended diff format'))],
2385 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2382 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2386 "^qinit":
2383 "^qinit":
2387 (init,
2384 (init,
2388 [('c', 'create-repo', None, 'create queue repository')],
2385 [('c', 'create-repo', None, 'create queue repository')],
2389 _('hg qinit [-c]')),
2386 _('hg qinit [-c]')),
2390 "qnew":
2387 "qnew":
2391 (new,
2388 (new,
2392 [('e', 'edit', None, _('edit commit message')),
2389 [('e', 'edit', None, _('edit commit message')),
2393 ('f', 'force', None, _('import uncommitted changes into patch')),
2390 ('f', 'force', None, _('import uncommitted changes into patch')),
2394 ('g', 'git', None, _('use git extended diff format')),
2391 ('g', 'git', None, _('use git extended diff format')),
2395 ] + commands.walkopts + commands.commitopts + headeropts,
2392 ] + commands.walkopts + commands.commitopts + headeropts,
2396 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2393 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2397 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2394 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2398 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2395 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2399 "^qpop":
2396 "^qpop":
2400 (pop,
2397 (pop,
2401 [('a', 'all', None, _('pop all patches')),
2398 [('a', 'all', None, _('pop all patches')),
2402 ('n', 'name', '', _('queue name to pop')),
2399 ('n', 'name', '', _('queue name to pop')),
2403 ('f', 'force', None, _('forget any local changes'))],
2400 ('f', 'force', None, _('forget any local changes'))],
2404 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2401 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2405 "^qpush":
2402 "^qpush":
2406 (push,
2403 (push,
2407 [('f', 'force', None, _('apply if the patch has rejects')),
2404 [('f', 'force', None, _('apply if the patch has rejects')),
2408 ('l', 'list', None, _('list patch name in commit text')),
2405 ('l', 'list', None, _('list patch name in commit text')),
2409 ('a', 'all', None, _('apply all patches')),
2406 ('a', 'all', None, _('apply all patches')),
2410 ('m', 'merge', None, _('merge from another queue')),
2407 ('m', 'merge', None, _('merge from another queue')),
2411 ('n', 'name', '', _('merge queue name'))],
2408 ('n', 'name', '', _('merge queue name'))],
2412 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2409 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2413 "^qrefresh":
2410 "^qrefresh":
2414 (refresh,
2411 (refresh,
2415 [('e', 'edit', None, _('edit commit message')),
2412 [('e', 'edit', None, _('edit commit message')),
2416 ('g', 'git', None, _('use git extended diff format')),
2413 ('g', 'git', None, _('use git extended diff format')),
2417 ('s', 'short', None, _('refresh only files already in the patch')),
2414 ('s', 'short', None, _('refresh only files already in the patch')),
2418 ] + commands.walkopts + commands.commitopts + headeropts,
2415 ] + commands.walkopts + commands.commitopts + headeropts,
2419 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2416 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2420 'qrename|qmv':
2417 'qrename|qmv':
2421 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2418 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2422 "qrestore":
2419 "qrestore":
2423 (restore,
2420 (restore,
2424 [('d', 'delete', None, _('delete save entry')),
2421 [('d', 'delete', None, _('delete save entry')),
2425 ('u', 'update', None, _('update queue working dir'))],
2422 ('u', 'update', None, _('update queue working dir'))],
2426 _('hg qrestore [-d] [-u] REV')),
2423 _('hg qrestore [-d] [-u] REV')),
2427 "qsave":
2424 "qsave":
2428 (save,
2425 (save,
2429 [('c', 'copy', None, _('copy patch directory')),
2426 [('c', 'copy', None, _('copy patch directory')),
2430 ('n', 'name', '', _('copy directory name')),
2427 ('n', 'name', '', _('copy directory name')),
2431 ('e', 'empty', None, _('clear queue status file')),
2428 ('e', 'empty', None, _('clear queue status file')),
2432 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2429 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2433 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2430 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2434 "qselect":
2431 "qselect":
2435 (select,
2432 (select,
2436 [('n', 'none', None, _('disable all guards')),
2433 [('n', 'none', None, _('disable all guards')),
2437 ('s', 'series', None, _('list all guards in series file')),
2434 ('s', 'series', None, _('list all guards in series file')),
2438 ('', 'pop', None, _('pop to before first guarded applied patch')),
2435 ('', 'pop', None, _('pop to before first guarded applied patch')),
2439 ('', 'reapply', None, _('pop, then reapply patches'))],
2436 ('', 'reapply', None, _('pop, then reapply patches'))],
2440 _('hg qselect [OPTION]... [GUARD]...')),
2437 _('hg qselect [OPTION]... [GUARD]...')),
2441 "qseries":
2438 "qseries":
2442 (series,
2439 (series,
2443 [('m', 'missing', None, _('print patches not in series')),
2440 [('m', 'missing', None, _('print patches not in series')),
2444 ] + seriesopts,
2441 ] + seriesopts,
2445 _('hg qseries [-ms]')),
2442 _('hg qseries [-ms]')),
2446 "^strip":
2443 "^strip":
2447 (strip,
2444 (strip,
2448 [('f', 'force', None, _('force removal with local changes')),
2445 [('f', 'force', None, _('force removal with local changes')),
2449 ('b', 'backup', None, _('bundle unrelated changesets')),
2446 ('b', 'backup', None, _('bundle unrelated changesets')),
2450 ('n', 'nobackup', None, _('no backups'))],
2447 ('n', 'nobackup', None, _('no backups'))],
2451 _('hg strip [-f] [-b] [-n] REV')),
2448 _('hg strip [-f] [-b] [-n] REV')),
2452 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2449 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2453 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2450 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2454 "qfinish":
2451 "qfinish":
2455 (finish,
2452 (finish,
2456 [('a', 'applied', None, _('finish all applied changesets'))],
2453 [('a', 'applied', None, _('finish all applied changesets'))],
2457 _('hg qfinish [-a] [REV...]')),
2454 _('hg qfinish [-a] [REV...]')),
2458 }
2455 }
@@ -1,283 +1,281 b''
1 # notify.py - email notifications for mercurial
1 # notify.py - email notifications for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 # hook extension to email notifications to people when changesets are
8 # hook extension to email notifications to people when changesets are
9 # committed to a repo they subscribe to.
9 # committed to a repo they subscribe to.
10 #
10 #
11 # default mode is to print messages to stdout, for testing and
11 # default mode is to print messages to stdout, for testing and
12 # configuring.
12 # configuring.
13 #
13 #
14 # to use, configure notify extension and enable in hgrc like this:
14 # to use, configure notify extension and enable in hgrc like this:
15 #
15 #
16 # [extensions]
16 # [extensions]
17 # hgext.notify =
17 # hgext.notify =
18 #
18 #
19 # [hooks]
19 # [hooks]
20 # # one email for each incoming changeset
20 # # one email for each incoming changeset
21 # incoming.notify = python:hgext.notify.hook
21 # incoming.notify = python:hgext.notify.hook
22 # # batch emails when many changesets incoming at one time
22 # # batch emails when many changesets incoming at one time
23 # changegroup.notify = python:hgext.notify.hook
23 # changegroup.notify = python:hgext.notify.hook
24 #
24 #
25 # [notify]
25 # [notify]
26 # # config items go in here
26 # # config items go in here
27 #
27 #
28 # config items:
28 # config items:
29 #
29 #
30 # REQUIRED:
30 # REQUIRED:
31 # config = /path/to/file # file containing subscriptions
31 # config = /path/to/file # file containing subscriptions
32 #
32 #
33 # OPTIONAL:
33 # OPTIONAL:
34 # test = True # print messages to stdout for testing
34 # test = True # print messages to stdout for testing
35 # strip = 3 # number of slashes to strip for url paths
35 # strip = 3 # number of slashes to strip for url paths
36 # domain = example.com # domain to use if committer missing domain
36 # domain = example.com # domain to use if committer missing domain
37 # style = ... # style file to use when formatting email
37 # style = ... # style file to use when formatting email
38 # template = ... # template to use when formatting email
38 # template = ... # template to use when formatting email
39 # incoming = ... # template to use when run as incoming hook
39 # incoming = ... # template to use when run as incoming hook
40 # changegroup = ... # template when run as changegroup hook
40 # changegroup = ... # template when run as changegroup hook
41 # maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
41 # maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
42 # maxsubject = 67 # truncate subject line longer than this
42 # maxsubject = 67 # truncate subject line longer than this
43 # diffstat = True # add a diffstat before the diff content
43 # diffstat = True # add a diffstat before the diff content
44 # sources = serve # notify if source of incoming changes in this list
44 # sources = serve # notify if source of incoming changes in this list
45 # # (serve == ssh or http, push, pull, bundle)
45 # # (serve == ssh or http, push, pull, bundle)
46 # [email]
46 # [email]
47 # from = user@host.com # email address to send as if none given
47 # from = user@host.com # email address to send as if none given
48 # [web]
48 # [web]
49 # baseurl = http://hgserver/... # root of hg web site for browsing commits
49 # baseurl = http://hgserver/... # root of hg web site for browsing commits
50 #
50 #
51 # notify config file has same format as regular hgrc. it has two
51 # notify config file has same format as regular hgrc. it has two
52 # sections so you can express subscriptions in whatever way is handier
52 # sections so you can express subscriptions in whatever way is handier
53 # for you.
53 # for you.
54 #
54 #
55 # [usersubs]
55 # [usersubs]
56 # # key is subscriber email, value is ","-separated list of glob patterns
56 # # key is subscriber email, value is ","-separated list of glob patterns
57 # user@host = pattern
57 # user@host = pattern
58 #
58 #
59 # [reposubs]
59 # [reposubs]
60 # # key is glob pattern, value is ","-separated list of subscriber emails
60 # # key is glob pattern, value is ","-separated list of subscriber emails
61 # pattern = user@host
61 # pattern = user@host
62 #
62 #
63 # glob patterns are matched against path to repo root.
63 # glob patterns are matched against path to repo root.
64 #
64 #
65 # if you like, you can put notify config file in repo that users can
65 # if you like, you can put notify config file in repo that users can
66 # push changes to, they can manage their own subscriptions.
66 # push changes to, they can manage their own subscriptions.
67
67
68 from mercurial.i18n import _
68 from mercurial.i18n import _
69 from mercurial.node import bin, short
69 from mercurial.node import bin, short
70 from mercurial import patch, cmdutil, templater, util, mail
70 from mercurial import patch, cmdutil, templater, util, mail
71 import email.Parser, fnmatch, socket, time
71 import email.Parser, fnmatch, socket, time
72
72
73 # template for single changeset can include email headers.
73 # template for single changeset can include email headers.
74 single_template = '''
74 single_template = '''
75 Subject: changeset in {webroot}: {desc|firstline|strip}
75 Subject: changeset in {webroot}: {desc|firstline|strip}
76 From: {author}
76 From: {author}
77
77
78 changeset {node|short} in {root}
78 changeset {node|short} in {root}
79 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
79 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
80 description:
80 description:
81 \t{desc|tabindent|strip}
81 \t{desc|tabindent|strip}
82 '''.lstrip()
82 '''.lstrip()
83
83
84 # template for multiple changesets should not contain email headers,
84 # template for multiple changesets should not contain email headers,
85 # because only first set of headers will be used and result will look
85 # because only first set of headers will be used and result will look
86 # strange.
86 # strange.
87 multiple_template = '''
87 multiple_template = '''
88 changeset {node|short} in {root}
88 changeset {node|short} in {root}
89 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
89 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
90 summary: {desc|firstline}
90 summary: {desc|firstline}
91 '''
91 '''
92
92
93 deftemplates = {
93 deftemplates = {
94 'changegroup': multiple_template,
94 'changegroup': multiple_template,
95 }
95 }
96
96
97 class notifier(object):
97 class notifier(object):
98 '''email notification class.'''
98 '''email notification class.'''
99
99
100 def __init__(self, ui, repo, hooktype):
100 def __init__(self, ui, repo, hooktype):
101 self.ui = ui
101 self.ui = ui
102 cfg = self.ui.config('notify', 'config')
102 cfg = self.ui.config('notify', 'config')
103 if cfg:
103 if cfg:
104 self.ui.readsections(cfg, 'usersubs', 'reposubs')
104 self.ui.readsections(cfg, 'usersubs', 'reposubs')
105 self.repo = repo
105 self.repo = repo
106 self.stripcount = int(self.ui.config('notify', 'strip', 0))
106 self.stripcount = int(self.ui.config('notify', 'strip', 0))
107 self.root = self.strip(self.repo.root)
107 self.root = self.strip(self.repo.root)
108 self.domain = self.ui.config('notify', 'domain')
108 self.domain = self.ui.config('notify', 'domain')
109 self.subs = self.subscribers()
109 self.subs = self.subscribers()
110
110
111 mapfile = self.ui.config('notify', 'style')
111 mapfile = self.ui.config('notify', 'style')
112 template = (self.ui.config('notify', hooktype) or
112 template = (self.ui.config('notify', hooktype) or
113 self.ui.config('notify', 'template'))
113 self.ui.config('notify', 'template'))
114 self.t = cmdutil.changeset_templater(self.ui, self.repo,
114 self.t = cmdutil.changeset_templater(self.ui, self.repo,
115 False, mapfile, False)
115 False, mapfile, False)
116 if not mapfile and not template:
116 if not mapfile and not template:
117 template = deftemplates.get(hooktype) or single_template
117 template = deftemplates.get(hooktype) or single_template
118 if template:
118 if template:
119 template = templater.parsestring(template, quoted=False)
119 template = templater.parsestring(template, quoted=False)
120 self.t.use_template(template)
120 self.t.use_template(template)
121
121
122 def strip(self, path):
122 def strip(self, path):
123 '''strip leading slashes from local path, turn into web-safe path.'''
123 '''strip leading slashes from local path, turn into web-safe path.'''
124
124
125 path = util.pconvert(path)
125 path = util.pconvert(path)
126 count = self.stripcount
126 count = self.stripcount
127 while count > 0:
127 while count > 0:
128 c = path.find('/')
128 c = path.find('/')
129 if c == -1:
129 if c == -1:
130 break
130 break
131 path = path[c+1:]
131 path = path[c+1:]
132 count -= 1
132 count -= 1
133 return path
133 return path
134
134
135 def fixmail(self, addr):
135 def fixmail(self, addr):
136 '''try to clean up email addresses.'''
136 '''try to clean up email addresses.'''
137
137
138 addr = util.email(addr.strip())
138 addr = util.email(addr.strip())
139 if self.domain:
139 if self.domain:
140 a = addr.find('@localhost')
140 a = addr.find('@localhost')
141 if a != -1:
141 if a != -1:
142 addr = addr[:a]
142 addr = addr[:a]
143 if '@' not in addr:
143 if '@' not in addr:
144 return addr + '@' + self.domain
144 return addr + '@' + self.domain
145 return addr
145 return addr
146
146
147 def subscribers(self):
147 def subscribers(self):
148 '''return list of email addresses of subscribers to this repo.'''
148 '''return list of email addresses of subscribers to this repo.'''
149
149
150 subs = {}
150 subs = {}
151 for user, pats in self.ui.configitems('usersubs'):
151 for user, pats in self.ui.configitems('usersubs'):
152 for pat in pats.split(','):
152 for pat in pats.split(','):
153 if fnmatch.fnmatch(self.repo.root, pat.strip()):
153 if fnmatch.fnmatch(self.repo.root, pat.strip()):
154 subs[self.fixmail(user)] = 1
154 subs[self.fixmail(user)] = 1
155 for pat, users in self.ui.configitems('reposubs'):
155 for pat, users in self.ui.configitems('reposubs'):
156 if fnmatch.fnmatch(self.repo.root, pat):
156 if fnmatch.fnmatch(self.repo.root, pat):
157 for user in users.split(','):
157 for user in users.split(','):
158 subs[self.fixmail(user)] = 1
158 subs[self.fixmail(user)] = 1
159 subs = subs.keys()
159 return util.sort(subs)
160 subs.sort()
161 return subs
162
160
163 def url(self, path=None):
161 def url(self, path=None):
164 return self.ui.config('web', 'baseurl') + (path or self.root)
162 return self.ui.config('web', 'baseurl') + (path or self.root)
165
163
166 def node(self, node):
164 def node(self, node):
167 '''format one changeset.'''
165 '''format one changeset.'''
168
166
169 self.t.show(changenode=node, changes=self.repo.changelog.read(node),
167 self.t.show(changenode=node, changes=self.repo.changelog.read(node),
170 baseurl=self.ui.config('web', 'baseurl'),
168 baseurl=self.ui.config('web', 'baseurl'),
171 root=self.repo.root,
169 root=self.repo.root,
172 webroot=self.root)
170 webroot=self.root)
173
171
174 def skipsource(self, source):
172 def skipsource(self, source):
175 '''true if incoming changes from this source should be skipped.'''
173 '''true if incoming changes from this source should be skipped.'''
176 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
174 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
177 return source not in ok_sources
175 return source not in ok_sources
178
176
179 def send(self, node, count, data):
177 def send(self, node, count, data):
180 '''send message.'''
178 '''send message.'''
181
179
182 p = email.Parser.Parser()
180 p = email.Parser.Parser()
183 msg = p.parsestr(data)
181 msg = p.parsestr(data)
184
182
185 def fix_subject():
183 def fix_subject():
186 '''try to make subject line exist and be useful.'''
184 '''try to make subject line exist and be useful.'''
187
185
188 subject = msg['Subject']
186 subject = msg['Subject']
189 if not subject:
187 if not subject:
190 if count > 1:
188 if count > 1:
191 subject = _('%s: %d new changesets') % (self.root, count)
189 subject = _('%s: %d new changesets') % (self.root, count)
192 else:
190 else:
193 changes = self.repo.changelog.read(node)
191 changes = self.repo.changelog.read(node)
194 s = changes[4].lstrip().split('\n', 1)[0].rstrip()
192 s = changes[4].lstrip().split('\n', 1)[0].rstrip()
195 subject = '%s: %s' % (self.root, s)
193 subject = '%s: %s' % (self.root, s)
196 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
194 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
197 if maxsubject and len(subject) > maxsubject:
195 if maxsubject and len(subject) > maxsubject:
198 subject = subject[:maxsubject-3] + '...'
196 subject = subject[:maxsubject-3] + '...'
199 del msg['Subject']
197 del msg['Subject']
200 msg['Subject'] = subject
198 msg['Subject'] = subject
201
199
202 def fix_sender():
200 def fix_sender():
203 '''try to make message have proper sender.'''
201 '''try to make message have proper sender.'''
204
202
205 sender = msg['From']
203 sender = msg['From']
206 if not sender:
204 if not sender:
207 sender = self.ui.config('email', 'from') or self.ui.username()
205 sender = self.ui.config('email', 'from') or self.ui.username()
208 if '@' not in sender or '@localhost' in sender:
206 if '@' not in sender or '@localhost' in sender:
209 sender = self.fixmail(sender)
207 sender = self.fixmail(sender)
210 del msg['From']
208 del msg['From']
211 msg['From'] = sender
209 msg['From'] = sender
212
210
213 msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
211 msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
214 fix_subject()
212 fix_subject()
215 fix_sender()
213 fix_sender()
216
214
217 msg['X-Hg-Notification'] = 'changeset ' + short(node)
215 msg['X-Hg-Notification'] = 'changeset ' + short(node)
218 if not msg['Message-Id']:
216 if not msg['Message-Id']:
219 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
217 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
220 (short(node), int(time.time()),
218 (short(node), int(time.time()),
221 hash(self.repo.root), socket.getfqdn()))
219 hash(self.repo.root), socket.getfqdn()))
222 msg['To'] = ', '.join(self.subs)
220 msg['To'] = ', '.join(self.subs)
223
221
224 msgtext = msg.as_string(0)
222 msgtext = msg.as_string(0)
225 if self.ui.configbool('notify', 'test', True):
223 if self.ui.configbool('notify', 'test', True):
226 self.ui.write(msgtext)
224 self.ui.write(msgtext)
227 if not msgtext.endswith('\n'):
225 if not msgtext.endswith('\n'):
228 self.ui.write('\n')
226 self.ui.write('\n')
229 else:
227 else:
230 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
228 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
231 (len(self.subs), count))
229 (len(self.subs), count))
232 mail.sendmail(self.ui, util.email(msg['From']),
230 mail.sendmail(self.ui, util.email(msg['From']),
233 self.subs, msgtext)
231 self.subs, msgtext)
234
232
235 def diff(self, node, ref):
233 def diff(self, node, ref):
236 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
234 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
237 prev = self.repo.changelog.parents(node)[0]
235 prev = self.repo.changelog.parents(node)[0]
238 self.ui.pushbuffer()
236 self.ui.pushbuffer()
239 patch.diff(self.repo, prev, ref)
237 patch.diff(self.repo, prev, ref)
240 difflines = self.ui.popbuffer().splitlines(1)
238 difflines = self.ui.popbuffer().splitlines(1)
241 if self.ui.configbool('notify', 'diffstat', True):
239 if self.ui.configbool('notify', 'diffstat', True):
242 s = patch.diffstat(difflines)
240 s = patch.diffstat(difflines)
243 # s may be nil, don't include the header if it is
241 # s may be nil, don't include the header if it is
244 if s:
242 if s:
245 self.ui.write('\ndiffstat:\n\n%s' % s)
243 self.ui.write('\ndiffstat:\n\n%s' % s)
246 if maxdiff == 0:
244 if maxdiff == 0:
247 return
245 return
248 if maxdiff > 0 and len(difflines) > maxdiff:
246 if maxdiff > 0 and len(difflines) > maxdiff:
249 self.ui.write(_('\ndiffs (truncated from %d to %d lines):\n\n') %
247 self.ui.write(_('\ndiffs (truncated from %d to %d lines):\n\n') %
250 (len(difflines), maxdiff))
248 (len(difflines), maxdiff))
251 difflines = difflines[:maxdiff]
249 difflines = difflines[:maxdiff]
252 elif difflines:
250 elif difflines:
253 self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
251 self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
254 self.ui.write(*difflines)
252 self.ui.write(*difflines)
255
253
256 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
254 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
257 '''send email notifications to interested subscribers.
255 '''send email notifications to interested subscribers.
258
256
259 if used as changegroup hook, send one email for all changesets in
257 if used as changegroup hook, send one email for all changesets in
260 changegroup. else send one email per changeset.'''
258 changegroup. else send one email per changeset.'''
261 n = notifier(ui, repo, hooktype)
259 n = notifier(ui, repo, hooktype)
262 if not n.subs:
260 if not n.subs:
263 ui.debug(_('notify: no subscribers to repo %s\n') % n.root)
261 ui.debug(_('notify: no subscribers to repo %s\n') % n.root)
264 return
262 return
265 if n.skipsource(source):
263 if n.skipsource(source):
266 ui.debug(_('notify: changes have source "%s" - skipping\n') %
264 ui.debug(_('notify: changes have source "%s" - skipping\n') %
267 source)
265 source)
268 return
266 return
269 node = bin(node)
267 node = bin(node)
270 ui.pushbuffer()
268 ui.pushbuffer()
271 if hooktype == 'changegroup':
269 if hooktype == 'changegroup':
272 start = repo[node].rev()
270 start = repo[node].rev()
273 end = len(repo)
271 end = len(repo)
274 count = end - start
272 count = end - start
275 for rev in xrange(start, end):
273 for rev in xrange(start, end):
276 n.node(repo[node].rev())
274 n.node(repo[node].rev())
277 n.diff(node, repo.changelog.tip())
275 n.diff(node, repo.changelog.tip())
278 else:
276 else:
279 count = 1
277 count = 1
280 n.node(node)
278 n.node(node)
281 n.diff(node, node)
279 n.diff(node, node)
282 data = ui.popbuffer()
280 data = ui.popbuffer()
283 n.send(node, count, data)
281 n.send(node, count, data)
@@ -1,103 +1,100 b''
1 # Copyright (C) 2006 - Marco Barisione <marco@barisione.org>
1 # Copyright (C) 2006 - Marco Barisione <marco@barisione.org>
2 #
2 #
3 # This is a small extension for Mercurial (http://www.selenic.com/mercurial)
3 # This is a small extension for Mercurial (http://www.selenic.com/mercurial)
4 # that removes files not known to mercurial
4 # that removes files not known to mercurial
5 #
5 #
6 # This program was inspired by the "cvspurge" script contained in CVS utilities
6 # This program was inspired by the "cvspurge" script contained in CVS utilities
7 # (http://www.red-bean.com/cvsutils/).
7 # (http://www.red-bean.com/cvsutils/).
8 #
8 #
9 # To enable the "purge" extension put these lines in your ~/.hgrc:
9 # To enable the "purge" extension put these lines in your ~/.hgrc:
10 # [extensions]
10 # [extensions]
11 # hgext.purge =
11 # hgext.purge =
12 #
12 #
13 # For help on the usage of "hg purge" use:
13 # For help on the usage of "hg purge" use:
14 # hg help purge
14 # hg help purge
15 #
15 #
16 # This program is free software; you can redistribute it and/or modify
16 # This program is free software; you can redistribute it and/or modify
17 # it under the terms of the GNU General Public License as published by
17 # it under the terms of the GNU General Public License as published by
18 # the Free Software Foundation; either version 2 of the License, or
18 # the Free Software Foundation; either version 2 of the License, or
19 # (at your option) any later version.
19 # (at your option) any later version.
20 #
20 #
21 # This program is distributed in the hope that it will be useful,
21 # This program is distributed in the hope that it will be useful,
22 # but WITHOUT ANY WARRANTY; without even the implied warranty of
22 # but WITHOUT ANY WARRANTY; without even the implied warranty of
23 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
24 # GNU General Public License for more details.
24 # GNU General Public License for more details.
25 #
25 #
26 # You should have received a copy of the GNU General Public License
26 # You should have received a copy of the GNU General Public License
27 # along with this program; if not, write to the Free Software
27 # along with this program; if not, write to the Free Software
28 # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
28 # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
29
29
30 from mercurial import util, commands, cmdutil
30 from mercurial import util, commands, cmdutil
31 from mercurial.i18n import _
31 from mercurial.i18n import _
32 import os
32 import os
33
33
34 def purge(ui, repo, *dirs, **opts):
34 def purge(ui, repo, *dirs, **opts):
35 '''removes files not tracked by mercurial
35 '''removes files not tracked by mercurial
36
36
37 Delete files not known to mercurial, this is useful to test local and
37 Delete files not known to mercurial, this is useful to test local and
38 uncommitted changes in the otherwise clean source tree.
38 uncommitted changes in the otherwise clean source tree.
39
39
40 This means that purge will delete:
40 This means that purge will delete:
41 - Unknown files: files marked with "?" by "hg status"
41 - Unknown files: files marked with "?" by "hg status"
42 - Ignored files: files usually ignored by Mercurial because they match
42 - Ignored files: files usually ignored by Mercurial because they match
43 a pattern in a ".hgignore" file
43 a pattern in a ".hgignore" file
44 - Empty directories: in fact Mercurial ignores directories unless they
44 - Empty directories: in fact Mercurial ignores directories unless they
45 contain files under source control managment
45 contain files under source control managment
46 But it will leave untouched:
46 But it will leave untouched:
47 - Unmodified tracked files
47 - Unmodified tracked files
48 - Modified tracked files
48 - Modified tracked files
49 - New files added to the repository (with "hg add")
49 - New files added to the repository (with "hg add")
50
50
51 If directories are given on the command line, only files in these
51 If directories are given on the command line, only files in these
52 directories are considered.
52 directories are considered.
53
53
54 Be careful with purge, you could irreversibly delete some files you
54 Be careful with purge, you could irreversibly delete some files you
55 forgot to add to the repository. If you only want to print the list of
55 forgot to add to the repository. If you only want to print the list of
56 files that this program would delete use the --print option.
56 files that this program would delete use the --print option.
57 '''
57 '''
58 act = not opts['print']
58 act = not opts['print']
59 eol = '\n'
59 eol = '\n'
60 if opts['print0']:
60 if opts['print0']:
61 eol = '\0'
61 eol = '\0'
62 act = False # --print0 implies --print
62 act = False # --print0 implies --print
63
63
64 def remove(remove_func, name):
64 def remove(remove_func, name):
65 if act:
65 if act:
66 try:
66 try:
67 remove_func(os.path.join(repo.root, name))
67 remove_func(os.path.join(repo.root, name))
68 except OSError, e:
68 except OSError, e:
69 m = _('%s cannot be removed') % name
69 m = _('%s cannot be removed') % name
70 if opts['abort_on_err']:
70 if opts['abort_on_err']:
71 raise util.Abort(m)
71 raise util.Abort(m)
72 ui.warn(_('warning: %s\n') % m)
72 ui.warn(_('warning: %s\n') % m)
73 else:
73 else:
74 ui.write('%s%s' % (name, eol))
74 ui.write('%s%s' % (name, eol))
75
75
76 directories = []
76 directories = []
77 match = cmdutil.match(repo, dirs, opts)
77 match = cmdutil.match(repo, dirs, opts)
78 match.dir = directories.append
78 match.dir = directories.append
79 status = repo.status(match=match, ignored=opts['all'], unknown=True)
79 status = repo.status(match=match, ignored=opts['all'], unknown=True)
80 files = status[4] + status[5]
81 files.sort()
82 directories.sort()
83
80
84 for f in files:
81 for f in util.sort(status[4] + status[5]):
85 ui.note(_('Removing file %s\n') % f)
82 ui.note(_('Removing file %s\n') % f)
86 remove(os.remove, f)
83 remove(os.remove, f)
87
84
88 for f in directories[::-1]:
85 for f in util.sort(directories)[::-1]:
89 if match(f) and not os.listdir(repo.wjoin(f)):
86 if match(f) and not os.listdir(repo.wjoin(f)):
90 ui.note(_('Removing directory %s\n') % f)
87 ui.note(_('Removing directory %s\n') % f)
91 remove(os.rmdir, f)
88 remove(os.rmdir, f)
92
89
93 cmdtable = {
90 cmdtable = {
94 'purge|clean':
91 'purge|clean':
95 (purge,
92 (purge,
96 [('a', 'abort-on-err', None, _('abort if an error occurs')),
93 [('a', 'abort-on-err', None, _('abort if an error occurs')),
97 ('', 'all', None, _('purge ignored files too')),
94 ('', 'all', None, _('purge ignored files too')),
98 ('p', 'print', None, _('print the file names instead of deleting them')),
95 ('p', 'print', None, _('print the file names instead of deleting them')),
99 ('0', 'print0', None, _('end filenames with NUL, for use with xargs'
96 ('0', 'print0', None, _('end filenames with NUL, for use with xargs'
100 ' (implies -p)')),
97 ' (implies -p)')),
101 ] + commands.walkopts,
98 ] + commands.walkopts,
102 _('hg purge [OPTION]... [DIR]...'))
99 _('hg purge [OPTION]... [DIR]...'))
103 }
100 }
@@ -1,597 +1,589 b''
1 # Patch transplanting extension for Mercurial
1 # Patch transplanting extension for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from mercurial.i18n import _
8 from mercurial.i18n import _
9 import os, tempfile
9 import os, tempfile
10 from mercurial import bundlerepo, changegroup, cmdutil, hg, merge
10 from mercurial import bundlerepo, changegroup, cmdutil, hg, merge
11 from mercurial import patch, revlog, util
11 from mercurial import patch, revlog, util
12
12
13 '''patch transplanting tool
13 '''patch transplanting tool
14
14
15 This extension allows you to transplant patches from another branch.
15 This extension allows you to transplant patches from another branch.
16
16
17 Transplanted patches are recorded in .hg/transplant/transplants, as a map
17 Transplanted patches are recorded in .hg/transplant/transplants, as a map
18 from a changeset hash to its hash in the source repository.
18 from a changeset hash to its hash in the source repository.
19 '''
19 '''
20
20
21 class transplantentry:
21 class transplantentry:
22 def __init__(self, lnode, rnode):
22 def __init__(self, lnode, rnode):
23 self.lnode = lnode
23 self.lnode = lnode
24 self.rnode = rnode
24 self.rnode = rnode
25
25
26 class transplants:
26 class transplants:
27 def __init__(self, path=None, transplantfile=None, opener=None):
27 def __init__(self, path=None, transplantfile=None, opener=None):
28 self.path = path
28 self.path = path
29 self.transplantfile = transplantfile
29 self.transplantfile = transplantfile
30 self.opener = opener
30 self.opener = opener
31
31
32 if not opener:
32 if not opener:
33 self.opener = util.opener(self.path)
33 self.opener = util.opener(self.path)
34 self.transplants = []
34 self.transplants = []
35 self.dirty = False
35 self.dirty = False
36 self.read()
36 self.read()
37
37
38 def read(self):
38 def read(self):
39 abspath = os.path.join(self.path, self.transplantfile)
39 abspath = os.path.join(self.path, self.transplantfile)
40 if self.transplantfile and os.path.exists(abspath):
40 if self.transplantfile and os.path.exists(abspath):
41 for line in self.opener(self.transplantfile).read().splitlines():
41 for line in self.opener(self.transplantfile).read().splitlines():
42 lnode, rnode = map(revlog.bin, line.split(':'))
42 lnode, rnode = map(revlog.bin, line.split(':'))
43 self.transplants.append(transplantentry(lnode, rnode))
43 self.transplants.append(transplantentry(lnode, rnode))
44
44
45 def write(self):
45 def write(self):
46 if self.dirty and self.transplantfile:
46 if self.dirty and self.transplantfile:
47 if not os.path.isdir(self.path):
47 if not os.path.isdir(self.path):
48 os.mkdir(self.path)
48 os.mkdir(self.path)
49 fp = self.opener(self.transplantfile, 'w')
49 fp = self.opener(self.transplantfile, 'w')
50 for c in self.transplants:
50 for c in self.transplants:
51 l, r = map(revlog.hex, (c.lnode, c.rnode))
51 l, r = map(revlog.hex, (c.lnode, c.rnode))
52 fp.write(l + ':' + r + '\n')
52 fp.write(l + ':' + r + '\n')
53 fp.close()
53 fp.close()
54 self.dirty = False
54 self.dirty = False
55
55
56 def get(self, rnode):
56 def get(self, rnode):
57 return [t for t in self.transplants if t.rnode == rnode]
57 return [t for t in self.transplants if t.rnode == rnode]
58
58
59 def set(self, lnode, rnode):
59 def set(self, lnode, rnode):
60 self.transplants.append(transplantentry(lnode, rnode))
60 self.transplants.append(transplantentry(lnode, rnode))
61 self.dirty = True
61 self.dirty = True
62
62
63 def remove(self, transplant):
63 def remove(self, transplant):
64 del self.transplants[self.transplants.index(transplant)]
64 del self.transplants[self.transplants.index(transplant)]
65 self.dirty = True
65 self.dirty = True
66
66
67 class transplanter:
67 class transplanter:
68 def __init__(self, ui, repo):
68 def __init__(self, ui, repo):
69 self.ui = ui
69 self.ui = ui
70 self.path = repo.join('transplant')
70 self.path = repo.join('transplant')
71 self.opener = util.opener(self.path)
71 self.opener = util.opener(self.path)
72 self.transplants = transplants(self.path, 'transplants', opener=self.opener)
72 self.transplants = transplants(self.path, 'transplants', opener=self.opener)
73
73
74 def applied(self, repo, node, parent):
74 def applied(self, repo, node, parent):
75 '''returns True if a node is already an ancestor of parent
75 '''returns True if a node is already an ancestor of parent
76 or has already been transplanted'''
76 or has already been transplanted'''
77 if hasnode(repo, node):
77 if hasnode(repo, node):
78 if node in repo.changelog.reachable(parent, stop=node):
78 if node in repo.changelog.reachable(parent, stop=node):
79 return True
79 return True
80 for t in self.transplants.get(node):
80 for t in self.transplants.get(node):
81 # it might have been stripped
81 # it might have been stripped
82 if not hasnode(repo, t.lnode):
82 if not hasnode(repo, t.lnode):
83 self.transplants.remove(t)
83 self.transplants.remove(t)
84 return False
84 return False
85 if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
85 if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
86 return True
86 return True
87 return False
87 return False
88
88
89 def apply(self, repo, source, revmap, merges, opts={}):
89 def apply(self, repo, source, revmap, merges, opts={}):
90 '''apply the revisions in revmap one by one in revision order'''
90 '''apply the revisions in revmap one by one in revision order'''
91 revs = revmap.keys()
91 revs = util.sort(revmap)
92 revs.sort()
93
94 p1, p2 = repo.dirstate.parents()
92 p1, p2 = repo.dirstate.parents()
95 pulls = []
93 pulls = []
96 diffopts = patch.diffopts(self.ui, opts)
94 diffopts = patch.diffopts(self.ui, opts)
97 diffopts.git = True
95 diffopts.git = True
98
96
99 lock = wlock = None
97 lock = wlock = None
100 try:
98 try:
101 wlock = repo.wlock()
99 wlock = repo.wlock()
102 lock = repo.lock()
100 lock = repo.lock()
103 for rev in revs:
101 for rev in revs:
104 node = revmap[rev]
102 node = revmap[rev]
105 revstr = '%s:%s' % (rev, revlog.short(node))
103 revstr = '%s:%s' % (rev, revlog.short(node))
106
104
107 if self.applied(repo, node, p1):
105 if self.applied(repo, node, p1):
108 self.ui.warn(_('skipping already applied revision %s\n') %
106 self.ui.warn(_('skipping already applied revision %s\n') %
109 revstr)
107 revstr)
110 continue
108 continue
111
109
112 parents = source.changelog.parents(node)
110 parents = source.changelog.parents(node)
113 if not opts.get('filter'):
111 if not opts.get('filter'):
114 # If the changeset parent is the same as the wdir's parent,
112 # If the changeset parent is the same as the wdir's parent,
115 # just pull it.
113 # just pull it.
116 if parents[0] == p1:
114 if parents[0] == p1:
117 pulls.append(node)
115 pulls.append(node)
118 p1 = node
116 p1 = node
119 continue
117 continue
120 if pulls:
118 if pulls:
121 if source != repo:
119 if source != repo:
122 repo.pull(source, heads=pulls)
120 repo.pull(source, heads=pulls)
123 merge.update(repo, pulls[-1], False, False, None)
121 merge.update(repo, pulls[-1], False, False, None)
124 p1, p2 = repo.dirstate.parents()
122 p1, p2 = repo.dirstate.parents()
125 pulls = []
123 pulls = []
126
124
127 domerge = False
125 domerge = False
128 if node in merges:
126 if node in merges:
129 # pulling all the merge revs at once would mean we couldn't
127 # pulling all the merge revs at once would mean we couldn't
130 # transplant after the latest even if transplants before them
128 # transplant after the latest even if transplants before them
131 # fail.
129 # fail.
132 domerge = True
130 domerge = True
133 if not hasnode(repo, node):
131 if not hasnode(repo, node):
134 repo.pull(source, heads=[node])
132 repo.pull(source, heads=[node])
135
133
136 if parents[1] != revlog.nullid:
134 if parents[1] != revlog.nullid:
137 self.ui.note(_('skipping merge changeset %s:%s\n')
135 self.ui.note(_('skipping merge changeset %s:%s\n')
138 % (rev, revlog.short(node)))
136 % (rev, revlog.short(node)))
139 patchfile = None
137 patchfile = None
140 else:
138 else:
141 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
139 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
142 fp = os.fdopen(fd, 'w')
140 fp = os.fdopen(fd, 'w')
143 patch.diff(source, parents[0], node, fp=fp, opts=diffopts)
141 patch.diff(source, parents[0], node, fp=fp, opts=diffopts)
144 fp.close()
142 fp.close()
145
143
146 del revmap[rev]
144 del revmap[rev]
147 if patchfile or domerge:
145 if patchfile or domerge:
148 try:
146 try:
149 n = self.applyone(repo, node,
147 n = self.applyone(repo, node,
150 source.changelog.read(node),
148 source.changelog.read(node),
151 patchfile, merge=domerge,
149 patchfile, merge=domerge,
152 log=opts.get('log'),
150 log=opts.get('log'),
153 filter=opts.get('filter'))
151 filter=opts.get('filter'))
154 if n and domerge:
152 if n and domerge:
155 self.ui.status(_('%s merged at %s\n') % (revstr,
153 self.ui.status(_('%s merged at %s\n') % (revstr,
156 revlog.short(n)))
154 revlog.short(n)))
157 elif n:
155 elif n:
158 self.ui.status(_('%s transplanted to %s\n') % (revlog.short(node),
156 self.ui.status(_('%s transplanted to %s\n') % (revlog.short(node),
159 revlog.short(n)))
157 revlog.short(n)))
160 finally:
158 finally:
161 if patchfile:
159 if patchfile:
162 os.unlink(patchfile)
160 os.unlink(patchfile)
163 if pulls:
161 if pulls:
164 repo.pull(source, heads=pulls)
162 repo.pull(source, heads=pulls)
165 merge.update(repo, pulls[-1], False, False, None)
163 merge.update(repo, pulls[-1], False, False, None)
166 finally:
164 finally:
167 self.saveseries(revmap, merges)
165 self.saveseries(revmap, merges)
168 self.transplants.write()
166 self.transplants.write()
169 del lock, wlock
167 del lock, wlock
170
168
171 def filter(self, filter, changelog, patchfile):
169 def filter(self, filter, changelog, patchfile):
172 '''arbitrarily rewrite changeset before applying it'''
170 '''arbitrarily rewrite changeset before applying it'''
173
171
174 self.ui.status('filtering %s\n' % patchfile)
172 self.ui.status('filtering %s\n' % patchfile)
175 user, date, msg = (changelog[1], changelog[2], changelog[4])
173 user, date, msg = (changelog[1], changelog[2], changelog[4])
176
174
177 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
175 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
178 fp = os.fdopen(fd, 'w')
176 fp = os.fdopen(fd, 'w')
179 fp.write("# HG changeset patch\n")
177 fp.write("# HG changeset patch\n")
180 fp.write("# User %s\n" % user)
178 fp.write("# User %s\n" % user)
181 fp.write("# Date %d %d\n" % date)
179 fp.write("# Date %d %d\n" % date)
182 fp.write(changelog[4])
180 fp.write(changelog[4])
183 fp.close()
181 fp.close()
184
182
185 try:
183 try:
186 util.system('%s %s %s' % (filter, util.shellquote(headerfile),
184 util.system('%s %s %s' % (filter, util.shellquote(headerfile),
187 util.shellquote(patchfile)),
185 util.shellquote(patchfile)),
188 environ={'HGUSER': changelog[1]},
186 environ={'HGUSER': changelog[1]},
189 onerr=util.Abort, errprefix=_('filter failed'))
187 onerr=util.Abort, errprefix=_('filter failed'))
190 user, date, msg = self.parselog(file(headerfile))[1:4]
188 user, date, msg = self.parselog(file(headerfile))[1:4]
191 finally:
189 finally:
192 os.unlink(headerfile)
190 os.unlink(headerfile)
193
191
194 return (user, date, msg)
192 return (user, date, msg)
195
193
196 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
194 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
197 filter=None):
195 filter=None):
198 '''apply the patch in patchfile to the repository as a transplant'''
196 '''apply the patch in patchfile to the repository as a transplant'''
199 (manifest, user, (time, timezone), files, message) = cl[:5]
197 (manifest, user, (time, timezone), files, message) = cl[:5]
200 date = "%d %d" % (time, timezone)
198 date = "%d %d" % (time, timezone)
201 extra = {'transplant_source': node}
199 extra = {'transplant_source': node}
202 if filter:
200 if filter:
203 (user, date, message) = self.filter(filter, cl, patchfile)
201 (user, date, message) = self.filter(filter, cl, patchfile)
204
202
205 if log:
203 if log:
206 message += '\n(transplanted from %s)' % revlog.hex(node)
204 message += '\n(transplanted from %s)' % revlog.hex(node)
207
205
208 self.ui.status(_('applying %s\n') % revlog.short(node))
206 self.ui.status(_('applying %s\n') % revlog.short(node))
209 self.ui.note('%s %s\n%s\n' % (user, date, message))
207 self.ui.note('%s %s\n%s\n' % (user, date, message))
210
208
211 if not patchfile and not merge:
209 if not patchfile and not merge:
212 raise util.Abort(_('can only omit patchfile if merging'))
210 raise util.Abort(_('can only omit patchfile if merging'))
213 if patchfile:
211 if patchfile:
214 try:
212 try:
215 files = {}
213 files = {}
216 try:
214 try:
217 fuzz = patch.patch(patchfile, self.ui, cwd=repo.root,
215 fuzz = patch.patch(patchfile, self.ui, cwd=repo.root,
218 files=files)
216 files=files)
219 if not files:
217 if not files:
220 self.ui.warn(_('%s: empty changeset') % revlog.hex(node))
218 self.ui.warn(_('%s: empty changeset') % revlog.hex(node))
221 return None
219 return None
222 finally:
220 finally:
223 files = patch.updatedir(self.ui, repo, files)
221 files = patch.updatedir(self.ui, repo, files)
224 except Exception, inst:
222 except Exception, inst:
225 if filter:
223 if filter:
226 os.unlink(patchfile)
224 os.unlink(patchfile)
227 seriespath = os.path.join(self.path, 'series')
225 seriespath = os.path.join(self.path, 'series')
228 if os.path.exists(seriespath):
226 if os.path.exists(seriespath):
229 os.unlink(seriespath)
227 os.unlink(seriespath)
230 p1 = repo.dirstate.parents()[0]
228 p1 = repo.dirstate.parents()[0]
231 p2 = node
229 p2 = node
232 self.log(user, date, message, p1, p2, merge=merge)
230 self.log(user, date, message, p1, p2, merge=merge)
233 self.ui.write(str(inst) + '\n')
231 self.ui.write(str(inst) + '\n')
234 raise util.Abort(_('Fix up the merge and run hg transplant --continue'))
232 raise util.Abort(_('Fix up the merge and run hg transplant --continue'))
235 else:
233 else:
236 files = None
234 files = None
237 if merge:
235 if merge:
238 p1, p2 = repo.dirstate.parents()
236 p1, p2 = repo.dirstate.parents()
239 repo.dirstate.setparents(p1, node)
237 repo.dirstate.setparents(p1, node)
240
238
241 n = repo.commit(files, message, user, date, extra=extra)
239 n = repo.commit(files, message, user, date, extra=extra)
242 if not merge:
240 if not merge:
243 self.transplants.set(n, node)
241 self.transplants.set(n, node)
244
242
245 return n
243 return n
246
244
247 def resume(self, repo, source, opts=None):
245 def resume(self, repo, source, opts=None):
248 '''recover last transaction and apply remaining changesets'''
246 '''recover last transaction and apply remaining changesets'''
249 if os.path.exists(os.path.join(self.path, 'journal')):
247 if os.path.exists(os.path.join(self.path, 'journal')):
250 n, node = self.recover(repo)
248 n, node = self.recover(repo)
251 self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
249 self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
252 revlog.short(n)))
250 revlog.short(n)))
253 seriespath = os.path.join(self.path, 'series')
251 seriespath = os.path.join(self.path, 'series')
254 if not os.path.exists(seriespath):
252 if not os.path.exists(seriespath):
255 self.transplants.write()
253 self.transplants.write()
256 return
254 return
257 nodes, merges = self.readseries()
255 nodes, merges = self.readseries()
258 revmap = {}
256 revmap = {}
259 for n in nodes:
257 for n in nodes:
260 revmap[source.changelog.rev(n)] = n
258 revmap[source.changelog.rev(n)] = n
261 os.unlink(seriespath)
259 os.unlink(seriespath)
262
260
263 self.apply(repo, source, revmap, merges, opts)
261 self.apply(repo, source, revmap, merges, opts)
264
262
265 def recover(self, repo):
263 def recover(self, repo):
266 '''commit working directory using journal metadata'''
264 '''commit working directory using journal metadata'''
267 node, user, date, message, parents = self.readlog()
265 node, user, date, message, parents = self.readlog()
268 merge = len(parents) == 2
266 merge = len(parents) == 2
269
267
270 if not user or not date or not message or not parents[0]:
268 if not user or not date or not message or not parents[0]:
271 raise util.Abort(_('transplant log file is corrupt'))
269 raise util.Abort(_('transplant log file is corrupt'))
272
270
273 extra = {'transplant_source': node}
271 extra = {'transplant_source': node}
274 wlock = repo.wlock()
272 wlock = repo.wlock()
275 try:
273 try:
276 p1, p2 = repo.dirstate.parents()
274 p1, p2 = repo.dirstate.parents()
277 if p1 != parents[0]:
275 if p1 != parents[0]:
278 raise util.Abort(
276 raise util.Abort(
279 _('working dir not at transplant parent %s') %
277 _('working dir not at transplant parent %s') %
280 revlog.hex(parents[0]))
278 revlog.hex(parents[0]))
281 if merge:
279 if merge:
282 repo.dirstate.setparents(p1, parents[1])
280 repo.dirstate.setparents(p1, parents[1])
283 n = repo.commit(None, message, user, date, extra=extra)
281 n = repo.commit(None, message, user, date, extra=extra)
284 if not n:
282 if not n:
285 raise util.Abort(_('commit failed'))
283 raise util.Abort(_('commit failed'))
286 if not merge:
284 if not merge:
287 self.transplants.set(n, node)
285 self.transplants.set(n, node)
288 self.unlog()
286 self.unlog()
289
287
290 return n, node
288 return n, node
291 finally:
289 finally:
292 del wlock
290 del wlock
293
291
294 def readseries(self):
292 def readseries(self):
295 nodes = []
293 nodes = []
296 merges = []
294 merges = []
297 cur = nodes
295 cur = nodes
298 for line in self.opener('series').read().splitlines():
296 for line in self.opener('series').read().splitlines():
299 if line.startswith('# Merges'):
297 if line.startswith('# Merges'):
300 cur = merges
298 cur = merges
301 continue
299 continue
302 cur.append(revlog.bin(line))
300 cur.append(revlog.bin(line))
303
301
304 return (nodes, merges)
302 return (nodes, merges)
305
303
306 def saveseries(self, revmap, merges):
304 def saveseries(self, revmap, merges):
307 if not revmap:
305 if not revmap:
308 return
306 return
309
307
310 if not os.path.isdir(self.path):
308 if not os.path.isdir(self.path):
311 os.mkdir(self.path)
309 os.mkdir(self.path)
312 series = self.opener('series', 'w')
310 series = self.opener('series', 'w')
313 revs = revmap.keys()
311 for rev in util.sort(revmap):
314 revs.sort()
315 for rev in revs:
316 series.write(revlog.hex(revmap[rev]) + '\n')
312 series.write(revlog.hex(revmap[rev]) + '\n')
317 if merges:
313 if merges:
318 series.write('# Merges\n')
314 series.write('# Merges\n')
319 for m in merges:
315 for m in merges:
320 series.write(revlog.hex(m) + '\n')
316 series.write(revlog.hex(m) + '\n')
321 series.close()
317 series.close()
322
318
323 def parselog(self, fp):
319 def parselog(self, fp):
324 parents = []
320 parents = []
325 message = []
321 message = []
326 node = revlog.nullid
322 node = revlog.nullid
327 inmsg = False
323 inmsg = False
328 for line in fp.read().splitlines():
324 for line in fp.read().splitlines():
329 if inmsg:
325 if inmsg:
330 message.append(line)
326 message.append(line)
331 elif line.startswith('# User '):
327 elif line.startswith('# User '):
332 user = line[7:]
328 user = line[7:]
333 elif line.startswith('# Date '):
329 elif line.startswith('# Date '):
334 date = line[7:]
330 date = line[7:]
335 elif line.startswith('# Node ID '):
331 elif line.startswith('# Node ID '):
336 node = revlog.bin(line[10:])
332 node = revlog.bin(line[10:])
337 elif line.startswith('# Parent '):
333 elif line.startswith('# Parent '):
338 parents.append(revlog.bin(line[9:]))
334 parents.append(revlog.bin(line[9:]))
339 elif not line.startswith('#'):
335 elif not line.startswith('#'):
340 inmsg = True
336 inmsg = True
341 message.append(line)
337 message.append(line)
342 return (node, user, date, '\n'.join(message), parents)
338 return (node, user, date, '\n'.join(message), parents)
343
339
344 def log(self, user, date, message, p1, p2, merge=False):
340 def log(self, user, date, message, p1, p2, merge=False):
345 '''journal changelog metadata for later recover'''
341 '''journal changelog metadata for later recover'''
346
342
347 if not os.path.isdir(self.path):
343 if not os.path.isdir(self.path):
348 os.mkdir(self.path)
344 os.mkdir(self.path)
349 fp = self.opener('journal', 'w')
345 fp = self.opener('journal', 'w')
350 fp.write('# User %s\n' % user)
346 fp.write('# User %s\n' % user)
351 fp.write('# Date %s\n' % date)
347 fp.write('# Date %s\n' % date)
352 fp.write('# Node ID %s\n' % revlog.hex(p2))
348 fp.write('# Node ID %s\n' % revlog.hex(p2))
353 fp.write('# Parent ' + revlog.hex(p1) + '\n')
349 fp.write('# Parent ' + revlog.hex(p1) + '\n')
354 if merge:
350 if merge:
355 fp.write('# Parent ' + revlog.hex(p2) + '\n')
351 fp.write('# Parent ' + revlog.hex(p2) + '\n')
356 fp.write(message.rstrip() + '\n')
352 fp.write(message.rstrip() + '\n')
357 fp.close()
353 fp.close()
358
354
359 def readlog(self):
355 def readlog(self):
360 return self.parselog(self.opener('journal'))
356 return self.parselog(self.opener('journal'))
361
357
362 def unlog(self):
358 def unlog(self):
363 '''remove changelog journal'''
359 '''remove changelog journal'''
364 absdst = os.path.join(self.path, 'journal')
360 absdst = os.path.join(self.path, 'journal')
365 if os.path.exists(absdst):
361 if os.path.exists(absdst):
366 os.unlink(absdst)
362 os.unlink(absdst)
367
363
368 def transplantfilter(self, repo, source, root):
364 def transplantfilter(self, repo, source, root):
369 def matchfn(node):
365 def matchfn(node):
370 if self.applied(repo, node, root):
366 if self.applied(repo, node, root):
371 return False
367 return False
372 if source.changelog.parents(node)[1] != revlog.nullid:
368 if source.changelog.parents(node)[1] != revlog.nullid:
373 return False
369 return False
374 extra = source.changelog.read(node)[5]
370 extra = source.changelog.read(node)[5]
375 cnode = extra.get('transplant_source')
371 cnode = extra.get('transplant_source')
376 if cnode and self.applied(repo, cnode, root):
372 if cnode and self.applied(repo, cnode, root):
377 return False
373 return False
378 return True
374 return True
379
375
380 return matchfn
376 return matchfn
381
377
382 def hasnode(repo, node):
378 def hasnode(repo, node):
383 try:
379 try:
384 return repo.changelog.rev(node) != None
380 return repo.changelog.rev(node) != None
385 except revlog.RevlogError:
381 except revlog.RevlogError:
386 return False
382 return False
387
383
388 def browserevs(ui, repo, nodes, opts):
384 def browserevs(ui, repo, nodes, opts):
389 '''interactively transplant changesets'''
385 '''interactively transplant changesets'''
390 def browsehelp(ui):
386 def browsehelp(ui):
391 ui.write('y: transplant this changeset\n'
387 ui.write('y: transplant this changeset\n'
392 'n: skip this changeset\n'
388 'n: skip this changeset\n'
393 'm: merge at this changeset\n'
389 'm: merge at this changeset\n'
394 'p: show patch\n'
390 'p: show patch\n'
395 'c: commit selected changesets\n'
391 'c: commit selected changesets\n'
396 'q: cancel transplant\n'
392 'q: cancel transplant\n'
397 '?: show this help\n')
393 '?: show this help\n')
398
394
399 displayer = cmdutil.show_changeset(ui, repo, opts)
395 displayer = cmdutil.show_changeset(ui, repo, opts)
400 transplants = []
396 transplants = []
401 merges = []
397 merges = []
402 for node in nodes:
398 for node in nodes:
403 displayer.show(changenode=node)
399 displayer.show(changenode=node)
404 action = None
400 action = None
405 while not action:
401 while not action:
406 action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
402 action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
407 if action == '?':
403 if action == '?':
408 browsehelp(ui)
404 browsehelp(ui)
409 action = None
405 action = None
410 elif action == 'p':
406 elif action == 'p':
411 parent = repo.changelog.parents(node)[0]
407 parent = repo.changelog.parents(node)[0]
412 patch.diff(repo, parent, node)
408 patch.diff(repo, parent, node)
413 action = None
409 action = None
414 elif action not in ('y', 'n', 'm', 'c', 'q'):
410 elif action not in ('y', 'n', 'm', 'c', 'q'):
415 ui.write('no such option\n')
411 ui.write('no such option\n')
416 action = None
412 action = None
417 if action == 'y':
413 if action == 'y':
418 transplants.append(node)
414 transplants.append(node)
419 elif action == 'm':
415 elif action == 'm':
420 merges.append(node)
416 merges.append(node)
421 elif action == 'c':
417 elif action == 'c':
422 break
418 break
423 elif action == 'q':
419 elif action == 'q':
424 transplants = ()
420 transplants = ()
425 merges = ()
421 merges = ()
426 break
422 break
427 return (transplants, merges)
423 return (transplants, merges)
428
424
429 def transplant(ui, repo, *revs, **opts):
425 def transplant(ui, repo, *revs, **opts):
430 '''transplant changesets from another branch
426 '''transplant changesets from another branch
431
427
432 Selected changesets will be applied on top of the current working
428 Selected changesets will be applied on top of the current working
433 directory with the log of the original changeset. If --log is
429 directory with the log of the original changeset. If --log is
434 specified, log messages will have a comment appended of the form:
430 specified, log messages will have a comment appended of the form:
435
431
436 (transplanted from CHANGESETHASH)
432 (transplanted from CHANGESETHASH)
437
433
438 You can rewrite the changelog message with the --filter option.
434 You can rewrite the changelog message with the --filter option.
439 Its argument will be invoked with the current changelog message
435 Its argument will be invoked with the current changelog message
440 as $1 and the patch as $2.
436 as $1 and the patch as $2.
441
437
442 If --source is specified, selects changesets from the named
438 If --source is specified, selects changesets from the named
443 repository. If --branch is specified, selects changesets from the
439 repository. If --branch is specified, selects changesets from the
444 branch holding the named revision, up to that revision. If --all
440 branch holding the named revision, up to that revision. If --all
445 is specified, all changesets on the branch will be transplanted,
441 is specified, all changesets on the branch will be transplanted,
446 otherwise you will be prompted to select the changesets you want.
442 otherwise you will be prompted to select the changesets you want.
447
443
448 hg transplant --branch REVISION --all will rebase the selected branch
444 hg transplant --branch REVISION --all will rebase the selected branch
449 (up to the named revision) onto your current working directory.
445 (up to the named revision) onto your current working directory.
450
446
451 You can optionally mark selected transplanted changesets as
447 You can optionally mark selected transplanted changesets as
452 merge changesets. You will not be prompted to transplant any
448 merge changesets. You will not be prompted to transplant any
453 ancestors of a merged transplant, and you can merge descendants
449 ancestors of a merged transplant, and you can merge descendants
454 of them normally instead of transplanting them.
450 of them normally instead of transplanting them.
455
451
456 If no merges or revisions are provided, hg transplant will start
452 If no merges or revisions are provided, hg transplant will start
457 an interactive changeset browser.
453 an interactive changeset browser.
458
454
459 If a changeset application fails, you can fix the merge by hand and
455 If a changeset application fails, you can fix the merge by hand and
460 then resume where you left off by calling hg transplant --continue.
456 then resume where you left off by calling hg transplant --continue.
461 '''
457 '''
462 def getoneitem(opts, item, errmsg):
458 def getoneitem(opts, item, errmsg):
463 val = opts.get(item)
459 val = opts.get(item)
464 if val:
460 if val:
465 if len(val) > 1:
461 if len(val) > 1:
466 raise util.Abort(errmsg)
462 raise util.Abort(errmsg)
467 else:
463 else:
468 return val[0]
464 return val[0]
469
465
470 def getremotechanges(repo, url):
466 def getremotechanges(repo, url):
471 sourcerepo = ui.expandpath(url)
467 sourcerepo = ui.expandpath(url)
472 source = hg.repository(ui, sourcerepo)
468 source = hg.repository(ui, sourcerepo)
473 incoming = repo.findincoming(source, force=True)
469 incoming = repo.findincoming(source, force=True)
474 if not incoming:
470 if not incoming:
475 return (source, None, None)
471 return (source, None, None)
476
472
477 bundle = None
473 bundle = None
478 if not source.local():
474 if not source.local():
479 cg = source.changegroup(incoming, 'incoming')
475 cg = source.changegroup(incoming, 'incoming')
480 bundle = changegroup.writebundle(cg, None, 'HG10UN')
476 bundle = changegroup.writebundle(cg, None, 'HG10UN')
481 source = bundlerepo.bundlerepository(ui, repo.root, bundle)
477 source = bundlerepo.bundlerepository(ui, repo.root, bundle)
482
478
483 return (source, incoming, bundle)
479 return (source, incoming, bundle)
484
480
485 def incwalk(repo, incoming, branches, match=util.always):
481 def incwalk(repo, incoming, branches, match=util.always):
486 if not branches:
482 if not branches:
487 branches=None
483 branches=None
488 for node in repo.changelog.nodesbetween(incoming, branches)[0]:
484 for node in repo.changelog.nodesbetween(incoming, branches)[0]:
489 if match(node):
485 if match(node):
490 yield node
486 yield node
491
487
492 def transplantwalk(repo, root, branches, match=util.always):
488 def transplantwalk(repo, root, branches, match=util.always):
493 if not branches:
489 if not branches:
494 branches = repo.heads()
490 branches = repo.heads()
495 ancestors = []
491 ancestors = []
496 for branch in branches:
492 for branch in branches:
497 ancestors.append(repo.changelog.ancestor(root, branch))
493 ancestors.append(repo.changelog.ancestor(root, branch))
498 for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
494 for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
499 if match(node):
495 if match(node):
500 yield node
496 yield node
501
497
502 def checkopts(opts, revs):
498 def checkopts(opts, revs):
503 if opts.get('continue'):
499 if opts.get('continue'):
504 if filter(lambda opt: opts.get(opt), ('branch', 'all', 'merge')):
500 if filter(lambda opt: opts.get(opt), ('branch', 'all', 'merge')):
505 raise util.Abort(_('--continue is incompatible with branch, all or merge'))
501 raise util.Abort(_('--continue is incompatible with branch, all or merge'))
506 return
502 return
507 if not (opts.get('source') or revs or
503 if not (opts.get('source') or revs or
508 opts.get('merge') or opts.get('branch')):
504 opts.get('merge') or opts.get('branch')):
509 raise util.Abort(_('no source URL, branch tag or revision list provided'))
505 raise util.Abort(_('no source URL, branch tag or revision list provided'))
510 if opts.get('all'):
506 if opts.get('all'):
511 if not opts.get('branch'):
507 if not opts.get('branch'):
512 raise util.Abort(_('--all requires a branch revision'))
508 raise util.Abort(_('--all requires a branch revision'))
513 if revs:
509 if revs:
514 raise util.Abort(_('--all is incompatible with a revision list'))
510 raise util.Abort(_('--all is incompatible with a revision list'))
515
511
516 checkopts(opts, revs)
512 checkopts(opts, revs)
517
513
518 if not opts.get('log'):
514 if not opts.get('log'):
519 opts['log'] = ui.config('transplant', 'log')
515 opts['log'] = ui.config('transplant', 'log')
520 if not opts.get('filter'):
516 if not opts.get('filter'):
521 opts['filter'] = ui.config('transplant', 'filter')
517 opts['filter'] = ui.config('transplant', 'filter')
522
518
523 tp = transplanter(ui, repo)
519 tp = transplanter(ui, repo)
524
520
525 p1, p2 = repo.dirstate.parents()
521 p1, p2 = repo.dirstate.parents()
526 if p1 == revlog.nullid:
522 if p1 == revlog.nullid:
527 raise util.Abort(_('no revision checked out'))
523 raise util.Abort(_('no revision checked out'))
528 if not opts.get('continue'):
524 if not opts.get('continue'):
529 if p2 != revlog.nullid:
525 if p2 != revlog.nullid:
530 raise util.Abort(_('outstanding uncommitted merges'))
526 raise util.Abort(_('outstanding uncommitted merges'))
531 m, a, r, d = repo.status()[:4]
527 m, a, r, d = repo.status()[:4]
532 if m or a or r or d:
528 if m or a or r or d:
533 raise util.Abort(_('outstanding local changes'))
529 raise util.Abort(_('outstanding local changes'))
534
530
535 bundle = None
531 bundle = None
536 source = opts.get('source')
532 source = opts.get('source')
537 if source:
533 if source:
538 (source, incoming, bundle) = getremotechanges(repo, source)
534 (source, incoming, bundle) = getremotechanges(repo, source)
539 else:
535 else:
540 source = repo
536 source = repo
541
537
542 try:
538 try:
543 if opts.get('continue'):
539 if opts.get('continue'):
544 tp.resume(repo, source, opts)
540 tp.resume(repo, source, opts)
545 return
541 return
546
542
547 tf=tp.transplantfilter(repo, source, p1)
543 tf=tp.transplantfilter(repo, source, p1)
548 if opts.get('prune'):
544 if opts.get('prune'):
549 prune = [source.lookup(r)
545 prune = [source.lookup(r)
550 for r in cmdutil.revrange(source, opts.get('prune'))]
546 for r in cmdutil.revrange(source, opts.get('prune'))]
551 matchfn = lambda x: tf(x) and x not in prune
547 matchfn = lambda x: tf(x) and x not in prune
552 else:
548 else:
553 matchfn = tf
549 matchfn = tf
554 branches = map(source.lookup, opts.get('branch', ()))
550 branches = map(source.lookup, opts.get('branch', ()))
555 merges = map(source.lookup, opts.get('merge', ()))
551 merges = map(source.lookup, opts.get('merge', ()))
556 revmap = {}
552 revmap = {}
557 if revs:
553 if revs:
558 for r in cmdutil.revrange(source, revs):
554 for r in cmdutil.revrange(source, revs):
559 revmap[int(r)] = source.lookup(r)
555 revmap[int(r)] = source.lookup(r)
560 elif opts.get('all') or not merges:
556 elif opts.get('all') or not merges:
561 if source != repo:
557 if source != repo:
562 alltransplants = incwalk(source, incoming, branches, match=matchfn)
558 alltransplants = incwalk(source, incoming, branches, match=matchfn)
563 else:
559 else:
564 alltransplants = transplantwalk(source, p1, branches, match=matchfn)
560 alltransplants = transplantwalk(source, p1, branches, match=matchfn)
565 if opts.get('all'):
561 if opts.get('all'):
566 revs = alltransplants
562 revs = alltransplants
567 else:
563 else:
568 revs, newmerges = browserevs(ui, source, alltransplants, opts)
564 revs, newmerges = browserevs(ui, source, alltransplants, opts)
569 merges.extend(newmerges)
565 merges.extend(newmerges)
570 for r in revs:
566 for r in revs:
571 revmap[source.changelog.rev(r)] = r
567 revmap[source.changelog.rev(r)] = r
572 for r in merges:
568 for r in merges:
573 revmap[source.changelog.rev(r)] = r
569 revmap[source.changelog.rev(r)] = r
574
570
575 revs = revmap.keys()
576 revs.sort()
577 pulls = []
578
579 tp.apply(repo, source, revmap, merges, opts)
571 tp.apply(repo, source, revmap, merges, opts)
580 finally:
572 finally:
581 if bundle:
573 if bundle:
582 source.close()
574 source.close()
583 os.unlink(bundle)
575 os.unlink(bundle)
584
576
585 cmdtable = {
577 cmdtable = {
586 "transplant":
578 "transplant":
587 (transplant,
579 (transplant,
588 [('s', 'source', '', _('pull patches from REPOSITORY')),
580 [('s', 'source', '', _('pull patches from REPOSITORY')),
589 ('b', 'branch', [], _('pull patches from branch BRANCH')),
581 ('b', 'branch', [], _('pull patches from branch BRANCH')),
590 ('a', 'all', None, _('pull all changesets up to BRANCH')),
582 ('a', 'all', None, _('pull all changesets up to BRANCH')),
591 ('p', 'prune', [], _('skip over REV')),
583 ('p', 'prune', [], _('skip over REV')),
592 ('m', 'merge', [], _('merge at REV')),
584 ('m', 'merge', [], _('merge at REV')),
593 ('', 'log', None, _('append transplant info to log message')),
585 ('', 'log', None, _('append transplant info to log message')),
594 ('c', 'continue', None, _('continue last transplant session after repair')),
586 ('c', 'continue', None, _('continue last transplant session after repair')),
595 ('', 'filter', '', _('filter changesets through FILTER'))],
587 ('', 'filter', '', _('filter changesets through FILTER'))],
596 _('hg transplant [-s REPOSITORY] [-b BRANCH [-a]] [-p REV] [-m REV] [REV]...'))
588 _('hg transplant [-s REPOSITORY] [-b BRANCH [-a]] [-p REV] [-m REV] [REV]...'))
597 }
589 }
@@ -1,195 +1,192 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import bin, hex, nullid
8 from node import bin, hex, nullid
9 from revlog import revlog
9 from revlog import revlog
10 import util
10 import util
11
11
12 def _string_escape(text):
12 def _string_escape(text):
13 """
13 """
14 >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
14 >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
15 >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d
15 >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d
16 >>> s
16 >>> s
17 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n'
17 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n'
18 >>> res = _string_escape(s)
18 >>> res = _string_escape(s)
19 >>> s == res.decode('string_escape')
19 >>> s == res.decode('string_escape')
20 True
20 True
21 """
21 """
22 # subset of the string_escape codec
22 # subset of the string_escape codec
23 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
23 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
24 return text.replace('\0', '\\0')
24 return text.replace('\0', '\\0')
25
25
26 class appender:
26 class appender:
27 '''the changelog index must be update last on disk, so we use this class
27 '''the changelog index must be update last on disk, so we use this class
28 to delay writes to it'''
28 to delay writes to it'''
29 def __init__(self, fp, buf):
29 def __init__(self, fp, buf):
30 self.data = buf
30 self.data = buf
31 self.fp = fp
31 self.fp = fp
32 self.offset = fp.tell()
32 self.offset = fp.tell()
33 self.size = util.fstat(fp).st_size
33 self.size = util.fstat(fp).st_size
34
34
35 def end(self):
35 def end(self):
36 return self.size + len("".join(self.data))
36 return self.size + len("".join(self.data))
37 def tell(self):
37 def tell(self):
38 return self.offset
38 return self.offset
39 def flush(self):
39 def flush(self):
40 pass
40 pass
41 def close(self):
41 def close(self):
42 self.fp.close()
42 self.fp.close()
43
43
44 def seek(self, offset, whence=0):
44 def seek(self, offset, whence=0):
45 '''virtual file offset spans real file and data'''
45 '''virtual file offset spans real file and data'''
46 if whence == 0:
46 if whence == 0:
47 self.offset = offset
47 self.offset = offset
48 elif whence == 1:
48 elif whence == 1:
49 self.offset += offset
49 self.offset += offset
50 elif whence == 2:
50 elif whence == 2:
51 self.offset = self.end() + offset
51 self.offset = self.end() + offset
52 if self.offset < self.size:
52 if self.offset < self.size:
53 self.fp.seek(self.offset)
53 self.fp.seek(self.offset)
54
54
55 def read(self, count=-1):
55 def read(self, count=-1):
56 '''only trick here is reads that span real file and data'''
56 '''only trick here is reads that span real file and data'''
57 ret = ""
57 ret = ""
58 if self.offset < self.size:
58 if self.offset < self.size:
59 s = self.fp.read(count)
59 s = self.fp.read(count)
60 ret = s
60 ret = s
61 self.offset += len(s)
61 self.offset += len(s)
62 if count > 0:
62 if count > 0:
63 count -= len(s)
63 count -= len(s)
64 if count != 0:
64 if count != 0:
65 doff = self.offset - self.size
65 doff = self.offset - self.size
66 self.data.insert(0, "".join(self.data))
66 self.data.insert(0, "".join(self.data))
67 del self.data[1:]
67 del self.data[1:]
68 s = self.data[0][doff:doff+count]
68 s = self.data[0][doff:doff+count]
69 self.offset += len(s)
69 self.offset += len(s)
70 ret += s
70 ret += s
71 return ret
71 return ret
72
72
73 def write(self, s):
73 def write(self, s):
74 self.data.append(str(s))
74 self.data.append(str(s))
75 self.offset += len(s)
75 self.offset += len(s)
76
76
77 class changelog(revlog):
77 class changelog(revlog):
78 def __init__(self, opener):
78 def __init__(self, opener):
79 revlog.__init__(self, opener, "00changelog.i")
79 revlog.__init__(self, opener, "00changelog.i")
80
80
81 def delayupdate(self):
81 def delayupdate(self):
82 "delay visibility of index updates to other readers"
82 "delay visibility of index updates to other readers"
83 self._realopener = self.opener
83 self._realopener = self.opener
84 self.opener = self._delayopener
84 self.opener = self._delayopener
85 self._delaycount = len(self)
85 self._delaycount = len(self)
86 self._delaybuf = []
86 self._delaybuf = []
87 self._delayname = None
87 self._delayname = None
88
88
89 def finalize(self, tr):
89 def finalize(self, tr):
90 "finalize index updates"
90 "finalize index updates"
91 self.opener = self._realopener
91 self.opener = self._realopener
92 # move redirected index data back into place
92 # move redirected index data back into place
93 if self._delayname:
93 if self._delayname:
94 util.rename(self._delayname + ".a", self._delayname)
94 util.rename(self._delayname + ".a", self._delayname)
95 elif self._delaybuf:
95 elif self._delaybuf:
96 fp = self.opener(self.indexfile, 'a')
96 fp = self.opener(self.indexfile, 'a')
97 fp.write("".join(self._delaybuf))
97 fp.write("".join(self._delaybuf))
98 fp.close()
98 fp.close()
99 del self._delaybuf
99 del self._delaybuf
100 # split when we're done
100 # split when we're done
101 self.checkinlinesize(tr)
101 self.checkinlinesize(tr)
102
102
103 def _delayopener(self, name, mode='r'):
103 def _delayopener(self, name, mode='r'):
104 fp = self._realopener(name, mode)
104 fp = self._realopener(name, mode)
105 # only divert the index
105 # only divert the index
106 if not name == self.indexfile:
106 if not name == self.indexfile:
107 return fp
107 return fp
108 # if we're doing an initial clone, divert to another file
108 # if we're doing an initial clone, divert to another file
109 if self._delaycount == 0:
109 if self._delaycount == 0:
110 self._delayname = fp.name
110 self._delayname = fp.name
111 if not len(self):
111 if not len(self):
112 # make sure to truncate the file
112 # make sure to truncate the file
113 mode = mode.replace('a', 'w')
113 mode = mode.replace('a', 'w')
114 return self._realopener(name + ".a", mode)
114 return self._realopener(name + ".a", mode)
115 # otherwise, divert to memory
115 # otherwise, divert to memory
116 return appender(fp, self._delaybuf)
116 return appender(fp, self._delaybuf)
117
117
118 def checkinlinesize(self, tr, fp=None):
118 def checkinlinesize(self, tr, fp=None):
119 if self.opener == self._delayopener:
119 if self.opener == self._delayopener:
120 return
120 return
121 return revlog.checkinlinesize(self, tr, fp)
121 return revlog.checkinlinesize(self, tr, fp)
122
122
123 def decode_extra(self, text):
123 def decode_extra(self, text):
124 extra = {}
124 extra = {}
125 for l in text.split('\0'):
125 for l in text.split('\0'):
126 if l:
126 if l:
127 k, v = l.decode('string_escape').split(':', 1)
127 k, v = l.decode('string_escape').split(':', 1)
128 extra[k] = v
128 extra[k] = v
129 return extra
129 return extra
130
130
131 def encode_extra(self, d):
131 def encode_extra(self, d):
132 # keys must be sorted to produce a deterministic changelog entry
132 # keys must be sorted to produce a deterministic changelog entry
133 keys = d.keys()
133 items = [_string_escape('%s:%s' % (k, d[k])) for k in util.sort(d)]
134 keys.sort()
135 items = [_string_escape('%s:%s' % (k, d[k])) for k in keys]
136 return "\0".join(items)
134 return "\0".join(items)
137
135
138 def read(self, node):
136 def read(self, node):
139 """
137 """
140 format used:
138 format used:
141 nodeid\n : manifest node in ascii
139 nodeid\n : manifest node in ascii
142 user\n : user, no \n or \r allowed
140 user\n : user, no \n or \r allowed
143 time tz extra\n : date (time is int or float, timezone is int)
141 time tz extra\n : date (time is int or float, timezone is int)
144 : extra is metadatas, encoded and separated by '\0'
142 : extra is metadatas, encoded and separated by '\0'
145 : older versions ignore it
143 : older versions ignore it
146 files\n\n : files modified by the cset, no \n or \r allowed
144 files\n\n : files modified by the cset, no \n or \r allowed
147 (.*) : comment (free text, ideally utf-8)
145 (.*) : comment (free text, ideally utf-8)
148
146
149 changelog v0 doesn't use extra
147 changelog v0 doesn't use extra
150 """
148 """
151 text = self.revision(node)
149 text = self.revision(node)
152 if not text:
150 if not text:
153 return (nullid, "", (0, 0), [], "", {'branch': 'default'})
151 return (nullid, "", (0, 0), [], "", {'branch': 'default'})
154 last = text.index("\n\n")
152 last = text.index("\n\n")
155 desc = util.tolocal(text[last + 2:])
153 desc = util.tolocal(text[last + 2:])
156 l = text[:last].split('\n')
154 l = text[:last].split('\n')
157 manifest = bin(l[0])
155 manifest = bin(l[0])
158 user = util.tolocal(l[1])
156 user = util.tolocal(l[1])
159
157
160 extra_data = l[2].split(' ', 2)
158 extra_data = l[2].split(' ', 2)
161 if len(extra_data) != 3:
159 if len(extra_data) != 3:
162 time = float(extra_data.pop(0))
160 time = float(extra_data.pop(0))
163 try:
161 try:
164 # various tools did silly things with the time zone field.
162 # various tools did silly things with the time zone field.
165 timezone = int(extra_data[0])
163 timezone = int(extra_data[0])
166 except:
164 except:
167 timezone = 0
165 timezone = 0
168 extra = {}
166 extra = {}
169 else:
167 else:
170 time, timezone, extra = extra_data
168 time, timezone, extra = extra_data
171 time, timezone = float(time), int(timezone)
169 time, timezone = float(time), int(timezone)
172 extra = self.decode_extra(extra)
170 extra = self.decode_extra(extra)
173 if not extra.get('branch'):
171 if not extra.get('branch'):
174 extra['branch'] = 'default'
172 extra['branch'] = 'default'
175 files = l[3:]
173 files = l[3:]
176 return (manifest, user, (time, timezone), files, desc, extra)
174 return (manifest, user, (time, timezone), files, desc, extra)
177
175
178 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
176 def add(self, manifest, files, desc, transaction, p1=None, p2=None,
179 user=None, date=None, extra={}):
177 user=None, date=None, extra={}):
180
178
181 user, desc = util.fromlocal(user), util.fromlocal(desc)
179 user, desc = util.fromlocal(user), util.fromlocal(desc)
182
180
183 if date:
181 if date:
184 parseddate = "%d %d" % util.parsedate(date)
182 parseddate = "%d %d" % util.parsedate(date)
185 else:
183 else:
186 parseddate = "%d %d" % util.makedate()
184 parseddate = "%d %d" % util.makedate()
187 if extra and extra.get("branch") in ("default", ""):
185 if extra and extra.get("branch") in ("default", ""):
188 del extra["branch"]
186 del extra["branch"]
189 if extra:
187 if extra:
190 extra = self.encode_extra(extra)
188 extra = self.encode_extra(extra)
191 parseddate = "%s %s" % (parseddate, extra)
189 parseddate = "%s %s" % (parseddate, extra)
192 list.sort()
190 l = [hex(manifest), user, parseddate] + util.sort(files) + ["", desc]
193 l = [hex(manifest), user, parseddate] + list + ["", desc]
194 text = "\n".join(l)
191 text = "\n".join(l)
195 return self.addrevision(text, transaction, len(self), p1, p2)
192 return self.addrevision(text, transaction, len(self), p1, p2)
@@ -1,1192 +1,1183 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, bisect, stat
10 import os, sys, bisect, stat
11 import mdiff, bdiff, util, templater, templatefilters, patch, errno
11 import mdiff, bdiff, util, templater, templatefilters, patch, errno
12 import match as _match
12 import match as _match
13
13
14 revrangesep = ':'
14 revrangesep = ':'
15
15
16 class UnknownCommand(Exception):
16 class UnknownCommand(Exception):
17 """Exception raised if command is not in the command table."""
17 """Exception raised if command is not in the command table."""
18 class AmbiguousCommand(Exception):
18 class AmbiguousCommand(Exception):
19 """Exception raised if command shortcut matches more than one command."""
19 """Exception raised if command shortcut matches more than one command."""
20
20
21 def findpossible(ui, cmd, table):
21 def findpossible(ui, cmd, table):
22 """
22 """
23 Return cmd -> (aliases, command table entry)
23 Return cmd -> (aliases, command table entry)
24 for each matching command.
24 for each matching command.
25 Return debug commands (or their aliases) only if no normal command matches.
25 Return debug commands (or their aliases) only if no normal command matches.
26 """
26 """
27 choice = {}
27 choice = {}
28 debugchoice = {}
28 debugchoice = {}
29 for e in table.keys():
29 for e in table.keys():
30 aliases = e.lstrip("^").split("|")
30 aliases = e.lstrip("^").split("|")
31 found = None
31 found = None
32 if cmd in aliases:
32 if cmd in aliases:
33 found = cmd
33 found = cmd
34 elif not ui.config("ui", "strict"):
34 elif not ui.config("ui", "strict"):
35 for a in aliases:
35 for a in aliases:
36 if a.startswith(cmd):
36 if a.startswith(cmd):
37 found = a
37 found = a
38 break
38 break
39 if found is not None:
39 if found is not None:
40 if aliases[0].startswith("debug") or found.startswith("debug"):
40 if aliases[0].startswith("debug") or found.startswith("debug"):
41 debugchoice[found] = (aliases, table[e])
41 debugchoice[found] = (aliases, table[e])
42 else:
42 else:
43 choice[found] = (aliases, table[e])
43 choice[found] = (aliases, table[e])
44
44
45 if not choice and debugchoice:
45 if not choice and debugchoice:
46 choice = debugchoice
46 choice = debugchoice
47
47
48 return choice
48 return choice
49
49
50 def findcmd(ui, cmd, table):
50 def findcmd(ui, cmd, table):
51 """Return (aliases, command table entry) for command string."""
51 """Return (aliases, command table entry) for command string."""
52 choice = findpossible(ui, cmd, table)
52 choice = findpossible(ui, cmd, table)
53
53
54 if cmd in choice:
54 if cmd in choice:
55 return choice[cmd]
55 return choice[cmd]
56
56
57 if len(choice) > 1:
57 if len(choice) > 1:
58 clist = choice.keys()
58 clist = choice.keys()
59 clist.sort()
59 clist.sort()
60 raise AmbiguousCommand(cmd, clist)
60 raise AmbiguousCommand(cmd, clist)
61
61
62 if choice:
62 if choice:
63 return choice.values()[0]
63 return choice.values()[0]
64
64
65 raise UnknownCommand(cmd)
65 raise UnknownCommand(cmd)
66
66
67 def bail_if_changed(repo):
67 def bail_if_changed(repo):
68 if repo.dirstate.parents()[1] != nullid:
68 if repo.dirstate.parents()[1] != nullid:
69 raise util.Abort(_('outstanding uncommitted merge'))
69 raise util.Abort(_('outstanding uncommitted merge'))
70 modified, added, removed, deleted = repo.status()[:4]
70 modified, added, removed, deleted = repo.status()[:4]
71 if modified or added or removed or deleted:
71 if modified or added or removed or deleted:
72 raise util.Abort(_("outstanding uncommitted changes"))
72 raise util.Abort(_("outstanding uncommitted changes"))
73
73
74 def logmessage(opts):
74 def logmessage(opts):
75 """ get the log message according to -m and -l option """
75 """ get the log message according to -m and -l option """
76 message = opts['message']
76 message = opts['message']
77 logfile = opts['logfile']
77 logfile = opts['logfile']
78
78
79 if message and logfile:
79 if message and logfile:
80 raise util.Abort(_('options --message and --logfile are mutually '
80 raise util.Abort(_('options --message and --logfile are mutually '
81 'exclusive'))
81 'exclusive'))
82 if not message and logfile:
82 if not message and logfile:
83 try:
83 try:
84 if logfile == '-':
84 if logfile == '-':
85 message = sys.stdin.read()
85 message = sys.stdin.read()
86 else:
86 else:
87 message = open(logfile).read()
87 message = open(logfile).read()
88 except IOError, inst:
88 except IOError, inst:
89 raise util.Abort(_("can't read commit message '%s': %s") %
89 raise util.Abort(_("can't read commit message '%s': %s") %
90 (logfile, inst.strerror))
90 (logfile, inst.strerror))
91 return message
91 return message
92
92
93 def loglimit(opts):
93 def loglimit(opts):
94 """get the log limit according to option -l/--limit"""
94 """get the log limit according to option -l/--limit"""
95 limit = opts.get('limit')
95 limit = opts.get('limit')
96 if limit:
96 if limit:
97 try:
97 try:
98 limit = int(limit)
98 limit = int(limit)
99 except ValueError:
99 except ValueError:
100 raise util.Abort(_('limit must be a positive integer'))
100 raise util.Abort(_('limit must be a positive integer'))
101 if limit <= 0: raise util.Abort(_('limit must be positive'))
101 if limit <= 0: raise util.Abort(_('limit must be positive'))
102 else:
102 else:
103 limit = sys.maxint
103 limit = sys.maxint
104 return limit
104 return limit
105
105
106 def setremoteconfig(ui, opts):
106 def setremoteconfig(ui, opts):
107 "copy remote options to ui tree"
107 "copy remote options to ui tree"
108 if opts.get('ssh'):
108 if opts.get('ssh'):
109 ui.setconfig("ui", "ssh", opts['ssh'])
109 ui.setconfig("ui", "ssh", opts['ssh'])
110 if opts.get('remotecmd'):
110 if opts.get('remotecmd'):
111 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
111 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
112
112
113 def revpair(repo, revs):
113 def revpair(repo, revs):
114 '''return pair of nodes, given list of revisions. second item can
114 '''return pair of nodes, given list of revisions. second item can
115 be None, meaning use working dir.'''
115 be None, meaning use working dir.'''
116
116
117 def revfix(repo, val, defval):
117 def revfix(repo, val, defval):
118 if not val and val != 0 and defval is not None:
118 if not val and val != 0 and defval is not None:
119 val = defval
119 val = defval
120 return repo.lookup(val)
120 return repo.lookup(val)
121
121
122 if not revs:
122 if not revs:
123 return repo.dirstate.parents()[0], None
123 return repo.dirstate.parents()[0], None
124 end = None
124 end = None
125 if len(revs) == 1:
125 if len(revs) == 1:
126 if revrangesep in revs[0]:
126 if revrangesep in revs[0]:
127 start, end = revs[0].split(revrangesep, 1)
127 start, end = revs[0].split(revrangesep, 1)
128 start = revfix(repo, start, 0)
128 start = revfix(repo, start, 0)
129 end = revfix(repo, end, len(repo) - 1)
129 end = revfix(repo, end, len(repo) - 1)
130 else:
130 else:
131 start = revfix(repo, revs[0], None)
131 start = revfix(repo, revs[0], None)
132 elif len(revs) == 2:
132 elif len(revs) == 2:
133 if revrangesep in revs[0] or revrangesep in revs[1]:
133 if revrangesep in revs[0] or revrangesep in revs[1]:
134 raise util.Abort(_('too many revisions specified'))
134 raise util.Abort(_('too many revisions specified'))
135 start = revfix(repo, revs[0], None)
135 start = revfix(repo, revs[0], None)
136 end = revfix(repo, revs[1], None)
136 end = revfix(repo, revs[1], None)
137 else:
137 else:
138 raise util.Abort(_('too many revisions specified'))
138 raise util.Abort(_('too many revisions specified'))
139 return start, end
139 return start, end
140
140
141 def revrange(repo, revs):
141 def revrange(repo, revs):
142 """Yield revision as strings from a list of revision specifications."""
142 """Yield revision as strings from a list of revision specifications."""
143
143
144 def revfix(repo, val, defval):
144 def revfix(repo, val, defval):
145 if not val and val != 0 and defval is not None:
145 if not val and val != 0 and defval is not None:
146 return defval
146 return defval
147 return repo.changelog.rev(repo.lookup(val))
147 return repo.changelog.rev(repo.lookup(val))
148
148
149 seen, l = {}, []
149 seen, l = {}, []
150 for spec in revs:
150 for spec in revs:
151 if revrangesep in spec:
151 if revrangesep in spec:
152 start, end = spec.split(revrangesep, 1)
152 start, end = spec.split(revrangesep, 1)
153 start = revfix(repo, start, 0)
153 start = revfix(repo, start, 0)
154 end = revfix(repo, end, len(repo) - 1)
154 end = revfix(repo, end, len(repo) - 1)
155 step = start > end and -1 or 1
155 step = start > end and -1 or 1
156 for rev in xrange(start, end+step, step):
156 for rev in xrange(start, end+step, step):
157 if rev in seen:
157 if rev in seen:
158 continue
158 continue
159 seen[rev] = 1
159 seen[rev] = 1
160 l.append(rev)
160 l.append(rev)
161 else:
161 else:
162 rev = revfix(repo, spec, None)
162 rev = revfix(repo, spec, None)
163 if rev in seen:
163 if rev in seen:
164 continue
164 continue
165 seen[rev] = 1
165 seen[rev] = 1
166 l.append(rev)
166 l.append(rev)
167
167
168 return l
168 return l
169
169
170 def make_filename(repo, pat, node,
170 def make_filename(repo, pat, node,
171 total=None, seqno=None, revwidth=None, pathname=None):
171 total=None, seqno=None, revwidth=None, pathname=None):
172 node_expander = {
172 node_expander = {
173 'H': lambda: hex(node),
173 'H': lambda: hex(node),
174 'R': lambda: str(repo.changelog.rev(node)),
174 'R': lambda: str(repo.changelog.rev(node)),
175 'h': lambda: short(node),
175 'h': lambda: short(node),
176 }
176 }
177 expander = {
177 expander = {
178 '%': lambda: '%',
178 '%': lambda: '%',
179 'b': lambda: os.path.basename(repo.root),
179 'b': lambda: os.path.basename(repo.root),
180 }
180 }
181
181
182 try:
182 try:
183 if node:
183 if node:
184 expander.update(node_expander)
184 expander.update(node_expander)
185 if node:
185 if node:
186 expander['r'] = (lambda:
186 expander['r'] = (lambda:
187 str(repo.changelog.rev(node)).zfill(revwidth or 0))
187 str(repo.changelog.rev(node)).zfill(revwidth or 0))
188 if total is not None:
188 if total is not None:
189 expander['N'] = lambda: str(total)
189 expander['N'] = lambda: str(total)
190 if seqno is not None:
190 if seqno is not None:
191 expander['n'] = lambda: str(seqno)
191 expander['n'] = lambda: str(seqno)
192 if total is not None and seqno is not None:
192 if total is not None and seqno is not None:
193 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
193 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
194 if pathname is not None:
194 if pathname is not None:
195 expander['s'] = lambda: os.path.basename(pathname)
195 expander['s'] = lambda: os.path.basename(pathname)
196 expander['d'] = lambda: os.path.dirname(pathname) or '.'
196 expander['d'] = lambda: os.path.dirname(pathname) or '.'
197 expander['p'] = lambda: pathname
197 expander['p'] = lambda: pathname
198
198
199 newname = []
199 newname = []
200 patlen = len(pat)
200 patlen = len(pat)
201 i = 0
201 i = 0
202 while i < patlen:
202 while i < patlen:
203 c = pat[i]
203 c = pat[i]
204 if c == '%':
204 if c == '%':
205 i += 1
205 i += 1
206 c = pat[i]
206 c = pat[i]
207 c = expander[c]()
207 c = expander[c]()
208 newname.append(c)
208 newname.append(c)
209 i += 1
209 i += 1
210 return ''.join(newname)
210 return ''.join(newname)
211 except KeyError, inst:
211 except KeyError, inst:
212 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
212 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
213 inst.args[0])
213 inst.args[0])
214
214
215 def make_file(repo, pat, node=None,
215 def make_file(repo, pat, node=None,
216 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
216 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
217 if not pat or pat == '-':
217 if not pat or pat == '-':
218 return 'w' in mode and sys.stdout or sys.stdin
218 return 'w' in mode and sys.stdout or sys.stdin
219 if hasattr(pat, 'write') and 'w' in mode:
219 if hasattr(pat, 'write') and 'w' in mode:
220 return pat
220 return pat
221 if hasattr(pat, 'read') and 'r' in mode:
221 if hasattr(pat, 'read') and 'r' in mode:
222 return pat
222 return pat
223 return open(make_filename(repo, pat, node, total, seqno, revwidth,
223 return open(make_filename(repo, pat, node, total, seqno, revwidth,
224 pathname),
224 pathname),
225 mode)
225 mode)
226
226
227 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
227 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
228 if not globbed and default == 'relpath':
228 if not globbed and default == 'relpath':
229 pats = util.expand_glob(pats or [])
229 pats = util.expand_glob(pats or [])
230 m = _match.match(repo.root, repo.getcwd(), pats,
230 m = _match.match(repo.root, repo.getcwd(), pats,
231 opts.get('include'), opts.get('exclude'), default)
231 opts.get('include'), opts.get('exclude'), default)
232 def badfn(f, msg):
232 def badfn(f, msg):
233 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
233 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
234 return False
234 return False
235 m.bad = badfn
235 m.bad = badfn
236 return m
236 return m
237
237
238 def matchall(repo):
238 def matchall(repo):
239 return _match.always(repo.root, repo.getcwd())
239 return _match.always(repo.root, repo.getcwd())
240
240
241 def matchfiles(repo, files):
241 def matchfiles(repo, files):
242 return _match.exact(repo.root, repo.getcwd(), files)
242 return _match.exact(repo.root, repo.getcwd(), files)
243
243
244 def findrenames(repo, added=None, removed=None, threshold=0.5):
244 def findrenames(repo, added=None, removed=None, threshold=0.5):
245 '''find renamed files -- yields (before, after, score) tuples'''
245 '''find renamed files -- yields (before, after, score) tuples'''
246 if added is None or removed is None:
246 if added is None or removed is None:
247 added, removed = repo.status()[1:3]
247 added, removed = repo.status()[1:3]
248 ctx = repo['.']
248 ctx = repo['.']
249 for a in added:
249 for a in added:
250 aa = repo.wread(a)
250 aa = repo.wread(a)
251 bestname, bestscore = None, threshold
251 bestname, bestscore = None, threshold
252 for r in removed:
252 for r in removed:
253 rr = ctx.filectx(r).data()
253 rr = ctx.filectx(r).data()
254
254
255 # bdiff.blocks() returns blocks of matching lines
255 # bdiff.blocks() returns blocks of matching lines
256 # count the number of bytes in each
256 # count the number of bytes in each
257 equal = 0
257 equal = 0
258 alines = mdiff.splitnewlines(aa)
258 alines = mdiff.splitnewlines(aa)
259 matches = bdiff.blocks(aa, rr)
259 matches = bdiff.blocks(aa, rr)
260 for x1,x2,y1,y2 in matches:
260 for x1,x2,y1,y2 in matches:
261 for line in alines[x1:x2]:
261 for line in alines[x1:x2]:
262 equal += len(line)
262 equal += len(line)
263
263
264 lengths = len(aa) + len(rr)
264 lengths = len(aa) + len(rr)
265 if lengths:
265 if lengths:
266 myscore = equal*2.0 / lengths
266 myscore = equal*2.0 / lengths
267 if myscore >= bestscore:
267 if myscore >= bestscore:
268 bestname, bestscore = r, myscore
268 bestname, bestscore = r, myscore
269 if bestname:
269 if bestname:
270 yield bestname, a, bestscore
270 yield bestname, a, bestscore
271
271
272 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
272 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
273 if dry_run is None:
273 if dry_run is None:
274 dry_run = opts.get('dry_run')
274 dry_run = opts.get('dry_run')
275 if similarity is None:
275 if similarity is None:
276 similarity = float(opts.get('similarity') or 0)
276 similarity = float(opts.get('similarity') or 0)
277 add, remove = [], []
277 add, remove = [], []
278 mapping = {}
278 mapping = {}
279 audit_path = util.path_auditor(repo.root)
279 audit_path = util.path_auditor(repo.root)
280 m = match(repo, pats, opts)
280 m = match(repo, pats, opts)
281 for abs in repo.walk(m):
281 for abs in repo.walk(m):
282 target = repo.wjoin(abs)
282 target = repo.wjoin(abs)
283 good = True
283 good = True
284 try:
284 try:
285 audit_path(abs)
285 audit_path(abs)
286 except:
286 except:
287 good = False
287 good = False
288 rel = m.rel(abs)
288 rel = m.rel(abs)
289 exact = m.exact(abs)
289 exact = m.exact(abs)
290 if good and abs not in repo.dirstate:
290 if good and abs not in repo.dirstate:
291 add.append(abs)
291 add.append(abs)
292 mapping[abs] = rel, m.exact(abs)
292 mapping[abs] = rel, m.exact(abs)
293 if repo.ui.verbose or not exact:
293 if repo.ui.verbose or not exact:
294 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
294 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
295 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
295 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
296 or (os.path.isdir(target) and not os.path.islink(target))):
296 or (os.path.isdir(target) and not os.path.islink(target))):
297 remove.append(abs)
297 remove.append(abs)
298 mapping[abs] = rel, exact
298 mapping[abs] = rel, exact
299 if repo.ui.verbose or not exact:
299 if repo.ui.verbose or not exact:
300 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
300 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
301 if not dry_run:
301 if not dry_run:
302 repo.remove(remove)
302 repo.remove(remove)
303 repo.add(add)
303 repo.add(add)
304 if similarity > 0:
304 if similarity > 0:
305 for old, new, score in findrenames(repo, add, remove, similarity):
305 for old, new, score in findrenames(repo, add, remove, similarity):
306 oldrel, oldexact = mapping[old]
306 oldrel, oldexact = mapping[old]
307 newrel, newexact = mapping[new]
307 newrel, newexact = mapping[new]
308 if repo.ui.verbose or not oldexact or not newexact:
308 if repo.ui.verbose or not oldexact or not newexact:
309 repo.ui.status(_('recording removal of %s as rename to %s '
309 repo.ui.status(_('recording removal of %s as rename to %s '
310 '(%d%% similar)\n') %
310 '(%d%% similar)\n') %
311 (oldrel, newrel, score * 100))
311 (oldrel, newrel, score * 100))
312 if not dry_run:
312 if not dry_run:
313 repo.copy(old, new)
313 repo.copy(old, new)
314
314
315 def copy(ui, repo, pats, opts, rename=False):
315 def copy(ui, repo, pats, opts, rename=False):
316 # called with the repo lock held
316 # called with the repo lock held
317 #
317 #
318 # hgsep => pathname that uses "/" to separate directories
318 # hgsep => pathname that uses "/" to separate directories
319 # ossep => pathname that uses os.sep to separate directories
319 # ossep => pathname that uses os.sep to separate directories
320 cwd = repo.getcwd()
320 cwd = repo.getcwd()
321 targets = {}
321 targets = {}
322 after = opts.get("after")
322 after = opts.get("after")
323 dryrun = opts.get("dry_run")
323 dryrun = opts.get("dry_run")
324
324
325 def walkpat(pat):
325 def walkpat(pat):
326 srcs = []
326 srcs = []
327 m = match(repo, [pat], opts, globbed=True)
327 m = match(repo, [pat], opts, globbed=True)
328 for abs in repo.walk(m):
328 for abs in repo.walk(m):
329 state = repo.dirstate[abs]
329 state = repo.dirstate[abs]
330 rel = m.rel(abs)
330 rel = m.rel(abs)
331 exact = m.exact(abs)
331 exact = m.exact(abs)
332 if state in '?r':
332 if state in '?r':
333 if exact and state == '?':
333 if exact and state == '?':
334 ui.warn(_('%s: not copying - file is not managed\n') % rel)
334 ui.warn(_('%s: not copying - file is not managed\n') % rel)
335 if exact and state == 'r':
335 if exact and state == 'r':
336 ui.warn(_('%s: not copying - file has been marked for'
336 ui.warn(_('%s: not copying - file has been marked for'
337 ' remove\n') % rel)
337 ' remove\n') % rel)
338 continue
338 continue
339 # abs: hgsep
339 # abs: hgsep
340 # rel: ossep
340 # rel: ossep
341 srcs.append((abs, rel, exact))
341 srcs.append((abs, rel, exact))
342 return srcs
342 return srcs
343
343
344 # abssrc: hgsep
344 # abssrc: hgsep
345 # relsrc: ossep
345 # relsrc: ossep
346 # otarget: ossep
346 # otarget: ossep
347 def copyfile(abssrc, relsrc, otarget, exact):
347 def copyfile(abssrc, relsrc, otarget, exact):
348 abstarget = util.canonpath(repo.root, cwd, otarget)
348 abstarget = util.canonpath(repo.root, cwd, otarget)
349 reltarget = repo.pathto(abstarget, cwd)
349 reltarget = repo.pathto(abstarget, cwd)
350 target = repo.wjoin(abstarget)
350 target = repo.wjoin(abstarget)
351 src = repo.wjoin(abssrc)
351 src = repo.wjoin(abssrc)
352 state = repo.dirstate[abstarget]
352 state = repo.dirstate[abstarget]
353
353
354 # check for collisions
354 # check for collisions
355 prevsrc = targets.get(abstarget)
355 prevsrc = targets.get(abstarget)
356 if prevsrc is not None:
356 if prevsrc is not None:
357 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
357 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
358 (reltarget, repo.pathto(abssrc, cwd),
358 (reltarget, repo.pathto(abssrc, cwd),
359 repo.pathto(prevsrc, cwd)))
359 repo.pathto(prevsrc, cwd)))
360 return
360 return
361
361
362 # check for overwrites
362 # check for overwrites
363 exists = os.path.exists(target)
363 exists = os.path.exists(target)
364 if (not after and exists or after and state in 'mn'):
364 if (not after and exists or after and state in 'mn'):
365 if not opts['force']:
365 if not opts['force']:
366 ui.warn(_('%s: not overwriting - file exists\n') %
366 ui.warn(_('%s: not overwriting - file exists\n') %
367 reltarget)
367 reltarget)
368 return
368 return
369
369
370 if after:
370 if after:
371 if not exists:
371 if not exists:
372 return
372 return
373 elif not dryrun:
373 elif not dryrun:
374 try:
374 try:
375 if exists:
375 if exists:
376 os.unlink(target)
376 os.unlink(target)
377 targetdir = os.path.dirname(target) or '.'
377 targetdir = os.path.dirname(target) or '.'
378 if not os.path.isdir(targetdir):
378 if not os.path.isdir(targetdir):
379 os.makedirs(targetdir)
379 os.makedirs(targetdir)
380 util.copyfile(src, target)
380 util.copyfile(src, target)
381 except IOError, inst:
381 except IOError, inst:
382 if inst.errno == errno.ENOENT:
382 if inst.errno == errno.ENOENT:
383 ui.warn(_('%s: deleted in working copy\n') % relsrc)
383 ui.warn(_('%s: deleted in working copy\n') % relsrc)
384 else:
384 else:
385 ui.warn(_('%s: cannot copy - %s\n') %
385 ui.warn(_('%s: cannot copy - %s\n') %
386 (relsrc, inst.strerror))
386 (relsrc, inst.strerror))
387 return True # report a failure
387 return True # report a failure
388
388
389 if ui.verbose or not exact:
389 if ui.verbose or not exact:
390 action = rename and "moving" or "copying"
390 action = rename and "moving" or "copying"
391 ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
391 ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
392
392
393 targets[abstarget] = abssrc
393 targets[abstarget] = abssrc
394
394
395 # fix up dirstate
395 # fix up dirstate
396 origsrc = repo.dirstate.copied(abssrc) or abssrc
396 origsrc = repo.dirstate.copied(abssrc) or abssrc
397 if abstarget == origsrc: # copying back a copy?
397 if abstarget == origsrc: # copying back a copy?
398 if state not in 'mn' and not dryrun:
398 if state not in 'mn' and not dryrun:
399 repo.dirstate.normallookup(abstarget)
399 repo.dirstate.normallookup(abstarget)
400 else:
400 else:
401 if repo.dirstate[origsrc] == 'a':
401 if repo.dirstate[origsrc] == 'a':
402 if not ui.quiet:
402 if not ui.quiet:
403 ui.warn(_("%s has not been committed yet, so no copy "
403 ui.warn(_("%s has not been committed yet, so no copy "
404 "data will be stored for %s.\n")
404 "data will be stored for %s.\n")
405 % (repo.pathto(origsrc, cwd), reltarget))
405 % (repo.pathto(origsrc, cwd), reltarget))
406 if abstarget not in repo.dirstate and not dryrun:
406 if abstarget not in repo.dirstate and not dryrun:
407 repo.add([abstarget])
407 repo.add([abstarget])
408 elif not dryrun:
408 elif not dryrun:
409 repo.copy(origsrc, abstarget)
409 repo.copy(origsrc, abstarget)
410
410
411 if rename and not dryrun:
411 if rename and not dryrun:
412 repo.remove([abssrc], not after)
412 repo.remove([abssrc], not after)
413
413
414 # pat: ossep
414 # pat: ossep
415 # dest ossep
415 # dest ossep
416 # srcs: list of (hgsep, hgsep, ossep, bool)
416 # srcs: list of (hgsep, hgsep, ossep, bool)
417 # return: function that takes hgsep and returns ossep
417 # return: function that takes hgsep and returns ossep
418 def targetpathfn(pat, dest, srcs):
418 def targetpathfn(pat, dest, srcs):
419 if os.path.isdir(pat):
419 if os.path.isdir(pat):
420 abspfx = util.canonpath(repo.root, cwd, pat)
420 abspfx = util.canonpath(repo.root, cwd, pat)
421 abspfx = util.localpath(abspfx)
421 abspfx = util.localpath(abspfx)
422 if destdirexists:
422 if destdirexists:
423 striplen = len(os.path.split(abspfx)[0])
423 striplen = len(os.path.split(abspfx)[0])
424 else:
424 else:
425 striplen = len(abspfx)
425 striplen = len(abspfx)
426 if striplen:
426 if striplen:
427 striplen += len(os.sep)
427 striplen += len(os.sep)
428 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
428 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
429 elif destdirexists:
429 elif destdirexists:
430 res = lambda p: os.path.join(dest,
430 res = lambda p: os.path.join(dest,
431 os.path.basename(util.localpath(p)))
431 os.path.basename(util.localpath(p)))
432 else:
432 else:
433 res = lambda p: dest
433 res = lambda p: dest
434 return res
434 return res
435
435
436 # pat: ossep
436 # pat: ossep
437 # dest ossep
437 # dest ossep
438 # srcs: list of (hgsep, hgsep, ossep, bool)
438 # srcs: list of (hgsep, hgsep, ossep, bool)
439 # return: function that takes hgsep and returns ossep
439 # return: function that takes hgsep and returns ossep
440 def targetpathafterfn(pat, dest, srcs):
440 def targetpathafterfn(pat, dest, srcs):
441 if util.patkind(pat, None)[0]:
441 if util.patkind(pat, None)[0]:
442 # a mercurial pattern
442 # a mercurial pattern
443 res = lambda p: os.path.join(dest,
443 res = lambda p: os.path.join(dest,
444 os.path.basename(util.localpath(p)))
444 os.path.basename(util.localpath(p)))
445 else:
445 else:
446 abspfx = util.canonpath(repo.root, cwd, pat)
446 abspfx = util.canonpath(repo.root, cwd, pat)
447 if len(abspfx) < len(srcs[0][0]):
447 if len(abspfx) < len(srcs[0][0]):
448 # A directory. Either the target path contains the last
448 # A directory. Either the target path contains the last
449 # component of the source path or it does not.
449 # component of the source path or it does not.
450 def evalpath(striplen):
450 def evalpath(striplen):
451 score = 0
451 score = 0
452 for s in srcs:
452 for s in srcs:
453 t = os.path.join(dest, util.localpath(s[0])[striplen:])
453 t = os.path.join(dest, util.localpath(s[0])[striplen:])
454 if os.path.exists(t):
454 if os.path.exists(t):
455 score += 1
455 score += 1
456 return score
456 return score
457
457
458 abspfx = util.localpath(abspfx)
458 abspfx = util.localpath(abspfx)
459 striplen = len(abspfx)
459 striplen = len(abspfx)
460 if striplen:
460 if striplen:
461 striplen += len(os.sep)
461 striplen += len(os.sep)
462 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
462 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
463 score = evalpath(striplen)
463 score = evalpath(striplen)
464 striplen1 = len(os.path.split(abspfx)[0])
464 striplen1 = len(os.path.split(abspfx)[0])
465 if striplen1:
465 if striplen1:
466 striplen1 += len(os.sep)
466 striplen1 += len(os.sep)
467 if evalpath(striplen1) > score:
467 if evalpath(striplen1) > score:
468 striplen = striplen1
468 striplen = striplen1
469 res = lambda p: os.path.join(dest,
469 res = lambda p: os.path.join(dest,
470 util.localpath(p)[striplen:])
470 util.localpath(p)[striplen:])
471 else:
471 else:
472 # a file
472 # a file
473 if destdirexists:
473 if destdirexists:
474 res = lambda p: os.path.join(dest,
474 res = lambda p: os.path.join(dest,
475 os.path.basename(util.localpath(p)))
475 os.path.basename(util.localpath(p)))
476 else:
476 else:
477 res = lambda p: dest
477 res = lambda p: dest
478 return res
478 return res
479
479
480
480
481 pats = util.expand_glob(pats)
481 pats = util.expand_glob(pats)
482 if not pats:
482 if not pats:
483 raise util.Abort(_('no source or destination specified'))
483 raise util.Abort(_('no source or destination specified'))
484 if len(pats) == 1:
484 if len(pats) == 1:
485 raise util.Abort(_('no destination specified'))
485 raise util.Abort(_('no destination specified'))
486 dest = pats.pop()
486 dest = pats.pop()
487 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
487 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
488 if not destdirexists:
488 if not destdirexists:
489 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
489 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
490 raise util.Abort(_('with multiple sources, destination must be an '
490 raise util.Abort(_('with multiple sources, destination must be an '
491 'existing directory'))
491 'existing directory'))
492 if util.endswithsep(dest):
492 if util.endswithsep(dest):
493 raise util.Abort(_('destination %s is not a directory') % dest)
493 raise util.Abort(_('destination %s is not a directory') % dest)
494
494
495 tfn = targetpathfn
495 tfn = targetpathfn
496 if after:
496 if after:
497 tfn = targetpathafterfn
497 tfn = targetpathafterfn
498 copylist = []
498 copylist = []
499 for pat in pats:
499 for pat in pats:
500 srcs = walkpat(pat)
500 srcs = walkpat(pat)
501 if not srcs:
501 if not srcs:
502 continue
502 continue
503 copylist.append((tfn(pat, dest, srcs), srcs))
503 copylist.append((tfn(pat, dest, srcs), srcs))
504 if not copylist:
504 if not copylist:
505 raise util.Abort(_('no files to copy'))
505 raise util.Abort(_('no files to copy'))
506
506
507 errors = 0
507 errors = 0
508 for targetpath, srcs in copylist:
508 for targetpath, srcs in copylist:
509 for abssrc, relsrc, exact in srcs:
509 for abssrc, relsrc, exact in srcs:
510 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
510 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
511 errors += 1
511 errors += 1
512
512
513 if errors:
513 if errors:
514 ui.warn(_('(consider using --after)\n'))
514 ui.warn(_('(consider using --after)\n'))
515
515
516 return errors
516 return errors
517
517
518 def service(opts, parentfn=None, initfn=None, runfn=None):
518 def service(opts, parentfn=None, initfn=None, runfn=None):
519 '''Run a command as a service.'''
519 '''Run a command as a service.'''
520
520
521 if opts['daemon'] and not opts['daemon_pipefds']:
521 if opts['daemon'] and not opts['daemon_pipefds']:
522 rfd, wfd = os.pipe()
522 rfd, wfd = os.pipe()
523 args = sys.argv[:]
523 args = sys.argv[:]
524 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
524 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
525 # Don't pass --cwd to the child process, because we've already
525 # Don't pass --cwd to the child process, because we've already
526 # changed directory.
526 # changed directory.
527 for i in xrange(1,len(args)):
527 for i in xrange(1,len(args)):
528 if args[i].startswith('--cwd='):
528 if args[i].startswith('--cwd='):
529 del args[i]
529 del args[i]
530 break
530 break
531 elif args[i].startswith('--cwd'):
531 elif args[i].startswith('--cwd'):
532 del args[i:i+2]
532 del args[i:i+2]
533 break
533 break
534 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
534 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
535 args[0], args)
535 args[0], args)
536 os.close(wfd)
536 os.close(wfd)
537 os.read(rfd, 1)
537 os.read(rfd, 1)
538 if parentfn:
538 if parentfn:
539 return parentfn(pid)
539 return parentfn(pid)
540 else:
540 else:
541 os._exit(0)
541 os._exit(0)
542
542
543 if initfn:
543 if initfn:
544 initfn()
544 initfn()
545
545
546 if opts['pid_file']:
546 if opts['pid_file']:
547 fp = open(opts['pid_file'], 'w')
547 fp = open(opts['pid_file'], 'w')
548 fp.write(str(os.getpid()) + '\n')
548 fp.write(str(os.getpid()) + '\n')
549 fp.close()
549 fp.close()
550
550
551 if opts['daemon_pipefds']:
551 if opts['daemon_pipefds']:
552 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
552 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
553 os.close(rfd)
553 os.close(rfd)
554 try:
554 try:
555 os.setsid()
555 os.setsid()
556 except AttributeError:
556 except AttributeError:
557 pass
557 pass
558 os.write(wfd, 'y')
558 os.write(wfd, 'y')
559 os.close(wfd)
559 os.close(wfd)
560 sys.stdout.flush()
560 sys.stdout.flush()
561 sys.stderr.flush()
561 sys.stderr.flush()
562 fd = os.open(util.nulldev, os.O_RDWR)
562 fd = os.open(util.nulldev, os.O_RDWR)
563 if fd != 0: os.dup2(fd, 0)
563 if fd != 0: os.dup2(fd, 0)
564 if fd != 1: os.dup2(fd, 1)
564 if fd != 1: os.dup2(fd, 1)
565 if fd != 2: os.dup2(fd, 2)
565 if fd != 2: os.dup2(fd, 2)
566 if fd not in (0, 1, 2): os.close(fd)
566 if fd not in (0, 1, 2): os.close(fd)
567
567
568 if runfn:
568 if runfn:
569 return runfn()
569 return runfn()
570
570
571 class changeset_printer(object):
571 class changeset_printer(object):
572 '''show changeset information when templating not requested.'''
572 '''show changeset information when templating not requested.'''
573
573
574 def __init__(self, ui, repo, patch, buffered):
574 def __init__(self, ui, repo, patch, buffered):
575 self.ui = ui
575 self.ui = ui
576 self.repo = repo
576 self.repo = repo
577 self.buffered = buffered
577 self.buffered = buffered
578 self.patch = patch
578 self.patch = patch
579 self.header = {}
579 self.header = {}
580 self.hunk = {}
580 self.hunk = {}
581 self.lastheader = None
581 self.lastheader = None
582
582
583 def flush(self, rev):
583 def flush(self, rev):
584 if rev in self.header:
584 if rev in self.header:
585 h = self.header[rev]
585 h = self.header[rev]
586 if h != self.lastheader:
586 if h != self.lastheader:
587 self.lastheader = h
587 self.lastheader = h
588 self.ui.write(h)
588 self.ui.write(h)
589 del self.header[rev]
589 del self.header[rev]
590 if rev in self.hunk:
590 if rev in self.hunk:
591 self.ui.write(self.hunk[rev])
591 self.ui.write(self.hunk[rev])
592 del self.hunk[rev]
592 del self.hunk[rev]
593 return 1
593 return 1
594 return 0
594 return 0
595
595
596 def show(self, rev=0, changenode=None, copies=(), **props):
596 def show(self, rev=0, changenode=None, copies=(), **props):
597 if self.buffered:
597 if self.buffered:
598 self.ui.pushbuffer()
598 self.ui.pushbuffer()
599 self._show(rev, changenode, copies, props)
599 self._show(rev, changenode, copies, props)
600 self.hunk[rev] = self.ui.popbuffer()
600 self.hunk[rev] = self.ui.popbuffer()
601 else:
601 else:
602 self._show(rev, changenode, copies, props)
602 self._show(rev, changenode, copies, props)
603
603
604 def _show(self, rev, changenode, copies, props):
604 def _show(self, rev, changenode, copies, props):
605 '''show a single changeset or file revision'''
605 '''show a single changeset or file revision'''
606 log = self.repo.changelog
606 log = self.repo.changelog
607 if changenode is None:
607 if changenode is None:
608 changenode = log.node(rev)
608 changenode = log.node(rev)
609 elif not rev:
609 elif not rev:
610 rev = log.rev(changenode)
610 rev = log.rev(changenode)
611
611
612 if self.ui.quiet:
612 if self.ui.quiet:
613 self.ui.write("%d:%s\n" % (rev, short(changenode)))
613 self.ui.write("%d:%s\n" % (rev, short(changenode)))
614 return
614 return
615
615
616 changes = log.read(changenode)
616 changes = log.read(changenode)
617 date = util.datestr(changes[2])
617 date = util.datestr(changes[2])
618 extra = changes[5]
618 extra = changes[5]
619 branch = extra.get("branch")
619 branch = extra.get("branch")
620
620
621 hexfunc = self.ui.debugflag and hex or short
621 hexfunc = self.ui.debugflag and hex or short
622
622
623 parents = [(p, hexfunc(log.node(p)))
623 parents = [(p, hexfunc(log.node(p)))
624 for p in self._meaningful_parentrevs(log, rev)]
624 for p in self._meaningful_parentrevs(log, rev)]
625
625
626 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
626 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
627
627
628 # don't show the default branch name
628 # don't show the default branch name
629 if branch != 'default':
629 if branch != 'default':
630 branch = util.tolocal(branch)
630 branch = util.tolocal(branch)
631 self.ui.write(_("branch: %s\n") % branch)
631 self.ui.write(_("branch: %s\n") % branch)
632 for tag in self.repo.nodetags(changenode):
632 for tag in self.repo.nodetags(changenode):
633 self.ui.write(_("tag: %s\n") % tag)
633 self.ui.write(_("tag: %s\n") % tag)
634 for parent in parents:
634 for parent in parents:
635 self.ui.write(_("parent: %d:%s\n") % parent)
635 self.ui.write(_("parent: %d:%s\n") % parent)
636
636
637 if self.ui.debugflag:
637 if self.ui.debugflag:
638 self.ui.write(_("manifest: %d:%s\n") %
638 self.ui.write(_("manifest: %d:%s\n") %
639 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
639 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
640 self.ui.write(_("user: %s\n") % changes[1])
640 self.ui.write(_("user: %s\n") % changes[1])
641 self.ui.write(_("date: %s\n") % date)
641 self.ui.write(_("date: %s\n") % date)
642
642
643 if self.ui.debugflag:
643 if self.ui.debugflag:
644 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
644 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
645 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
645 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
646 files):
646 files):
647 if value:
647 if value:
648 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
648 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
649 elif changes[3] and self.ui.verbose:
649 elif changes[3] and self.ui.verbose:
650 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
650 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
651 if copies and self.ui.verbose:
651 if copies and self.ui.verbose:
652 copies = ['%s (%s)' % c for c in copies]
652 copies = ['%s (%s)' % c for c in copies]
653 self.ui.write(_("copies: %s\n") % ' '.join(copies))
653 self.ui.write(_("copies: %s\n") % ' '.join(copies))
654
654
655 if extra and self.ui.debugflag:
655 if extra and self.ui.debugflag:
656 extraitems = extra.items()
656 for key, value in util.sort(extra.items()):
657 extraitems.sort()
658 for key, value in extraitems:
659 self.ui.write(_("extra: %s=%s\n")
657 self.ui.write(_("extra: %s=%s\n")
660 % (key, value.encode('string_escape')))
658 % (key, value.encode('string_escape')))
661
659
662 description = changes[4].strip()
660 description = changes[4].strip()
663 if description:
661 if description:
664 if self.ui.verbose:
662 if self.ui.verbose:
665 self.ui.write(_("description:\n"))
663 self.ui.write(_("description:\n"))
666 self.ui.write(description)
664 self.ui.write(description)
667 self.ui.write("\n\n")
665 self.ui.write("\n\n")
668 else:
666 else:
669 self.ui.write(_("summary: %s\n") %
667 self.ui.write(_("summary: %s\n") %
670 description.splitlines()[0])
668 description.splitlines()[0])
671 self.ui.write("\n")
669 self.ui.write("\n")
672
670
673 self.showpatch(changenode)
671 self.showpatch(changenode)
674
672
675 def showpatch(self, node):
673 def showpatch(self, node):
676 if self.patch:
674 if self.patch:
677 prev = self.repo.changelog.parents(node)[0]
675 prev = self.repo.changelog.parents(node)[0]
678 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
676 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
679 opts=patch.diffopts(self.ui))
677 opts=patch.diffopts(self.ui))
680 self.ui.write("\n")
678 self.ui.write("\n")
681
679
682 def _meaningful_parentrevs(self, log, rev):
680 def _meaningful_parentrevs(self, log, rev):
683 """Return list of meaningful (or all if debug) parentrevs for rev.
681 """Return list of meaningful (or all if debug) parentrevs for rev.
684
682
685 For merges (two non-nullrev revisions) both parents are meaningful.
683 For merges (two non-nullrev revisions) both parents are meaningful.
686 Otherwise the first parent revision is considered meaningful if it
684 Otherwise the first parent revision is considered meaningful if it
687 is not the preceding revision.
685 is not the preceding revision.
688 """
686 """
689 parents = log.parentrevs(rev)
687 parents = log.parentrevs(rev)
690 if not self.ui.debugflag and parents[1] == nullrev:
688 if not self.ui.debugflag and parents[1] == nullrev:
691 if parents[0] >= rev - 1:
689 if parents[0] >= rev - 1:
692 parents = []
690 parents = []
693 else:
691 else:
694 parents = [parents[0]]
692 parents = [parents[0]]
695 return parents
693 return parents
696
694
697
695
698 class changeset_templater(changeset_printer):
696 class changeset_templater(changeset_printer):
699 '''format changeset information.'''
697 '''format changeset information.'''
700
698
701 def __init__(self, ui, repo, patch, mapfile, buffered):
699 def __init__(self, ui, repo, patch, mapfile, buffered):
702 changeset_printer.__init__(self, ui, repo, patch, buffered)
700 changeset_printer.__init__(self, ui, repo, patch, buffered)
703 filters = templatefilters.filters.copy()
701 filters = templatefilters.filters.copy()
704 filters['formatnode'] = (ui.debugflag and (lambda x: x)
702 filters['formatnode'] = (ui.debugflag and (lambda x: x)
705 or (lambda x: x[:12]))
703 or (lambda x: x[:12]))
706 self.t = templater.templater(mapfile, filters,
704 self.t = templater.templater(mapfile, filters,
707 cache={
705 cache={
708 'parent': '{rev}:{node|formatnode} ',
706 'parent': '{rev}:{node|formatnode} ',
709 'manifest': '{rev}:{node|formatnode}',
707 'manifest': '{rev}:{node|formatnode}',
710 'filecopy': '{name} ({source})'})
708 'filecopy': '{name} ({source})'})
711
709
712 def use_template(self, t):
710 def use_template(self, t):
713 '''set template string to use'''
711 '''set template string to use'''
714 self.t.cache['changeset'] = t
712 self.t.cache['changeset'] = t
715
713
716 def _show(self, rev, changenode, copies, props):
714 def _show(self, rev, changenode, copies, props):
717 '''show a single changeset or file revision'''
715 '''show a single changeset or file revision'''
718 log = self.repo.changelog
716 log = self.repo.changelog
719 if changenode is None:
717 if changenode is None:
720 changenode = log.node(rev)
718 changenode = log.node(rev)
721 elif not rev:
719 elif not rev:
722 rev = log.rev(changenode)
720 rev = log.rev(changenode)
723
721
724 changes = log.read(changenode)
722 changes = log.read(changenode)
725
723
726 def showlist(name, values, plural=None, **args):
724 def showlist(name, values, plural=None, **args):
727 '''expand set of values.
725 '''expand set of values.
728 name is name of key in template map.
726 name is name of key in template map.
729 values is list of strings or dicts.
727 values is list of strings or dicts.
730 plural is plural of name, if not simply name + 's'.
728 plural is plural of name, if not simply name + 's'.
731
729
732 expansion works like this, given name 'foo'.
730 expansion works like this, given name 'foo'.
733
731
734 if values is empty, expand 'no_foos'.
732 if values is empty, expand 'no_foos'.
735
733
736 if 'foo' not in template map, return values as a string,
734 if 'foo' not in template map, return values as a string,
737 joined by space.
735 joined by space.
738
736
739 expand 'start_foos'.
737 expand 'start_foos'.
740
738
741 for each value, expand 'foo'. if 'last_foo' in template
739 for each value, expand 'foo'. if 'last_foo' in template
742 map, expand it instead of 'foo' for last key.
740 map, expand it instead of 'foo' for last key.
743
741
744 expand 'end_foos'.
742 expand 'end_foos'.
745 '''
743 '''
746 if plural: names = plural
744 if plural: names = plural
747 else: names = name + 's'
745 else: names = name + 's'
748 if not values:
746 if not values:
749 noname = 'no_' + names
747 noname = 'no_' + names
750 if noname in self.t:
748 if noname in self.t:
751 yield self.t(noname, **args)
749 yield self.t(noname, **args)
752 return
750 return
753 if name not in self.t:
751 if name not in self.t:
754 if isinstance(values[0], str):
752 if isinstance(values[0], str):
755 yield ' '.join(values)
753 yield ' '.join(values)
756 else:
754 else:
757 for v in values:
755 for v in values:
758 yield dict(v, **args)
756 yield dict(v, **args)
759 return
757 return
760 startname = 'start_' + names
758 startname = 'start_' + names
761 if startname in self.t:
759 if startname in self.t:
762 yield self.t(startname, **args)
760 yield self.t(startname, **args)
763 vargs = args.copy()
761 vargs = args.copy()
764 def one(v, tag=name):
762 def one(v, tag=name):
765 try:
763 try:
766 vargs.update(v)
764 vargs.update(v)
767 except (AttributeError, ValueError):
765 except (AttributeError, ValueError):
768 try:
766 try:
769 for a, b in v:
767 for a, b in v:
770 vargs[a] = b
768 vargs[a] = b
771 except ValueError:
769 except ValueError:
772 vargs[name] = v
770 vargs[name] = v
773 return self.t(tag, **vargs)
771 return self.t(tag, **vargs)
774 lastname = 'last_' + name
772 lastname = 'last_' + name
775 if lastname in self.t:
773 if lastname in self.t:
776 last = values.pop()
774 last = values.pop()
777 else:
775 else:
778 last = None
776 last = None
779 for v in values:
777 for v in values:
780 yield one(v)
778 yield one(v)
781 if last is not None:
779 if last is not None:
782 yield one(last, tag=lastname)
780 yield one(last, tag=lastname)
783 endname = 'end_' + names
781 endname = 'end_' + names
784 if endname in self.t:
782 if endname in self.t:
785 yield self.t(endname, **args)
783 yield self.t(endname, **args)
786
784
787 def showbranches(**args):
785 def showbranches(**args):
788 branch = changes[5].get("branch")
786 branch = changes[5].get("branch")
789 if branch != 'default':
787 if branch != 'default':
790 branch = util.tolocal(branch)
788 branch = util.tolocal(branch)
791 return showlist('branch', [branch], plural='branches', **args)
789 return showlist('branch', [branch], plural='branches', **args)
792
790
793 def showparents(**args):
791 def showparents(**args):
794 parents = [[('rev', p), ('node', hex(log.node(p)))]
792 parents = [[('rev', p), ('node', hex(log.node(p)))]
795 for p in self._meaningful_parentrevs(log, rev)]
793 for p in self._meaningful_parentrevs(log, rev)]
796 return showlist('parent', parents, **args)
794 return showlist('parent', parents, **args)
797
795
798 def showtags(**args):
796 def showtags(**args):
799 return showlist('tag', self.repo.nodetags(changenode), **args)
797 return showlist('tag', self.repo.nodetags(changenode), **args)
800
798
801 def showextras(**args):
799 def showextras(**args):
802 extras = changes[5].items()
800 for key, value in util.sort(changes[5].items()):
803 extras.sort()
804 for key, value in extras:
805 args = args.copy()
801 args = args.copy()
806 args.update(dict(key=key, value=value))
802 args.update(dict(key=key, value=value))
807 yield self.t('extra', **args)
803 yield self.t('extra', **args)
808
804
809 def showcopies(**args):
805 def showcopies(**args):
810 c = [{'name': x[0], 'source': x[1]} for x in copies]
806 c = [{'name': x[0], 'source': x[1]} for x in copies]
811 return showlist('file_copy', c, plural='file_copies', **args)
807 return showlist('file_copy', c, plural='file_copies', **args)
812
808
813 files = []
809 files = []
814 def getfiles():
810 def getfiles():
815 if not files:
811 if not files:
816 files[:] = self.repo.status(
812 files[:] = self.repo.status(
817 log.parents(changenode)[0], changenode)[:3]
813 log.parents(changenode)[0], changenode)[:3]
818 return files
814 return files
819 def showfiles(**args):
815 def showfiles(**args):
820 return showlist('file', changes[3], **args)
816 return showlist('file', changes[3], **args)
821 def showmods(**args):
817 def showmods(**args):
822 return showlist('file_mod', getfiles()[0], **args)
818 return showlist('file_mod', getfiles()[0], **args)
823 def showadds(**args):
819 def showadds(**args):
824 return showlist('file_add', getfiles()[1], **args)
820 return showlist('file_add', getfiles()[1], **args)
825 def showdels(**args):
821 def showdels(**args):
826 return showlist('file_del', getfiles()[2], **args)
822 return showlist('file_del', getfiles()[2], **args)
827 def showmanifest(**args):
823 def showmanifest(**args):
828 args = args.copy()
824 args = args.copy()
829 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
825 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
830 node=hex(changes[0])))
826 node=hex(changes[0])))
831 return self.t('manifest', **args)
827 return self.t('manifest', **args)
832
828
833 defprops = {
829 defprops = {
834 'author': changes[1],
830 'author': changes[1],
835 'branches': showbranches,
831 'branches': showbranches,
836 'date': changes[2],
832 'date': changes[2],
837 'desc': changes[4].strip(),
833 'desc': changes[4].strip(),
838 'file_adds': showadds,
834 'file_adds': showadds,
839 'file_dels': showdels,
835 'file_dels': showdels,
840 'file_mods': showmods,
836 'file_mods': showmods,
841 'files': showfiles,
837 'files': showfiles,
842 'file_copies': showcopies,
838 'file_copies': showcopies,
843 'manifest': showmanifest,
839 'manifest': showmanifest,
844 'node': hex(changenode),
840 'node': hex(changenode),
845 'parents': showparents,
841 'parents': showparents,
846 'rev': rev,
842 'rev': rev,
847 'tags': showtags,
843 'tags': showtags,
848 'extras': showextras,
844 'extras': showextras,
849 }
845 }
850 props = props.copy()
846 props = props.copy()
851 props.update(defprops)
847 props.update(defprops)
852
848
853 try:
849 try:
854 if self.ui.debugflag and 'header_debug' in self.t:
850 if self.ui.debugflag and 'header_debug' in self.t:
855 key = 'header_debug'
851 key = 'header_debug'
856 elif self.ui.quiet and 'header_quiet' in self.t:
852 elif self.ui.quiet and 'header_quiet' in self.t:
857 key = 'header_quiet'
853 key = 'header_quiet'
858 elif self.ui.verbose and 'header_verbose' in self.t:
854 elif self.ui.verbose and 'header_verbose' in self.t:
859 key = 'header_verbose'
855 key = 'header_verbose'
860 elif 'header' in self.t:
856 elif 'header' in self.t:
861 key = 'header'
857 key = 'header'
862 else:
858 else:
863 key = ''
859 key = ''
864 if key:
860 if key:
865 h = templater.stringify(self.t(key, **props))
861 h = templater.stringify(self.t(key, **props))
866 if self.buffered:
862 if self.buffered:
867 self.header[rev] = h
863 self.header[rev] = h
868 else:
864 else:
869 self.ui.write(h)
865 self.ui.write(h)
870 if self.ui.debugflag and 'changeset_debug' in self.t:
866 if self.ui.debugflag and 'changeset_debug' in self.t:
871 key = 'changeset_debug'
867 key = 'changeset_debug'
872 elif self.ui.quiet and 'changeset_quiet' in self.t:
868 elif self.ui.quiet and 'changeset_quiet' in self.t:
873 key = 'changeset_quiet'
869 key = 'changeset_quiet'
874 elif self.ui.verbose and 'changeset_verbose' in self.t:
870 elif self.ui.verbose and 'changeset_verbose' in self.t:
875 key = 'changeset_verbose'
871 key = 'changeset_verbose'
876 else:
872 else:
877 key = 'changeset'
873 key = 'changeset'
878 self.ui.write(templater.stringify(self.t(key, **props)))
874 self.ui.write(templater.stringify(self.t(key, **props)))
879 self.showpatch(changenode)
875 self.showpatch(changenode)
880 except KeyError, inst:
876 except KeyError, inst:
881 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
877 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
882 inst.args[0]))
878 inst.args[0]))
883 except SyntaxError, inst:
879 except SyntaxError, inst:
884 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
880 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
885
881
886 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
882 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
887 """show one changeset using template or regular display.
883 """show one changeset using template or regular display.
888
884
889 Display format will be the first non-empty hit of:
885 Display format will be the first non-empty hit of:
890 1. option 'template'
886 1. option 'template'
891 2. option 'style'
887 2. option 'style'
892 3. [ui] setting 'logtemplate'
888 3. [ui] setting 'logtemplate'
893 4. [ui] setting 'style'
889 4. [ui] setting 'style'
894 If all of these values are either the unset or the empty string,
890 If all of these values are either the unset or the empty string,
895 regular display via changeset_printer() is done.
891 regular display via changeset_printer() is done.
896 """
892 """
897 # options
893 # options
898 patch = False
894 patch = False
899 if opts.get('patch'):
895 if opts.get('patch'):
900 patch = matchfn or matchall(repo)
896 patch = matchfn or matchall(repo)
901
897
902 tmpl = opts.get('template')
898 tmpl = opts.get('template')
903 mapfile = None
899 mapfile = None
904 if tmpl:
900 if tmpl:
905 tmpl = templater.parsestring(tmpl, quoted=False)
901 tmpl = templater.parsestring(tmpl, quoted=False)
906 else:
902 else:
907 mapfile = opts.get('style')
903 mapfile = opts.get('style')
908 # ui settings
904 # ui settings
909 if not mapfile:
905 if not mapfile:
910 tmpl = ui.config('ui', 'logtemplate')
906 tmpl = ui.config('ui', 'logtemplate')
911 if tmpl:
907 if tmpl:
912 tmpl = templater.parsestring(tmpl)
908 tmpl = templater.parsestring(tmpl)
913 else:
909 else:
914 mapfile = ui.config('ui', 'style')
910 mapfile = ui.config('ui', 'style')
915
911
916 if tmpl or mapfile:
912 if tmpl or mapfile:
917 if mapfile:
913 if mapfile:
918 if not os.path.split(mapfile)[0]:
914 if not os.path.split(mapfile)[0]:
919 mapname = (templater.templatepath('map-cmdline.' + mapfile)
915 mapname = (templater.templatepath('map-cmdline.' + mapfile)
920 or templater.templatepath(mapfile))
916 or templater.templatepath(mapfile))
921 if mapname: mapfile = mapname
917 if mapname: mapfile = mapname
922 try:
918 try:
923 t = changeset_templater(ui, repo, patch, mapfile, buffered)
919 t = changeset_templater(ui, repo, patch, mapfile, buffered)
924 except SyntaxError, inst:
920 except SyntaxError, inst:
925 raise util.Abort(inst.args[0])
921 raise util.Abort(inst.args[0])
926 if tmpl: t.use_template(tmpl)
922 if tmpl: t.use_template(tmpl)
927 return t
923 return t
928 return changeset_printer(ui, repo, patch, buffered)
924 return changeset_printer(ui, repo, patch, buffered)
929
925
930 def finddate(ui, repo, date):
926 def finddate(ui, repo, date):
931 """Find the tipmost changeset that matches the given date spec"""
927 """Find the tipmost changeset that matches the given date spec"""
932 df = util.matchdate(date)
928 df = util.matchdate(date)
933 get = util.cachefunc(lambda r: repo[r].changeset())
929 get = util.cachefunc(lambda r: repo[r].changeset())
934 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
930 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
935 results = {}
931 results = {}
936 for st, rev, fns in changeiter:
932 for st, rev, fns in changeiter:
937 if st == 'add':
933 if st == 'add':
938 d = get(rev)[2]
934 d = get(rev)[2]
939 if df(d[0]):
935 if df(d[0]):
940 results[rev] = d
936 results[rev] = d
941 elif st == 'iter':
937 elif st == 'iter':
942 if rev in results:
938 if rev in results:
943 ui.status("Found revision %s from %s\n" %
939 ui.status("Found revision %s from %s\n" %
944 (rev, util.datestr(results[rev])))
940 (rev, util.datestr(results[rev])))
945 return str(rev)
941 return str(rev)
946
942
947 raise util.Abort(_("revision matching date not found"))
943 raise util.Abort(_("revision matching date not found"))
948
944
949 def walkchangerevs(ui, repo, pats, change, opts):
945 def walkchangerevs(ui, repo, pats, change, opts):
950 '''Iterate over files and the revs they changed in.
946 '''Iterate over files and the revs they changed in.
951
947
952 Callers most commonly need to iterate backwards over the history
948 Callers most commonly need to iterate backwards over the history
953 it is interested in. Doing so has awful (quadratic-looking)
949 it is interested in. Doing so has awful (quadratic-looking)
954 performance, so we use iterators in a "windowed" way.
950 performance, so we use iterators in a "windowed" way.
955
951
956 We walk a window of revisions in the desired order. Within the
952 We walk a window of revisions in the desired order. Within the
957 window, we first walk forwards to gather data, then in the desired
953 window, we first walk forwards to gather data, then in the desired
958 order (usually backwards) to display it.
954 order (usually backwards) to display it.
959
955
960 This function returns an (iterator, matchfn) tuple. The iterator
956 This function returns an (iterator, matchfn) tuple. The iterator
961 yields 3-tuples. They will be of one of the following forms:
957 yields 3-tuples. They will be of one of the following forms:
962
958
963 "window", incrementing, lastrev: stepping through a window,
959 "window", incrementing, lastrev: stepping through a window,
964 positive if walking forwards through revs, last rev in the
960 positive if walking forwards through revs, last rev in the
965 sequence iterated over - use to reset state for the current window
961 sequence iterated over - use to reset state for the current window
966
962
967 "add", rev, fns: out-of-order traversal of the given file names
963 "add", rev, fns: out-of-order traversal of the given file names
968 fns, which changed during revision rev - use to gather data for
964 fns, which changed during revision rev - use to gather data for
969 possible display
965 possible display
970
966
971 "iter", rev, None: in-order traversal of the revs earlier iterated
967 "iter", rev, None: in-order traversal of the revs earlier iterated
972 over with "add" - use to display data'''
968 over with "add" - use to display data'''
973
969
974 def increasing_windows(start, end, windowsize=8, sizelimit=512):
970 def increasing_windows(start, end, windowsize=8, sizelimit=512):
975 if start < end:
971 if start < end:
976 while start < end:
972 while start < end:
977 yield start, min(windowsize, end-start)
973 yield start, min(windowsize, end-start)
978 start += windowsize
974 start += windowsize
979 if windowsize < sizelimit:
975 if windowsize < sizelimit:
980 windowsize *= 2
976 windowsize *= 2
981 else:
977 else:
982 while start > end:
978 while start > end:
983 yield start, min(windowsize, start-end-1)
979 yield start, min(windowsize, start-end-1)
984 start -= windowsize
980 start -= windowsize
985 if windowsize < sizelimit:
981 if windowsize < sizelimit:
986 windowsize *= 2
982 windowsize *= 2
987
983
988 m = match(repo, pats, opts)
984 m = match(repo, pats, opts)
989 follow = opts.get('follow') or opts.get('follow_first')
985 follow = opts.get('follow') or opts.get('follow_first')
990
986
991 if not len(repo):
987 if not len(repo):
992 return [], m
988 return [], m
993
989
994 if follow:
990 if follow:
995 defrange = '%s:0' % repo['.'].rev()
991 defrange = '%s:0' % repo['.'].rev()
996 else:
992 else:
997 defrange = '-1:0'
993 defrange = '-1:0'
998 revs = revrange(repo, opts['rev'] or [defrange])
994 revs = revrange(repo, opts['rev'] or [defrange])
999 wanted = {}
995 wanted = {}
1000 slowpath = m.anypats() or opts.get('removed')
996 slowpath = m.anypats() or opts.get('removed')
1001 fncache = {}
997 fncache = {}
1002
998
1003 if not slowpath and not m.files():
999 if not slowpath and not m.files():
1004 # No files, no patterns. Display all revs.
1000 # No files, no patterns. Display all revs.
1005 wanted = dict.fromkeys(revs)
1001 wanted = dict.fromkeys(revs)
1006 copies = []
1002 copies = []
1007 if not slowpath:
1003 if not slowpath:
1008 # Only files, no patterns. Check the history of each file.
1004 # Only files, no patterns. Check the history of each file.
1009 def filerevgen(filelog, node):
1005 def filerevgen(filelog, node):
1010 cl_count = len(repo)
1006 cl_count = len(repo)
1011 if node is None:
1007 if node is None:
1012 last = len(filelog) - 1
1008 last = len(filelog) - 1
1013 else:
1009 else:
1014 last = filelog.rev(node)
1010 last = filelog.rev(node)
1015 for i, window in increasing_windows(last, nullrev):
1011 for i, window in increasing_windows(last, nullrev):
1016 revs = []
1012 revs = []
1017 for j in xrange(i - window, i + 1):
1013 for j in xrange(i - window, i + 1):
1018 n = filelog.node(j)
1014 n = filelog.node(j)
1019 revs.append((filelog.linkrev(n),
1015 revs.append((filelog.linkrev(n),
1020 follow and filelog.renamed(n)))
1016 follow and filelog.renamed(n)))
1021 revs.reverse()
1017 revs.reverse()
1022 for rev in revs:
1018 for rev in revs:
1023 # only yield rev for which we have the changelog, it can
1019 # only yield rev for which we have the changelog, it can
1024 # happen while doing "hg log" during a pull or commit
1020 # happen while doing "hg log" during a pull or commit
1025 if rev[0] < cl_count:
1021 if rev[0] < cl_count:
1026 yield rev
1022 yield rev
1027 def iterfiles():
1023 def iterfiles():
1028 for filename in m.files():
1024 for filename in m.files():
1029 yield filename, None
1025 yield filename, None
1030 for filename_node in copies:
1026 for filename_node in copies:
1031 yield filename_node
1027 yield filename_node
1032 minrev, maxrev = min(revs), max(revs)
1028 minrev, maxrev = min(revs), max(revs)
1033 for file_, node in iterfiles():
1029 for file_, node in iterfiles():
1034 filelog = repo.file(file_)
1030 filelog = repo.file(file_)
1035 if not len(filelog):
1031 if not len(filelog):
1036 if node is None:
1032 if node is None:
1037 # A zero count may be a directory or deleted file, so
1033 # A zero count may be a directory or deleted file, so
1038 # try to find matching entries on the slow path.
1034 # try to find matching entries on the slow path.
1039 slowpath = True
1035 slowpath = True
1040 break
1036 break
1041 else:
1037 else:
1042 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1038 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1043 % (file_, short(node)))
1039 % (file_, short(node)))
1044 continue
1040 continue
1045 for rev, copied in filerevgen(filelog, node):
1041 for rev, copied in filerevgen(filelog, node):
1046 if rev <= maxrev:
1042 if rev <= maxrev:
1047 if rev < minrev:
1043 if rev < minrev:
1048 break
1044 break
1049 fncache.setdefault(rev, [])
1045 fncache.setdefault(rev, [])
1050 fncache[rev].append(file_)
1046 fncache[rev].append(file_)
1051 wanted[rev] = 1
1047 wanted[rev] = 1
1052 if follow and copied:
1048 if follow and copied:
1053 copies.append(copied)
1049 copies.append(copied)
1054 if slowpath:
1050 if slowpath:
1055 if follow:
1051 if follow:
1056 raise util.Abort(_('can only follow copies/renames for explicit '
1052 raise util.Abort(_('can only follow copies/renames for explicit '
1057 'file names'))
1053 'file names'))
1058
1054
1059 # The slow path checks files modified in every changeset.
1055 # The slow path checks files modified in every changeset.
1060 def changerevgen():
1056 def changerevgen():
1061 for i, window in increasing_windows(len(repo) - 1, nullrev):
1057 for i, window in increasing_windows(len(repo) - 1, nullrev):
1062 for j in xrange(i - window, i + 1):
1058 for j in xrange(i - window, i + 1):
1063 yield j, change(j)[3]
1059 yield j, change(j)[3]
1064
1060
1065 for rev, changefiles in changerevgen():
1061 for rev, changefiles in changerevgen():
1066 matches = filter(m, changefiles)
1062 matches = filter(m, changefiles)
1067 if matches:
1063 if matches:
1068 fncache[rev] = matches
1064 fncache[rev] = matches
1069 wanted[rev] = 1
1065 wanted[rev] = 1
1070
1066
1071 class followfilter:
1067 class followfilter:
1072 def __init__(self, onlyfirst=False):
1068 def __init__(self, onlyfirst=False):
1073 self.startrev = nullrev
1069 self.startrev = nullrev
1074 self.roots = []
1070 self.roots = []
1075 self.onlyfirst = onlyfirst
1071 self.onlyfirst = onlyfirst
1076
1072
1077 def match(self, rev):
1073 def match(self, rev):
1078 def realparents(rev):
1074 def realparents(rev):
1079 if self.onlyfirst:
1075 if self.onlyfirst:
1080 return repo.changelog.parentrevs(rev)[0:1]
1076 return repo.changelog.parentrevs(rev)[0:1]
1081 else:
1077 else:
1082 return filter(lambda x: x != nullrev,
1078 return filter(lambda x: x != nullrev,
1083 repo.changelog.parentrevs(rev))
1079 repo.changelog.parentrevs(rev))
1084
1080
1085 if self.startrev == nullrev:
1081 if self.startrev == nullrev:
1086 self.startrev = rev
1082 self.startrev = rev
1087 return True
1083 return True
1088
1084
1089 if rev > self.startrev:
1085 if rev > self.startrev:
1090 # forward: all descendants
1086 # forward: all descendants
1091 if not self.roots:
1087 if not self.roots:
1092 self.roots.append(self.startrev)
1088 self.roots.append(self.startrev)
1093 for parent in realparents(rev):
1089 for parent in realparents(rev):
1094 if parent in self.roots:
1090 if parent in self.roots:
1095 self.roots.append(rev)
1091 self.roots.append(rev)
1096 return True
1092 return True
1097 else:
1093 else:
1098 # backwards: all parents
1094 # backwards: all parents
1099 if not self.roots:
1095 if not self.roots:
1100 self.roots.extend(realparents(self.startrev))
1096 self.roots.extend(realparents(self.startrev))
1101 if rev in self.roots:
1097 if rev in self.roots:
1102 self.roots.remove(rev)
1098 self.roots.remove(rev)
1103 self.roots.extend(realparents(rev))
1099 self.roots.extend(realparents(rev))
1104 return True
1100 return True
1105
1101
1106 return False
1102 return False
1107
1103
1108 # it might be worthwhile to do this in the iterator if the rev range
1104 # it might be worthwhile to do this in the iterator if the rev range
1109 # is descending and the prune args are all within that range
1105 # is descending and the prune args are all within that range
1110 for rev in opts.get('prune', ()):
1106 for rev in opts.get('prune', ()):
1111 rev = repo.changelog.rev(repo.lookup(rev))
1107 rev = repo.changelog.rev(repo.lookup(rev))
1112 ff = followfilter()
1108 ff = followfilter()
1113 stop = min(revs[0], revs[-1])
1109 stop = min(revs[0], revs[-1])
1114 for x in xrange(rev, stop-1, -1):
1110 for x in xrange(rev, stop-1, -1):
1115 if ff.match(x) and x in wanted:
1111 if ff.match(x) and x in wanted:
1116 del wanted[x]
1112 del wanted[x]
1117
1113
1118 def iterate():
1114 def iterate():
1119 if follow and not m.files():
1115 if follow and not m.files():
1120 ff = followfilter(onlyfirst=opts.get('follow_first'))
1116 ff = followfilter(onlyfirst=opts.get('follow_first'))
1121 def want(rev):
1117 def want(rev):
1122 if ff.match(rev) and rev in wanted:
1118 if ff.match(rev) and rev in wanted:
1123 return True
1119 return True
1124 return False
1120 return False
1125 else:
1121 else:
1126 def want(rev):
1122 def want(rev):
1127 return rev in wanted
1123 return rev in wanted
1128
1124
1129 for i, window in increasing_windows(0, len(revs)):
1125 for i, window in increasing_windows(0, len(revs)):
1130 yield 'window', revs[0] < revs[-1], revs[-1]
1126 yield 'window', revs[0] < revs[-1], revs[-1]
1131 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1127 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1132 srevs = list(nrevs)
1128 for rev in util.sort(list(nrevs)):
1133 srevs.sort()
1134 for rev in srevs:
1135 fns = fncache.get(rev)
1129 fns = fncache.get(rev)
1136 if not fns:
1130 if not fns:
1137 def fns_generator():
1131 def fns_generator():
1138 for f in change(rev)[3]:
1132 for f in change(rev)[3]:
1139 if m(f):
1133 if m(f):
1140 yield f
1134 yield f
1141 fns = fns_generator()
1135 fns = fns_generator()
1142 yield 'add', rev, fns
1136 yield 'add', rev, fns
1143 for rev in nrevs:
1137 for rev in nrevs:
1144 yield 'iter', rev, None
1138 yield 'iter', rev, None
1145 return iterate(), m
1139 return iterate(), m
1146
1140
1147 def commit(ui, repo, commitfunc, pats, opts):
1141 def commit(ui, repo, commitfunc, pats, opts):
1148 '''commit the specified files or all outstanding changes'''
1142 '''commit the specified files or all outstanding changes'''
1149 date = opts.get('date')
1143 date = opts.get('date')
1150 if date:
1144 if date:
1151 opts['date'] = util.parsedate(date)
1145 opts['date'] = util.parsedate(date)
1152 message = logmessage(opts)
1146 message = logmessage(opts)
1153
1147
1154 # extract addremove carefully -- this function can be called from a command
1148 # extract addremove carefully -- this function can be called from a command
1155 # that doesn't support addremove
1149 # that doesn't support addremove
1156 if opts.get('addremove'):
1150 if opts.get('addremove'):
1157 addremove(repo, pats, opts)
1151 addremove(repo, pats, opts)
1158
1152
1159 m = match(repo, pats, opts)
1153 m = match(repo, pats, opts)
1160 if pats:
1154 if pats:
1161 modified, added, removed = repo.status(match=m)[:3]
1155 modified, added, removed = repo.status(match=m)[:3]
1162 files = modified + added + removed
1156 files = util.sort(modified + added + removed)
1163 slist = None
1157 slist = None
1164 for f in m.files():
1158 for f in m.files():
1165 if f == '.':
1159 if f == '.':
1166 continue
1160 continue
1167 if f not in files:
1161 if f not in files:
1168 rf = repo.wjoin(f)
1162 rf = repo.wjoin(f)
1169 rel = repo.pathto(f)
1163 rel = repo.pathto(f)
1170 try:
1164 try:
1171 mode = os.lstat(rf)[stat.ST_MODE]
1165 mode = os.lstat(rf)[stat.ST_MODE]
1172 except OSError:
1166 except OSError:
1173 raise util.Abort(_("file %s not found!") % rel)
1167 raise util.Abort(_("file %s not found!") % rel)
1174 if stat.S_ISDIR(mode):
1168 if stat.S_ISDIR(mode):
1175 name = f + '/'
1169 name = f + '/'
1176 if slist is None:
1170 i = bisect.bisect(files, name)
1177 slist = list(files)
1171 if i >= len(files) or not files[i].startswith(name):
1178 slist.sort()
1179 i = bisect.bisect(slist, name)
1180 if i >= len(slist) or not slist[i].startswith(name):
1181 raise util.Abort(_("no match under directory %s!")
1172 raise util.Abort(_("no match under directory %s!")
1182 % rel)
1173 % rel)
1183 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1174 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1184 raise util.Abort(_("can't commit %s: "
1175 raise util.Abort(_("can't commit %s: "
1185 "unsupported file type!") % rel)
1176 "unsupported file type!") % rel)
1186 elif f not in repo.dirstate:
1177 elif f not in repo.dirstate:
1187 raise util.Abort(_("file %s not tracked!") % rel)
1178 raise util.Abort(_("file %s not tracked!") % rel)
1188 m = matchfiles(repo, files)
1179 m = matchfiles(repo, files)
1189 try:
1180 try:
1190 return commitfunc(ui, repo, message, m, opts)
1181 return commitfunc(ui, repo, message, m, opts)
1191 except ValueError, inst:
1182 except ValueError, inst:
1192 raise util.Abort(str(inst))
1183 raise util.Abort(str(inst))
@@ -1,3315 +1,3300 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from repo import RepoError, NoCapability
9 from repo import RepoError, NoCapability
10 from i18n import _
10 from i18n import _
11 import os, re, sys, urllib
11 import os, re, sys, urllib
12 import hg, util, revlog, bundlerepo, extensions, copies
12 import hg, util, revlog, bundlerepo, extensions, copies
13 import difflib, patch, time, help, mdiff, tempfile
13 import difflib, patch, time, help, mdiff, tempfile
14 import version, socket
14 import version, socket
15 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
15 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
16 import merge as merge_
16 import merge as merge_
17
17
18 # Commands start here, listed alphabetically
18 # Commands start here, listed alphabetically
19
19
20 def add(ui, repo, *pats, **opts):
20 def add(ui, repo, *pats, **opts):
21 """add the specified files on the next commit
21 """add the specified files on the next commit
22
22
23 Schedule files to be version controlled and added to the repository.
23 Schedule files to be version controlled and added to the repository.
24
24
25 The files will be added to the repository at the next commit. To
25 The files will be added to the repository at the next commit. To
26 undo an add before that, see hg revert.
26 undo an add before that, see hg revert.
27
27
28 If no names are given, add all files in the repository.
28 If no names are given, add all files in the repository.
29 """
29 """
30
30
31 rejected = None
31 rejected = None
32 exacts = {}
32 exacts = {}
33 names = []
33 names = []
34 m = cmdutil.match(repo, pats, opts)
34 m = cmdutil.match(repo, pats, opts)
35 m.bad = lambda x,y: True
35 m.bad = lambda x,y: True
36 for abs in repo.walk(m):
36 for abs in repo.walk(m):
37 if m.exact(abs):
37 if m.exact(abs):
38 if ui.verbose:
38 if ui.verbose:
39 ui.status(_('adding %s\n') % m.rel(abs))
39 ui.status(_('adding %s\n') % m.rel(abs))
40 names.append(abs)
40 names.append(abs)
41 exacts[abs] = 1
41 exacts[abs] = 1
42 elif abs not in repo.dirstate:
42 elif abs not in repo.dirstate:
43 ui.status(_('adding %s\n') % m.rel(abs))
43 ui.status(_('adding %s\n') % m.rel(abs))
44 names.append(abs)
44 names.append(abs)
45 if not opts.get('dry_run'):
45 if not opts.get('dry_run'):
46 rejected = repo.add(names)
46 rejected = repo.add(names)
47 rejected = [p for p in rejected if p in exacts]
47 rejected = [p for p in rejected if p in exacts]
48 return rejected and 1 or 0
48 return rejected and 1 or 0
49
49
50 def addremove(ui, repo, *pats, **opts):
50 def addremove(ui, repo, *pats, **opts):
51 """add all new files, delete all missing files
51 """add all new files, delete all missing files
52
52
53 Add all new files and remove all missing files from the repository.
53 Add all new files and remove all missing files from the repository.
54
54
55 New files are ignored if they match any of the patterns in .hgignore. As
55 New files are ignored if they match any of the patterns in .hgignore. As
56 with add, these changes take effect at the next commit.
56 with add, these changes take effect at the next commit.
57
57
58 Use the -s option to detect renamed files. With a parameter > 0,
58 Use the -s option to detect renamed files. With a parameter > 0,
59 this compares every removed file with every added file and records
59 this compares every removed file with every added file and records
60 those similar enough as renames. This option takes a percentage
60 those similar enough as renames. This option takes a percentage
61 between 0 (disabled) and 100 (files must be identical) as its
61 between 0 (disabled) and 100 (files must be identical) as its
62 parameter. Detecting renamed files this way can be expensive.
62 parameter. Detecting renamed files this way can be expensive.
63 """
63 """
64 try:
64 try:
65 sim = float(opts.get('similarity') or 0)
65 sim = float(opts.get('similarity') or 0)
66 except ValueError:
66 except ValueError:
67 raise util.Abort(_('similarity must be a number'))
67 raise util.Abort(_('similarity must be a number'))
68 if sim < 0 or sim > 100:
68 if sim < 0 or sim > 100:
69 raise util.Abort(_('similarity must be between 0 and 100'))
69 raise util.Abort(_('similarity must be between 0 and 100'))
70 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
70 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
71
71
72 def annotate(ui, repo, *pats, **opts):
72 def annotate(ui, repo, *pats, **opts):
73 """show changeset information per file line
73 """show changeset information per file line
74
74
75 List changes in files, showing the revision id responsible for each line
75 List changes in files, showing the revision id responsible for each line
76
76
77 This command is useful to discover who did a change or when a change took
77 This command is useful to discover who did a change or when a change took
78 place.
78 place.
79
79
80 Without the -a option, annotate will avoid processing files it
80 Without the -a option, annotate will avoid processing files it
81 detects as binary. With -a, annotate will generate an annotation
81 detects as binary. With -a, annotate will generate an annotation
82 anyway, probably with undesirable results.
82 anyway, probably with undesirable results.
83 """
83 """
84 datefunc = ui.quiet and util.shortdate or util.datestr
84 datefunc = ui.quiet and util.shortdate or util.datestr
85 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
85 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
86
86
87 if not pats:
87 if not pats:
88 raise util.Abort(_('at least one file name or pattern required'))
88 raise util.Abort(_('at least one file name or pattern required'))
89
89
90 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
90 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
91 ('number', lambda x: str(x[0].rev())),
91 ('number', lambda x: str(x[0].rev())),
92 ('changeset', lambda x: short(x[0].node())),
92 ('changeset', lambda x: short(x[0].node())),
93 ('date', getdate),
93 ('date', getdate),
94 ('follow', lambda x: x[0].path()),
94 ('follow', lambda x: x[0].path()),
95 ]
95 ]
96
96
97 if (not opts['user'] and not opts['changeset'] and not opts['date']
97 if (not opts['user'] and not opts['changeset'] and not opts['date']
98 and not opts['follow']):
98 and not opts['follow']):
99 opts['number'] = 1
99 opts['number'] = 1
100
100
101 linenumber = opts.get('line_number') is not None
101 linenumber = opts.get('line_number') is not None
102 if (linenumber and (not opts['changeset']) and (not opts['number'])):
102 if (linenumber and (not opts['changeset']) and (not opts['number'])):
103 raise util.Abort(_('at least one of -n/-c is required for -l'))
103 raise util.Abort(_('at least one of -n/-c is required for -l'))
104
104
105 funcmap = [func for op, func in opmap if opts.get(op)]
105 funcmap = [func for op, func in opmap if opts.get(op)]
106 if linenumber:
106 if linenumber:
107 lastfunc = funcmap[-1]
107 lastfunc = funcmap[-1]
108 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
108 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
109
109
110 ctx = repo[opts['rev']]
110 ctx = repo[opts['rev']]
111
111
112 m = cmdutil.match(repo, pats, opts)
112 m = cmdutil.match(repo, pats, opts)
113 for abs in repo.walk(m, ctx.node()):
113 for abs in repo.walk(m, ctx.node()):
114 fctx = ctx.filectx(abs)
114 fctx = ctx.filectx(abs)
115 if not opts['text'] and util.binary(fctx.data()):
115 if not opts['text'] and util.binary(fctx.data()):
116 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
116 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
117 continue
117 continue
118
118
119 lines = fctx.annotate(follow=opts.get('follow'),
119 lines = fctx.annotate(follow=opts.get('follow'),
120 linenumber=linenumber)
120 linenumber=linenumber)
121 pieces = []
121 pieces = []
122
122
123 for f in funcmap:
123 for f in funcmap:
124 l = [f(n) for n, dummy in lines]
124 l = [f(n) for n, dummy in lines]
125 if l:
125 if l:
126 m = max(map(len, l))
126 m = max(map(len, l))
127 pieces.append(["%*s" % (m, x) for x in l])
127 pieces.append(["%*s" % (m, x) for x in l])
128
128
129 if pieces:
129 if pieces:
130 for p, l in zip(zip(*pieces), lines):
130 for p, l in zip(zip(*pieces), lines):
131 ui.write("%s: %s" % (" ".join(p), l[1]))
131 ui.write("%s: %s" % (" ".join(p), l[1]))
132
132
133 def archive(ui, repo, dest, **opts):
133 def archive(ui, repo, dest, **opts):
134 '''create unversioned archive of a repository revision
134 '''create unversioned archive of a repository revision
135
135
136 By default, the revision used is the parent of the working
136 By default, the revision used is the parent of the working
137 directory; use "-r" to specify a different revision.
137 directory; use "-r" to specify a different revision.
138
138
139 To specify the type of archive to create, use "-t". Valid
139 To specify the type of archive to create, use "-t". Valid
140 types are:
140 types are:
141
141
142 "files" (default): a directory full of files
142 "files" (default): a directory full of files
143 "tar": tar archive, uncompressed
143 "tar": tar archive, uncompressed
144 "tbz2": tar archive, compressed using bzip2
144 "tbz2": tar archive, compressed using bzip2
145 "tgz": tar archive, compressed using gzip
145 "tgz": tar archive, compressed using gzip
146 "uzip": zip archive, uncompressed
146 "uzip": zip archive, uncompressed
147 "zip": zip archive, compressed using deflate
147 "zip": zip archive, compressed using deflate
148
148
149 The exact name of the destination archive or directory is given
149 The exact name of the destination archive or directory is given
150 using a format string; see "hg help export" for details.
150 using a format string; see "hg help export" for details.
151
151
152 Each member added to an archive file has a directory prefix
152 Each member added to an archive file has a directory prefix
153 prepended. Use "-p" to specify a format string for the prefix.
153 prepended. Use "-p" to specify a format string for the prefix.
154 The default is the basename of the archive, with suffixes removed.
154 The default is the basename of the archive, with suffixes removed.
155 '''
155 '''
156
156
157 ctx = repo[opts['rev']]
157 ctx = repo[opts['rev']]
158 if not ctx:
158 if not ctx:
159 raise util.Abort(_('repository has no revisions'))
159 raise util.Abort(_('repository has no revisions'))
160 node = ctx.node()
160 node = ctx.node()
161 dest = cmdutil.make_filename(repo, dest, node)
161 dest = cmdutil.make_filename(repo, dest, node)
162 if os.path.realpath(dest) == repo.root:
162 if os.path.realpath(dest) == repo.root:
163 raise util.Abort(_('repository root cannot be destination'))
163 raise util.Abort(_('repository root cannot be destination'))
164 matchfn = cmdutil.match(repo, [], opts)
164 matchfn = cmdutil.match(repo, [], opts)
165 kind = opts.get('type') or 'files'
165 kind = opts.get('type') or 'files'
166 prefix = opts['prefix']
166 prefix = opts['prefix']
167 if dest == '-':
167 if dest == '-':
168 if kind == 'files':
168 if kind == 'files':
169 raise util.Abort(_('cannot archive plain files to stdout'))
169 raise util.Abort(_('cannot archive plain files to stdout'))
170 dest = sys.stdout
170 dest = sys.stdout
171 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
171 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
172 prefix = cmdutil.make_filename(repo, prefix, node)
172 prefix = cmdutil.make_filename(repo, prefix, node)
173 archival.archive(repo, dest, node, kind, not opts['no_decode'],
173 archival.archive(repo, dest, node, kind, not opts['no_decode'],
174 matchfn, prefix)
174 matchfn, prefix)
175
175
176 def backout(ui, repo, node=None, rev=None, **opts):
176 def backout(ui, repo, node=None, rev=None, **opts):
177 '''reverse effect of earlier changeset
177 '''reverse effect of earlier changeset
178
178
179 Commit the backed out changes as a new changeset. The new
179 Commit the backed out changes as a new changeset. The new
180 changeset is a child of the backed out changeset.
180 changeset is a child of the backed out changeset.
181
181
182 If you back out a changeset other than the tip, a new head is
182 If you back out a changeset other than the tip, a new head is
183 created. This head will be the new tip and you should merge this
183 created. This head will be the new tip and you should merge this
184 backout changeset with another head (current one by default).
184 backout changeset with another head (current one by default).
185
185
186 The --merge option remembers the parent of the working directory
186 The --merge option remembers the parent of the working directory
187 before starting the backout, then merges the new head with that
187 before starting the backout, then merges the new head with that
188 changeset afterwards. This saves you from doing the merge by
188 changeset afterwards. This saves you from doing the merge by
189 hand. The result of this merge is not committed, as for a normal
189 hand. The result of this merge is not committed, as for a normal
190 merge.
190 merge.
191
191
192 See \'hg help dates\' for a list of formats valid for -d/--date.
192 See \'hg help dates\' for a list of formats valid for -d/--date.
193 '''
193 '''
194 if rev and node:
194 if rev and node:
195 raise util.Abort(_("please specify just one revision"))
195 raise util.Abort(_("please specify just one revision"))
196
196
197 if not rev:
197 if not rev:
198 rev = node
198 rev = node
199
199
200 if not rev:
200 if not rev:
201 raise util.Abort(_("please specify a revision to backout"))
201 raise util.Abort(_("please specify a revision to backout"))
202
202
203 date = opts.get('date')
203 date = opts.get('date')
204 if date:
204 if date:
205 opts['date'] = util.parsedate(date)
205 opts['date'] = util.parsedate(date)
206
206
207 cmdutil.bail_if_changed(repo)
207 cmdutil.bail_if_changed(repo)
208 node = repo.lookup(rev)
208 node = repo.lookup(rev)
209
209
210 op1, op2 = repo.dirstate.parents()
210 op1, op2 = repo.dirstate.parents()
211 a = repo.changelog.ancestor(op1, node)
211 a = repo.changelog.ancestor(op1, node)
212 if a != node:
212 if a != node:
213 raise util.Abort(_('cannot back out change on a different branch'))
213 raise util.Abort(_('cannot back out change on a different branch'))
214
214
215 p1, p2 = repo.changelog.parents(node)
215 p1, p2 = repo.changelog.parents(node)
216 if p1 == nullid:
216 if p1 == nullid:
217 raise util.Abort(_('cannot back out a change with no parents'))
217 raise util.Abort(_('cannot back out a change with no parents'))
218 if p2 != nullid:
218 if p2 != nullid:
219 if not opts['parent']:
219 if not opts['parent']:
220 raise util.Abort(_('cannot back out a merge changeset without '
220 raise util.Abort(_('cannot back out a merge changeset without '
221 '--parent'))
221 '--parent'))
222 p = repo.lookup(opts['parent'])
222 p = repo.lookup(opts['parent'])
223 if p not in (p1, p2):
223 if p not in (p1, p2):
224 raise util.Abort(_('%s is not a parent of %s') %
224 raise util.Abort(_('%s is not a parent of %s') %
225 (short(p), short(node)))
225 (short(p), short(node)))
226 parent = p
226 parent = p
227 else:
227 else:
228 if opts['parent']:
228 if opts['parent']:
229 raise util.Abort(_('cannot use --parent on non-merge changeset'))
229 raise util.Abort(_('cannot use --parent on non-merge changeset'))
230 parent = p1
230 parent = p1
231
231
232 # the backout should appear on the same branch
232 # the backout should appear on the same branch
233 branch = repo.dirstate.branch()
233 branch = repo.dirstate.branch()
234 hg.clean(repo, node, show_stats=False)
234 hg.clean(repo, node, show_stats=False)
235 repo.dirstate.setbranch(branch)
235 repo.dirstate.setbranch(branch)
236 revert_opts = opts.copy()
236 revert_opts = opts.copy()
237 revert_opts['date'] = None
237 revert_opts['date'] = None
238 revert_opts['all'] = True
238 revert_opts['all'] = True
239 revert_opts['rev'] = hex(parent)
239 revert_opts['rev'] = hex(parent)
240 revert_opts['no_backup'] = None
240 revert_opts['no_backup'] = None
241 revert(ui, repo, **revert_opts)
241 revert(ui, repo, **revert_opts)
242 commit_opts = opts.copy()
242 commit_opts = opts.copy()
243 commit_opts['addremove'] = False
243 commit_opts['addremove'] = False
244 if not commit_opts['message'] and not commit_opts['logfile']:
244 if not commit_opts['message'] and not commit_opts['logfile']:
245 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
245 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
246 commit_opts['force_editor'] = True
246 commit_opts['force_editor'] = True
247 commit(ui, repo, **commit_opts)
247 commit(ui, repo, **commit_opts)
248 def nice(node):
248 def nice(node):
249 return '%d:%s' % (repo.changelog.rev(node), short(node))
249 return '%d:%s' % (repo.changelog.rev(node), short(node))
250 ui.status(_('changeset %s backs out changeset %s\n') %
250 ui.status(_('changeset %s backs out changeset %s\n') %
251 (nice(repo.changelog.tip()), nice(node)))
251 (nice(repo.changelog.tip()), nice(node)))
252 if op1 != node:
252 if op1 != node:
253 hg.clean(repo, op1, show_stats=False)
253 hg.clean(repo, op1, show_stats=False)
254 if opts['merge']:
254 if opts['merge']:
255 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
255 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
256 hg.merge(repo, hex(repo.changelog.tip()))
256 hg.merge(repo, hex(repo.changelog.tip()))
257 else:
257 else:
258 ui.status(_('the backout changeset is a new head - '
258 ui.status(_('the backout changeset is a new head - '
259 'do not forget to merge\n'))
259 'do not forget to merge\n'))
260 ui.status(_('(use "backout --merge" '
260 ui.status(_('(use "backout --merge" '
261 'if you want to auto-merge)\n'))
261 'if you want to auto-merge)\n'))
262
262
263 def bisect(ui, repo, rev=None, extra=None,
263 def bisect(ui, repo, rev=None, extra=None,
264 reset=None, good=None, bad=None, skip=None, noupdate=None):
264 reset=None, good=None, bad=None, skip=None, noupdate=None):
265 """subdivision search of changesets
265 """subdivision search of changesets
266
266
267 This command helps to find changesets which introduce problems.
267 This command helps to find changesets which introduce problems.
268 To use, mark the earliest changeset you know exhibits the problem
268 To use, mark the earliest changeset you know exhibits the problem
269 as bad, then mark the latest changeset which is free from the
269 as bad, then mark the latest changeset which is free from the
270 problem as good. Bisect will update your working directory to a
270 problem as good. Bisect will update your working directory to a
271 revision for testing. Once you have performed tests, mark the
271 revision for testing. Once you have performed tests, mark the
272 working directory as bad or good and bisect will either update to
272 working directory as bad or good and bisect will either update to
273 another candidate changeset or announce that it has found the bad
273 another candidate changeset or announce that it has found the bad
274 revision.
274 revision.
275 """
275 """
276 # backward compatibility
276 # backward compatibility
277 if rev in "good bad reset init".split():
277 if rev in "good bad reset init".split():
278 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
278 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
279 cmd, rev, extra = rev, extra, None
279 cmd, rev, extra = rev, extra, None
280 if cmd == "good":
280 if cmd == "good":
281 good = True
281 good = True
282 elif cmd == "bad":
282 elif cmd == "bad":
283 bad = True
283 bad = True
284 else:
284 else:
285 reset = True
285 reset = True
286 elif extra or good + bad + skip + reset > 1:
286 elif extra or good + bad + skip + reset > 1:
287 raise util.Abort("Incompatible arguments")
287 raise util.Abort("Incompatible arguments")
288
288
289 if reset:
289 if reset:
290 p = repo.join("bisect.state")
290 p = repo.join("bisect.state")
291 if os.path.exists(p):
291 if os.path.exists(p):
292 os.unlink(p)
292 os.unlink(p)
293 return
293 return
294
294
295 # load state
295 # load state
296 state = {'good': [], 'bad': [], 'skip': []}
296 state = {'good': [], 'bad': [], 'skip': []}
297 if os.path.exists(repo.join("bisect.state")):
297 if os.path.exists(repo.join("bisect.state")):
298 for l in repo.opener("bisect.state"):
298 for l in repo.opener("bisect.state"):
299 kind, node = l[:-1].split()
299 kind, node = l[:-1].split()
300 node = repo.lookup(node)
300 node = repo.lookup(node)
301 if kind not in state:
301 if kind not in state:
302 raise util.Abort(_("unknown bisect kind %s") % kind)
302 raise util.Abort(_("unknown bisect kind %s") % kind)
303 state[kind].append(node)
303 state[kind].append(node)
304
304
305 # update state
305 # update state
306 node = repo.lookup(rev or '.')
306 node = repo.lookup(rev or '.')
307 if good:
307 if good:
308 state['good'].append(node)
308 state['good'].append(node)
309 elif bad:
309 elif bad:
310 state['bad'].append(node)
310 state['bad'].append(node)
311 elif skip:
311 elif skip:
312 state['skip'].append(node)
312 state['skip'].append(node)
313
313
314 # save state
314 # save state
315 f = repo.opener("bisect.state", "w", atomictemp=True)
315 f = repo.opener("bisect.state", "w", atomictemp=True)
316 wlock = repo.wlock()
316 wlock = repo.wlock()
317 try:
317 try:
318 for kind in state:
318 for kind in state:
319 for node in state[kind]:
319 for node in state[kind]:
320 f.write("%s %s\n" % (kind, hex(node)))
320 f.write("%s %s\n" % (kind, hex(node)))
321 f.rename()
321 f.rename()
322 finally:
322 finally:
323 del wlock
323 del wlock
324
324
325 if not state['good'] or not state['bad']:
325 if not state['good'] or not state['bad']:
326 return
326 return
327
327
328 # actually bisect
328 # actually bisect
329 node, changesets, good = hbisect.bisect(repo.changelog, state)
329 node, changesets, good = hbisect.bisect(repo.changelog, state)
330 if changesets == 0:
330 if changesets == 0:
331 ui.write(_("The first %s revision is:\n") % (good and "good" or "bad"))
331 ui.write(_("The first %s revision is:\n") % (good and "good" or "bad"))
332 displayer = cmdutil.show_changeset(ui, repo, {})
332 displayer = cmdutil.show_changeset(ui, repo, {})
333 displayer.show(changenode=node)
333 displayer.show(changenode=node)
334 elif node is not None:
334 elif node is not None:
335 # compute the approximate number of remaining tests
335 # compute the approximate number of remaining tests
336 tests, size = 0, 2
336 tests, size = 0, 2
337 while size <= changesets:
337 while size <= changesets:
338 tests, size = tests + 1, size * 2
338 tests, size = tests + 1, size * 2
339 rev = repo.changelog.rev(node)
339 rev = repo.changelog.rev(node)
340 ui.write(_("Testing changeset %s:%s "
340 ui.write(_("Testing changeset %s:%s "
341 "(%s changesets remaining, ~%s tests)\n")
341 "(%s changesets remaining, ~%s tests)\n")
342 % (rev, short(node), changesets, tests))
342 % (rev, short(node), changesets, tests))
343 if not noupdate:
343 if not noupdate:
344 cmdutil.bail_if_changed(repo)
344 cmdutil.bail_if_changed(repo)
345 return hg.clean(repo, node)
345 return hg.clean(repo, node)
346
346
347 def branch(ui, repo, label=None, **opts):
347 def branch(ui, repo, label=None, **opts):
348 """set or show the current branch name
348 """set or show the current branch name
349
349
350 With no argument, show the current branch name. With one argument,
350 With no argument, show the current branch name. With one argument,
351 set the working directory branch name (the branch does not exist in
351 set the working directory branch name (the branch does not exist in
352 the repository until the next commit).
352 the repository until the next commit).
353
353
354 Unless --force is specified, branch will not let you set a
354 Unless --force is specified, branch will not let you set a
355 branch name that shadows an existing branch.
355 branch name that shadows an existing branch.
356
356
357 Use the command 'hg update' to switch to an existing branch.
357 Use the command 'hg update' to switch to an existing branch.
358 """
358 """
359
359
360 if label:
360 if label:
361 if not opts.get('force') and label in repo.branchtags():
361 if not opts.get('force') and label in repo.branchtags():
362 if label not in [p.branch() for p in repo.parents()]:
362 if label not in [p.branch() for p in repo.parents()]:
363 raise util.Abort(_('a branch of the same name already exists'
363 raise util.Abort(_('a branch of the same name already exists'
364 ' (use --force to override)'))
364 ' (use --force to override)'))
365 repo.dirstate.setbranch(util.fromlocal(label))
365 repo.dirstate.setbranch(util.fromlocal(label))
366 ui.status(_('marked working directory as branch %s\n') % label)
366 ui.status(_('marked working directory as branch %s\n') % label)
367 else:
367 else:
368 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
368 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
369
369
370 def branches(ui, repo, active=False):
370 def branches(ui, repo, active=False):
371 """list repository named branches
371 """list repository named branches
372
372
373 List the repository's named branches, indicating which ones are
373 List the repository's named branches, indicating which ones are
374 inactive. If active is specified, only show active branches.
374 inactive. If active is specified, only show active branches.
375
375
376 A branch is considered active if it contains repository heads.
376 A branch is considered active if it contains repository heads.
377
377
378 Use the command 'hg update' to switch to an existing branch.
378 Use the command 'hg update' to switch to an existing branch.
379 """
379 """
380 hexfunc = ui.debugflag and hex or short
380 hexfunc = ui.debugflag and hex or short
381 activebranches = [util.tolocal(repo[n].branch())
381 activebranches = [util.tolocal(repo[n].branch())
382 for n in repo.heads()]
382 for n in repo.heads()]
383 branches = [(tag in activebranches, repo.changelog.rev(node), tag)
383 branches = util.sort([(tag in activebranches, repo.changelog.rev(node), tag)
384 for tag, node in repo.branchtags().items()]
384 for tag, node in repo.branchtags().items()])
385 branches.sort()
386 branches.reverse()
385 branches.reverse()
387
386
388 for isactive, node, tag in branches:
387 for isactive, node, tag in branches:
389 if (not active) or isactive:
388 if (not active) or isactive:
390 if ui.quiet:
389 if ui.quiet:
391 ui.write("%s\n" % tag)
390 ui.write("%s\n" % tag)
392 else:
391 else:
393 rev = str(node).rjust(32 - util.locallen(tag))
392 rev = str(node).rjust(32 - util.locallen(tag))
394 isinactive = ((not isactive) and " (inactive)") or ''
393 isinactive = ((not isactive) and " (inactive)") or ''
395 data = tag, rev, hexfunc(repo.lookup(node)), isinactive
394 data = tag, rev, hexfunc(repo.lookup(node)), isinactive
396 ui.write("%s%s:%s%s\n" % data)
395 ui.write("%s%s:%s%s\n" % data)
397
396
398 def bundle(ui, repo, fname, dest=None, **opts):
397 def bundle(ui, repo, fname, dest=None, **opts):
399 """create a changegroup file
398 """create a changegroup file
400
399
401 Generate a compressed changegroup file collecting changesets not
400 Generate a compressed changegroup file collecting changesets not
402 found in the other repository.
401 found in the other repository.
403
402
404 If no destination repository is specified the destination is
403 If no destination repository is specified the destination is
405 assumed to have all the nodes specified by one or more --base
404 assumed to have all the nodes specified by one or more --base
406 parameters. To create a bundle containing all changesets, use
405 parameters. To create a bundle containing all changesets, use
407 --all (or --base null). To change the compression method applied,
406 --all (or --base null). To change the compression method applied,
408 use the -t option (by default, bundles are compressed using bz2).
407 use the -t option (by default, bundles are compressed using bz2).
409
408
410 The bundle file can then be transferred using conventional means and
409 The bundle file can then be transferred using conventional means and
411 applied to another repository with the unbundle or pull command.
410 applied to another repository with the unbundle or pull command.
412 This is useful when direct push and pull are not available or when
411 This is useful when direct push and pull are not available or when
413 exporting an entire repository is undesirable.
412 exporting an entire repository is undesirable.
414
413
415 Applying bundles preserves all changeset contents including
414 Applying bundles preserves all changeset contents including
416 permissions, copy/rename information, and revision history.
415 permissions, copy/rename information, and revision history.
417 """
416 """
418 revs = opts.get('rev') or None
417 revs = opts.get('rev') or None
419 if revs:
418 if revs:
420 revs = [repo.lookup(rev) for rev in revs]
419 revs = [repo.lookup(rev) for rev in revs]
421 if opts.get('all'):
420 if opts.get('all'):
422 base = ['null']
421 base = ['null']
423 else:
422 else:
424 base = opts.get('base')
423 base = opts.get('base')
425 if base:
424 if base:
426 if dest:
425 if dest:
427 raise util.Abort(_("--base is incompatible with specifiying "
426 raise util.Abort(_("--base is incompatible with specifiying "
428 "a destination"))
427 "a destination"))
429 base = [repo.lookup(rev) for rev in base]
428 base = [repo.lookup(rev) for rev in base]
430 # create the right base
429 # create the right base
431 # XXX: nodesbetween / changegroup* should be "fixed" instead
430 # XXX: nodesbetween / changegroup* should be "fixed" instead
432 o = []
431 o = []
433 has = {nullid: None}
432 has = {nullid: None}
434 for n in base:
433 for n in base:
435 has.update(repo.changelog.reachable(n))
434 has.update(repo.changelog.reachable(n))
436 if revs:
435 if revs:
437 visit = list(revs)
436 visit = list(revs)
438 else:
437 else:
439 visit = repo.changelog.heads()
438 visit = repo.changelog.heads()
440 seen = {}
439 seen = {}
441 while visit:
440 while visit:
442 n = visit.pop(0)
441 n = visit.pop(0)
443 parents = [p for p in repo.changelog.parents(n) if p not in has]
442 parents = [p for p in repo.changelog.parents(n) if p not in has]
444 if len(parents) == 0:
443 if len(parents) == 0:
445 o.insert(0, n)
444 o.insert(0, n)
446 else:
445 else:
447 for p in parents:
446 for p in parents:
448 if p not in seen:
447 if p not in seen:
449 seen[p] = 1
448 seen[p] = 1
450 visit.append(p)
449 visit.append(p)
451 else:
450 else:
452 cmdutil.setremoteconfig(ui, opts)
451 cmdutil.setremoteconfig(ui, opts)
453 dest, revs, checkout = hg.parseurl(
452 dest, revs, checkout = hg.parseurl(
454 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
453 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
455 other = hg.repository(ui, dest)
454 other = hg.repository(ui, dest)
456 o = repo.findoutgoing(other, force=opts['force'])
455 o = repo.findoutgoing(other, force=opts['force'])
457
456
458 if revs:
457 if revs:
459 cg = repo.changegroupsubset(o, revs, 'bundle')
458 cg = repo.changegroupsubset(o, revs, 'bundle')
460 else:
459 else:
461 cg = repo.changegroup(o, 'bundle')
460 cg = repo.changegroup(o, 'bundle')
462
461
463 bundletype = opts.get('type', 'bzip2').lower()
462 bundletype = opts.get('type', 'bzip2').lower()
464 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
463 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
465 bundletype = btypes.get(bundletype)
464 bundletype = btypes.get(bundletype)
466 if bundletype not in changegroup.bundletypes:
465 if bundletype not in changegroup.bundletypes:
467 raise util.Abort(_('unknown bundle type specified with --type'))
466 raise util.Abort(_('unknown bundle type specified with --type'))
468
467
469 changegroup.writebundle(cg, fname, bundletype)
468 changegroup.writebundle(cg, fname, bundletype)
470
469
471 def cat(ui, repo, file1, *pats, **opts):
470 def cat(ui, repo, file1, *pats, **opts):
472 """output the current or given revision of files
471 """output the current or given revision of files
473
472
474 Print the specified files as they were at the given revision.
473 Print the specified files as they were at the given revision.
475 If no revision is given, the parent of the working directory is used,
474 If no revision is given, the parent of the working directory is used,
476 or tip if no revision is checked out.
475 or tip if no revision is checked out.
477
476
478 Output may be to a file, in which case the name of the file is
477 Output may be to a file, in which case the name of the file is
479 given using a format string. The formatting rules are the same as
478 given using a format string. The formatting rules are the same as
480 for the export command, with the following additions:
479 for the export command, with the following additions:
481
480
482 %s basename of file being printed
481 %s basename of file being printed
483 %d dirname of file being printed, or '.' if in repo root
482 %d dirname of file being printed, or '.' if in repo root
484 %p root-relative path name of file being printed
483 %p root-relative path name of file being printed
485 """
484 """
486 ctx = repo[opts['rev']]
485 ctx = repo[opts['rev']]
487 err = 1
486 err = 1
488 m = cmdutil.match(repo, (file1,) + pats, opts)
487 m = cmdutil.match(repo, (file1,) + pats, opts)
489 for abs in repo.walk(m, ctx.node()):
488 for abs in repo.walk(m, ctx.node()):
490 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
489 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
491 data = ctx.filectx(abs).data()
490 data = ctx.filectx(abs).data()
492 if opts.get('decode'):
491 if opts.get('decode'):
493 data = repo.wwritedata(abs, data)
492 data = repo.wwritedata(abs, data)
494 fp.write(data)
493 fp.write(data)
495 err = 0
494 err = 0
496 return err
495 return err
497
496
498 def clone(ui, source, dest=None, **opts):
497 def clone(ui, source, dest=None, **opts):
499 """make a copy of an existing repository
498 """make a copy of an existing repository
500
499
501 Create a copy of an existing repository in a new directory.
500 Create a copy of an existing repository in a new directory.
502
501
503 If no destination directory name is specified, it defaults to the
502 If no destination directory name is specified, it defaults to the
504 basename of the source.
503 basename of the source.
505
504
506 The location of the source is added to the new repository's
505 The location of the source is added to the new repository's
507 .hg/hgrc file, as the default to be used for future pulls.
506 .hg/hgrc file, as the default to be used for future pulls.
508
507
509 For efficiency, hardlinks are used for cloning whenever the source
508 For efficiency, hardlinks are used for cloning whenever the source
510 and destination are on the same filesystem (note this applies only
509 and destination are on the same filesystem (note this applies only
511 to the repository data, not to the checked out files). Some
510 to the repository data, not to the checked out files). Some
512 filesystems, such as AFS, implement hardlinking incorrectly, but
511 filesystems, such as AFS, implement hardlinking incorrectly, but
513 do not report errors. In these cases, use the --pull option to
512 do not report errors. In these cases, use the --pull option to
514 avoid hardlinking.
513 avoid hardlinking.
515
514
516 In some cases, you can clone repositories and checked out files
515 In some cases, you can clone repositories and checked out files
517 using full hardlinks with
516 using full hardlinks with
518
517
519 $ cp -al REPO REPOCLONE
518 $ cp -al REPO REPOCLONE
520
519
521 This is the fastest way to clone, but it is not always safe. The
520 This is the fastest way to clone, but it is not always safe. The
522 operation is not atomic (making sure REPO is not modified during
521 operation is not atomic (making sure REPO is not modified during
523 the operation is up to you) and you have to make sure your editor
522 the operation is up to you) and you have to make sure your editor
524 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
523 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
525 this is not compatible with certain extensions that place their
524 this is not compatible with certain extensions that place their
526 metadata under the .hg directory, such as mq.
525 metadata under the .hg directory, such as mq.
527
526
528 If you use the -r option to clone up to a specific revision, no
527 If you use the -r option to clone up to a specific revision, no
529 subsequent revisions will be present in the cloned repository.
528 subsequent revisions will be present in the cloned repository.
530 This option implies --pull, even on local repositories.
529 This option implies --pull, even on local repositories.
531
530
532 If the -U option is used, the new clone will contain only a repository
531 If the -U option is used, the new clone will contain only a repository
533 (.hg) and no working copy (the working copy parent is the null revision).
532 (.hg) and no working copy (the working copy parent is the null revision).
534
533
535 See pull for valid source format details.
534 See pull for valid source format details.
536
535
537 It is possible to specify an ssh:// URL as the destination, but no
536 It is possible to specify an ssh:// URL as the destination, but no
538 .hg/hgrc and working directory will be created on the remote side.
537 .hg/hgrc and working directory will be created on the remote side.
539 Look at the help text for the pull command for important details
538 Look at the help text for the pull command for important details
540 about ssh:// URLs.
539 about ssh:// URLs.
541 """
540 """
542 cmdutil.setremoteconfig(ui, opts)
541 cmdutil.setremoteconfig(ui, opts)
543 hg.clone(ui, source, dest,
542 hg.clone(ui, source, dest,
544 pull=opts['pull'],
543 pull=opts['pull'],
545 stream=opts['uncompressed'],
544 stream=opts['uncompressed'],
546 rev=opts['rev'],
545 rev=opts['rev'],
547 update=not opts['noupdate'])
546 update=not opts['noupdate'])
548
547
549 def commit(ui, repo, *pats, **opts):
548 def commit(ui, repo, *pats, **opts):
550 """commit the specified files or all outstanding changes
549 """commit the specified files or all outstanding changes
551
550
552 Commit changes to the given files into the repository.
551 Commit changes to the given files into the repository.
553
552
554 If a list of files is omitted, all changes reported by "hg status"
553 If a list of files is omitted, all changes reported by "hg status"
555 will be committed.
554 will be committed.
556
555
557 If you are committing the result of a merge, do not provide any
556 If you are committing the result of a merge, do not provide any
558 file names or -I/-X filters.
557 file names or -I/-X filters.
559
558
560 If no commit message is specified, the configured editor is started to
559 If no commit message is specified, the configured editor is started to
561 enter a message.
560 enter a message.
562
561
563 See 'hg help dates' for a list of formats valid for -d/--date.
562 See 'hg help dates' for a list of formats valid for -d/--date.
564 """
563 """
565 def commitfunc(ui, repo, message, match, opts):
564 def commitfunc(ui, repo, message, match, opts):
566 return repo.commit(match.files(), message, opts['user'], opts['date'],
565 return repo.commit(match.files(), message, opts['user'], opts['date'],
567 match, force_editor=opts.get('force_editor'))
566 match, force_editor=opts.get('force_editor'))
568
567
569 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
568 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
570 if not node:
569 if not node:
571 return
570 return
572 cl = repo.changelog
571 cl = repo.changelog
573 rev = cl.rev(node)
572 rev = cl.rev(node)
574 parents = cl.parentrevs(rev)
573 parents = cl.parentrevs(rev)
575 if rev - 1 in parents:
574 if rev - 1 in parents:
576 # one of the parents was the old tip
575 # one of the parents was the old tip
577 return
576 return
578 if (parents == (nullrev, nullrev) or
577 if (parents == (nullrev, nullrev) or
579 len(cl.heads(cl.node(parents[0]))) > 1 and
578 len(cl.heads(cl.node(parents[0]))) > 1 and
580 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
579 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
581 ui.status(_('created new head\n'))
580 ui.status(_('created new head\n'))
582
581
583 def copy(ui, repo, *pats, **opts):
582 def copy(ui, repo, *pats, **opts):
584 """mark files as copied for the next commit
583 """mark files as copied for the next commit
585
584
586 Mark dest as having copies of source files. If dest is a
585 Mark dest as having copies of source files. If dest is a
587 directory, copies are put in that directory. If dest is a file,
586 directory, copies are put in that directory. If dest is a file,
588 there can only be one source.
587 there can only be one source.
589
588
590 By default, this command copies the contents of files as they
589 By default, this command copies the contents of files as they
591 stand in the working directory. If invoked with --after, the
590 stand in the working directory. If invoked with --after, the
592 operation is recorded, but no copying is performed.
591 operation is recorded, but no copying is performed.
593
592
594 This command takes effect in the next commit. To undo a copy
593 This command takes effect in the next commit. To undo a copy
595 before that, see hg revert.
594 before that, see hg revert.
596 """
595 """
597 wlock = repo.wlock(False)
596 wlock = repo.wlock(False)
598 try:
597 try:
599 return cmdutil.copy(ui, repo, pats, opts)
598 return cmdutil.copy(ui, repo, pats, opts)
600 finally:
599 finally:
601 del wlock
600 del wlock
602
601
603 def debugancestor(ui, repo, *args):
602 def debugancestor(ui, repo, *args):
604 """find the ancestor revision of two revisions in a given index"""
603 """find the ancestor revision of two revisions in a given index"""
605 if len(args) == 3:
604 if len(args) == 3:
606 index, rev1, rev2 = args
605 index, rev1, rev2 = args
607 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
606 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
608 lookup = r.lookup
607 lookup = r.lookup
609 elif len(args) == 2:
608 elif len(args) == 2:
610 if not repo:
609 if not repo:
611 raise util.Abort(_("There is no Mercurial repository here "
610 raise util.Abort(_("There is no Mercurial repository here "
612 "(.hg not found)"))
611 "(.hg not found)"))
613 rev1, rev2 = args
612 rev1, rev2 = args
614 r = repo.changelog
613 r = repo.changelog
615 lookup = repo.lookup
614 lookup = repo.lookup
616 else:
615 else:
617 raise util.Abort(_('either two or three arguments required'))
616 raise util.Abort(_('either two or three arguments required'))
618 a = r.ancestor(lookup(rev1), lookup(rev2))
617 a = r.ancestor(lookup(rev1), lookup(rev2))
619 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
618 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
620
619
621 def debugcomplete(ui, cmd='', **opts):
620 def debugcomplete(ui, cmd='', **opts):
622 """returns the completion list associated with the given command"""
621 """returns the completion list associated with the given command"""
623
622
624 if opts['options']:
623 if opts['options']:
625 options = []
624 options = []
626 otables = [globalopts]
625 otables = [globalopts]
627 if cmd:
626 if cmd:
628 aliases, entry = cmdutil.findcmd(ui, cmd, table)
627 aliases, entry = cmdutil.findcmd(ui, cmd, table)
629 otables.append(entry[1])
628 otables.append(entry[1])
630 for t in otables:
629 for t in otables:
631 for o in t:
630 for o in t:
632 if o[0]:
631 if o[0]:
633 options.append('-%s' % o[0])
632 options.append('-%s' % o[0])
634 options.append('--%s' % o[1])
633 options.append('--%s' % o[1])
635 ui.write("%s\n" % "\n".join(options))
634 ui.write("%s\n" % "\n".join(options))
636 return
635 return
637
636
638 clist = cmdutil.findpossible(ui, cmd, table).keys()
637 ui.write("%s\n" % "\n".join(util.sort(cmdutil.findpossible(ui, cmd, table))))
639 clist.sort()
640 ui.write("%s\n" % "\n".join(clist))
641
638
642 def debugfsinfo(ui, path = "."):
639 def debugfsinfo(ui, path = "."):
643 file('.debugfsinfo', 'w').write('')
640 file('.debugfsinfo', 'w').write('')
644 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
641 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
645 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
642 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
646 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
643 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
647 and 'yes' or 'no'))
644 and 'yes' or 'no'))
648 os.unlink('.debugfsinfo')
645 os.unlink('.debugfsinfo')
649
646
650 def debugrebuildstate(ui, repo, rev="tip"):
647 def debugrebuildstate(ui, repo, rev="tip"):
651 """rebuild the dirstate as it would look like for the given revision"""
648 """rebuild the dirstate as it would look like for the given revision"""
652 ctx = repo[rev]
649 ctx = repo[rev]
653 wlock = repo.wlock()
650 wlock = repo.wlock()
654 try:
651 try:
655 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
652 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
656 finally:
653 finally:
657 del wlock
654 del wlock
658
655
659 def debugcheckstate(ui, repo):
656 def debugcheckstate(ui, repo):
660 """validate the correctness of the current dirstate"""
657 """validate the correctness of the current dirstate"""
661 parent1, parent2 = repo.dirstate.parents()
658 parent1, parent2 = repo.dirstate.parents()
662 m1 = repo[parent1].manifest()
659 m1 = repo[parent1].manifest()
663 m2 = repo[parent2].manifest()
660 m2 = repo[parent2].manifest()
664 errors = 0
661 errors = 0
665 for f in repo.dirstate:
662 for f in repo.dirstate:
666 state = repo.dirstate[f]
663 state = repo.dirstate[f]
667 if state in "nr" and f not in m1:
664 if state in "nr" and f not in m1:
668 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
665 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
669 errors += 1
666 errors += 1
670 if state in "a" and f in m1:
667 if state in "a" and f in m1:
671 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
668 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
672 errors += 1
669 errors += 1
673 if state in "m" and f not in m1 and f not in m2:
670 if state in "m" and f not in m1 and f not in m2:
674 ui.warn(_("%s in state %s, but not in either manifest\n") %
671 ui.warn(_("%s in state %s, but not in either manifest\n") %
675 (f, state))
672 (f, state))
676 errors += 1
673 errors += 1
677 for f in m1:
674 for f in m1:
678 state = repo.dirstate[f]
675 state = repo.dirstate[f]
679 if state not in "nrm":
676 if state not in "nrm":
680 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
677 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
681 errors += 1
678 errors += 1
682 if errors:
679 if errors:
683 error = _(".hg/dirstate inconsistent with current parent's manifest")
680 error = _(".hg/dirstate inconsistent with current parent's manifest")
684 raise util.Abort(error)
681 raise util.Abort(error)
685
682
686 def showconfig(ui, repo, *values, **opts):
683 def showconfig(ui, repo, *values, **opts):
687 """show combined config settings from all hgrc files
684 """show combined config settings from all hgrc files
688
685
689 With no args, print names and values of all config items.
686 With no args, print names and values of all config items.
690
687
691 With one arg of the form section.name, print just the value of
688 With one arg of the form section.name, print just the value of
692 that config item.
689 that config item.
693
690
694 With multiple args, print names and values of all config items
691 With multiple args, print names and values of all config items
695 with matching section names."""
692 with matching section names."""
696
693
697 untrusted = bool(opts.get('untrusted'))
694 untrusted = bool(opts.get('untrusted'))
698 if values:
695 if values:
699 if len([v for v in values if '.' in v]) > 1:
696 if len([v for v in values if '.' in v]) > 1:
700 raise util.Abort(_('only one config item permitted'))
697 raise util.Abort(_('only one config item permitted'))
701 for section, name, value in ui.walkconfig(untrusted=untrusted):
698 for section, name, value in ui.walkconfig(untrusted=untrusted):
702 sectname = section + '.' + name
699 sectname = section + '.' + name
703 if values:
700 if values:
704 for v in values:
701 for v in values:
705 if v == section:
702 if v == section:
706 ui.write('%s=%s\n' % (sectname, value))
703 ui.write('%s=%s\n' % (sectname, value))
707 elif v == sectname:
704 elif v == sectname:
708 ui.write(value, '\n')
705 ui.write(value, '\n')
709 else:
706 else:
710 ui.write('%s=%s\n' % (sectname, value))
707 ui.write('%s=%s\n' % (sectname, value))
711
708
712 def debugsetparents(ui, repo, rev1, rev2=None):
709 def debugsetparents(ui, repo, rev1, rev2=None):
713 """manually set the parents of the current working directory
710 """manually set the parents of the current working directory
714
711
715 This is useful for writing repository conversion tools, but should
712 This is useful for writing repository conversion tools, but should
716 be used with care.
713 be used with care.
717 """
714 """
718
715
719 if not rev2:
716 if not rev2:
720 rev2 = hex(nullid)
717 rev2 = hex(nullid)
721
718
722 wlock = repo.wlock()
719 wlock = repo.wlock()
723 try:
720 try:
724 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
721 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
725 finally:
722 finally:
726 del wlock
723 del wlock
727
724
728 def debugstate(ui, repo, nodates=None):
725 def debugstate(ui, repo, nodates=None):
729 """show the contents of the current dirstate"""
726 """show the contents of the current dirstate"""
730 k = repo.dirstate._map.items()
731 k.sort()
732 timestr = ""
727 timestr = ""
733 showdate = not nodates
728 showdate = not nodates
734 for file_, ent in k:
729 for file_, ent in util.sort(repo.dirstate._map.items()):
735 if showdate:
730 if showdate:
736 if ent[3] == -1:
731 if ent[3] == -1:
737 # Pad or slice to locale representation
732 # Pad or slice to locale representation
738 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
733 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
739 timestr = 'unset'
734 timestr = 'unset'
740 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
735 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
741 else:
736 else:
742 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
737 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
743 if ent[1] & 020000:
738 if ent[1] & 020000:
744 mode = 'lnk'
739 mode = 'lnk'
745 else:
740 else:
746 mode = '%3o' % (ent[1] & 0777)
741 mode = '%3o' % (ent[1] & 0777)
747 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
742 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
748 for f in repo.dirstate.copies():
743 for f in repo.dirstate.copies():
749 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
744 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
750
745
751 def debugdata(ui, file_, rev):
746 def debugdata(ui, file_, rev):
752 """dump the contents of a data file revision"""
747 """dump the contents of a data file revision"""
753 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
748 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
754 try:
749 try:
755 ui.write(r.revision(r.lookup(rev)))
750 ui.write(r.revision(r.lookup(rev)))
756 except KeyError:
751 except KeyError:
757 raise util.Abort(_('invalid revision identifier %s') % rev)
752 raise util.Abort(_('invalid revision identifier %s') % rev)
758
753
759 def debugdate(ui, date, range=None, **opts):
754 def debugdate(ui, date, range=None, **opts):
760 """parse and display a date"""
755 """parse and display a date"""
761 if opts["extended"]:
756 if opts["extended"]:
762 d = util.parsedate(date, util.extendeddateformats)
757 d = util.parsedate(date, util.extendeddateformats)
763 else:
758 else:
764 d = util.parsedate(date)
759 d = util.parsedate(date)
765 ui.write("internal: %s %s\n" % d)
760 ui.write("internal: %s %s\n" % d)
766 ui.write("standard: %s\n" % util.datestr(d))
761 ui.write("standard: %s\n" % util.datestr(d))
767 if range:
762 if range:
768 m = util.matchdate(range)
763 m = util.matchdate(range)
769 ui.write("match: %s\n" % m(d[0]))
764 ui.write("match: %s\n" % m(d[0]))
770
765
771 def debugindex(ui, file_):
766 def debugindex(ui, file_):
772 """dump the contents of an index file"""
767 """dump the contents of an index file"""
773 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
768 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
774 ui.write(" rev offset length base linkrev" +
769 ui.write(" rev offset length base linkrev" +
775 " nodeid p1 p2\n")
770 " nodeid p1 p2\n")
776 for i in r:
771 for i in r:
777 node = r.node(i)
772 node = r.node(i)
778 try:
773 try:
779 pp = r.parents(node)
774 pp = r.parents(node)
780 except:
775 except:
781 pp = [nullid, nullid]
776 pp = [nullid, nullid]
782 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
777 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
783 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
778 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
784 short(node), short(pp[0]), short(pp[1])))
779 short(node), short(pp[0]), short(pp[1])))
785
780
786 def debugindexdot(ui, file_):
781 def debugindexdot(ui, file_):
787 """dump an index DAG as a .dot file"""
782 """dump an index DAG as a .dot file"""
788 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
783 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
789 ui.write("digraph G {\n")
784 ui.write("digraph G {\n")
790 for i in r:
785 for i in r:
791 node = r.node(i)
786 node = r.node(i)
792 pp = r.parents(node)
787 pp = r.parents(node)
793 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
788 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
794 if pp[1] != nullid:
789 if pp[1] != nullid:
795 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
790 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
796 ui.write("}\n")
791 ui.write("}\n")
797
792
798 def debuginstall(ui):
793 def debuginstall(ui):
799 '''test Mercurial installation'''
794 '''test Mercurial installation'''
800
795
801 def writetemp(contents):
796 def writetemp(contents):
802 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
797 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
803 f = os.fdopen(fd, "wb")
798 f = os.fdopen(fd, "wb")
804 f.write(contents)
799 f.write(contents)
805 f.close()
800 f.close()
806 return name
801 return name
807
802
808 problems = 0
803 problems = 0
809
804
810 # encoding
805 # encoding
811 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
806 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
812 try:
807 try:
813 util.fromlocal("test")
808 util.fromlocal("test")
814 except util.Abort, inst:
809 except util.Abort, inst:
815 ui.write(" %s\n" % inst)
810 ui.write(" %s\n" % inst)
816 ui.write(_(" (check that your locale is properly set)\n"))
811 ui.write(_(" (check that your locale is properly set)\n"))
817 problems += 1
812 problems += 1
818
813
819 # compiled modules
814 # compiled modules
820 ui.status(_("Checking extensions...\n"))
815 ui.status(_("Checking extensions...\n"))
821 try:
816 try:
822 import bdiff, mpatch, base85
817 import bdiff, mpatch, base85
823 except Exception, inst:
818 except Exception, inst:
824 ui.write(" %s\n" % inst)
819 ui.write(" %s\n" % inst)
825 ui.write(_(" One or more extensions could not be found"))
820 ui.write(_(" One or more extensions could not be found"))
826 ui.write(_(" (check that you compiled the extensions)\n"))
821 ui.write(_(" (check that you compiled the extensions)\n"))
827 problems += 1
822 problems += 1
828
823
829 # templates
824 # templates
830 ui.status(_("Checking templates...\n"))
825 ui.status(_("Checking templates...\n"))
831 try:
826 try:
832 import templater
827 import templater
833 t = templater.templater(templater.templatepath("map-cmdline.default"))
828 t = templater.templater(templater.templatepath("map-cmdline.default"))
834 except Exception, inst:
829 except Exception, inst:
835 ui.write(" %s\n" % inst)
830 ui.write(" %s\n" % inst)
836 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
831 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
837 problems += 1
832 problems += 1
838
833
839 # patch
834 # patch
840 ui.status(_("Checking patch...\n"))
835 ui.status(_("Checking patch...\n"))
841 patchproblems = 0
836 patchproblems = 0
842 a = "1\n2\n3\n4\n"
837 a = "1\n2\n3\n4\n"
843 b = "1\n2\n3\ninsert\n4\n"
838 b = "1\n2\n3\ninsert\n4\n"
844 fa = writetemp(a)
839 fa = writetemp(a)
845 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
840 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
846 os.path.basename(fa))
841 os.path.basename(fa))
847 fd = writetemp(d)
842 fd = writetemp(d)
848
843
849 files = {}
844 files = {}
850 try:
845 try:
851 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
846 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
852 except util.Abort, e:
847 except util.Abort, e:
853 ui.write(_(" patch call failed:\n"))
848 ui.write(_(" patch call failed:\n"))
854 ui.write(" " + str(e) + "\n")
849 ui.write(" " + str(e) + "\n")
855 patchproblems += 1
850 patchproblems += 1
856 else:
851 else:
857 if list(files) != [os.path.basename(fa)]:
852 if list(files) != [os.path.basename(fa)]:
858 ui.write(_(" unexpected patch output!\n"))
853 ui.write(_(" unexpected patch output!\n"))
859 patchproblems += 1
854 patchproblems += 1
860 a = file(fa).read()
855 a = file(fa).read()
861 if a != b:
856 if a != b:
862 ui.write(_(" patch test failed!\n"))
857 ui.write(_(" patch test failed!\n"))
863 patchproblems += 1
858 patchproblems += 1
864
859
865 if patchproblems:
860 if patchproblems:
866 if ui.config('ui', 'patch'):
861 if ui.config('ui', 'patch'):
867 ui.write(_(" (Current patch tool may be incompatible with patch,"
862 ui.write(_(" (Current patch tool may be incompatible with patch,"
868 " or misconfigured. Please check your .hgrc file)\n"))
863 " or misconfigured. Please check your .hgrc file)\n"))
869 else:
864 else:
870 ui.write(_(" Internal patcher failure, please report this error"
865 ui.write(_(" Internal patcher failure, please report this error"
871 " to http://www.selenic.com/mercurial/bts\n"))
866 " to http://www.selenic.com/mercurial/bts\n"))
872 problems += patchproblems
867 problems += patchproblems
873
868
874 os.unlink(fa)
869 os.unlink(fa)
875 os.unlink(fd)
870 os.unlink(fd)
876
871
877 # editor
872 # editor
878 ui.status(_("Checking commit editor...\n"))
873 ui.status(_("Checking commit editor...\n"))
879 editor = ui.geteditor()
874 editor = ui.geteditor()
880 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
875 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
881 if not cmdpath:
876 if not cmdpath:
882 if editor == 'vi':
877 if editor == 'vi':
883 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
878 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
884 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
879 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
885 else:
880 else:
886 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
881 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
887 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
882 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
888 problems += 1
883 problems += 1
889
884
890 # check username
885 # check username
891 ui.status(_("Checking username...\n"))
886 ui.status(_("Checking username...\n"))
892 user = os.environ.get("HGUSER")
887 user = os.environ.get("HGUSER")
893 if user is None:
888 if user is None:
894 user = ui.config("ui", "username")
889 user = ui.config("ui", "username")
895 if user is None:
890 if user is None:
896 user = os.environ.get("EMAIL")
891 user = os.environ.get("EMAIL")
897 if not user:
892 if not user:
898 ui.warn(" ")
893 ui.warn(" ")
899 ui.username()
894 ui.username()
900 ui.write(_(" (specify a username in your .hgrc file)\n"))
895 ui.write(_(" (specify a username in your .hgrc file)\n"))
901
896
902 if not problems:
897 if not problems:
903 ui.status(_("No problems detected\n"))
898 ui.status(_("No problems detected\n"))
904 else:
899 else:
905 ui.write(_("%s problems detected,"
900 ui.write(_("%s problems detected,"
906 " please check your install!\n") % problems)
901 " please check your install!\n") % problems)
907
902
908 return problems
903 return problems
909
904
910 def debugrename(ui, repo, file1, *pats, **opts):
905 def debugrename(ui, repo, file1, *pats, **opts):
911 """dump rename information"""
906 """dump rename information"""
912
907
913 ctx = repo[opts.get('rev')]
908 ctx = repo[opts.get('rev')]
914 m = cmdutil.match(repo, (file1,) + pats, opts)
909 m = cmdutil.match(repo, (file1,) + pats, opts)
915 for abs in repo.walk(m, ctx.node()):
910 for abs in repo.walk(m, ctx.node()):
916 fctx = ctx.filectx(abs)
911 fctx = ctx.filectx(abs)
917 o = fctx.filelog().renamed(fctx.filenode())
912 o = fctx.filelog().renamed(fctx.filenode())
918 rel = m.rel(abs)
913 rel = m.rel(abs)
919 if o:
914 if o:
920 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
915 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
921 else:
916 else:
922 ui.write(_("%s not renamed\n") % rel)
917 ui.write(_("%s not renamed\n") % rel)
923
918
924 def debugwalk(ui, repo, *pats, **opts):
919 def debugwalk(ui, repo, *pats, **opts):
925 """show how files match on given patterns"""
920 """show how files match on given patterns"""
926 m = cmdutil.match(repo, pats, opts)
921 m = cmdutil.match(repo, pats, opts)
927 items = list(repo.walk(m))
922 items = list(repo.walk(m))
928 if not items:
923 if not items:
929 return
924 return
930 fmt = 'f %%-%ds %%-%ds %%s' % (
925 fmt = 'f %%-%ds %%-%ds %%s' % (
931 max([len(abs) for abs in items]),
926 max([len(abs) for abs in items]),
932 max([len(m.rel(abs)) for abs in items]))
927 max([len(m.rel(abs)) for abs in items]))
933 for abs in items:
928 for abs in items:
934 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
929 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
935 ui.write("%s\n" % line.rstrip())
930 ui.write("%s\n" % line.rstrip())
936
931
937 def diff(ui, repo, *pats, **opts):
932 def diff(ui, repo, *pats, **opts):
938 """diff repository (or selected files)
933 """diff repository (or selected files)
939
934
940 Show differences between revisions for the specified files.
935 Show differences between revisions for the specified files.
941
936
942 Differences between files are shown using the unified diff format.
937 Differences between files are shown using the unified diff format.
943
938
944 NOTE: diff may generate unexpected results for merges, as it will
939 NOTE: diff may generate unexpected results for merges, as it will
945 default to comparing against the working directory's first parent
940 default to comparing against the working directory's first parent
946 changeset if no revisions are specified.
941 changeset if no revisions are specified.
947
942
948 When two revision arguments are given, then changes are shown
943 When two revision arguments are given, then changes are shown
949 between those revisions. If only one revision is specified then
944 between those revisions. If only one revision is specified then
950 that revision is compared to the working directory, and, when no
945 that revision is compared to the working directory, and, when no
951 revisions are specified, the working directory files are compared
946 revisions are specified, the working directory files are compared
952 to its parent.
947 to its parent.
953
948
954 Without the -a option, diff will avoid generating diffs of files
949 Without the -a option, diff will avoid generating diffs of files
955 it detects as binary. With -a, diff will generate a diff anyway,
950 it detects as binary. With -a, diff will generate a diff anyway,
956 probably with undesirable results.
951 probably with undesirable results.
957 """
952 """
958 node1, node2 = cmdutil.revpair(repo, opts['rev'])
953 node1, node2 = cmdutil.revpair(repo, opts['rev'])
959
954
960 m = cmdutil.match(repo, pats, opts)
955 m = cmdutil.match(repo, pats, opts)
961 patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
956 patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
962
957
963 def export(ui, repo, *changesets, **opts):
958 def export(ui, repo, *changesets, **opts):
964 """dump the header and diffs for one or more changesets
959 """dump the header and diffs for one or more changesets
965
960
966 Print the changeset header and diffs for one or more revisions.
961 Print the changeset header and diffs for one or more revisions.
967
962
968 The information shown in the changeset header is: author,
963 The information shown in the changeset header is: author,
969 changeset hash, parent(s) and commit comment.
964 changeset hash, parent(s) and commit comment.
970
965
971 NOTE: export may generate unexpected diff output for merge changesets,
966 NOTE: export may generate unexpected diff output for merge changesets,
972 as it will compare the merge changeset against its first parent only.
967 as it will compare the merge changeset against its first parent only.
973
968
974 Output may be to a file, in which case the name of the file is
969 Output may be to a file, in which case the name of the file is
975 given using a format string. The formatting rules are as follows:
970 given using a format string. The formatting rules are as follows:
976
971
977 %% literal "%" character
972 %% literal "%" character
978 %H changeset hash (40 bytes of hexadecimal)
973 %H changeset hash (40 bytes of hexadecimal)
979 %N number of patches being generated
974 %N number of patches being generated
980 %R changeset revision number
975 %R changeset revision number
981 %b basename of the exporting repository
976 %b basename of the exporting repository
982 %h short-form changeset hash (12 bytes of hexadecimal)
977 %h short-form changeset hash (12 bytes of hexadecimal)
983 %n zero-padded sequence number, starting at 1
978 %n zero-padded sequence number, starting at 1
984 %r zero-padded changeset revision number
979 %r zero-padded changeset revision number
985
980
986 Without the -a option, export will avoid generating diffs of files
981 Without the -a option, export will avoid generating diffs of files
987 it detects as binary. With -a, export will generate a diff anyway,
982 it detects as binary. With -a, export will generate a diff anyway,
988 probably with undesirable results.
983 probably with undesirable results.
989
984
990 With the --switch-parent option, the diff will be against the second
985 With the --switch-parent option, the diff will be against the second
991 parent. It can be useful to review a merge.
986 parent. It can be useful to review a merge.
992 """
987 """
993 if not changesets:
988 if not changesets:
994 raise util.Abort(_("export requires at least one changeset"))
989 raise util.Abort(_("export requires at least one changeset"))
995 revs = cmdutil.revrange(repo, changesets)
990 revs = cmdutil.revrange(repo, changesets)
996 if len(revs) > 1:
991 if len(revs) > 1:
997 ui.note(_('exporting patches:\n'))
992 ui.note(_('exporting patches:\n'))
998 else:
993 else:
999 ui.note(_('exporting patch:\n'))
994 ui.note(_('exporting patch:\n'))
1000 patch.export(repo, revs, template=opts['output'],
995 patch.export(repo, revs, template=opts['output'],
1001 switch_parent=opts['switch_parent'],
996 switch_parent=opts['switch_parent'],
1002 opts=patch.diffopts(ui, opts))
997 opts=patch.diffopts(ui, opts))
1003
998
1004 def grep(ui, repo, pattern, *pats, **opts):
999 def grep(ui, repo, pattern, *pats, **opts):
1005 """search for a pattern in specified files and revisions
1000 """search for a pattern in specified files and revisions
1006
1001
1007 Search revisions of files for a regular expression.
1002 Search revisions of files for a regular expression.
1008
1003
1009 This command behaves differently than Unix grep. It only accepts
1004 This command behaves differently than Unix grep. It only accepts
1010 Python/Perl regexps. It searches repository history, not the
1005 Python/Perl regexps. It searches repository history, not the
1011 working directory. It always prints the revision number in which
1006 working directory. It always prints the revision number in which
1012 a match appears.
1007 a match appears.
1013
1008
1014 By default, grep only prints output for the first revision of a
1009 By default, grep only prints output for the first revision of a
1015 file in which it finds a match. To get it to print every revision
1010 file in which it finds a match. To get it to print every revision
1016 that contains a change in match status ("-" for a match that
1011 that contains a change in match status ("-" for a match that
1017 becomes a non-match, or "+" for a non-match that becomes a match),
1012 becomes a non-match, or "+" for a non-match that becomes a match),
1018 use the --all flag.
1013 use the --all flag.
1019 """
1014 """
1020 reflags = 0
1015 reflags = 0
1021 if opts['ignore_case']:
1016 if opts['ignore_case']:
1022 reflags |= re.I
1017 reflags |= re.I
1023 try:
1018 try:
1024 regexp = re.compile(pattern, reflags)
1019 regexp = re.compile(pattern, reflags)
1025 except Exception, inst:
1020 except Exception, inst:
1026 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1021 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1027 return None
1022 return None
1028 sep, eol = ':', '\n'
1023 sep, eol = ':', '\n'
1029 if opts['print0']:
1024 if opts['print0']:
1030 sep = eol = '\0'
1025 sep = eol = '\0'
1031
1026
1032 fcache = {}
1027 fcache = {}
1033 def getfile(fn):
1028 def getfile(fn):
1034 if fn not in fcache:
1029 if fn not in fcache:
1035 fcache[fn] = repo.file(fn)
1030 fcache[fn] = repo.file(fn)
1036 return fcache[fn]
1031 return fcache[fn]
1037
1032
1038 def matchlines(body):
1033 def matchlines(body):
1039 begin = 0
1034 begin = 0
1040 linenum = 0
1035 linenum = 0
1041 while True:
1036 while True:
1042 match = regexp.search(body, begin)
1037 match = regexp.search(body, begin)
1043 if not match:
1038 if not match:
1044 break
1039 break
1045 mstart, mend = match.span()
1040 mstart, mend = match.span()
1046 linenum += body.count('\n', begin, mstart) + 1
1041 linenum += body.count('\n', begin, mstart) + 1
1047 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1042 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1048 lend = body.find('\n', mend)
1043 lend = body.find('\n', mend)
1049 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1044 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1050 begin = lend + 1
1045 begin = lend + 1
1051
1046
1052 class linestate(object):
1047 class linestate(object):
1053 def __init__(self, line, linenum, colstart, colend):
1048 def __init__(self, line, linenum, colstart, colend):
1054 self.line = line
1049 self.line = line
1055 self.linenum = linenum
1050 self.linenum = linenum
1056 self.colstart = colstart
1051 self.colstart = colstart
1057 self.colend = colend
1052 self.colend = colend
1058
1053
1059 def __hash__(self):
1054 def __hash__(self):
1060 return hash((self.linenum, self.line))
1055 return hash((self.linenum, self.line))
1061
1056
1062 def __eq__(self, other):
1057 def __eq__(self, other):
1063 return self.line == other.line
1058 return self.line == other.line
1064
1059
1065 matches = {}
1060 matches = {}
1066 copies = {}
1061 copies = {}
1067 def grepbody(fn, rev, body):
1062 def grepbody(fn, rev, body):
1068 matches[rev].setdefault(fn, [])
1063 matches[rev].setdefault(fn, [])
1069 m = matches[rev][fn]
1064 m = matches[rev][fn]
1070 for lnum, cstart, cend, line in matchlines(body):
1065 for lnum, cstart, cend, line in matchlines(body):
1071 s = linestate(line, lnum, cstart, cend)
1066 s = linestate(line, lnum, cstart, cend)
1072 m.append(s)
1067 m.append(s)
1073
1068
1074 def difflinestates(a, b):
1069 def difflinestates(a, b):
1075 sm = difflib.SequenceMatcher(None, a, b)
1070 sm = difflib.SequenceMatcher(None, a, b)
1076 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1071 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1077 if tag == 'insert':
1072 if tag == 'insert':
1078 for i in xrange(blo, bhi):
1073 for i in xrange(blo, bhi):
1079 yield ('+', b[i])
1074 yield ('+', b[i])
1080 elif tag == 'delete':
1075 elif tag == 'delete':
1081 for i in xrange(alo, ahi):
1076 for i in xrange(alo, ahi):
1082 yield ('-', a[i])
1077 yield ('-', a[i])
1083 elif tag == 'replace':
1078 elif tag == 'replace':
1084 for i in xrange(alo, ahi):
1079 for i in xrange(alo, ahi):
1085 yield ('-', a[i])
1080 yield ('-', a[i])
1086 for i in xrange(blo, bhi):
1081 for i in xrange(blo, bhi):
1087 yield ('+', b[i])
1082 yield ('+', b[i])
1088
1083
1089 prev = {}
1084 prev = {}
1090 def display(fn, rev, states, prevstates):
1085 def display(fn, rev, states, prevstates):
1091 datefunc = ui.quiet and util.shortdate or util.datestr
1086 datefunc = ui.quiet and util.shortdate or util.datestr
1092 found = False
1087 found = False
1093 filerevmatches = {}
1088 filerevmatches = {}
1094 r = prev.get(fn, -1)
1089 r = prev.get(fn, -1)
1095 if opts['all']:
1090 if opts['all']:
1096 iter = difflinestates(states, prevstates)
1091 iter = difflinestates(states, prevstates)
1097 else:
1092 else:
1098 iter = [('', l) for l in prevstates]
1093 iter = [('', l) for l in prevstates]
1099 for change, l in iter:
1094 for change, l in iter:
1100 cols = [fn, str(r)]
1095 cols = [fn, str(r)]
1101 if opts['line_number']:
1096 if opts['line_number']:
1102 cols.append(str(l.linenum))
1097 cols.append(str(l.linenum))
1103 if opts['all']:
1098 if opts['all']:
1104 cols.append(change)
1099 cols.append(change)
1105 if opts['user']:
1100 if opts['user']:
1106 cols.append(ui.shortuser(get(r)[1]))
1101 cols.append(ui.shortuser(get(r)[1]))
1107 if opts.get('date'):
1102 if opts.get('date'):
1108 cols.append(datefunc(get(r)[2]))
1103 cols.append(datefunc(get(r)[2]))
1109 if opts['files_with_matches']:
1104 if opts['files_with_matches']:
1110 c = (fn, r)
1105 c = (fn, r)
1111 if c in filerevmatches:
1106 if c in filerevmatches:
1112 continue
1107 continue
1113 filerevmatches[c] = 1
1108 filerevmatches[c] = 1
1114 else:
1109 else:
1115 cols.append(l.line)
1110 cols.append(l.line)
1116 ui.write(sep.join(cols), eol)
1111 ui.write(sep.join(cols), eol)
1117 found = True
1112 found = True
1118 return found
1113 return found
1119
1114
1120 fstate = {}
1115 fstate = {}
1121 skip = {}
1116 skip = {}
1122 get = util.cachefunc(lambda r: repo[r].changeset())
1117 get = util.cachefunc(lambda r: repo[r].changeset())
1123 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1118 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1124 found = False
1119 found = False
1125 follow = opts.get('follow')
1120 follow = opts.get('follow')
1126 for st, rev, fns in changeiter:
1121 for st, rev, fns in changeiter:
1127 if st == 'window':
1122 if st == 'window':
1128 matches.clear()
1123 matches.clear()
1129 elif st == 'add':
1124 elif st == 'add':
1130 ctx = repo[rev]
1125 ctx = repo[rev]
1131 matches[rev] = {}
1126 matches[rev] = {}
1132 for fn in fns:
1127 for fn in fns:
1133 if fn in skip:
1128 if fn in skip:
1134 continue
1129 continue
1135 try:
1130 try:
1136 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1131 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1137 fstate.setdefault(fn, [])
1132 fstate.setdefault(fn, [])
1138 if follow:
1133 if follow:
1139 copied = getfile(fn).renamed(ctx.filenode(fn))
1134 copied = getfile(fn).renamed(ctx.filenode(fn))
1140 if copied:
1135 if copied:
1141 copies.setdefault(rev, {})[fn] = copied[0]
1136 copies.setdefault(rev, {})[fn] = copied[0]
1142 except revlog.LookupError:
1137 except revlog.LookupError:
1143 pass
1138 pass
1144 elif st == 'iter':
1139 elif st == 'iter':
1145 states = matches[rev].items()
1140 for fn, m in util.sort(matches[rev].items()):
1146 states.sort()
1147 for fn, m in states:
1148 copy = copies.get(rev, {}).get(fn)
1141 copy = copies.get(rev, {}).get(fn)
1149 if fn in skip:
1142 if fn in skip:
1150 if copy:
1143 if copy:
1151 skip[copy] = True
1144 skip[copy] = True
1152 continue
1145 continue
1153 if fn in prev or fstate[fn]:
1146 if fn in prev or fstate[fn]:
1154 r = display(fn, rev, m, fstate[fn])
1147 r = display(fn, rev, m, fstate[fn])
1155 found = found or r
1148 found = found or r
1156 if r and not opts['all']:
1149 if r and not opts['all']:
1157 skip[fn] = True
1150 skip[fn] = True
1158 if copy:
1151 if copy:
1159 skip[copy] = True
1152 skip[copy] = True
1160 fstate[fn] = m
1153 fstate[fn] = m
1161 if copy:
1154 if copy:
1162 fstate[copy] = m
1155 fstate[copy] = m
1163 prev[fn] = rev
1156 prev[fn] = rev
1164
1157
1165 fstate = fstate.items()
1158 for fn, state in util.sort(fstate.items()):
1166 fstate.sort()
1167 for fn, state in fstate:
1168 if fn in skip:
1159 if fn in skip:
1169 continue
1160 continue
1170 if fn not in copies.get(prev[fn], {}):
1161 if fn not in copies.get(prev[fn], {}):
1171 found = display(fn, rev, {}, state) or found
1162 found = display(fn, rev, {}, state) or found
1172 return (not found and 1) or 0
1163 return (not found and 1) or 0
1173
1164
1174 def heads(ui, repo, *branchrevs, **opts):
1165 def heads(ui, repo, *branchrevs, **opts):
1175 """show current repository heads or show branch heads
1166 """show current repository heads or show branch heads
1176
1167
1177 With no arguments, show all repository head changesets.
1168 With no arguments, show all repository head changesets.
1178
1169
1179 If branch or revisions names are given this will show the heads of
1170 If branch or revisions names are given this will show the heads of
1180 the specified branches or the branches those revisions are tagged
1171 the specified branches or the branches those revisions are tagged
1181 with.
1172 with.
1182
1173
1183 Repository "heads" are changesets that don't have child
1174 Repository "heads" are changesets that don't have child
1184 changesets. They are where development generally takes place and
1175 changesets. They are where development generally takes place and
1185 are the usual targets for update and merge operations.
1176 are the usual targets for update and merge operations.
1186
1177
1187 Branch heads are changesets that have a given branch tag, but have
1178 Branch heads are changesets that have a given branch tag, but have
1188 no child changesets with that tag. They are usually where
1179 no child changesets with that tag. They are usually where
1189 development on the given branch takes place.
1180 development on the given branch takes place.
1190 """
1181 """
1191 if opts['rev']:
1182 if opts['rev']:
1192 start = repo.lookup(opts['rev'])
1183 start = repo.lookup(opts['rev'])
1193 else:
1184 else:
1194 start = None
1185 start = None
1195 if not branchrevs:
1186 if not branchrevs:
1196 # Assume we're looking repo-wide heads if no revs were specified.
1187 # Assume we're looking repo-wide heads if no revs were specified.
1197 heads = repo.heads(start)
1188 heads = repo.heads(start)
1198 else:
1189 else:
1199 heads = []
1190 heads = []
1200 visitedset = util.set()
1191 visitedset = util.set()
1201 for branchrev in branchrevs:
1192 for branchrev in branchrevs:
1202 branch = repo[branchrev].branch()
1193 branch = repo[branchrev].branch()
1203 if branch in visitedset:
1194 if branch in visitedset:
1204 continue
1195 continue
1205 visitedset.add(branch)
1196 visitedset.add(branch)
1206 bheads = repo.branchheads(branch, start)
1197 bheads = repo.branchheads(branch, start)
1207 if not bheads:
1198 if not bheads:
1208 if branch != branchrev:
1199 if branch != branchrev:
1209 ui.warn(_("no changes on branch %s containing %s are "
1200 ui.warn(_("no changes on branch %s containing %s are "
1210 "reachable from %s\n")
1201 "reachable from %s\n")
1211 % (branch, branchrev, opts['rev']))
1202 % (branch, branchrev, opts['rev']))
1212 else:
1203 else:
1213 ui.warn(_("no changes on branch %s are reachable from %s\n")
1204 ui.warn(_("no changes on branch %s are reachable from %s\n")
1214 % (branch, opts['rev']))
1205 % (branch, opts['rev']))
1215 heads.extend(bheads)
1206 heads.extend(bheads)
1216 if not heads:
1207 if not heads:
1217 return 1
1208 return 1
1218 displayer = cmdutil.show_changeset(ui, repo, opts)
1209 displayer = cmdutil.show_changeset(ui, repo, opts)
1219 for n in heads:
1210 for n in heads:
1220 displayer.show(changenode=n)
1211 displayer.show(changenode=n)
1221
1212
1222 def help_(ui, name=None, with_version=False):
1213 def help_(ui, name=None, with_version=False):
1223 """show help for a command, extension, or list of commands
1214 """show help for a command, extension, or list of commands
1224
1215
1225 With no arguments, print a list of commands and short help.
1216 With no arguments, print a list of commands and short help.
1226
1217
1227 Given a command name, print help for that command.
1218 Given a command name, print help for that command.
1228
1219
1229 Given an extension name, print help for that extension, and the
1220 Given an extension name, print help for that extension, and the
1230 commands it provides."""
1221 commands it provides."""
1231 option_lists = []
1222 option_lists = []
1232
1223
1233 def addglobalopts(aliases):
1224 def addglobalopts(aliases):
1234 if ui.verbose:
1225 if ui.verbose:
1235 option_lists.append((_("global options:"), globalopts))
1226 option_lists.append((_("global options:"), globalopts))
1236 if name == 'shortlist':
1227 if name == 'shortlist':
1237 option_lists.append((_('use "hg help" for the full list '
1228 option_lists.append((_('use "hg help" for the full list '
1238 'of commands'), ()))
1229 'of commands'), ()))
1239 else:
1230 else:
1240 if name == 'shortlist':
1231 if name == 'shortlist':
1241 msg = _('use "hg help" for the full list of commands '
1232 msg = _('use "hg help" for the full list of commands '
1242 'or "hg -v" for details')
1233 'or "hg -v" for details')
1243 elif aliases:
1234 elif aliases:
1244 msg = _('use "hg -v help%s" to show aliases and '
1235 msg = _('use "hg -v help%s" to show aliases and '
1245 'global options') % (name and " " + name or "")
1236 'global options') % (name and " " + name or "")
1246 else:
1237 else:
1247 msg = _('use "hg -v help %s" to show global options') % name
1238 msg = _('use "hg -v help %s" to show global options') % name
1248 option_lists.append((msg, ()))
1239 option_lists.append((msg, ()))
1249
1240
1250 def helpcmd(name):
1241 def helpcmd(name):
1251 if with_version:
1242 if with_version:
1252 version_(ui)
1243 version_(ui)
1253 ui.write('\n')
1244 ui.write('\n')
1254
1245
1255 try:
1246 try:
1256 aliases, i = cmdutil.findcmd(ui, name, table)
1247 aliases, i = cmdutil.findcmd(ui, name, table)
1257 except cmdutil.AmbiguousCommand, inst:
1248 except cmdutil.AmbiguousCommand, inst:
1258 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1249 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1259 helplist(_('list of commands:\n\n'), select)
1250 helplist(_('list of commands:\n\n'), select)
1260 return
1251 return
1261
1252
1262 # synopsis
1253 # synopsis
1263 ui.write("%s\n" % i[2])
1254 ui.write("%s\n" % i[2])
1264
1255
1265 # aliases
1256 # aliases
1266 if not ui.quiet and len(aliases) > 1:
1257 if not ui.quiet and len(aliases) > 1:
1267 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1258 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1268
1259
1269 # description
1260 # description
1270 doc = i[0].__doc__
1261 doc = i[0].__doc__
1271 if not doc:
1262 if not doc:
1272 doc = _("(No help text available)")
1263 doc = _("(No help text available)")
1273 if ui.quiet:
1264 if ui.quiet:
1274 doc = doc.splitlines(0)[0]
1265 doc = doc.splitlines(0)[0]
1275 ui.write("\n%s\n" % doc.rstrip())
1266 ui.write("\n%s\n" % doc.rstrip())
1276
1267
1277 if not ui.quiet:
1268 if not ui.quiet:
1278 # options
1269 # options
1279 if i[1]:
1270 if i[1]:
1280 option_lists.append((_("options:\n"), i[1]))
1271 option_lists.append((_("options:\n"), i[1]))
1281
1272
1282 addglobalopts(False)
1273 addglobalopts(False)
1283
1274
1284 def helplist(header, select=None):
1275 def helplist(header, select=None):
1285 h = {}
1276 h = {}
1286 cmds = {}
1277 cmds = {}
1287 for c, e in table.items():
1278 for c, e in table.items():
1288 f = c.split("|", 1)[0]
1279 f = c.split("|", 1)[0]
1289 if select and not select(f):
1280 if select and not select(f):
1290 continue
1281 continue
1291 if name == "shortlist" and not f.startswith("^"):
1282 if name == "shortlist" and not f.startswith("^"):
1292 continue
1283 continue
1293 f = f.lstrip("^")
1284 f = f.lstrip("^")
1294 if not ui.debugflag and f.startswith("debug"):
1285 if not ui.debugflag and f.startswith("debug"):
1295 continue
1286 continue
1296 doc = e[0].__doc__
1287 doc = e[0].__doc__
1297 if not doc:
1288 if not doc:
1298 doc = _("(No help text available)")
1289 doc = _("(No help text available)")
1299 h[f] = doc.splitlines(0)[0].rstrip()
1290 h[f] = doc.splitlines(0)[0].rstrip()
1300 cmds[f] = c.lstrip("^")
1291 cmds[f] = c.lstrip("^")
1301
1292
1302 if not h:
1293 if not h:
1303 ui.status(_('no commands defined\n'))
1294 ui.status(_('no commands defined\n'))
1304 return
1295 return
1305
1296
1306 ui.status(header)
1297 ui.status(header)
1307 fns = h.keys()
1298 fns = util.sort(h)
1308 fns.sort()
1309 m = max(map(len, fns))
1299 m = max(map(len, fns))
1310 for f in fns:
1300 for f in fns:
1311 if ui.verbose:
1301 if ui.verbose:
1312 commands = cmds[f].replace("|",", ")
1302 commands = cmds[f].replace("|",", ")
1313 ui.write(" %s:\n %s\n"%(commands, h[f]))
1303 ui.write(" %s:\n %s\n"%(commands, h[f]))
1314 else:
1304 else:
1315 ui.write(' %-*s %s\n' % (m, f, h[f]))
1305 ui.write(' %-*s %s\n' % (m, f, h[f]))
1316
1306
1317 if not ui.quiet:
1307 if not ui.quiet:
1318 addglobalopts(True)
1308 addglobalopts(True)
1319
1309
1320 def helptopic(name):
1310 def helptopic(name):
1321 v = None
1311 v = None
1322 for i, d in help.helptable:
1312 for i, d in help.helptable:
1323 l = i.split('|')
1313 l = i.split('|')
1324 if name in l:
1314 if name in l:
1325 v = i
1315 v = i
1326 header = l[-1]
1316 header = l[-1]
1327 doc = d
1317 doc = d
1328 if not v:
1318 if not v:
1329 raise cmdutil.UnknownCommand(name)
1319 raise cmdutil.UnknownCommand(name)
1330
1320
1331 # description
1321 # description
1332 if not doc:
1322 if not doc:
1333 doc = _("(No help text available)")
1323 doc = _("(No help text available)")
1334 if callable(doc):
1324 if callable(doc):
1335 doc = doc()
1325 doc = doc()
1336
1326
1337 ui.write("%s\n" % header)
1327 ui.write("%s\n" % header)
1338 ui.write("%s\n" % doc.rstrip())
1328 ui.write("%s\n" % doc.rstrip())
1339
1329
1340 def helpext(name):
1330 def helpext(name):
1341 try:
1331 try:
1342 mod = extensions.find(name)
1332 mod = extensions.find(name)
1343 except KeyError:
1333 except KeyError:
1344 raise cmdutil.UnknownCommand(name)
1334 raise cmdutil.UnknownCommand(name)
1345
1335
1346 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1336 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1347 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1337 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1348 for d in doc[1:]:
1338 for d in doc[1:]:
1349 ui.write(d, '\n')
1339 ui.write(d, '\n')
1350
1340
1351 ui.status('\n')
1341 ui.status('\n')
1352
1342
1353 try:
1343 try:
1354 ct = mod.cmdtable
1344 ct = mod.cmdtable
1355 except AttributeError:
1345 except AttributeError:
1356 ct = {}
1346 ct = {}
1357
1347
1358 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1348 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1359 helplist(_('list of commands:\n\n'), modcmds.has_key)
1349 helplist(_('list of commands:\n\n'), modcmds.has_key)
1360
1350
1361 if name and name != 'shortlist':
1351 if name and name != 'shortlist':
1362 i = None
1352 i = None
1363 for f in (helpcmd, helptopic, helpext):
1353 for f in (helpcmd, helptopic, helpext):
1364 try:
1354 try:
1365 f(name)
1355 f(name)
1366 i = None
1356 i = None
1367 break
1357 break
1368 except cmdutil.UnknownCommand, inst:
1358 except cmdutil.UnknownCommand, inst:
1369 i = inst
1359 i = inst
1370 if i:
1360 if i:
1371 raise i
1361 raise i
1372
1362
1373 else:
1363 else:
1374 # program name
1364 # program name
1375 if ui.verbose or with_version:
1365 if ui.verbose or with_version:
1376 version_(ui)
1366 version_(ui)
1377 else:
1367 else:
1378 ui.status(_("Mercurial Distributed SCM\n"))
1368 ui.status(_("Mercurial Distributed SCM\n"))
1379 ui.status('\n')
1369 ui.status('\n')
1380
1370
1381 # list of commands
1371 # list of commands
1382 if name == "shortlist":
1372 if name == "shortlist":
1383 header = _('basic commands:\n\n')
1373 header = _('basic commands:\n\n')
1384 else:
1374 else:
1385 header = _('list of commands:\n\n')
1375 header = _('list of commands:\n\n')
1386
1376
1387 helplist(header)
1377 helplist(header)
1388
1378
1389 # list all option lists
1379 # list all option lists
1390 opt_output = []
1380 opt_output = []
1391 for title, options in option_lists:
1381 for title, options in option_lists:
1392 opt_output.append(("\n%s" % title, None))
1382 opt_output.append(("\n%s" % title, None))
1393 for shortopt, longopt, default, desc in options:
1383 for shortopt, longopt, default, desc in options:
1394 if "DEPRECATED" in desc and not ui.verbose: continue
1384 if "DEPRECATED" in desc and not ui.verbose: continue
1395 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1385 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1396 longopt and " --%s" % longopt),
1386 longopt and " --%s" % longopt),
1397 "%s%s" % (desc,
1387 "%s%s" % (desc,
1398 default
1388 default
1399 and _(" (default: %s)") % default
1389 and _(" (default: %s)") % default
1400 or "")))
1390 or "")))
1401
1391
1402 if ui.verbose:
1392 if ui.verbose:
1403 ui.write(_("\nspecial help topics:\n"))
1393 ui.write(_("\nspecial help topics:\n"))
1404 topics = []
1394 topics = []
1405 for i, d in help.helptable:
1395 for i, d in help.helptable:
1406 l = i.split('|')
1396 l = i.split('|')
1407 topics.append((", ".join(l[:-1]), l[-1]))
1397 topics.append((", ".join(l[:-1]), l[-1]))
1408 topics_len = max([len(s[0]) for s in topics])
1398 topics_len = max([len(s[0]) for s in topics])
1409 for t, desc in topics:
1399 for t, desc in topics:
1410 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1400 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1411
1401
1412 if opt_output:
1402 if opt_output:
1413 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1403 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1414 for first, second in opt_output:
1404 for first, second in opt_output:
1415 if second:
1405 if second:
1416 ui.write(" %-*s %s\n" % (opts_len, first, second))
1406 ui.write(" %-*s %s\n" % (opts_len, first, second))
1417 else:
1407 else:
1418 ui.write("%s\n" % first)
1408 ui.write("%s\n" % first)
1419
1409
1420 def identify(ui, repo, source=None,
1410 def identify(ui, repo, source=None,
1421 rev=None, num=None, id=None, branch=None, tags=None):
1411 rev=None, num=None, id=None, branch=None, tags=None):
1422 """identify the working copy or specified revision
1412 """identify the working copy or specified revision
1423
1413
1424 With no revision, print a summary of the current state of the repo.
1414 With no revision, print a summary of the current state of the repo.
1425
1415
1426 With a path, do a lookup in another repository.
1416 With a path, do a lookup in another repository.
1427
1417
1428 This summary identifies the repository state using one or two parent
1418 This summary identifies the repository state using one or two parent
1429 hash identifiers, followed by a "+" if there are uncommitted changes
1419 hash identifiers, followed by a "+" if there are uncommitted changes
1430 in the working directory, a list of tags for this revision and a branch
1420 in the working directory, a list of tags for this revision and a branch
1431 name for non-default branches.
1421 name for non-default branches.
1432 """
1422 """
1433
1423
1434 if not repo and not source:
1424 if not repo and not source:
1435 raise util.Abort(_("There is no Mercurial repository here "
1425 raise util.Abort(_("There is no Mercurial repository here "
1436 "(.hg not found)"))
1426 "(.hg not found)"))
1437
1427
1438 hexfunc = ui.debugflag and hex or short
1428 hexfunc = ui.debugflag and hex or short
1439 default = not (num or id or branch or tags)
1429 default = not (num or id or branch or tags)
1440 output = []
1430 output = []
1441
1431
1442 if source:
1432 if source:
1443 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1433 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1444 srepo = hg.repository(ui, source)
1434 srepo = hg.repository(ui, source)
1445 if not rev and revs:
1435 if not rev and revs:
1446 rev = revs[0]
1436 rev = revs[0]
1447 if not rev:
1437 if not rev:
1448 rev = "tip"
1438 rev = "tip"
1449 if num or branch or tags:
1439 if num or branch or tags:
1450 raise util.Abort(
1440 raise util.Abort(
1451 "can't query remote revision number, branch, or tags")
1441 "can't query remote revision number, branch, or tags")
1452 output = [hexfunc(srepo.lookup(rev))]
1442 output = [hexfunc(srepo.lookup(rev))]
1453 elif not rev:
1443 elif not rev:
1454 ctx = repo[None]
1444 ctx = repo[None]
1455 parents = ctx.parents()
1445 parents = ctx.parents()
1456 changed = False
1446 changed = False
1457 if default or id or num:
1447 if default or id or num:
1458 changed = ctx.files() + ctx.deleted()
1448 changed = ctx.files() + ctx.deleted()
1459 if default or id:
1449 if default or id:
1460 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1450 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1461 (changed) and "+" or "")]
1451 (changed) and "+" or "")]
1462 if num:
1452 if num:
1463 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1453 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1464 (changed) and "+" or ""))
1454 (changed) and "+" or ""))
1465 else:
1455 else:
1466 ctx = repo[rev]
1456 ctx = repo[rev]
1467 if default or id:
1457 if default or id:
1468 output = [hexfunc(ctx.node())]
1458 output = [hexfunc(ctx.node())]
1469 if num:
1459 if num:
1470 output.append(str(ctx.rev()))
1460 output.append(str(ctx.rev()))
1471
1461
1472 if not source and default and not ui.quiet:
1462 if not source and default and not ui.quiet:
1473 b = util.tolocal(ctx.branch())
1463 b = util.tolocal(ctx.branch())
1474 if b != 'default':
1464 if b != 'default':
1475 output.append("(%s)" % b)
1465 output.append("(%s)" % b)
1476
1466
1477 # multiple tags for a single parent separated by '/'
1467 # multiple tags for a single parent separated by '/'
1478 t = "/".join(ctx.tags())
1468 t = "/".join(ctx.tags())
1479 if t:
1469 if t:
1480 output.append(t)
1470 output.append(t)
1481
1471
1482 if branch:
1472 if branch:
1483 output.append(util.tolocal(ctx.branch()))
1473 output.append(util.tolocal(ctx.branch()))
1484
1474
1485 if tags:
1475 if tags:
1486 output.extend(ctx.tags())
1476 output.extend(ctx.tags())
1487
1477
1488 ui.write("%s\n" % ' '.join(output))
1478 ui.write("%s\n" % ' '.join(output))
1489
1479
1490 def import_(ui, repo, patch1, *patches, **opts):
1480 def import_(ui, repo, patch1, *patches, **opts):
1491 """import an ordered set of patches
1481 """import an ordered set of patches
1492
1482
1493 Import a list of patches and commit them individually.
1483 Import a list of patches and commit them individually.
1494
1484
1495 If there are outstanding changes in the working directory, import
1485 If there are outstanding changes in the working directory, import
1496 will abort unless given the -f flag.
1486 will abort unless given the -f flag.
1497
1487
1498 You can import a patch straight from a mail message. Even patches
1488 You can import a patch straight from a mail message. Even patches
1499 as attachments work (body part must be type text/plain or
1489 as attachments work (body part must be type text/plain or
1500 text/x-patch to be used). From and Subject headers of email
1490 text/x-patch to be used). From and Subject headers of email
1501 message are used as default committer and commit message. All
1491 message are used as default committer and commit message. All
1502 text/plain body parts before first diff are added to commit
1492 text/plain body parts before first diff are added to commit
1503 message.
1493 message.
1504
1494
1505 If the imported patch was generated by hg export, user and description
1495 If the imported patch was generated by hg export, user and description
1506 from patch override values from message headers and body. Values
1496 from patch override values from message headers and body. Values
1507 given on command line with -m and -u override these.
1497 given on command line with -m and -u override these.
1508
1498
1509 If --exact is specified, import will set the working directory
1499 If --exact is specified, import will set the working directory
1510 to the parent of each patch before applying it, and will abort
1500 to the parent of each patch before applying it, and will abort
1511 if the resulting changeset has a different ID than the one
1501 if the resulting changeset has a different ID than the one
1512 recorded in the patch. This may happen due to character set
1502 recorded in the patch. This may happen due to character set
1513 problems or other deficiencies in the text patch format.
1503 problems or other deficiencies in the text patch format.
1514
1504
1515 To read a patch from standard input, use patch name "-".
1505 To read a patch from standard input, use patch name "-".
1516 See 'hg help dates' for a list of formats valid for -d/--date.
1506 See 'hg help dates' for a list of formats valid for -d/--date.
1517 """
1507 """
1518 patches = (patch1,) + patches
1508 patches = (patch1,) + patches
1519
1509
1520 date = opts.get('date')
1510 date = opts.get('date')
1521 if date:
1511 if date:
1522 opts['date'] = util.parsedate(date)
1512 opts['date'] = util.parsedate(date)
1523
1513
1524 if opts.get('exact') or not opts['force']:
1514 if opts.get('exact') or not opts['force']:
1525 cmdutil.bail_if_changed(repo)
1515 cmdutil.bail_if_changed(repo)
1526
1516
1527 d = opts["base"]
1517 d = opts["base"]
1528 strip = opts["strip"]
1518 strip = opts["strip"]
1529 wlock = lock = None
1519 wlock = lock = None
1530 try:
1520 try:
1531 wlock = repo.wlock()
1521 wlock = repo.wlock()
1532 lock = repo.lock()
1522 lock = repo.lock()
1533 for p in patches:
1523 for p in patches:
1534 pf = os.path.join(d, p)
1524 pf = os.path.join(d, p)
1535
1525
1536 if pf == '-':
1526 if pf == '-':
1537 ui.status(_("applying patch from stdin\n"))
1527 ui.status(_("applying patch from stdin\n"))
1538 data = patch.extract(ui, sys.stdin)
1528 data = patch.extract(ui, sys.stdin)
1539 else:
1529 else:
1540 ui.status(_("applying %s\n") % p)
1530 ui.status(_("applying %s\n") % p)
1541 if os.path.exists(pf):
1531 if os.path.exists(pf):
1542 data = patch.extract(ui, file(pf, 'rb'))
1532 data = patch.extract(ui, file(pf, 'rb'))
1543 else:
1533 else:
1544 data = patch.extract(ui, urllib.urlopen(pf))
1534 data = patch.extract(ui, urllib.urlopen(pf))
1545 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1535 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1546
1536
1547 if tmpname is None:
1537 if tmpname is None:
1548 raise util.Abort(_('no diffs found'))
1538 raise util.Abort(_('no diffs found'))
1549
1539
1550 try:
1540 try:
1551 cmdline_message = cmdutil.logmessage(opts)
1541 cmdline_message = cmdutil.logmessage(opts)
1552 if cmdline_message:
1542 if cmdline_message:
1553 # pickup the cmdline msg
1543 # pickup the cmdline msg
1554 message = cmdline_message
1544 message = cmdline_message
1555 elif message:
1545 elif message:
1556 # pickup the patch msg
1546 # pickup the patch msg
1557 message = message.strip()
1547 message = message.strip()
1558 else:
1548 else:
1559 # launch the editor
1549 # launch the editor
1560 message = None
1550 message = None
1561 ui.debug(_('message:\n%s\n') % message)
1551 ui.debug(_('message:\n%s\n') % message)
1562
1552
1563 wp = repo.parents()
1553 wp = repo.parents()
1564 if opts.get('exact'):
1554 if opts.get('exact'):
1565 if not nodeid or not p1:
1555 if not nodeid or not p1:
1566 raise util.Abort(_('not a mercurial patch'))
1556 raise util.Abort(_('not a mercurial patch'))
1567 p1 = repo.lookup(p1)
1557 p1 = repo.lookup(p1)
1568 p2 = repo.lookup(p2 or hex(nullid))
1558 p2 = repo.lookup(p2 or hex(nullid))
1569
1559
1570 if p1 != wp[0].node():
1560 if p1 != wp[0].node():
1571 hg.clean(repo, p1)
1561 hg.clean(repo, p1)
1572 repo.dirstate.setparents(p1, p2)
1562 repo.dirstate.setparents(p1, p2)
1573 elif p2:
1563 elif p2:
1574 try:
1564 try:
1575 p1 = repo.lookup(p1)
1565 p1 = repo.lookup(p1)
1576 p2 = repo.lookup(p2)
1566 p2 = repo.lookup(p2)
1577 if p1 == wp[0].node():
1567 if p1 == wp[0].node():
1578 repo.dirstate.setparents(p1, p2)
1568 repo.dirstate.setparents(p1, p2)
1579 except RepoError:
1569 except RepoError:
1580 pass
1570 pass
1581 if opts.get('exact') or opts.get('import_branch'):
1571 if opts.get('exact') or opts.get('import_branch'):
1582 repo.dirstate.setbranch(branch or 'default')
1572 repo.dirstate.setbranch(branch or 'default')
1583
1573
1584 files = {}
1574 files = {}
1585 try:
1575 try:
1586 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1576 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1587 files=files)
1577 files=files)
1588 finally:
1578 finally:
1589 files = patch.updatedir(ui, repo, files)
1579 files = patch.updatedir(ui, repo, files)
1590 if not opts.get('no_commit'):
1580 if not opts.get('no_commit'):
1591 n = repo.commit(files, message, opts.get('user') or user,
1581 n = repo.commit(files, message, opts.get('user') or user,
1592 opts.get('date') or date)
1582 opts.get('date') or date)
1593 if opts.get('exact'):
1583 if opts.get('exact'):
1594 if hex(n) != nodeid:
1584 if hex(n) != nodeid:
1595 repo.rollback()
1585 repo.rollback()
1596 raise util.Abort(_('patch is damaged'
1586 raise util.Abort(_('patch is damaged'
1597 ' or loses information'))
1587 ' or loses information'))
1598 # Force a dirstate write so that the next transaction
1588 # Force a dirstate write so that the next transaction
1599 # backups an up-do-date file.
1589 # backups an up-do-date file.
1600 repo.dirstate.write()
1590 repo.dirstate.write()
1601 finally:
1591 finally:
1602 os.unlink(tmpname)
1592 os.unlink(tmpname)
1603 finally:
1593 finally:
1604 del lock, wlock
1594 del lock, wlock
1605
1595
1606 def incoming(ui, repo, source="default", **opts):
1596 def incoming(ui, repo, source="default", **opts):
1607 """show new changesets found in source
1597 """show new changesets found in source
1608
1598
1609 Show new changesets found in the specified path/URL or the default
1599 Show new changesets found in the specified path/URL or the default
1610 pull location. These are the changesets that would be pulled if a pull
1600 pull location. These are the changesets that would be pulled if a pull
1611 was requested.
1601 was requested.
1612
1602
1613 For remote repository, using --bundle avoids downloading the changesets
1603 For remote repository, using --bundle avoids downloading the changesets
1614 twice if the incoming is followed by a pull.
1604 twice if the incoming is followed by a pull.
1615
1605
1616 See pull for valid source format details.
1606 See pull for valid source format details.
1617 """
1607 """
1618 limit = cmdutil.loglimit(opts)
1608 limit = cmdutil.loglimit(opts)
1619 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1609 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1620 cmdutil.setremoteconfig(ui, opts)
1610 cmdutil.setremoteconfig(ui, opts)
1621
1611
1622 other = hg.repository(ui, source)
1612 other = hg.repository(ui, source)
1623 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1613 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1624 if revs:
1614 if revs:
1625 revs = [other.lookup(rev) for rev in revs]
1615 revs = [other.lookup(rev) for rev in revs]
1626 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1616 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1627 if not incoming:
1617 if not incoming:
1628 try:
1618 try:
1629 os.unlink(opts["bundle"])
1619 os.unlink(opts["bundle"])
1630 except:
1620 except:
1631 pass
1621 pass
1632 ui.status(_("no changes found\n"))
1622 ui.status(_("no changes found\n"))
1633 return 1
1623 return 1
1634
1624
1635 cleanup = None
1625 cleanup = None
1636 try:
1626 try:
1637 fname = opts["bundle"]
1627 fname = opts["bundle"]
1638 if fname or not other.local():
1628 if fname or not other.local():
1639 # create a bundle (uncompressed if other repo is not local)
1629 # create a bundle (uncompressed if other repo is not local)
1640 if revs is None:
1630 if revs is None:
1641 cg = other.changegroup(incoming, "incoming")
1631 cg = other.changegroup(incoming, "incoming")
1642 else:
1632 else:
1643 cg = other.changegroupsubset(incoming, revs, 'incoming')
1633 cg = other.changegroupsubset(incoming, revs, 'incoming')
1644 bundletype = other.local() and "HG10BZ" or "HG10UN"
1634 bundletype = other.local() and "HG10BZ" or "HG10UN"
1645 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1635 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1646 # keep written bundle?
1636 # keep written bundle?
1647 if opts["bundle"]:
1637 if opts["bundle"]:
1648 cleanup = None
1638 cleanup = None
1649 if not other.local():
1639 if not other.local():
1650 # use the created uncompressed bundlerepo
1640 # use the created uncompressed bundlerepo
1651 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1641 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1652
1642
1653 o = other.changelog.nodesbetween(incoming, revs)[0]
1643 o = other.changelog.nodesbetween(incoming, revs)[0]
1654 if opts['newest_first']:
1644 if opts['newest_first']:
1655 o.reverse()
1645 o.reverse()
1656 displayer = cmdutil.show_changeset(ui, other, opts)
1646 displayer = cmdutil.show_changeset(ui, other, opts)
1657 count = 0
1647 count = 0
1658 for n in o:
1648 for n in o:
1659 if count >= limit:
1649 if count >= limit:
1660 break
1650 break
1661 parents = [p for p in other.changelog.parents(n) if p != nullid]
1651 parents = [p for p in other.changelog.parents(n) if p != nullid]
1662 if opts['no_merges'] and len(parents) == 2:
1652 if opts['no_merges'] and len(parents) == 2:
1663 continue
1653 continue
1664 count += 1
1654 count += 1
1665 displayer.show(changenode=n)
1655 displayer.show(changenode=n)
1666 finally:
1656 finally:
1667 if hasattr(other, 'close'):
1657 if hasattr(other, 'close'):
1668 other.close()
1658 other.close()
1669 if cleanup:
1659 if cleanup:
1670 os.unlink(cleanup)
1660 os.unlink(cleanup)
1671
1661
1672 def init(ui, dest=".", **opts):
1662 def init(ui, dest=".", **opts):
1673 """create a new repository in the given directory
1663 """create a new repository in the given directory
1674
1664
1675 Initialize a new repository in the given directory. If the given
1665 Initialize a new repository in the given directory. If the given
1676 directory does not exist, it is created.
1666 directory does not exist, it is created.
1677
1667
1678 If no directory is given, the current directory is used.
1668 If no directory is given, the current directory is used.
1679
1669
1680 It is possible to specify an ssh:// URL as the destination.
1670 It is possible to specify an ssh:// URL as the destination.
1681 Look at the help text for the pull command for important details
1671 Look at the help text for the pull command for important details
1682 about ssh:// URLs.
1672 about ssh:// URLs.
1683 """
1673 """
1684 cmdutil.setremoteconfig(ui, opts)
1674 cmdutil.setremoteconfig(ui, opts)
1685 hg.repository(ui, dest, create=1)
1675 hg.repository(ui, dest, create=1)
1686
1676
1687 def locate(ui, repo, *pats, **opts):
1677 def locate(ui, repo, *pats, **opts):
1688 """locate files matching specific patterns
1678 """locate files matching specific patterns
1689
1679
1690 Print all files under Mercurial control whose names match the
1680 Print all files under Mercurial control whose names match the
1691 given patterns.
1681 given patterns.
1692
1682
1693 This command searches the entire repository by default. To search
1683 This command searches the entire repository by default. To search
1694 just the current directory and its subdirectories, use
1684 just the current directory and its subdirectories, use
1695 "--include .".
1685 "--include .".
1696
1686
1697 If no patterns are given to match, this command prints all file
1687 If no patterns are given to match, this command prints all file
1698 names.
1688 names.
1699
1689
1700 If you want to feed the output of this command into the "xargs"
1690 If you want to feed the output of this command into the "xargs"
1701 command, use the "-0" option to both this command and "xargs".
1691 command, use the "-0" option to both this command and "xargs".
1702 This will avoid the problem of "xargs" treating single filenames
1692 This will avoid the problem of "xargs" treating single filenames
1703 that contain white space as multiple filenames.
1693 that contain white space as multiple filenames.
1704 """
1694 """
1705 end = opts['print0'] and '\0' or '\n'
1695 end = opts['print0'] and '\0' or '\n'
1706 rev = opts['rev']
1696 rev = opts['rev']
1707 if rev:
1697 if rev:
1708 node = repo.lookup(rev)
1698 node = repo.lookup(rev)
1709 else:
1699 else:
1710 node = None
1700 node = None
1711
1701
1712 ret = 1
1702 ret = 1
1713 m = cmdutil.match(repo, pats, opts, default='relglob')
1703 m = cmdutil.match(repo, pats, opts, default='relglob')
1714 m.bad = lambda x,y: False
1704 m.bad = lambda x,y: False
1715 for abs in repo.walk(m, node):
1705 for abs in repo.walk(m, node):
1716 if not node and abs not in repo.dirstate:
1706 if not node and abs not in repo.dirstate:
1717 continue
1707 continue
1718 if opts['fullpath']:
1708 if opts['fullpath']:
1719 ui.write(os.path.join(repo.root, abs), end)
1709 ui.write(os.path.join(repo.root, abs), end)
1720 else:
1710 else:
1721 ui.write(((pats and m.rel(abs)) or abs), end)
1711 ui.write(((pats and m.rel(abs)) or abs), end)
1722 ret = 0
1712 ret = 0
1723
1713
1724 return ret
1714 return ret
1725
1715
1726 def log(ui, repo, *pats, **opts):
1716 def log(ui, repo, *pats, **opts):
1727 """show revision history of entire repository or files
1717 """show revision history of entire repository or files
1728
1718
1729 Print the revision history of the specified files or the entire
1719 Print the revision history of the specified files or the entire
1730 project.
1720 project.
1731
1721
1732 File history is shown without following rename or copy history of
1722 File history is shown without following rename or copy history of
1733 files. Use -f/--follow with a file name to follow history across
1723 files. Use -f/--follow with a file name to follow history across
1734 renames and copies. --follow without a file name will only show
1724 renames and copies. --follow without a file name will only show
1735 ancestors or descendants of the starting revision. --follow-first
1725 ancestors or descendants of the starting revision. --follow-first
1736 only follows the first parent of merge revisions.
1726 only follows the first parent of merge revisions.
1737
1727
1738 If no revision range is specified, the default is tip:0 unless
1728 If no revision range is specified, the default is tip:0 unless
1739 --follow is set, in which case the working directory parent is
1729 --follow is set, in which case the working directory parent is
1740 used as the starting revision.
1730 used as the starting revision.
1741
1731
1742 See 'hg help dates' for a list of formats valid for -d/--date.
1732 See 'hg help dates' for a list of formats valid for -d/--date.
1743
1733
1744 By default this command outputs: changeset id and hash, tags,
1734 By default this command outputs: changeset id and hash, tags,
1745 non-trivial parents, user, date and time, and a summary for each
1735 non-trivial parents, user, date and time, and a summary for each
1746 commit. When the -v/--verbose switch is used, the list of changed
1736 commit. When the -v/--verbose switch is used, the list of changed
1747 files and full commit message is shown.
1737 files and full commit message is shown.
1748
1738
1749 NOTE: log -p may generate unexpected diff output for merge
1739 NOTE: log -p may generate unexpected diff output for merge
1750 changesets, as it will compare the merge changeset against its
1740 changesets, as it will compare the merge changeset against its
1751 first parent only. Also, the files: list will only reflect files
1741 first parent only. Also, the files: list will only reflect files
1752 that are different from BOTH parents.
1742 that are different from BOTH parents.
1753
1743
1754 """
1744 """
1755
1745
1756 get = util.cachefunc(lambda r: repo[r].changeset())
1746 get = util.cachefunc(lambda r: repo[r].changeset())
1757 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1747 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1758
1748
1759 limit = cmdutil.loglimit(opts)
1749 limit = cmdutil.loglimit(opts)
1760 count = 0
1750 count = 0
1761
1751
1762 if opts['copies'] and opts['rev']:
1752 if opts['copies'] and opts['rev']:
1763 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1753 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1764 else:
1754 else:
1765 endrev = len(repo)
1755 endrev = len(repo)
1766 rcache = {}
1756 rcache = {}
1767 ncache = {}
1757 ncache = {}
1768 def getrenamed(fn, rev):
1758 def getrenamed(fn, rev):
1769 '''looks up all renames for a file (up to endrev) the first
1759 '''looks up all renames for a file (up to endrev) the first
1770 time the file is given. It indexes on the changerev and only
1760 time the file is given. It indexes on the changerev and only
1771 parses the manifest if linkrev != changerev.
1761 parses the manifest if linkrev != changerev.
1772 Returns rename info for fn at changerev rev.'''
1762 Returns rename info for fn at changerev rev.'''
1773 if fn not in rcache:
1763 if fn not in rcache:
1774 rcache[fn] = {}
1764 rcache[fn] = {}
1775 ncache[fn] = {}
1765 ncache[fn] = {}
1776 fl = repo.file(fn)
1766 fl = repo.file(fn)
1777 for i in fl:
1767 for i in fl:
1778 node = fl.node(i)
1768 node = fl.node(i)
1779 lr = fl.linkrev(node)
1769 lr = fl.linkrev(node)
1780 renamed = fl.renamed(node)
1770 renamed = fl.renamed(node)
1781 rcache[fn][lr] = renamed
1771 rcache[fn][lr] = renamed
1782 if renamed:
1772 if renamed:
1783 ncache[fn][node] = renamed
1773 ncache[fn][node] = renamed
1784 if lr >= endrev:
1774 if lr >= endrev:
1785 break
1775 break
1786 if rev in rcache[fn]:
1776 if rev in rcache[fn]:
1787 return rcache[fn][rev]
1777 return rcache[fn][rev]
1788
1778
1789 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1779 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1790 # filectx logic.
1780 # filectx logic.
1791
1781
1792 try:
1782 try:
1793 return repo[rev][fn].renamed()
1783 return repo[rev][fn].renamed()
1794 except revlog.LookupError:
1784 except revlog.LookupError:
1795 pass
1785 pass
1796 return None
1786 return None
1797
1787
1798 df = False
1788 df = False
1799 if opts["date"]:
1789 if opts["date"]:
1800 df = util.matchdate(opts["date"])
1790 df = util.matchdate(opts["date"])
1801
1791
1802 only_branches = opts['only_branch']
1792 only_branches = opts['only_branch']
1803
1793
1804 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1794 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1805 for st, rev, fns in changeiter:
1795 for st, rev, fns in changeiter:
1806 if st == 'add':
1796 if st == 'add':
1807 changenode = repo.changelog.node(rev)
1797 changenode = repo.changelog.node(rev)
1808 parents = [p for p in repo.changelog.parentrevs(rev)
1798 parents = [p for p in repo.changelog.parentrevs(rev)
1809 if p != nullrev]
1799 if p != nullrev]
1810 if opts['no_merges'] and len(parents) == 2:
1800 if opts['no_merges'] and len(parents) == 2:
1811 continue
1801 continue
1812 if opts['only_merges'] and len(parents) != 2:
1802 if opts['only_merges'] and len(parents) != 2:
1813 continue
1803 continue
1814
1804
1815 if only_branches:
1805 if only_branches:
1816 revbranch = get(rev)[5]['branch']
1806 revbranch = get(rev)[5]['branch']
1817 if revbranch not in only_branches:
1807 if revbranch not in only_branches:
1818 continue
1808 continue
1819
1809
1820 if df:
1810 if df:
1821 changes = get(rev)
1811 changes = get(rev)
1822 if not df(changes[2][0]):
1812 if not df(changes[2][0]):
1823 continue
1813 continue
1824
1814
1825 if opts['keyword']:
1815 if opts['keyword']:
1826 changes = get(rev)
1816 changes = get(rev)
1827 miss = 0
1817 miss = 0
1828 for k in [kw.lower() for kw in opts['keyword']]:
1818 for k in [kw.lower() for kw in opts['keyword']]:
1829 if not (k in changes[1].lower() or
1819 if not (k in changes[1].lower() or
1830 k in changes[4].lower() or
1820 k in changes[4].lower() or
1831 k in " ".join(changes[3]).lower()):
1821 k in " ".join(changes[3]).lower()):
1832 miss = 1
1822 miss = 1
1833 break
1823 break
1834 if miss:
1824 if miss:
1835 continue
1825 continue
1836
1826
1837 copies = []
1827 copies = []
1838 if opts.get('copies') and rev:
1828 if opts.get('copies') and rev:
1839 for fn in get(rev)[3]:
1829 for fn in get(rev)[3]:
1840 rename = getrenamed(fn, rev)
1830 rename = getrenamed(fn, rev)
1841 if rename:
1831 if rename:
1842 copies.append((fn, rename[0]))
1832 copies.append((fn, rename[0]))
1843 displayer.show(rev, changenode, copies=copies)
1833 displayer.show(rev, changenode, copies=copies)
1844 elif st == 'iter':
1834 elif st == 'iter':
1845 if count == limit: break
1835 if count == limit: break
1846 if displayer.flush(rev):
1836 if displayer.flush(rev):
1847 count += 1
1837 count += 1
1848
1838
1849 def manifest(ui, repo, node=None, rev=None):
1839 def manifest(ui, repo, node=None, rev=None):
1850 """output the current or given revision of the project manifest
1840 """output the current or given revision of the project manifest
1851
1841
1852 Print a list of version controlled files for the given revision.
1842 Print a list of version controlled files for the given revision.
1853 If no revision is given, the parent of the working directory is used,
1843 If no revision is given, the parent of the working directory is used,
1854 or tip if no revision is checked out.
1844 or tip if no revision is checked out.
1855
1845
1856 The manifest is the list of files being version controlled. If no revision
1846 The manifest is the list of files being version controlled. If no revision
1857 is given then the first parent of the working directory is used.
1847 is given then the first parent of the working directory is used.
1858
1848
1859 With -v flag, print file permissions, symlink and executable bits. With
1849 With -v flag, print file permissions, symlink and executable bits. With
1860 --debug flag, print file revision hashes.
1850 --debug flag, print file revision hashes.
1861 """
1851 """
1862
1852
1863 if rev and node:
1853 if rev and node:
1864 raise util.Abort(_("please specify just one revision"))
1854 raise util.Abort(_("please specify just one revision"))
1865
1855
1866 if not node:
1856 if not node:
1867 node = rev
1857 node = rev
1868
1858
1869 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
1859 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
1870 ctx = repo[node]
1860 ctx = repo[node]
1871 for f in ctx:
1861 for f in ctx:
1872 if ui.debugflag:
1862 if ui.debugflag:
1873 ui.write("%40s " % hex(ctx.manifest()[f]))
1863 ui.write("%40s " % hex(ctx.manifest()[f]))
1874 if ui.verbose:
1864 if ui.verbose:
1875 ui.write(decor[ctx.flags(f)])
1865 ui.write(decor[ctx.flags(f)])
1876 ui.write("%s\n" % f)
1866 ui.write("%s\n" % f)
1877
1867
1878 def merge(ui, repo, node=None, force=None, rev=None):
1868 def merge(ui, repo, node=None, force=None, rev=None):
1879 """merge working directory with another revision
1869 """merge working directory with another revision
1880
1870
1881 Merge the contents of the current working directory and the
1871 Merge the contents of the current working directory and the
1882 requested revision. Files that changed between either parent are
1872 requested revision. Files that changed between either parent are
1883 marked as changed for the next commit and a commit must be
1873 marked as changed for the next commit and a commit must be
1884 performed before any further updates are allowed.
1874 performed before any further updates are allowed.
1885
1875
1886 If no revision is specified, the working directory's parent is a
1876 If no revision is specified, the working directory's parent is a
1887 head revision, and the current branch contains exactly one other head,
1877 head revision, and the current branch contains exactly one other head,
1888 the other head is merged with by default. Otherwise, an explicit
1878 the other head is merged with by default. Otherwise, an explicit
1889 revision to merge with must be provided.
1879 revision to merge with must be provided.
1890 """
1880 """
1891
1881
1892 if rev and node:
1882 if rev and node:
1893 raise util.Abort(_("please specify just one revision"))
1883 raise util.Abort(_("please specify just one revision"))
1894 if not node:
1884 if not node:
1895 node = rev
1885 node = rev
1896
1886
1897 if not node:
1887 if not node:
1898 branch = repo.changectx(None).branch()
1888 branch = repo.changectx(None).branch()
1899 bheads = repo.branchheads()
1889 bheads = repo.branchheads()
1900 if len(bheads) > 2:
1890 if len(bheads) > 2:
1901 raise util.Abort(_("branch '%s' has %d heads - "
1891 raise util.Abort(_("branch '%s' has %d heads - "
1902 "please merge with an explicit rev") %
1892 "please merge with an explicit rev") %
1903 (branch, len(bheads)))
1893 (branch, len(bheads)))
1904
1894
1905 parent = repo.dirstate.parents()[0]
1895 parent = repo.dirstate.parents()[0]
1906 if len(bheads) == 1:
1896 if len(bheads) == 1:
1907 if len(repo.heads()) > 1:
1897 if len(repo.heads()) > 1:
1908 raise util.Abort(_("branch '%s' has one head - "
1898 raise util.Abort(_("branch '%s' has one head - "
1909 "please merge with an explicit rev") %
1899 "please merge with an explicit rev") %
1910 branch)
1900 branch)
1911 msg = _('there is nothing to merge')
1901 msg = _('there is nothing to merge')
1912 if parent != repo.lookup(repo[None].branch()):
1902 if parent != repo.lookup(repo[None].branch()):
1913 msg = _('%s - use "hg update" instead') % msg
1903 msg = _('%s - use "hg update" instead') % msg
1914 raise util.Abort(msg)
1904 raise util.Abort(msg)
1915
1905
1916 if parent not in bheads:
1906 if parent not in bheads:
1917 raise util.Abort(_('working dir not at a head rev - '
1907 raise util.Abort(_('working dir not at a head rev - '
1918 'use "hg update" or merge with an explicit rev'))
1908 'use "hg update" or merge with an explicit rev'))
1919 node = parent == bheads[0] and bheads[-1] or bheads[0]
1909 node = parent == bheads[0] and bheads[-1] or bheads[0]
1920 return hg.merge(repo, node, force=force)
1910 return hg.merge(repo, node, force=force)
1921
1911
1922 def outgoing(ui, repo, dest=None, **opts):
1912 def outgoing(ui, repo, dest=None, **opts):
1923 """show changesets not found in destination
1913 """show changesets not found in destination
1924
1914
1925 Show changesets not found in the specified destination repository or
1915 Show changesets not found in the specified destination repository or
1926 the default push location. These are the changesets that would be pushed
1916 the default push location. These are the changesets that would be pushed
1927 if a push was requested.
1917 if a push was requested.
1928
1918
1929 See pull for valid destination format details.
1919 See pull for valid destination format details.
1930 """
1920 """
1931 limit = cmdutil.loglimit(opts)
1921 limit = cmdutil.loglimit(opts)
1932 dest, revs, checkout = hg.parseurl(
1922 dest, revs, checkout = hg.parseurl(
1933 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1923 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1934 cmdutil.setremoteconfig(ui, opts)
1924 cmdutil.setremoteconfig(ui, opts)
1935 if revs:
1925 if revs:
1936 revs = [repo.lookup(rev) for rev in revs]
1926 revs = [repo.lookup(rev) for rev in revs]
1937
1927
1938 other = hg.repository(ui, dest)
1928 other = hg.repository(ui, dest)
1939 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1929 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1940 o = repo.findoutgoing(other, force=opts['force'])
1930 o = repo.findoutgoing(other, force=opts['force'])
1941 if not o:
1931 if not o:
1942 ui.status(_("no changes found\n"))
1932 ui.status(_("no changes found\n"))
1943 return 1
1933 return 1
1944 o = repo.changelog.nodesbetween(o, revs)[0]
1934 o = repo.changelog.nodesbetween(o, revs)[0]
1945 if opts['newest_first']:
1935 if opts['newest_first']:
1946 o.reverse()
1936 o.reverse()
1947 displayer = cmdutil.show_changeset(ui, repo, opts)
1937 displayer = cmdutil.show_changeset(ui, repo, opts)
1948 count = 0
1938 count = 0
1949 for n in o:
1939 for n in o:
1950 if count >= limit:
1940 if count >= limit:
1951 break
1941 break
1952 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1942 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1953 if opts['no_merges'] and len(parents) == 2:
1943 if opts['no_merges'] and len(parents) == 2:
1954 continue
1944 continue
1955 count += 1
1945 count += 1
1956 displayer.show(changenode=n)
1946 displayer.show(changenode=n)
1957
1947
1958 def parents(ui, repo, file_=None, **opts):
1948 def parents(ui, repo, file_=None, **opts):
1959 """show the parents of the working dir or revision
1949 """show the parents of the working dir or revision
1960
1950
1961 Print the working directory's parent revisions. If a
1951 Print the working directory's parent revisions. If a
1962 revision is given via --rev, the parent of that revision
1952 revision is given via --rev, the parent of that revision
1963 will be printed. If a file argument is given, revision in
1953 will be printed. If a file argument is given, revision in
1964 which the file was last changed (before the working directory
1954 which the file was last changed (before the working directory
1965 revision or the argument to --rev if given) is printed.
1955 revision or the argument to --rev if given) is printed.
1966 """
1956 """
1967 rev = opts.get('rev')
1957 rev = opts.get('rev')
1968 if rev:
1958 if rev:
1969 ctx = repo[rev]
1959 ctx = repo[rev]
1970 else:
1960 else:
1971 ctx = repo[None]
1961 ctx = repo[None]
1972
1962
1973 if file_:
1963 if file_:
1974 m = cmdutil.match(repo, (file_,), opts)
1964 m = cmdutil.match(repo, (file_,), opts)
1975 if m.anypats() or len(m.files()) != 1:
1965 if m.anypats() or len(m.files()) != 1:
1976 raise util.Abort(_('can only specify an explicit file name'))
1966 raise util.Abort(_('can only specify an explicit file name'))
1977 file_ = m.files()[0]
1967 file_ = m.files()[0]
1978 filenodes = []
1968 filenodes = []
1979 for cp in ctx.parents():
1969 for cp in ctx.parents():
1980 if not cp:
1970 if not cp:
1981 continue
1971 continue
1982 try:
1972 try:
1983 filenodes.append(cp.filenode(file_))
1973 filenodes.append(cp.filenode(file_))
1984 except revlog.LookupError:
1974 except revlog.LookupError:
1985 pass
1975 pass
1986 if not filenodes:
1976 if not filenodes:
1987 raise util.Abort(_("'%s' not found in manifest!") % file_)
1977 raise util.Abort(_("'%s' not found in manifest!") % file_)
1988 fl = repo.file(file_)
1978 fl = repo.file(file_)
1989 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1979 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1990 else:
1980 else:
1991 p = [cp.node() for cp in ctx.parents()]
1981 p = [cp.node() for cp in ctx.parents()]
1992
1982
1993 displayer = cmdutil.show_changeset(ui, repo, opts)
1983 displayer = cmdutil.show_changeset(ui, repo, opts)
1994 for n in p:
1984 for n in p:
1995 if n != nullid:
1985 if n != nullid:
1996 displayer.show(changenode=n)
1986 displayer.show(changenode=n)
1997
1987
1998 def paths(ui, repo, search=None):
1988 def paths(ui, repo, search=None):
1999 """show definition of symbolic path names
1989 """show definition of symbolic path names
2000
1990
2001 Show definition of symbolic path name NAME. If no name is given, show
1991 Show definition of symbolic path name NAME. If no name is given, show
2002 definition of available names.
1992 definition of available names.
2003
1993
2004 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1994 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2005 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1995 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2006 """
1996 """
2007 if search:
1997 if search:
2008 for name, path in ui.configitems("paths"):
1998 for name, path in ui.configitems("paths"):
2009 if name == search:
1999 if name == search:
2010 ui.write("%s\n" % util.hidepassword(path))
2000 ui.write("%s\n" % util.hidepassword(path))
2011 return
2001 return
2012 ui.warn(_("not found!\n"))
2002 ui.warn(_("not found!\n"))
2013 return 1
2003 return 1
2014 else:
2004 else:
2015 for name, path in ui.configitems("paths"):
2005 for name, path in ui.configitems("paths"):
2016 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
2006 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
2017
2007
2018 def postincoming(ui, repo, modheads, optupdate, checkout):
2008 def postincoming(ui, repo, modheads, optupdate, checkout):
2019 if modheads == 0:
2009 if modheads == 0:
2020 return
2010 return
2021 if optupdate:
2011 if optupdate:
2022 if modheads <= 1 or checkout:
2012 if modheads <= 1 or checkout:
2023 return hg.update(repo, checkout)
2013 return hg.update(repo, checkout)
2024 else:
2014 else:
2025 ui.status(_("not updating, since new heads added\n"))
2015 ui.status(_("not updating, since new heads added\n"))
2026 if modheads > 1:
2016 if modheads > 1:
2027 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2017 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2028 else:
2018 else:
2029 ui.status(_("(run 'hg update' to get a working copy)\n"))
2019 ui.status(_("(run 'hg update' to get a working copy)\n"))
2030
2020
2031 def pull(ui, repo, source="default", **opts):
2021 def pull(ui, repo, source="default", **opts):
2032 """pull changes from the specified source
2022 """pull changes from the specified source
2033
2023
2034 Pull changes from a remote repository to a local one.
2024 Pull changes from a remote repository to a local one.
2035
2025
2036 This finds all changes from the repository at the specified path
2026 This finds all changes from the repository at the specified path
2037 or URL and adds them to the local repository. By default, this
2027 or URL and adds them to the local repository. By default, this
2038 does not update the copy of the project in the working directory.
2028 does not update the copy of the project in the working directory.
2039
2029
2040 Valid URLs are of the form:
2030 Valid URLs are of the form:
2041
2031
2042 local/filesystem/path (or file://local/filesystem/path)
2032 local/filesystem/path (or file://local/filesystem/path)
2043 http://[user@]host[:port]/[path]
2033 http://[user@]host[:port]/[path]
2044 https://[user@]host[:port]/[path]
2034 https://[user@]host[:port]/[path]
2045 ssh://[user@]host[:port]/[path]
2035 ssh://[user@]host[:port]/[path]
2046 static-http://host[:port]/[path]
2036 static-http://host[:port]/[path]
2047
2037
2048 Paths in the local filesystem can either point to Mercurial
2038 Paths in the local filesystem can either point to Mercurial
2049 repositories or to bundle files (as created by 'hg bundle' or
2039 repositories or to bundle files (as created by 'hg bundle' or
2050 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2040 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2051 allows access to a Mercurial repository where you simply use a web
2041 allows access to a Mercurial repository where you simply use a web
2052 server to publish the .hg directory as static content.
2042 server to publish the .hg directory as static content.
2053
2043
2054 An optional identifier after # indicates a particular branch, tag,
2044 An optional identifier after # indicates a particular branch, tag,
2055 or changeset to pull.
2045 or changeset to pull.
2056
2046
2057 Some notes about using SSH with Mercurial:
2047 Some notes about using SSH with Mercurial:
2058 - SSH requires an accessible shell account on the destination machine
2048 - SSH requires an accessible shell account on the destination machine
2059 and a copy of hg in the remote path or specified with as remotecmd.
2049 and a copy of hg in the remote path or specified with as remotecmd.
2060 - path is relative to the remote user's home directory by default.
2050 - path is relative to the remote user's home directory by default.
2061 Use an extra slash at the start of a path to specify an absolute path:
2051 Use an extra slash at the start of a path to specify an absolute path:
2062 ssh://example.com//tmp/repository
2052 ssh://example.com//tmp/repository
2063 - Mercurial doesn't use its own compression via SSH; the right thing
2053 - Mercurial doesn't use its own compression via SSH; the right thing
2064 to do is to configure it in your ~/.ssh/config, e.g.:
2054 to do is to configure it in your ~/.ssh/config, e.g.:
2065 Host *.mylocalnetwork.example.com
2055 Host *.mylocalnetwork.example.com
2066 Compression no
2056 Compression no
2067 Host *
2057 Host *
2068 Compression yes
2058 Compression yes
2069 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2059 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2070 with the --ssh command line option.
2060 with the --ssh command line option.
2071 """
2061 """
2072 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
2062 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
2073 cmdutil.setremoteconfig(ui, opts)
2063 cmdutil.setremoteconfig(ui, opts)
2074
2064
2075 other = hg.repository(ui, source)
2065 other = hg.repository(ui, source)
2076 ui.status(_('pulling from %s\n') % util.hidepassword(source))
2066 ui.status(_('pulling from %s\n') % util.hidepassword(source))
2077 if revs:
2067 if revs:
2078 try:
2068 try:
2079 revs = [other.lookup(rev) for rev in revs]
2069 revs = [other.lookup(rev) for rev in revs]
2080 except NoCapability:
2070 except NoCapability:
2081 error = _("Other repository doesn't support revision lookup, "
2071 error = _("Other repository doesn't support revision lookup, "
2082 "so a rev cannot be specified.")
2072 "so a rev cannot be specified.")
2083 raise util.Abort(error)
2073 raise util.Abort(error)
2084
2074
2085 modheads = repo.pull(other, heads=revs, force=opts['force'])
2075 modheads = repo.pull(other, heads=revs, force=opts['force'])
2086 return postincoming(ui, repo, modheads, opts['update'], checkout)
2076 return postincoming(ui, repo, modheads, opts['update'], checkout)
2087
2077
2088 def push(ui, repo, dest=None, **opts):
2078 def push(ui, repo, dest=None, **opts):
2089 """push changes to the specified destination
2079 """push changes to the specified destination
2090
2080
2091 Push changes from the local repository to the given destination.
2081 Push changes from the local repository to the given destination.
2092
2082
2093 This is the symmetrical operation for pull. It helps to move
2083 This is the symmetrical operation for pull. It helps to move
2094 changes from the current repository to a different one. If the
2084 changes from the current repository to a different one. If the
2095 destination is local this is identical to a pull in that directory
2085 destination is local this is identical to a pull in that directory
2096 from the current one.
2086 from the current one.
2097
2087
2098 By default, push will refuse to run if it detects the result would
2088 By default, push will refuse to run if it detects the result would
2099 increase the number of remote heads. This generally indicates the
2089 increase the number of remote heads. This generally indicates the
2100 the client has forgotten to pull and merge before pushing.
2090 the client has forgotten to pull and merge before pushing.
2101
2091
2102 Valid URLs are of the form:
2092 Valid URLs are of the form:
2103
2093
2104 local/filesystem/path (or file://local/filesystem/path)
2094 local/filesystem/path (or file://local/filesystem/path)
2105 ssh://[user@]host[:port]/[path]
2095 ssh://[user@]host[:port]/[path]
2106 http://[user@]host[:port]/[path]
2096 http://[user@]host[:port]/[path]
2107 https://[user@]host[:port]/[path]
2097 https://[user@]host[:port]/[path]
2108
2098
2109 An optional identifier after # indicates a particular branch, tag,
2099 An optional identifier after # indicates a particular branch, tag,
2110 or changeset to push. If -r is used, the named changeset and all its
2100 or changeset to push. If -r is used, the named changeset and all its
2111 ancestors will be pushed to the remote repository.
2101 ancestors will be pushed to the remote repository.
2112
2102
2113 Look at the help text for the pull command for important details
2103 Look at the help text for the pull command for important details
2114 about ssh:// URLs.
2104 about ssh:// URLs.
2115
2105
2116 Pushing to http:// and https:// URLs is only possible, if this
2106 Pushing to http:// and https:// URLs is only possible, if this
2117 feature is explicitly enabled on the remote Mercurial server.
2107 feature is explicitly enabled on the remote Mercurial server.
2118 """
2108 """
2119 dest, revs, checkout = hg.parseurl(
2109 dest, revs, checkout = hg.parseurl(
2120 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2110 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2121 cmdutil.setremoteconfig(ui, opts)
2111 cmdutil.setremoteconfig(ui, opts)
2122
2112
2123 other = hg.repository(ui, dest)
2113 other = hg.repository(ui, dest)
2124 ui.status('pushing to %s\n' % util.hidepassword(dest))
2114 ui.status('pushing to %s\n' % util.hidepassword(dest))
2125 if revs:
2115 if revs:
2126 revs = [repo.lookup(rev) for rev in revs]
2116 revs = [repo.lookup(rev) for rev in revs]
2127 r = repo.push(other, opts['force'], revs=revs)
2117 r = repo.push(other, opts['force'], revs=revs)
2128 return r == 0
2118 return r == 0
2129
2119
2130 def rawcommit(ui, repo, *pats, **opts):
2120 def rawcommit(ui, repo, *pats, **opts):
2131 """raw commit interface (DEPRECATED)
2121 """raw commit interface (DEPRECATED)
2132
2122
2133 (DEPRECATED)
2123 (DEPRECATED)
2134 Lowlevel commit, for use in helper scripts.
2124 Lowlevel commit, for use in helper scripts.
2135
2125
2136 This command is not intended to be used by normal users, as it is
2126 This command is not intended to be used by normal users, as it is
2137 primarily useful for importing from other SCMs.
2127 primarily useful for importing from other SCMs.
2138
2128
2139 This command is now deprecated and will be removed in a future
2129 This command is now deprecated and will be removed in a future
2140 release, please use debugsetparents and commit instead.
2130 release, please use debugsetparents and commit instead.
2141 """
2131 """
2142
2132
2143 ui.warn(_("(the rawcommit command is deprecated)\n"))
2133 ui.warn(_("(the rawcommit command is deprecated)\n"))
2144
2134
2145 message = cmdutil.logmessage(opts)
2135 message = cmdutil.logmessage(opts)
2146
2136
2147 files = cmdutil.match(repo, pats, opts).files()
2137 files = cmdutil.match(repo, pats, opts).files()
2148 if opts['files']:
2138 if opts['files']:
2149 files += open(opts['files']).read().splitlines()
2139 files += open(opts['files']).read().splitlines()
2150
2140
2151 parents = [repo.lookup(p) for p in opts['parent']]
2141 parents = [repo.lookup(p) for p in opts['parent']]
2152
2142
2153 try:
2143 try:
2154 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2144 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2155 except ValueError, inst:
2145 except ValueError, inst:
2156 raise util.Abort(str(inst))
2146 raise util.Abort(str(inst))
2157
2147
2158 def recover(ui, repo):
2148 def recover(ui, repo):
2159 """roll back an interrupted transaction
2149 """roll back an interrupted transaction
2160
2150
2161 Recover from an interrupted commit or pull.
2151 Recover from an interrupted commit or pull.
2162
2152
2163 This command tries to fix the repository status after an interrupted
2153 This command tries to fix the repository status after an interrupted
2164 operation. It should only be necessary when Mercurial suggests it.
2154 operation. It should only be necessary when Mercurial suggests it.
2165 """
2155 """
2166 if repo.recover():
2156 if repo.recover():
2167 return hg.verify(repo)
2157 return hg.verify(repo)
2168 return 1
2158 return 1
2169
2159
2170 def remove(ui, repo, *pats, **opts):
2160 def remove(ui, repo, *pats, **opts):
2171 """remove the specified files on the next commit
2161 """remove the specified files on the next commit
2172
2162
2173 Schedule the indicated files for removal from the repository.
2163 Schedule the indicated files for removal from the repository.
2174
2164
2175 This only removes files from the current branch, not from the entire
2165 This only removes files from the current branch, not from the entire
2176 project history. -A can be used to remove only files that have already
2166 project history. -A can be used to remove only files that have already
2177 been deleted, -f can be used to force deletion, and -Af can be used
2167 been deleted, -f can be used to force deletion, and -Af can be used
2178 to remove files from the next revision without deleting them.
2168 to remove files from the next revision without deleting them.
2179
2169
2180 The following table details the behavior of remove for different file
2170 The following table details the behavior of remove for different file
2181 states (columns) and option combinations (rows). The file states are
2171 states (columns) and option combinations (rows). The file states are
2182 Added, Clean, Modified and Missing (as reported by hg status). The
2172 Added, Clean, Modified and Missing (as reported by hg status). The
2183 actions are Warn, Remove (from branch) and Delete (from disk).
2173 actions are Warn, Remove (from branch) and Delete (from disk).
2184
2174
2185 A C M !
2175 A C M !
2186 none W RD W R
2176 none W RD W R
2187 -f R RD RD R
2177 -f R RD RD R
2188 -A W W W R
2178 -A W W W R
2189 -Af R R R R
2179 -Af R R R R
2190
2180
2191 This command schedules the files to be removed at the next commit.
2181 This command schedules the files to be removed at the next commit.
2192 To undo a remove before that, see hg revert.
2182 To undo a remove before that, see hg revert.
2193 """
2183 """
2194
2184
2195 after, force = opts.get('after'), opts.get('force')
2185 after, force = opts.get('after'), opts.get('force')
2196 if not pats and not after:
2186 if not pats and not after:
2197 raise util.Abort(_('no files specified'))
2187 raise util.Abort(_('no files specified'))
2198
2188
2199 m = cmdutil.match(repo, pats, opts)
2189 m = cmdutil.match(repo, pats, opts)
2200 s = repo.status(match=m, clean=True)
2190 s = repo.status(match=m, clean=True)
2201 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2191 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2202
2192
2203 def warn(files, reason):
2193 def warn(files, reason):
2204 for f in files:
2194 for f in files:
2205 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2195 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2206 % (m.rel(f), reason))
2196 % (m.rel(f), reason))
2207
2197
2208 if force:
2198 if force:
2209 remove, forget = modified + deleted + clean, added
2199 remove, forget = modified + deleted + clean, added
2210 elif after:
2200 elif after:
2211 remove, forget = deleted, []
2201 remove, forget = deleted, []
2212 warn(modified + added + clean, _('still exists'))
2202 warn(modified + added + clean, _('still exists'))
2213 else:
2203 else:
2214 remove, forget = deleted + clean, []
2204 remove, forget = deleted + clean, []
2215 warn(modified, _('is modified'))
2205 warn(modified, _('is modified'))
2216 warn(added, _('has been marked for add'))
2206 warn(added, _('has been marked for add'))
2217
2207
2218 files = remove + forget
2208 for f in util.sort(remove + forget):
2219 files.sort()
2220 for f in files:
2221 if ui.verbose or not m.exact(f):
2209 if ui.verbose or not m.exact(f):
2222 ui.status(_('removing %s\n') % m.rel(f))
2210 ui.status(_('removing %s\n') % m.rel(f))
2223
2211
2224 repo.forget(forget)
2212 repo.forget(forget)
2225 repo.remove(remove, unlink=not after)
2213 repo.remove(remove, unlink=not after)
2226
2214
2227 def rename(ui, repo, *pats, **opts):
2215 def rename(ui, repo, *pats, **opts):
2228 """rename files; equivalent of copy + remove
2216 """rename files; equivalent of copy + remove
2229
2217
2230 Mark dest as copies of sources; mark sources for deletion. If
2218 Mark dest as copies of sources; mark sources for deletion. If
2231 dest is a directory, copies are put in that directory. If dest is
2219 dest is a directory, copies are put in that directory. If dest is
2232 a file, there can only be one source.
2220 a file, there can only be one source.
2233
2221
2234 By default, this command copies the contents of files as they
2222 By default, this command copies the contents of files as they
2235 stand in the working directory. If invoked with --after, the
2223 stand in the working directory. If invoked with --after, the
2236 operation is recorded, but no copying is performed.
2224 operation is recorded, but no copying is performed.
2237
2225
2238 This command takes effect in the next commit. To undo a rename
2226 This command takes effect in the next commit. To undo a rename
2239 before that, see hg revert.
2227 before that, see hg revert.
2240 """
2228 """
2241 wlock = repo.wlock(False)
2229 wlock = repo.wlock(False)
2242 try:
2230 try:
2243 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2231 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2244 finally:
2232 finally:
2245 del wlock
2233 del wlock
2246
2234
2247 def resolve(ui, repo, *pats, **opts):
2235 def resolve(ui, repo, *pats, **opts):
2248 """resolve file merges from a branch merge or update
2236 """resolve file merges from a branch merge or update
2249
2237
2250 This command will attempt to resolve unresolved merges from the
2238 This command will attempt to resolve unresolved merges from the
2251 last update or merge command. This will use the local file
2239 last update or merge command. This will use the local file
2252 revision preserved at the last update or merge to cleanly retry
2240 revision preserved at the last update or merge to cleanly retry
2253 the file merge attempt. With no file or options specified, this
2241 the file merge attempt. With no file or options specified, this
2254 command will attempt to resolve all unresolved files.
2242 command will attempt to resolve all unresolved files.
2255
2243
2256 The codes used to show the status of files are:
2244 The codes used to show the status of files are:
2257 U = unresolved
2245 U = unresolved
2258 R = resolved
2246 R = resolved
2259 """
2247 """
2260
2248
2261 if len([x for x in opts if opts[x]]) > 1:
2249 if len([x for x in opts if opts[x]]) > 1:
2262 raise util.Abort(_("too many options specified"))
2250 raise util.Abort(_("too many options specified"))
2263
2251
2264 ms = merge_.mergestate(repo)
2252 ms = merge_.mergestate(repo)
2265 m = cmdutil.match(repo, pats, opts)
2253 m = cmdutil.match(repo, pats, opts)
2266
2254
2267 for f in ms:
2255 for f in ms:
2268 if m(f):
2256 if m(f):
2269 if opts.get("list"):
2257 if opts.get("list"):
2270 ui.write("%s %s\n" % (ms[f].upper(), f))
2258 ui.write("%s %s\n" % (ms[f].upper(), f))
2271 elif opts.get("mark"):
2259 elif opts.get("mark"):
2272 ms.mark(f, "r")
2260 ms.mark(f, "r")
2273 elif opts.get("unmark"):
2261 elif opts.get("unmark"):
2274 ms.mark(f, "u")
2262 ms.mark(f, "u")
2275 else:
2263 else:
2276 wctx = repo[None]
2264 wctx = repo[None]
2277 mctx = wctx.parents()[-1]
2265 mctx = wctx.parents()[-1]
2278 ms.resolve(f, wctx, mctx)
2266 ms.resolve(f, wctx, mctx)
2279
2267
2280 def revert(ui, repo, *pats, **opts):
2268 def revert(ui, repo, *pats, **opts):
2281 """restore individual files or dirs to an earlier state
2269 """restore individual files or dirs to an earlier state
2282
2270
2283 (use update -r to check out earlier revisions, revert does not
2271 (use update -r to check out earlier revisions, revert does not
2284 change the working dir parents)
2272 change the working dir parents)
2285
2273
2286 With no revision specified, revert the named files or directories
2274 With no revision specified, revert the named files or directories
2287 to the contents they had in the parent of the working directory.
2275 to the contents they had in the parent of the working directory.
2288 This restores the contents of the affected files to an unmodified
2276 This restores the contents of the affected files to an unmodified
2289 state and unschedules adds, removes, copies, and renames. If the
2277 state and unschedules adds, removes, copies, and renames. If the
2290 working directory has two parents, you must explicitly specify the
2278 working directory has two parents, you must explicitly specify the
2291 revision to revert to.
2279 revision to revert to.
2292
2280
2293 Using the -r option, revert the given files or directories to their
2281 Using the -r option, revert the given files or directories to their
2294 contents as of a specific revision. This can be helpful to "roll
2282 contents as of a specific revision. This can be helpful to "roll
2295 back" some or all of an earlier change.
2283 back" some or all of an earlier change.
2296 See 'hg help dates' for a list of formats valid for -d/--date.
2284 See 'hg help dates' for a list of formats valid for -d/--date.
2297
2285
2298 Revert modifies the working directory. It does not commit any
2286 Revert modifies the working directory. It does not commit any
2299 changes, or change the parent of the working directory. If you
2287 changes, or change the parent of the working directory. If you
2300 revert to a revision other than the parent of the working
2288 revert to a revision other than the parent of the working
2301 directory, the reverted files will thus appear modified
2289 directory, the reverted files will thus appear modified
2302 afterwards.
2290 afterwards.
2303
2291
2304 If a file has been deleted, it is restored. If the executable
2292 If a file has been deleted, it is restored. If the executable
2305 mode of a file was changed, it is reset.
2293 mode of a file was changed, it is reset.
2306
2294
2307 If names are given, all files matching the names are reverted.
2295 If names are given, all files matching the names are reverted.
2308 If no arguments are given, no files are reverted.
2296 If no arguments are given, no files are reverted.
2309
2297
2310 Modified files are saved with a .orig suffix before reverting.
2298 Modified files are saved with a .orig suffix before reverting.
2311 To disable these backups, use --no-backup.
2299 To disable these backups, use --no-backup.
2312 """
2300 """
2313
2301
2314 if opts["date"]:
2302 if opts["date"]:
2315 if opts["rev"]:
2303 if opts["rev"]:
2316 raise util.Abort(_("you can't specify a revision and a date"))
2304 raise util.Abort(_("you can't specify a revision and a date"))
2317 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2305 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2318
2306
2319 if not pats and not opts['all']:
2307 if not pats and not opts['all']:
2320 raise util.Abort(_('no files or directories specified; '
2308 raise util.Abort(_('no files or directories specified; '
2321 'use --all to revert the whole repo'))
2309 'use --all to revert the whole repo'))
2322
2310
2323 parent, p2 = repo.dirstate.parents()
2311 parent, p2 = repo.dirstate.parents()
2324 if not opts['rev'] and p2 != nullid:
2312 if not opts['rev'] and p2 != nullid:
2325 raise util.Abort(_('uncommitted merge - please provide a '
2313 raise util.Abort(_('uncommitted merge - please provide a '
2326 'specific revision'))
2314 'specific revision'))
2327 ctx = repo[opts['rev']]
2315 ctx = repo[opts['rev']]
2328 node = ctx.node()
2316 node = ctx.node()
2329 mf = ctx.manifest()
2317 mf = ctx.manifest()
2330 if node == parent:
2318 if node == parent:
2331 pmf = mf
2319 pmf = mf
2332 else:
2320 else:
2333 pmf = None
2321 pmf = None
2334
2322
2335 # need all matching names in dirstate and manifest of target rev,
2323 # need all matching names in dirstate and manifest of target rev,
2336 # so have to walk both. do not print errors if files exist in one
2324 # so have to walk both. do not print errors if files exist in one
2337 # but not other.
2325 # but not other.
2338
2326
2339 names = {}
2327 names = {}
2340
2328
2341 wlock = repo.wlock()
2329 wlock = repo.wlock()
2342 try:
2330 try:
2343 # walk dirstate.
2331 # walk dirstate.
2344 files = []
2332 files = []
2345
2333
2346 m = cmdutil.match(repo, pats, opts)
2334 m = cmdutil.match(repo, pats, opts)
2347 m.bad = lambda x,y: False
2335 m.bad = lambda x,y: False
2348 for abs in repo.walk(m):
2336 for abs in repo.walk(m):
2349 names[abs] = m.rel(abs), m.exact(abs)
2337 names[abs] = m.rel(abs), m.exact(abs)
2350
2338
2351 # walk target manifest.
2339 # walk target manifest.
2352
2340
2353 def badfn(path, msg):
2341 def badfn(path, msg):
2354 if path in names:
2342 if path in names:
2355 return False
2343 return False
2356 path_ = path + '/'
2344 path_ = path + '/'
2357 for f in names:
2345 for f in names:
2358 if f.startswith(path_):
2346 if f.startswith(path_):
2359 return False
2347 return False
2360 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2348 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2361 return False
2349 return False
2362
2350
2363 m = cmdutil.match(repo, pats, opts)
2351 m = cmdutil.match(repo, pats, opts)
2364 m.bad = badfn
2352 m.bad = badfn
2365 for abs in repo.walk(m, node=node):
2353 for abs in repo.walk(m, node=node):
2366 if abs not in names:
2354 if abs not in names:
2367 names[abs] = m.rel(abs), m.exact(abs)
2355 names[abs] = m.rel(abs), m.exact(abs)
2368
2356
2369 m = cmdutil.matchfiles(repo, names)
2357 m = cmdutil.matchfiles(repo, names)
2370 changes = repo.status(match=m)[:4]
2358 changes = repo.status(match=m)[:4]
2371 modified, added, removed, deleted = map(dict.fromkeys, changes)
2359 modified, added, removed, deleted = map(dict.fromkeys, changes)
2372
2360
2373 # if f is a rename, also revert the source
2361 # if f is a rename, also revert the source
2374 cwd = repo.getcwd()
2362 cwd = repo.getcwd()
2375 for f in added:
2363 for f in added:
2376 src = repo.dirstate.copied(f)
2364 src = repo.dirstate.copied(f)
2377 if src and src not in names and repo.dirstate[src] == 'r':
2365 if src and src not in names and repo.dirstate[src] == 'r':
2378 removed[src] = None
2366 removed[src] = None
2379 names[src] = (repo.pathto(src, cwd), True)
2367 names[src] = (repo.pathto(src, cwd), True)
2380
2368
2381 def removeforget(abs):
2369 def removeforget(abs):
2382 if repo.dirstate[abs] == 'a':
2370 if repo.dirstate[abs] == 'a':
2383 return _('forgetting %s\n')
2371 return _('forgetting %s\n')
2384 return _('removing %s\n')
2372 return _('removing %s\n')
2385
2373
2386 revert = ([], _('reverting %s\n'))
2374 revert = ([], _('reverting %s\n'))
2387 add = ([], _('adding %s\n'))
2375 add = ([], _('adding %s\n'))
2388 remove = ([], removeforget)
2376 remove = ([], removeforget)
2389 undelete = ([], _('undeleting %s\n'))
2377 undelete = ([], _('undeleting %s\n'))
2390
2378
2391 disptable = (
2379 disptable = (
2392 # dispatch table:
2380 # dispatch table:
2393 # file state
2381 # file state
2394 # action if in target manifest
2382 # action if in target manifest
2395 # action if not in target manifest
2383 # action if not in target manifest
2396 # make backup if in target manifest
2384 # make backup if in target manifest
2397 # make backup if not in target manifest
2385 # make backup if not in target manifest
2398 (modified, revert, remove, True, True),
2386 (modified, revert, remove, True, True),
2399 (added, revert, remove, True, False),
2387 (added, revert, remove, True, False),
2400 (removed, undelete, None, False, False),
2388 (removed, undelete, None, False, False),
2401 (deleted, revert, remove, False, False),
2389 (deleted, revert, remove, False, False),
2402 )
2390 )
2403
2391
2404 entries = names.items()
2392 for abs, (rel, exact) in util.sort(names.items()):
2405 entries.sort()
2406
2407 for abs, (rel, exact) in entries:
2408 mfentry = mf.get(abs)
2393 mfentry = mf.get(abs)
2409 target = repo.wjoin(abs)
2394 target = repo.wjoin(abs)
2410 def handle(xlist, dobackup):
2395 def handle(xlist, dobackup):
2411 xlist[0].append(abs)
2396 xlist[0].append(abs)
2412 if dobackup and not opts['no_backup'] and util.lexists(target):
2397 if dobackup and not opts['no_backup'] and util.lexists(target):
2413 bakname = "%s.orig" % rel
2398 bakname = "%s.orig" % rel
2414 ui.note(_('saving current version of %s as %s\n') %
2399 ui.note(_('saving current version of %s as %s\n') %
2415 (rel, bakname))
2400 (rel, bakname))
2416 if not opts.get('dry_run'):
2401 if not opts.get('dry_run'):
2417 util.copyfile(target, bakname)
2402 util.copyfile(target, bakname)
2418 if ui.verbose or not exact:
2403 if ui.verbose or not exact:
2419 msg = xlist[1]
2404 msg = xlist[1]
2420 if not isinstance(msg, basestring):
2405 if not isinstance(msg, basestring):
2421 msg = msg(abs)
2406 msg = msg(abs)
2422 ui.status(msg % rel)
2407 ui.status(msg % rel)
2423 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2408 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2424 if abs not in table: continue
2409 if abs not in table: continue
2425 # file has changed in dirstate
2410 # file has changed in dirstate
2426 if mfentry:
2411 if mfentry:
2427 handle(hitlist, backuphit)
2412 handle(hitlist, backuphit)
2428 elif misslist is not None:
2413 elif misslist is not None:
2429 handle(misslist, backupmiss)
2414 handle(misslist, backupmiss)
2430 break
2415 break
2431 else:
2416 else:
2432 if abs not in repo.dirstate:
2417 if abs not in repo.dirstate:
2433 if mfentry:
2418 if mfentry:
2434 handle(add, True)
2419 handle(add, True)
2435 elif exact:
2420 elif exact:
2436 ui.warn(_('file not managed: %s\n') % rel)
2421 ui.warn(_('file not managed: %s\n') % rel)
2437 continue
2422 continue
2438 # file has not changed in dirstate
2423 # file has not changed in dirstate
2439 if node == parent:
2424 if node == parent:
2440 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2425 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2441 continue
2426 continue
2442 if pmf is None:
2427 if pmf is None:
2443 # only need parent manifest in this unlikely case,
2428 # only need parent manifest in this unlikely case,
2444 # so do not read by default
2429 # so do not read by default
2445 pmf = repo[parent].manifest()
2430 pmf = repo[parent].manifest()
2446 if abs in pmf:
2431 if abs in pmf:
2447 if mfentry:
2432 if mfentry:
2448 # if version of file is same in parent and target
2433 # if version of file is same in parent and target
2449 # manifests, do nothing
2434 # manifests, do nothing
2450 if (pmf[abs] != mfentry or
2435 if (pmf[abs] != mfentry or
2451 pmf.flags(abs) != mf.flags(abs)):
2436 pmf.flags(abs) != mf.flags(abs)):
2452 handle(revert, False)
2437 handle(revert, False)
2453 else:
2438 else:
2454 handle(remove, False)
2439 handle(remove, False)
2455
2440
2456 if not opts.get('dry_run'):
2441 if not opts.get('dry_run'):
2457 def checkout(f):
2442 def checkout(f):
2458 fc = ctx[f]
2443 fc = ctx[f]
2459 repo.wwrite(f, fc.data(), fc.flags())
2444 repo.wwrite(f, fc.data(), fc.flags())
2460
2445
2461 audit_path = util.path_auditor(repo.root)
2446 audit_path = util.path_auditor(repo.root)
2462 for f in remove[0]:
2447 for f in remove[0]:
2463 if repo.dirstate[f] == 'a':
2448 if repo.dirstate[f] == 'a':
2464 repo.dirstate.forget(f)
2449 repo.dirstate.forget(f)
2465 continue
2450 continue
2466 audit_path(f)
2451 audit_path(f)
2467 try:
2452 try:
2468 util.unlink(repo.wjoin(f))
2453 util.unlink(repo.wjoin(f))
2469 except OSError:
2454 except OSError:
2470 pass
2455 pass
2471 repo.dirstate.remove(f)
2456 repo.dirstate.remove(f)
2472
2457
2473 normal = None
2458 normal = None
2474 if node == parent:
2459 if node == parent:
2475 # We're reverting to our parent. If possible, we'd like status
2460 # We're reverting to our parent. If possible, we'd like status
2476 # to report the file as clean. We have to use normallookup for
2461 # to report the file as clean. We have to use normallookup for
2477 # merges to avoid losing information about merged/dirty files.
2462 # merges to avoid losing information about merged/dirty files.
2478 if p2 != nullid:
2463 if p2 != nullid:
2479 normal = repo.dirstate.normallookup
2464 normal = repo.dirstate.normallookup
2480 else:
2465 else:
2481 normal = repo.dirstate.normal
2466 normal = repo.dirstate.normal
2482 for f in revert[0]:
2467 for f in revert[0]:
2483 checkout(f)
2468 checkout(f)
2484 if normal:
2469 if normal:
2485 normal(f)
2470 normal(f)
2486
2471
2487 for f in add[0]:
2472 for f in add[0]:
2488 checkout(f)
2473 checkout(f)
2489 repo.dirstate.add(f)
2474 repo.dirstate.add(f)
2490
2475
2491 normal = repo.dirstate.normallookup
2476 normal = repo.dirstate.normallookup
2492 if node == parent and p2 == nullid:
2477 if node == parent and p2 == nullid:
2493 normal = repo.dirstate.normal
2478 normal = repo.dirstate.normal
2494 for f in undelete[0]:
2479 for f in undelete[0]:
2495 checkout(f)
2480 checkout(f)
2496 normal(f)
2481 normal(f)
2497
2482
2498 finally:
2483 finally:
2499 del wlock
2484 del wlock
2500
2485
2501 def rollback(ui, repo):
2486 def rollback(ui, repo):
2502 """roll back the last transaction
2487 """roll back the last transaction
2503
2488
2504 This command should be used with care. There is only one level of
2489 This command should be used with care. There is only one level of
2505 rollback, and there is no way to undo a rollback. It will also
2490 rollback, and there is no way to undo a rollback. It will also
2506 restore the dirstate at the time of the last transaction, losing
2491 restore the dirstate at the time of the last transaction, losing
2507 any dirstate changes since that time.
2492 any dirstate changes since that time.
2508
2493
2509 Transactions are used to encapsulate the effects of all commands
2494 Transactions are used to encapsulate the effects of all commands
2510 that create new changesets or propagate existing changesets into a
2495 that create new changesets or propagate existing changesets into a
2511 repository. For example, the following commands are transactional,
2496 repository. For example, the following commands are transactional,
2512 and their effects can be rolled back:
2497 and their effects can be rolled back:
2513
2498
2514 commit
2499 commit
2515 import
2500 import
2516 pull
2501 pull
2517 push (with this repository as destination)
2502 push (with this repository as destination)
2518 unbundle
2503 unbundle
2519
2504
2520 This command is not intended for use on public repositories. Once
2505 This command is not intended for use on public repositories. Once
2521 changes are visible for pull by other users, rolling a transaction
2506 changes are visible for pull by other users, rolling a transaction
2522 back locally is ineffective (someone else may already have pulled
2507 back locally is ineffective (someone else may already have pulled
2523 the changes). Furthermore, a race is possible with readers of the
2508 the changes). Furthermore, a race is possible with readers of the
2524 repository; for example an in-progress pull from the repository
2509 repository; for example an in-progress pull from the repository
2525 may fail if a rollback is performed.
2510 may fail if a rollback is performed.
2526 """
2511 """
2527 repo.rollback()
2512 repo.rollback()
2528
2513
2529 def root(ui, repo):
2514 def root(ui, repo):
2530 """print the root (top) of the current working dir
2515 """print the root (top) of the current working dir
2531
2516
2532 Print the root directory of the current repository.
2517 Print the root directory of the current repository.
2533 """
2518 """
2534 ui.write(repo.root + "\n")
2519 ui.write(repo.root + "\n")
2535
2520
2536 def serve(ui, repo, **opts):
2521 def serve(ui, repo, **opts):
2537 """export the repository via HTTP
2522 """export the repository via HTTP
2538
2523
2539 Start a local HTTP repository browser and pull server.
2524 Start a local HTTP repository browser and pull server.
2540
2525
2541 By default, the server logs accesses to stdout and errors to
2526 By default, the server logs accesses to stdout and errors to
2542 stderr. Use the "-A" and "-E" options to log to files.
2527 stderr. Use the "-A" and "-E" options to log to files.
2543 """
2528 """
2544
2529
2545 if opts["stdio"]:
2530 if opts["stdio"]:
2546 if repo is None:
2531 if repo is None:
2547 raise RepoError(_("There is no Mercurial repository here"
2532 raise RepoError(_("There is no Mercurial repository here"
2548 " (.hg not found)"))
2533 " (.hg not found)"))
2549 s = sshserver.sshserver(ui, repo)
2534 s = sshserver.sshserver(ui, repo)
2550 s.serve_forever()
2535 s.serve_forever()
2551
2536
2552 parentui = ui.parentui or ui
2537 parentui = ui.parentui or ui
2553 optlist = ("name templates style address port prefix ipv6"
2538 optlist = ("name templates style address port prefix ipv6"
2554 " accesslog errorlog webdir_conf certificate")
2539 " accesslog errorlog webdir_conf certificate")
2555 for o in optlist.split():
2540 for o in optlist.split():
2556 if opts[o]:
2541 if opts[o]:
2557 parentui.setconfig("web", o, str(opts[o]))
2542 parentui.setconfig("web", o, str(opts[o]))
2558 if (repo is not None) and (repo.ui != parentui):
2543 if (repo is not None) and (repo.ui != parentui):
2559 repo.ui.setconfig("web", o, str(opts[o]))
2544 repo.ui.setconfig("web", o, str(opts[o]))
2560
2545
2561 if repo is None and not ui.config("web", "webdir_conf"):
2546 if repo is None and not ui.config("web", "webdir_conf"):
2562 raise RepoError(_("There is no Mercurial repository here"
2547 raise RepoError(_("There is no Mercurial repository here"
2563 " (.hg not found)"))
2548 " (.hg not found)"))
2564
2549
2565 class service:
2550 class service:
2566 def init(self):
2551 def init(self):
2567 util.set_signal_handler()
2552 util.set_signal_handler()
2568 self.httpd = hgweb.server.create_server(parentui, repo)
2553 self.httpd = hgweb.server.create_server(parentui, repo)
2569
2554
2570 if not ui.verbose: return
2555 if not ui.verbose: return
2571
2556
2572 if self.httpd.prefix:
2557 if self.httpd.prefix:
2573 prefix = self.httpd.prefix.strip('/') + '/'
2558 prefix = self.httpd.prefix.strip('/') + '/'
2574 else:
2559 else:
2575 prefix = ''
2560 prefix = ''
2576
2561
2577 port = ':%d' % self.httpd.port
2562 port = ':%d' % self.httpd.port
2578 if port == ':80':
2563 if port == ':80':
2579 port = ''
2564 port = ''
2580
2565
2581 bindaddr = self.httpd.addr
2566 bindaddr = self.httpd.addr
2582 if bindaddr == '0.0.0.0':
2567 if bindaddr == '0.0.0.0':
2583 bindaddr = '*'
2568 bindaddr = '*'
2584 elif ':' in bindaddr: # IPv6
2569 elif ':' in bindaddr: # IPv6
2585 bindaddr = '[%s]' % bindaddr
2570 bindaddr = '[%s]' % bindaddr
2586
2571
2587 fqaddr = self.httpd.fqaddr
2572 fqaddr = self.httpd.fqaddr
2588 if ':' in fqaddr:
2573 if ':' in fqaddr:
2589 fqaddr = '[%s]' % fqaddr
2574 fqaddr = '[%s]' % fqaddr
2590 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2575 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2591 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2576 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2592
2577
2593 def run(self):
2578 def run(self):
2594 self.httpd.serve_forever()
2579 self.httpd.serve_forever()
2595
2580
2596 service = service()
2581 service = service()
2597
2582
2598 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2583 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2599
2584
2600 def status(ui, repo, *pats, **opts):
2585 def status(ui, repo, *pats, **opts):
2601 """show changed files in the working directory
2586 """show changed files in the working directory
2602
2587
2603 Show status of files in the repository. If names are given, only
2588 Show status of files in the repository. If names are given, only
2604 files that match are shown. Files that are clean or ignored or
2589 files that match are shown. Files that are clean or ignored or
2605 source of a copy/move operation, are not listed unless -c (clean),
2590 source of a copy/move operation, are not listed unless -c (clean),
2606 -i (ignored), -C (copies) or -A is given. Unless options described
2591 -i (ignored), -C (copies) or -A is given. Unless options described
2607 with "show only ..." are given, the options -mardu are used.
2592 with "show only ..." are given, the options -mardu are used.
2608
2593
2609 Option -q/--quiet hides untracked (unknown and ignored) files
2594 Option -q/--quiet hides untracked (unknown and ignored) files
2610 unless explicitly requested with -u/--unknown or -i/-ignored.
2595 unless explicitly requested with -u/--unknown or -i/-ignored.
2611
2596
2612 NOTE: status may appear to disagree with diff if permissions have
2597 NOTE: status may appear to disagree with diff if permissions have
2613 changed or a merge has occurred. The standard diff format does not
2598 changed or a merge has occurred. The standard diff format does not
2614 report permission changes and diff only reports changes relative
2599 report permission changes and diff only reports changes relative
2615 to one merge parent.
2600 to one merge parent.
2616
2601
2617 If one revision is given, it is used as the base revision.
2602 If one revision is given, it is used as the base revision.
2618 If two revisions are given, the difference between them is shown.
2603 If two revisions are given, the difference between them is shown.
2619
2604
2620 The codes used to show the status of files are:
2605 The codes used to show the status of files are:
2621 M = modified
2606 M = modified
2622 A = added
2607 A = added
2623 R = removed
2608 R = removed
2624 C = clean
2609 C = clean
2625 ! = deleted, but still tracked
2610 ! = deleted, but still tracked
2626 ? = not tracked
2611 ? = not tracked
2627 I = ignored
2612 I = ignored
2628 = the previous added file was copied from here
2613 = the previous added file was copied from here
2629 """
2614 """
2630
2615
2631 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2616 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2632 cwd = (pats and repo.getcwd()) or ''
2617 cwd = (pats and repo.getcwd()) or ''
2633 end = opts['print0'] and '\0' or '\n'
2618 end = opts['print0'] and '\0' or '\n'
2634 copy = {}
2619 copy = {}
2635 states = 'modified added removed deleted unknown ignored clean'.split()
2620 states = 'modified added removed deleted unknown ignored clean'.split()
2636 show = [k for k in states if opts[k]]
2621 show = [k for k in states if opts[k]]
2637 if opts['all']:
2622 if opts['all']:
2638 show += ui.quiet and (states[:4] + ['clean']) or states
2623 show += ui.quiet and (states[:4] + ['clean']) or states
2639 if not show:
2624 if not show:
2640 show = ui.quiet and states[:4] or states[:5]
2625 show = ui.quiet and states[:4] or states[:5]
2641
2626
2642 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2627 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2643 'ignored' in show, 'clean' in show, 'unknown' in show)
2628 'ignored' in show, 'clean' in show, 'unknown' in show)
2644 changestates = zip(states, 'MAR!?IC', stat)
2629 changestates = zip(states, 'MAR!?IC', stat)
2645
2630
2646 if (opts['all'] or opts['copies']) and not opts['no_status']:
2631 if (opts['all'] or opts['copies']) and not opts['no_status']:
2647 ctxn = repo[nullid]
2632 ctxn = repo[nullid]
2648 ctx1 = repo[node1]
2633 ctx1 = repo[node1]
2649 ctx2 = repo[node2]
2634 ctx2 = repo[node2]
2650 added = stat[1]
2635 added = stat[1]
2651 if node2 is None:
2636 if node2 is None:
2652 added = stat[0] + stat[1] # merged?
2637 added = stat[0] + stat[1] # merged?
2653
2638
2654 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].items():
2639 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].items():
2655 if k in added:
2640 if k in added:
2656 copy[k] = v
2641 copy[k] = v
2657 elif v in added:
2642 elif v in added:
2658 copy[v] = k
2643 copy[v] = k
2659
2644
2660 for state, char, files in changestates:
2645 for state, char, files in changestates:
2661 if state in show:
2646 if state in show:
2662 format = "%s %%s%s" % (char, end)
2647 format = "%s %%s%s" % (char, end)
2663 if opts['no_status']:
2648 if opts['no_status']:
2664 format = "%%s%s" % end
2649 format = "%%s%s" % end
2665
2650
2666 for f in files:
2651 for f in files:
2667 ui.write(format % repo.pathto(f, cwd))
2652 ui.write(format % repo.pathto(f, cwd))
2668 if f in copy:
2653 if f in copy:
2669 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2654 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2670
2655
2671 def tag(ui, repo, name1, *names, **opts):
2656 def tag(ui, repo, name1, *names, **opts):
2672 """add one or more tags for the current or given revision
2657 """add one or more tags for the current or given revision
2673
2658
2674 Name a particular revision using <name>.
2659 Name a particular revision using <name>.
2675
2660
2676 Tags are used to name particular revisions of the repository and are
2661 Tags are used to name particular revisions of the repository and are
2677 very useful to compare different revisions, to go back to significant
2662 very useful to compare different revisions, to go back to significant
2678 earlier versions or to mark branch points as releases, etc.
2663 earlier versions or to mark branch points as releases, etc.
2679
2664
2680 If no revision is given, the parent of the working directory is used,
2665 If no revision is given, the parent of the working directory is used,
2681 or tip if no revision is checked out.
2666 or tip if no revision is checked out.
2682
2667
2683 To facilitate version control, distribution, and merging of tags,
2668 To facilitate version control, distribution, and merging of tags,
2684 they are stored as a file named ".hgtags" which is managed
2669 they are stored as a file named ".hgtags" which is managed
2685 similarly to other project files and can be hand-edited if
2670 similarly to other project files and can be hand-edited if
2686 necessary. The file '.hg/localtags' is used for local tags (not
2671 necessary. The file '.hg/localtags' is used for local tags (not
2687 shared among repositories).
2672 shared among repositories).
2688
2673
2689 See 'hg help dates' for a list of formats valid for -d/--date.
2674 See 'hg help dates' for a list of formats valid for -d/--date.
2690 """
2675 """
2691
2676
2692 rev_ = "."
2677 rev_ = "."
2693 names = (name1,) + names
2678 names = (name1,) + names
2694 if len(names) != len(dict.fromkeys(names)):
2679 if len(names) != len(dict.fromkeys(names)):
2695 raise util.Abort(_('tag names must be unique'))
2680 raise util.Abort(_('tag names must be unique'))
2696 for n in names:
2681 for n in names:
2697 if n in ['tip', '.', 'null']:
2682 if n in ['tip', '.', 'null']:
2698 raise util.Abort(_('the name \'%s\' is reserved') % n)
2683 raise util.Abort(_('the name \'%s\' is reserved') % n)
2699 if opts['rev'] and opts['remove']:
2684 if opts['rev'] and opts['remove']:
2700 raise util.Abort(_("--rev and --remove are incompatible"))
2685 raise util.Abort(_("--rev and --remove are incompatible"))
2701 if opts['rev']:
2686 if opts['rev']:
2702 rev_ = opts['rev']
2687 rev_ = opts['rev']
2703 message = opts['message']
2688 message = opts['message']
2704 if opts['remove']:
2689 if opts['remove']:
2705 expectedtype = opts['local'] and 'local' or 'global'
2690 expectedtype = opts['local'] and 'local' or 'global'
2706 for n in names:
2691 for n in names:
2707 if not repo.tagtype(n):
2692 if not repo.tagtype(n):
2708 raise util.Abort(_('tag \'%s\' does not exist') % n)
2693 raise util.Abort(_('tag \'%s\' does not exist') % n)
2709 if repo.tagtype(n) != expectedtype:
2694 if repo.tagtype(n) != expectedtype:
2710 raise util.Abort(_('tag \'%s\' is not a %s tag') %
2695 raise util.Abort(_('tag \'%s\' is not a %s tag') %
2711 (n, expectedtype))
2696 (n, expectedtype))
2712 rev_ = nullid
2697 rev_ = nullid
2713 if not message:
2698 if not message:
2714 message = _('Removed tag %s') % ', '.join(names)
2699 message = _('Removed tag %s') % ', '.join(names)
2715 elif not opts['force']:
2700 elif not opts['force']:
2716 for n in names:
2701 for n in names:
2717 if n in repo.tags():
2702 if n in repo.tags():
2718 raise util.Abort(_('tag \'%s\' already exists '
2703 raise util.Abort(_('tag \'%s\' already exists '
2719 '(use -f to force)') % n)
2704 '(use -f to force)') % n)
2720 if not rev_ and repo.dirstate.parents()[1] != nullid:
2705 if not rev_ and repo.dirstate.parents()[1] != nullid:
2721 raise util.Abort(_('uncommitted merge - please provide a '
2706 raise util.Abort(_('uncommitted merge - please provide a '
2722 'specific revision'))
2707 'specific revision'))
2723 r = repo[rev_].node()
2708 r = repo[rev_].node()
2724
2709
2725 if not message:
2710 if not message:
2726 message = (_('Added tag %s for changeset %s') %
2711 message = (_('Added tag %s for changeset %s') %
2727 (', '.join(names), short(r)))
2712 (', '.join(names), short(r)))
2728
2713
2729 date = opts.get('date')
2714 date = opts.get('date')
2730 if date:
2715 if date:
2731 date = util.parsedate(date)
2716 date = util.parsedate(date)
2732
2717
2733 repo.tag(names, r, message, opts['local'], opts['user'], date)
2718 repo.tag(names, r, message, opts['local'], opts['user'], date)
2734
2719
2735 def tags(ui, repo):
2720 def tags(ui, repo):
2736 """list repository tags
2721 """list repository tags
2737
2722
2738 List the repository tags.
2723 List the repository tags.
2739
2724
2740 This lists both regular and local tags. When the -v/--verbose switch
2725 This lists both regular and local tags. When the -v/--verbose switch
2741 is used, a third column "local" is printed for local tags.
2726 is used, a third column "local" is printed for local tags.
2742 """
2727 """
2743
2728
2744 l = repo.tagslist()
2729 l = repo.tagslist()
2745 l.reverse()
2730 l.reverse()
2746 hexfunc = ui.debugflag and hex or short
2731 hexfunc = ui.debugflag and hex or short
2747 tagtype = ""
2732 tagtype = ""
2748
2733
2749 for t, n in l:
2734 for t, n in l:
2750 if ui.quiet:
2735 if ui.quiet:
2751 ui.write("%s\n" % t)
2736 ui.write("%s\n" % t)
2752 continue
2737 continue
2753
2738
2754 try:
2739 try:
2755 hn = hexfunc(n)
2740 hn = hexfunc(n)
2756 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2741 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2757 except revlog.LookupError:
2742 except revlog.LookupError:
2758 r = " ?:%s" % hn
2743 r = " ?:%s" % hn
2759 else:
2744 else:
2760 spaces = " " * (30 - util.locallen(t))
2745 spaces = " " * (30 - util.locallen(t))
2761 if ui.verbose:
2746 if ui.verbose:
2762 if repo.tagtype(t) == 'local':
2747 if repo.tagtype(t) == 'local':
2763 tagtype = " local"
2748 tagtype = " local"
2764 else:
2749 else:
2765 tagtype = ""
2750 tagtype = ""
2766 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2751 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2767
2752
2768 def tip(ui, repo, **opts):
2753 def tip(ui, repo, **opts):
2769 """show the tip revision
2754 """show the tip revision
2770
2755
2771 The tip revision (usually just called the tip) is the most
2756 The tip revision (usually just called the tip) is the most
2772 recently added changeset in the repository, the most recently
2757 recently added changeset in the repository, the most recently
2773 changed head.
2758 changed head.
2774
2759
2775 If you have just made a commit, that commit will be the tip. If
2760 If you have just made a commit, that commit will be the tip. If
2776 you have just pulled changes from another repository, the tip of
2761 you have just pulled changes from another repository, the tip of
2777 that repository becomes the current tip. The "tip" tag is special
2762 that repository becomes the current tip. The "tip" tag is special
2778 and cannot be renamed or assigned to a different changeset.
2763 and cannot be renamed or assigned to a different changeset.
2779 """
2764 """
2780 cmdutil.show_changeset(ui, repo, opts).show(len(repo) - 1)
2765 cmdutil.show_changeset(ui, repo, opts).show(len(repo) - 1)
2781
2766
2782 def unbundle(ui, repo, fname1, *fnames, **opts):
2767 def unbundle(ui, repo, fname1, *fnames, **opts):
2783 """apply one or more changegroup files
2768 """apply one or more changegroup files
2784
2769
2785 Apply one or more compressed changegroup files generated by the
2770 Apply one or more compressed changegroup files generated by the
2786 bundle command.
2771 bundle command.
2787 """
2772 """
2788 fnames = (fname1,) + fnames
2773 fnames = (fname1,) + fnames
2789
2774
2790 lock = None
2775 lock = None
2791 try:
2776 try:
2792 lock = repo.lock()
2777 lock = repo.lock()
2793 for fname in fnames:
2778 for fname in fnames:
2794 if os.path.exists(fname):
2779 if os.path.exists(fname):
2795 f = open(fname, "rb")
2780 f = open(fname, "rb")
2796 else:
2781 else:
2797 f = urllib.urlopen(fname)
2782 f = urllib.urlopen(fname)
2798 gen = changegroup.readbundle(f, fname)
2783 gen = changegroup.readbundle(f, fname)
2799 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2784 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2800 finally:
2785 finally:
2801 del lock
2786 del lock
2802
2787
2803 return postincoming(ui, repo, modheads, opts['update'], None)
2788 return postincoming(ui, repo, modheads, opts['update'], None)
2804
2789
2805 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2790 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2806 """update working directory
2791 """update working directory
2807
2792
2808 Update the repository's working directory to the specified revision,
2793 Update the repository's working directory to the specified revision,
2809 or the tip of the current branch if none is specified.
2794 or the tip of the current branch if none is specified.
2810
2795
2811 If the requested revision is a descendant of the working
2796 If the requested revision is a descendant of the working
2812 directory, any outstanding changes in the working directory will
2797 directory, any outstanding changes in the working directory will
2813 be merged into the result. If it is not directly descended but is
2798 be merged into the result. If it is not directly descended but is
2814 on the same named branch, update aborts with a suggestion to use
2799 on the same named branch, update aborts with a suggestion to use
2815 merge or update -C instead.
2800 merge or update -C instead.
2816
2801
2817 If the requested revision is on a different named branch and the
2802 If the requested revision is on a different named branch and the
2818 working directory is clean, update quietly switches branches.
2803 working directory is clean, update quietly switches branches.
2819
2804
2820 See 'hg help dates' for a list of formats valid for --date.
2805 See 'hg help dates' for a list of formats valid for --date.
2821 """
2806 """
2822 if rev and node:
2807 if rev and node:
2823 raise util.Abort(_("please specify just one revision"))
2808 raise util.Abort(_("please specify just one revision"))
2824
2809
2825 if not rev:
2810 if not rev:
2826 rev = node
2811 rev = node
2827
2812
2828 if date:
2813 if date:
2829 if rev:
2814 if rev:
2830 raise util.Abort(_("you can't specify a revision and a date"))
2815 raise util.Abort(_("you can't specify a revision and a date"))
2831 rev = cmdutil.finddate(ui, repo, date)
2816 rev = cmdutil.finddate(ui, repo, date)
2832
2817
2833 if clean:
2818 if clean:
2834 return hg.clean(repo, rev)
2819 return hg.clean(repo, rev)
2835 else:
2820 else:
2836 return hg.update(repo, rev)
2821 return hg.update(repo, rev)
2837
2822
2838 def verify(ui, repo):
2823 def verify(ui, repo):
2839 """verify the integrity of the repository
2824 """verify the integrity of the repository
2840
2825
2841 Verify the integrity of the current repository.
2826 Verify the integrity of the current repository.
2842
2827
2843 This will perform an extensive check of the repository's
2828 This will perform an extensive check of the repository's
2844 integrity, validating the hashes and checksums of each entry in
2829 integrity, validating the hashes and checksums of each entry in
2845 the changelog, manifest, and tracked files, as well as the
2830 the changelog, manifest, and tracked files, as well as the
2846 integrity of their crosslinks and indices.
2831 integrity of their crosslinks and indices.
2847 """
2832 """
2848 return hg.verify(repo)
2833 return hg.verify(repo)
2849
2834
2850 def version_(ui):
2835 def version_(ui):
2851 """output version and copyright information"""
2836 """output version and copyright information"""
2852 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2837 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2853 % version.get_version())
2838 % version.get_version())
2854 ui.status(_(
2839 ui.status(_(
2855 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2840 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2856 "This is free software; see the source for copying conditions. "
2841 "This is free software; see the source for copying conditions. "
2857 "There is NO\nwarranty; "
2842 "There is NO\nwarranty; "
2858 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2843 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2859 ))
2844 ))
2860
2845
2861 # Command options and aliases are listed here, alphabetically
2846 # Command options and aliases are listed here, alphabetically
2862
2847
2863 globalopts = [
2848 globalopts = [
2864 ('R', 'repository', '',
2849 ('R', 'repository', '',
2865 _('repository root directory or symbolic path name')),
2850 _('repository root directory or symbolic path name')),
2866 ('', 'cwd', '', _('change working directory')),
2851 ('', 'cwd', '', _('change working directory')),
2867 ('y', 'noninteractive', None,
2852 ('y', 'noninteractive', None,
2868 _('do not prompt, assume \'yes\' for any required answers')),
2853 _('do not prompt, assume \'yes\' for any required answers')),
2869 ('q', 'quiet', None, _('suppress output')),
2854 ('q', 'quiet', None, _('suppress output')),
2870 ('v', 'verbose', None, _('enable additional output')),
2855 ('v', 'verbose', None, _('enable additional output')),
2871 ('', 'config', [], _('set/override config option')),
2856 ('', 'config', [], _('set/override config option')),
2872 ('', 'debug', None, _('enable debugging output')),
2857 ('', 'debug', None, _('enable debugging output')),
2873 ('', 'debugger', None, _('start debugger')),
2858 ('', 'debugger', None, _('start debugger')),
2874 ('', 'encoding', util._encoding, _('set the charset encoding')),
2859 ('', 'encoding', util._encoding, _('set the charset encoding')),
2875 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2860 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2876 ('', 'lsprof', None, _('print improved command execution profile')),
2861 ('', 'lsprof', None, _('print improved command execution profile')),
2877 ('', 'traceback', None, _('print traceback on exception')),
2862 ('', 'traceback', None, _('print traceback on exception')),
2878 ('', 'time', None, _('time how long the command takes')),
2863 ('', 'time', None, _('time how long the command takes')),
2879 ('', 'profile', None, _('print command execution profile')),
2864 ('', 'profile', None, _('print command execution profile')),
2880 ('', 'version', None, _('output version information and exit')),
2865 ('', 'version', None, _('output version information and exit')),
2881 ('h', 'help', None, _('display help and exit')),
2866 ('h', 'help', None, _('display help and exit')),
2882 ]
2867 ]
2883
2868
2884 dryrunopts = [('n', 'dry-run', None,
2869 dryrunopts = [('n', 'dry-run', None,
2885 _('do not perform actions, just print output'))]
2870 _('do not perform actions, just print output'))]
2886
2871
2887 remoteopts = [
2872 remoteopts = [
2888 ('e', 'ssh', '', _('specify ssh command to use')),
2873 ('e', 'ssh', '', _('specify ssh command to use')),
2889 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2874 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2890 ]
2875 ]
2891
2876
2892 walkopts = [
2877 walkopts = [
2893 ('I', 'include', [], _('include names matching the given patterns')),
2878 ('I', 'include', [], _('include names matching the given patterns')),
2894 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2879 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2895 ]
2880 ]
2896
2881
2897 commitopts = [
2882 commitopts = [
2898 ('m', 'message', '', _('use <text> as commit message')),
2883 ('m', 'message', '', _('use <text> as commit message')),
2899 ('l', 'logfile', '', _('read commit message from <file>')),
2884 ('l', 'logfile', '', _('read commit message from <file>')),
2900 ]
2885 ]
2901
2886
2902 commitopts2 = [
2887 commitopts2 = [
2903 ('d', 'date', '', _('record datecode as commit date')),
2888 ('d', 'date', '', _('record datecode as commit date')),
2904 ('u', 'user', '', _('record user as committer')),
2889 ('u', 'user', '', _('record user as committer')),
2905 ]
2890 ]
2906
2891
2907 templateopts = [
2892 templateopts = [
2908 ('', 'style', '', _('display using template map file')),
2893 ('', 'style', '', _('display using template map file')),
2909 ('', 'template', '', _('display with template')),
2894 ('', 'template', '', _('display with template')),
2910 ]
2895 ]
2911
2896
2912 logopts = [
2897 logopts = [
2913 ('p', 'patch', None, _('show patch')),
2898 ('p', 'patch', None, _('show patch')),
2914 ('l', 'limit', '', _('limit number of changes displayed')),
2899 ('l', 'limit', '', _('limit number of changes displayed')),
2915 ('M', 'no-merges', None, _('do not show merges')),
2900 ('M', 'no-merges', None, _('do not show merges')),
2916 ] + templateopts
2901 ] + templateopts
2917
2902
2918 diffopts = [
2903 diffopts = [
2919 ('a', 'text', None, _('treat all files as text')),
2904 ('a', 'text', None, _('treat all files as text')),
2920 ('g', 'git', None, _('use git extended diff format')),
2905 ('g', 'git', None, _('use git extended diff format')),
2921 ('', 'nodates', None, _("don't include dates in diff headers"))
2906 ('', 'nodates', None, _("don't include dates in diff headers"))
2922 ]
2907 ]
2923
2908
2924 diffopts2 = [
2909 diffopts2 = [
2925 ('p', 'show-function', None, _('show which function each change is in')),
2910 ('p', 'show-function', None, _('show which function each change is in')),
2926 ('w', 'ignore-all-space', None,
2911 ('w', 'ignore-all-space', None,
2927 _('ignore white space when comparing lines')),
2912 _('ignore white space when comparing lines')),
2928 ('b', 'ignore-space-change', None,
2913 ('b', 'ignore-space-change', None,
2929 _('ignore changes in the amount of white space')),
2914 _('ignore changes in the amount of white space')),
2930 ('B', 'ignore-blank-lines', None,
2915 ('B', 'ignore-blank-lines', None,
2931 _('ignore changes whose lines are all blank')),
2916 _('ignore changes whose lines are all blank')),
2932 ('U', 'unified', '', _('number of lines of context to show'))
2917 ('U', 'unified', '', _('number of lines of context to show'))
2933 ]
2918 ]
2934
2919
2935 table = {
2920 table = {
2936 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2921 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2937 "addremove":
2922 "addremove":
2938 (addremove,
2923 (addremove,
2939 [('s', 'similarity', '',
2924 [('s', 'similarity', '',
2940 _('guess renamed files by similarity (0<=s<=100)')),
2925 _('guess renamed files by similarity (0<=s<=100)')),
2941 ] + walkopts + dryrunopts,
2926 ] + walkopts + dryrunopts,
2942 _('hg addremove [OPTION]... [FILE]...')),
2927 _('hg addremove [OPTION]... [FILE]...')),
2943 "^annotate|blame":
2928 "^annotate|blame":
2944 (annotate,
2929 (annotate,
2945 [('r', 'rev', '', _('annotate the specified revision')),
2930 [('r', 'rev', '', _('annotate the specified revision')),
2946 ('f', 'follow', None, _('follow file copies and renames')),
2931 ('f', 'follow', None, _('follow file copies and renames')),
2947 ('a', 'text', None, _('treat all files as text')),
2932 ('a', 'text', None, _('treat all files as text')),
2948 ('u', 'user', None, _('list the author (long with -v)')),
2933 ('u', 'user', None, _('list the author (long with -v)')),
2949 ('d', 'date', None, _('list the date (short with -q)')),
2934 ('d', 'date', None, _('list the date (short with -q)')),
2950 ('n', 'number', None, _('list the revision number (default)')),
2935 ('n', 'number', None, _('list the revision number (default)')),
2951 ('c', 'changeset', None, _('list the changeset')),
2936 ('c', 'changeset', None, _('list the changeset')),
2952 ('l', 'line-number', None,
2937 ('l', 'line-number', None,
2953 _('show line number at the first appearance'))
2938 _('show line number at the first appearance'))
2954 ] + walkopts,
2939 ] + walkopts,
2955 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2940 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2956 "archive":
2941 "archive":
2957 (archive,
2942 (archive,
2958 [('', 'no-decode', None, _('do not pass files through decoders')),
2943 [('', 'no-decode', None, _('do not pass files through decoders')),
2959 ('p', 'prefix', '', _('directory prefix for files in archive')),
2944 ('p', 'prefix', '', _('directory prefix for files in archive')),
2960 ('r', 'rev', '', _('revision to distribute')),
2945 ('r', 'rev', '', _('revision to distribute')),
2961 ('t', 'type', '', _('type of distribution to create')),
2946 ('t', 'type', '', _('type of distribution to create')),
2962 ] + walkopts,
2947 ] + walkopts,
2963 _('hg archive [OPTION]... DEST')),
2948 _('hg archive [OPTION]... DEST')),
2964 "backout":
2949 "backout":
2965 (backout,
2950 (backout,
2966 [('', 'merge', None,
2951 [('', 'merge', None,
2967 _('merge with old dirstate parent after backout')),
2952 _('merge with old dirstate parent after backout')),
2968 ('', 'parent', '', _('parent to choose when backing out merge')),
2953 ('', 'parent', '', _('parent to choose when backing out merge')),
2969 ('r', 'rev', '', _('revision to backout')),
2954 ('r', 'rev', '', _('revision to backout')),
2970 ] + walkopts + commitopts + commitopts2,
2955 ] + walkopts + commitopts + commitopts2,
2971 _('hg backout [OPTION]... [-r] REV')),
2956 _('hg backout [OPTION]... [-r] REV')),
2972 "bisect":
2957 "bisect":
2973 (bisect,
2958 (bisect,
2974 [('r', 'reset', False, _('reset bisect state')),
2959 [('r', 'reset', False, _('reset bisect state')),
2975 ('g', 'good', False, _('mark changeset good')),
2960 ('g', 'good', False, _('mark changeset good')),
2976 ('b', 'bad', False, _('mark changeset bad')),
2961 ('b', 'bad', False, _('mark changeset bad')),
2977 ('s', 'skip', False, _('skip testing changeset')),
2962 ('s', 'skip', False, _('skip testing changeset')),
2978 ('U', 'noupdate', False, _('do not update to target'))],
2963 ('U', 'noupdate', False, _('do not update to target'))],
2979 _("hg bisect [-gbsr] [REV]")),
2964 _("hg bisect [-gbsr] [REV]")),
2980 "branch":
2965 "branch":
2981 (branch,
2966 (branch,
2982 [('f', 'force', None,
2967 [('f', 'force', None,
2983 _('set branch name even if it shadows an existing branch'))],
2968 _('set branch name even if it shadows an existing branch'))],
2984 _('hg branch [-f] [NAME]')),
2969 _('hg branch [-f] [NAME]')),
2985 "branches":
2970 "branches":
2986 (branches,
2971 (branches,
2987 [('a', 'active', False,
2972 [('a', 'active', False,
2988 _('show only branches that have unmerged heads'))],
2973 _('show only branches that have unmerged heads'))],
2989 _('hg branches [-a]')),
2974 _('hg branches [-a]')),
2990 "bundle":
2975 "bundle":
2991 (bundle,
2976 (bundle,
2992 [('f', 'force', None,
2977 [('f', 'force', None,
2993 _('run even when remote repository is unrelated')),
2978 _('run even when remote repository is unrelated')),
2994 ('r', 'rev', [],
2979 ('r', 'rev', [],
2995 _('a changeset up to which you would like to bundle')),
2980 _('a changeset up to which you would like to bundle')),
2996 ('', 'base', [],
2981 ('', 'base', [],
2997 _('a base changeset to specify instead of a destination')),
2982 _('a base changeset to specify instead of a destination')),
2998 ('a', 'all', None, _('bundle all changesets in the repository')),
2983 ('a', 'all', None, _('bundle all changesets in the repository')),
2999 ('t', 'type', 'bzip2', _('bundle compression type to use')),
2984 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3000 ] + remoteopts,
2985 ] + remoteopts,
3001 _('hg bundle [-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
2986 _('hg bundle [-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3002 "cat":
2987 "cat":
3003 (cat,
2988 (cat,
3004 [('o', 'output', '', _('print output to file with formatted name')),
2989 [('o', 'output', '', _('print output to file with formatted name')),
3005 ('r', 'rev', '', _('print the given revision')),
2990 ('r', 'rev', '', _('print the given revision')),
3006 ('', 'decode', None, _('apply any matching decode filter')),
2991 ('', 'decode', None, _('apply any matching decode filter')),
3007 ] + walkopts,
2992 ] + walkopts,
3008 _('hg cat [OPTION]... FILE...')),
2993 _('hg cat [OPTION]... FILE...')),
3009 "^clone":
2994 "^clone":
3010 (clone,
2995 (clone,
3011 [('U', 'noupdate', None,
2996 [('U', 'noupdate', None,
3012 _('the clone will only contain a repository (no working copy)')),
2997 _('the clone will only contain a repository (no working copy)')),
3013 ('r', 'rev', [],
2998 ('r', 'rev', [],
3014 _('a changeset you would like to have after cloning')),
2999 _('a changeset you would like to have after cloning')),
3015 ('', 'pull', None, _('use pull protocol to copy metadata')),
3000 ('', 'pull', None, _('use pull protocol to copy metadata')),
3016 ('', 'uncompressed', None,
3001 ('', 'uncompressed', None,
3017 _('use uncompressed transfer (fast over LAN)')),
3002 _('use uncompressed transfer (fast over LAN)')),
3018 ] + remoteopts,
3003 ] + remoteopts,
3019 _('hg clone [OPTION]... SOURCE [DEST]')),
3004 _('hg clone [OPTION]... SOURCE [DEST]')),
3020 "^commit|ci":
3005 "^commit|ci":
3021 (commit,
3006 (commit,
3022 [('A', 'addremove', None,
3007 [('A', 'addremove', None,
3023 _('mark new/missing files as added/removed before committing')),
3008 _('mark new/missing files as added/removed before committing')),
3024 ] + walkopts + commitopts + commitopts2,
3009 ] + walkopts + commitopts + commitopts2,
3025 _('hg commit [OPTION]... [FILE]...')),
3010 _('hg commit [OPTION]... [FILE]...')),
3026 "copy|cp":
3011 "copy|cp":
3027 (copy,
3012 (copy,
3028 [('A', 'after', None, _('record a copy that has already occurred')),
3013 [('A', 'after', None, _('record a copy that has already occurred')),
3029 ('f', 'force', None,
3014 ('f', 'force', None,
3030 _('forcibly copy over an existing managed file')),
3015 _('forcibly copy over an existing managed file')),
3031 ] + walkopts + dryrunopts,
3016 ] + walkopts + dryrunopts,
3032 _('hg copy [OPTION]... [SOURCE]... DEST')),
3017 _('hg copy [OPTION]... [SOURCE]... DEST')),
3033 "debugancestor": (debugancestor, [],
3018 "debugancestor": (debugancestor, [],
3034 _('hg debugancestor [INDEX] REV1 REV2')),
3019 _('hg debugancestor [INDEX] REV1 REV2')),
3035 "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
3020 "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
3036 "debugcomplete":
3021 "debugcomplete":
3037 (debugcomplete,
3022 (debugcomplete,
3038 [('o', 'options', None, _('show the command options'))],
3023 [('o', 'options', None, _('show the command options'))],
3039 _('hg debugcomplete [-o] CMD')),
3024 _('hg debugcomplete [-o] CMD')),
3040 "debugdate":
3025 "debugdate":
3041 (debugdate,
3026 (debugdate,
3042 [('e', 'extended', None, _('try extended date formats'))],
3027 [('e', 'extended', None, _('try extended date formats'))],
3043 _('hg debugdate [-e] DATE [RANGE]')),
3028 _('hg debugdate [-e] DATE [RANGE]')),
3044 "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
3029 "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
3045 "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
3030 "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
3046 "debugindex": (debugindex, [], _('hg debugindex FILE')),
3031 "debugindex": (debugindex, [], _('hg debugindex FILE')),
3047 "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
3032 "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
3048 "debuginstall": (debuginstall, [], _('hg debuginstall')),
3033 "debuginstall": (debuginstall, [], _('hg debuginstall')),
3049 "debugrawcommit|rawcommit":
3034 "debugrawcommit|rawcommit":
3050 (rawcommit,
3035 (rawcommit,
3051 [('p', 'parent', [], _('parent')),
3036 [('p', 'parent', [], _('parent')),
3052 ('F', 'files', '', _('file list'))
3037 ('F', 'files', '', _('file list'))
3053 ] + commitopts + commitopts2,
3038 ] + commitopts + commitopts2,
3054 _('hg debugrawcommit [OPTION]... [FILE]...')),
3039 _('hg debugrawcommit [OPTION]... [FILE]...')),
3055 "debugrebuildstate":
3040 "debugrebuildstate":
3056 (debugrebuildstate,
3041 (debugrebuildstate,
3057 [('r', 'rev', '', _('revision to rebuild to'))],
3042 [('r', 'rev', '', _('revision to rebuild to'))],
3058 _('hg debugrebuildstate [-r REV] [REV]')),
3043 _('hg debugrebuildstate [-r REV] [REV]')),
3059 "debugrename":
3044 "debugrename":
3060 (debugrename,
3045 (debugrename,
3061 [('r', 'rev', '', _('revision to debug'))],
3046 [('r', 'rev', '', _('revision to debug'))],
3062 _('hg debugrename [-r REV] FILE')),
3047 _('hg debugrename [-r REV] FILE')),
3063 "debugsetparents":
3048 "debugsetparents":
3064 (debugsetparents,
3049 (debugsetparents,
3065 [],
3050 [],
3066 _('hg debugsetparents REV1 [REV2]')),
3051 _('hg debugsetparents REV1 [REV2]')),
3067 "debugstate":
3052 "debugstate":
3068 (debugstate,
3053 (debugstate,
3069 [('', 'nodates', None, _('do not display the saved mtime'))],
3054 [('', 'nodates', None, _('do not display the saved mtime'))],
3070 _('hg debugstate [OPTS]')),
3055 _('hg debugstate [OPTS]')),
3071 "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
3056 "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
3072 "^diff":
3057 "^diff":
3073 (diff,
3058 (diff,
3074 [('r', 'rev', [], _('revision'))
3059 [('r', 'rev', [], _('revision'))
3075 ] + diffopts + diffopts2 + walkopts,
3060 ] + diffopts + diffopts2 + walkopts,
3076 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3061 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3077 "^export":
3062 "^export":
3078 (export,
3063 (export,
3079 [('o', 'output', '', _('print output to file with formatted name')),
3064 [('o', 'output', '', _('print output to file with formatted name')),
3080 ('', 'switch-parent', None, _('diff against the second parent'))
3065 ('', 'switch-parent', None, _('diff against the second parent'))
3081 ] + diffopts,
3066 ] + diffopts,
3082 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
3067 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
3083 "grep":
3068 "grep":
3084 (grep,
3069 (grep,
3085 [('0', 'print0', None, _('end fields with NUL')),
3070 [('0', 'print0', None, _('end fields with NUL')),
3086 ('', 'all', None, _('print all revisions that match')),
3071 ('', 'all', None, _('print all revisions that match')),
3087 ('f', 'follow', None,
3072 ('f', 'follow', None,
3088 _('follow changeset history, or file history across copies and renames')),
3073 _('follow changeset history, or file history across copies and renames')),
3089 ('i', 'ignore-case', None, _('ignore case when matching')),
3074 ('i', 'ignore-case', None, _('ignore case when matching')),
3090 ('l', 'files-with-matches', None,
3075 ('l', 'files-with-matches', None,
3091 _('print only filenames and revs that match')),
3076 _('print only filenames and revs that match')),
3092 ('n', 'line-number', None, _('print matching line numbers')),
3077 ('n', 'line-number', None, _('print matching line numbers')),
3093 ('r', 'rev', [], _('search in given revision range')),
3078 ('r', 'rev', [], _('search in given revision range')),
3094 ('u', 'user', None, _('list the author (long with -v)')),
3079 ('u', 'user', None, _('list the author (long with -v)')),
3095 ('d', 'date', None, _('list the date (short with -q)')),
3080 ('d', 'date', None, _('list the date (short with -q)')),
3096 ] + walkopts,
3081 ] + walkopts,
3097 _('hg grep [OPTION]... PATTERN [FILE]...')),
3082 _('hg grep [OPTION]... PATTERN [FILE]...')),
3098 "heads":
3083 "heads":
3099 (heads,
3084 (heads,
3100 [('r', 'rev', '', _('show only heads which are descendants of rev')),
3085 [('r', 'rev', '', _('show only heads which are descendants of rev')),
3101 ] + templateopts,
3086 ] + templateopts,
3102 _('hg heads [-r REV] [REV]...')),
3087 _('hg heads [-r REV] [REV]...')),
3103 "help": (help_, [], _('hg help [COMMAND]')),
3088 "help": (help_, [], _('hg help [COMMAND]')),
3104 "identify|id":
3089 "identify|id":
3105 (identify,
3090 (identify,
3106 [('r', 'rev', '', _('identify the specified rev')),
3091 [('r', 'rev', '', _('identify the specified rev')),
3107 ('n', 'num', None, _('show local revision number')),
3092 ('n', 'num', None, _('show local revision number')),
3108 ('i', 'id', None, _('show global revision id')),
3093 ('i', 'id', None, _('show global revision id')),
3109 ('b', 'branch', None, _('show branch')),
3094 ('b', 'branch', None, _('show branch')),
3110 ('t', 'tags', None, _('show tags'))],
3095 ('t', 'tags', None, _('show tags'))],
3111 _('hg identify [-nibt] [-r REV] [SOURCE]')),
3096 _('hg identify [-nibt] [-r REV] [SOURCE]')),
3112 "import|patch":
3097 "import|patch":
3113 (import_,
3098 (import_,
3114 [('p', 'strip', 1,
3099 [('p', 'strip', 1,
3115 _('directory strip option for patch. This has the same\n'
3100 _('directory strip option for patch. This has the same\n'
3116 'meaning as the corresponding patch option')),
3101 'meaning as the corresponding patch option')),
3117 ('b', 'base', '', _('base path')),
3102 ('b', 'base', '', _('base path')),
3118 ('f', 'force', None,
3103 ('f', 'force', None,
3119 _('skip check for outstanding uncommitted changes')),
3104 _('skip check for outstanding uncommitted changes')),
3120 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3105 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3121 ('', 'exact', None,
3106 ('', 'exact', None,
3122 _('apply patch to the nodes from which it was generated')),
3107 _('apply patch to the nodes from which it was generated')),
3123 ('', 'import-branch', None,
3108 ('', 'import-branch', None,
3124 _('Use any branch information in patch (implied by --exact)'))] +
3109 _('Use any branch information in patch (implied by --exact)'))] +
3125 commitopts + commitopts2,
3110 commitopts + commitopts2,
3126 _('hg import [OPTION]... PATCH...')),
3111 _('hg import [OPTION]... PATCH...')),
3127 "incoming|in":
3112 "incoming|in":
3128 (incoming,
3113 (incoming,
3129 [('f', 'force', None,
3114 [('f', 'force', None,
3130 _('run even when remote repository is unrelated')),
3115 _('run even when remote repository is unrelated')),
3131 ('n', 'newest-first', None, _('show newest record first')),
3116 ('n', 'newest-first', None, _('show newest record first')),
3132 ('', 'bundle', '', _('file to store the bundles into')),
3117 ('', 'bundle', '', _('file to store the bundles into')),
3133 ('r', 'rev', [],
3118 ('r', 'rev', [],
3134 _('a specific revision up to which you would like to pull')),
3119 _('a specific revision up to which you would like to pull')),
3135 ] + logopts + remoteopts,
3120 ] + logopts + remoteopts,
3136 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
3121 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
3137 ' [--bundle FILENAME] [SOURCE]')),
3122 ' [--bundle FILENAME] [SOURCE]')),
3138 "^init":
3123 "^init":
3139 (init,
3124 (init,
3140 remoteopts,
3125 remoteopts,
3141 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
3126 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
3142 "locate":
3127 "locate":
3143 (locate,
3128 (locate,
3144 [('r', 'rev', '', _('search the repository as it stood at rev')),
3129 [('r', 'rev', '', _('search the repository as it stood at rev')),
3145 ('0', 'print0', None,
3130 ('0', 'print0', None,
3146 _('end filenames with NUL, for use with xargs')),
3131 _('end filenames with NUL, for use with xargs')),
3147 ('f', 'fullpath', None,
3132 ('f', 'fullpath', None,
3148 _('print complete paths from the filesystem root')),
3133 _('print complete paths from the filesystem root')),
3149 ] + walkopts,
3134 ] + walkopts,
3150 _('hg locate [OPTION]... [PATTERN]...')),
3135 _('hg locate [OPTION]... [PATTERN]...')),
3151 "^log|history":
3136 "^log|history":
3152 (log,
3137 (log,
3153 [('f', 'follow', None,
3138 [('f', 'follow', None,
3154 _('follow changeset history, or file history across copies and renames')),
3139 _('follow changeset history, or file history across copies and renames')),
3155 ('', 'follow-first', None,
3140 ('', 'follow-first', None,
3156 _('only follow the first parent of merge changesets')),
3141 _('only follow the first parent of merge changesets')),
3157 ('d', 'date', '', _('show revs matching date spec')),
3142 ('d', 'date', '', _('show revs matching date spec')),
3158 ('C', 'copies', None, _('show copied files')),
3143 ('C', 'copies', None, _('show copied files')),
3159 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3144 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3160 ('r', 'rev', [], _('show the specified revision or range')),
3145 ('r', 'rev', [], _('show the specified revision or range')),
3161 ('', 'removed', None, _('include revs where files were removed')),
3146 ('', 'removed', None, _('include revs where files were removed')),
3162 ('m', 'only-merges', None, _('show only merges')),
3147 ('m', 'only-merges', None, _('show only merges')),
3163 ('b', 'only-branch', [],
3148 ('b', 'only-branch', [],
3164 _('show only changesets within the given named branch')),
3149 _('show only changesets within the given named branch')),
3165 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3150 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3166 ] + logopts + walkopts,
3151 ] + logopts + walkopts,
3167 _('hg log [OPTION]... [FILE]')),
3152 _('hg log [OPTION]... [FILE]')),
3168 "manifest":
3153 "manifest":
3169 (manifest,
3154 (manifest,
3170 [('r', 'rev', '', _('revision to display'))],
3155 [('r', 'rev', '', _('revision to display'))],
3171 _('hg manifest [-r REV]')),
3156 _('hg manifest [-r REV]')),
3172 "^merge":
3157 "^merge":
3173 (merge,
3158 (merge,
3174 [('f', 'force', None, _('force a merge with outstanding changes')),
3159 [('f', 'force', None, _('force a merge with outstanding changes')),
3175 ('r', 'rev', '', _('revision to merge')),
3160 ('r', 'rev', '', _('revision to merge')),
3176 ],
3161 ],
3177 _('hg merge [-f] [[-r] REV]')),
3162 _('hg merge [-f] [[-r] REV]')),
3178 "outgoing|out":
3163 "outgoing|out":
3179 (outgoing,
3164 (outgoing,
3180 [('f', 'force', None,
3165 [('f', 'force', None,
3181 _('run even when remote repository is unrelated')),
3166 _('run even when remote repository is unrelated')),
3182 ('r', 'rev', [],
3167 ('r', 'rev', [],
3183 _('a specific revision up to which you would like to push')),
3168 _('a specific revision up to which you would like to push')),
3184 ('n', 'newest-first', None, _('show newest record first')),
3169 ('n', 'newest-first', None, _('show newest record first')),
3185 ] + logopts + remoteopts,
3170 ] + logopts + remoteopts,
3186 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3171 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3187 "^parents":
3172 "^parents":
3188 (parents,
3173 (parents,
3189 [('r', 'rev', '', _('show parents from the specified rev')),
3174 [('r', 'rev', '', _('show parents from the specified rev')),
3190 ] + templateopts,
3175 ] + templateopts,
3191 _('hg parents [-r REV] [FILE]')),
3176 _('hg parents [-r REV] [FILE]')),
3192 "paths": (paths, [], _('hg paths [NAME]')),
3177 "paths": (paths, [], _('hg paths [NAME]')),
3193 "^pull":
3178 "^pull":
3194 (pull,
3179 (pull,
3195 [('u', 'update', None,
3180 [('u', 'update', None,
3196 _('update to new tip if changesets were pulled')),
3181 _('update to new tip if changesets were pulled')),
3197 ('f', 'force', None,
3182 ('f', 'force', None,
3198 _('run even when remote repository is unrelated')),
3183 _('run even when remote repository is unrelated')),
3199 ('r', 'rev', [],
3184 ('r', 'rev', [],
3200 _('a specific revision up to which you would like to pull')),
3185 _('a specific revision up to which you would like to pull')),
3201 ] + remoteopts,
3186 ] + remoteopts,
3202 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3187 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3203 "^push":
3188 "^push":
3204 (push,
3189 (push,
3205 [('f', 'force', None, _('force push')),
3190 [('f', 'force', None, _('force push')),
3206 ('r', 'rev', [],
3191 ('r', 'rev', [],
3207 _('a specific revision up to which you would like to push')),
3192 _('a specific revision up to which you would like to push')),
3208 ] + remoteopts,
3193 ] + remoteopts,
3209 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3194 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3210 "recover": (recover, [], _('hg recover')),
3195 "recover": (recover, [], _('hg recover')),
3211 "^remove|rm":
3196 "^remove|rm":
3212 (remove,
3197 (remove,
3213 [('A', 'after', None, _('record delete for missing files')),
3198 [('A', 'after', None, _('record delete for missing files')),
3214 ('f', 'force', None,
3199 ('f', 'force', None,
3215 _('remove (and delete) file even if added or modified')),
3200 _('remove (and delete) file even if added or modified')),
3216 ] + walkopts,
3201 ] + walkopts,
3217 _('hg remove [OPTION]... FILE...')),
3202 _('hg remove [OPTION]... FILE...')),
3218 "rename|mv":
3203 "rename|mv":
3219 (rename,
3204 (rename,
3220 [('A', 'after', None, _('record a rename that has already occurred')),
3205 [('A', 'after', None, _('record a rename that has already occurred')),
3221 ('f', 'force', None,
3206 ('f', 'force', None,
3222 _('forcibly copy over an existing managed file')),
3207 _('forcibly copy over an existing managed file')),
3223 ] + walkopts + dryrunopts,
3208 ] + walkopts + dryrunopts,
3224 _('hg rename [OPTION]... SOURCE... DEST')),
3209 _('hg rename [OPTION]... SOURCE... DEST')),
3225 "resolve":
3210 "resolve":
3226 (resolve,
3211 (resolve,
3227 [('l', 'list', None, _('list state of files needing merge')),
3212 [('l', 'list', None, _('list state of files needing merge')),
3228 ('m', 'mark', None, _('mark files as resolved')),
3213 ('m', 'mark', None, _('mark files as resolved')),
3229 ('u', 'unmark', None, _('unmark files as resolved'))],
3214 ('u', 'unmark', None, _('unmark files as resolved'))],
3230 ('hg resolve [OPTION] [FILES...]')),
3215 ('hg resolve [OPTION] [FILES...]')),
3231 "revert":
3216 "revert":
3232 (revert,
3217 (revert,
3233 [('a', 'all', None, _('revert all changes when no arguments given')),
3218 [('a', 'all', None, _('revert all changes when no arguments given')),
3234 ('d', 'date', '', _('tipmost revision matching date')),
3219 ('d', 'date', '', _('tipmost revision matching date')),
3235 ('r', 'rev', '', _('revision to revert to')),
3220 ('r', 'rev', '', _('revision to revert to')),
3236 ('', 'no-backup', None, _('do not save backup copies of files')),
3221 ('', 'no-backup', None, _('do not save backup copies of files')),
3237 ] + walkopts + dryrunopts,
3222 ] + walkopts + dryrunopts,
3238 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3223 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3239 "rollback": (rollback, [], _('hg rollback')),
3224 "rollback": (rollback, [], _('hg rollback')),
3240 "root": (root, [], _('hg root')),
3225 "root": (root, [], _('hg root')),
3241 "^serve":
3226 "^serve":
3242 (serve,
3227 (serve,
3243 [('A', 'accesslog', '', _('name of access log file to write to')),
3228 [('A', 'accesslog', '', _('name of access log file to write to')),
3244 ('d', 'daemon', None, _('run server in background')),
3229 ('d', 'daemon', None, _('run server in background')),
3245 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3230 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3246 ('E', 'errorlog', '', _('name of error log file to write to')),
3231 ('E', 'errorlog', '', _('name of error log file to write to')),
3247 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3232 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3248 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3233 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3249 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3234 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3250 ('n', 'name', '',
3235 ('n', 'name', '',
3251 _('name to show in web pages (default: working dir)')),
3236 _('name to show in web pages (default: working dir)')),
3252 ('', 'webdir-conf', '', _('name of the webdir config file'
3237 ('', 'webdir-conf', '', _('name of the webdir config file'
3253 ' (serve more than one repo)')),
3238 ' (serve more than one repo)')),
3254 ('', 'pid-file', '', _('name of file to write process ID to')),
3239 ('', 'pid-file', '', _('name of file to write process ID to')),
3255 ('', 'stdio', None, _('for remote clients')),
3240 ('', 'stdio', None, _('for remote clients')),
3256 ('t', 'templates', '', _('web templates to use')),
3241 ('t', 'templates', '', _('web templates to use')),
3257 ('', 'style', '', _('template style to use')),
3242 ('', 'style', '', _('template style to use')),
3258 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3243 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3259 ('', 'certificate', '', _('SSL certificate file'))],
3244 ('', 'certificate', '', _('SSL certificate file'))],
3260 _('hg serve [OPTION]...')),
3245 _('hg serve [OPTION]...')),
3261 "showconfig|debugconfig":
3246 "showconfig|debugconfig":
3262 (showconfig,
3247 (showconfig,
3263 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3248 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3264 _('hg showconfig [-u] [NAME]...')),
3249 _('hg showconfig [-u] [NAME]...')),
3265 "^status|st":
3250 "^status|st":
3266 (status,
3251 (status,
3267 [('A', 'all', None, _('show status of all files')),
3252 [('A', 'all', None, _('show status of all files')),
3268 ('m', 'modified', None, _('show only modified files')),
3253 ('m', 'modified', None, _('show only modified files')),
3269 ('a', 'added', None, _('show only added files')),
3254 ('a', 'added', None, _('show only added files')),
3270 ('r', 'removed', None, _('show only removed files')),
3255 ('r', 'removed', None, _('show only removed files')),
3271 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3256 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3272 ('c', 'clean', None, _('show only files without changes')),
3257 ('c', 'clean', None, _('show only files without changes')),
3273 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3258 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3274 ('i', 'ignored', None, _('show only ignored files')),
3259 ('i', 'ignored', None, _('show only ignored files')),
3275 ('n', 'no-status', None, _('hide status prefix')),
3260 ('n', 'no-status', None, _('hide status prefix')),
3276 ('C', 'copies', None, _('show source of copied files')),
3261 ('C', 'copies', None, _('show source of copied files')),
3277 ('0', 'print0', None,
3262 ('0', 'print0', None,
3278 _('end filenames with NUL, for use with xargs')),
3263 _('end filenames with NUL, for use with xargs')),
3279 ('', 'rev', [], _('show difference from revision')),
3264 ('', 'rev', [], _('show difference from revision')),
3280 ] + walkopts,
3265 ] + walkopts,
3281 _('hg status [OPTION]... [FILE]...')),
3266 _('hg status [OPTION]... [FILE]...')),
3282 "tag":
3267 "tag":
3283 (tag,
3268 (tag,
3284 [('f', 'force', None, _('replace existing tag')),
3269 [('f', 'force', None, _('replace existing tag')),
3285 ('l', 'local', None, _('make the tag local')),
3270 ('l', 'local', None, _('make the tag local')),
3286 ('r', 'rev', '', _('revision to tag')),
3271 ('r', 'rev', '', _('revision to tag')),
3287 ('', 'remove', None, _('remove a tag')),
3272 ('', 'remove', None, _('remove a tag')),
3288 # -l/--local is already there, commitopts cannot be used
3273 # -l/--local is already there, commitopts cannot be used
3289 ('m', 'message', '', _('use <text> as commit message')),
3274 ('m', 'message', '', _('use <text> as commit message')),
3290 ] + commitopts2,
3275 ] + commitopts2,
3291 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3276 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3292 "tags": (tags, [], _('hg tags')),
3277 "tags": (tags, [], _('hg tags')),
3293 "tip":
3278 "tip":
3294 (tip,
3279 (tip,
3295 [('p', 'patch', None, _('show patch')),
3280 [('p', 'patch', None, _('show patch')),
3296 ] + templateopts,
3281 ] + templateopts,
3297 _('hg tip [-p]')),
3282 _('hg tip [-p]')),
3298 "unbundle":
3283 "unbundle":
3299 (unbundle,
3284 (unbundle,
3300 [('u', 'update', None,
3285 [('u', 'update', None,
3301 _('update to new tip if changesets were unbundled'))],
3286 _('update to new tip if changesets were unbundled'))],
3302 _('hg unbundle [-u] FILE...')),
3287 _('hg unbundle [-u] FILE...')),
3303 "^update|up|checkout|co":
3288 "^update|up|checkout|co":
3304 (update,
3289 (update,
3305 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3290 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3306 ('d', 'date', '', _('tipmost revision matching date')),
3291 ('d', 'date', '', _('tipmost revision matching date')),
3307 ('r', 'rev', '', _('revision'))],
3292 ('r', 'rev', '', _('revision'))],
3308 _('hg update [-C] [-d DATE] [[-r] REV]')),
3293 _('hg update [-C] [-d DATE] [[-r] REV]')),
3309 "verify": (verify, [], _('hg verify')),
3294 "verify": (verify, [], _('hg verify')),
3310 "version": (version_, [], _('hg version')),
3295 "version": (version_, [], _('hg version')),
3311 }
3296 }
3312
3297
3313 norepo = ("clone init version help debugcomplete debugdata"
3298 norepo = ("clone init version help debugcomplete debugdata"
3314 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3299 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3315 optionalrepo = ("identify paths serve showconfig debugancestor")
3300 optionalrepo = ("identify paths serve showconfig debugancestor")
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now