##// END OF EJS Templates
Remove deprecated old-style branch support
Matt Mackall -
r3876:1e0b94cf default
parent child Browse files
Show More
@@ -1,312 +1,312 b''
1 # bugzilla.py - bugzilla integration for mercurial
1 # bugzilla.py - bugzilla integration for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 # hook extension to update comments of bugzilla bugs when changesets
8 # hook extension to update comments of bugzilla bugs when changesets
9 # that refer to bugs by id are seen. this hook does not change bug
9 # that refer to bugs by id are seen. this hook does not change bug
10 # status, only comments.
10 # status, only comments.
11 #
11 #
12 # to configure, add items to '[bugzilla]' section of hgrc.
12 # to configure, add items to '[bugzilla]' section of hgrc.
13 #
13 #
14 # to use, configure bugzilla extension and enable like this:
14 # to use, configure bugzilla extension and enable like this:
15 #
15 #
16 # [extensions]
16 # [extensions]
17 # hgext.bugzilla =
17 # hgext.bugzilla =
18 #
18 #
19 # [hooks]
19 # [hooks]
20 # # run bugzilla hook on every change pulled or pushed in here
20 # # run bugzilla hook on every change pulled or pushed in here
21 # incoming.bugzilla = python:hgext.bugzilla.hook
21 # incoming.bugzilla = python:hgext.bugzilla.hook
22 #
22 #
23 # config items:
23 # config items:
24 #
24 #
25 # section name is 'bugzilla'.
25 # section name is 'bugzilla'.
26 # [bugzilla]
26 # [bugzilla]
27 #
27 #
28 # REQUIRED:
28 # REQUIRED:
29 # host = bugzilla # mysql server where bugzilla database lives
29 # host = bugzilla # mysql server where bugzilla database lives
30 # password = ** # user's password
30 # password = ** # user's password
31 # version = 2.16 # version of bugzilla installed
31 # version = 2.16 # version of bugzilla installed
32 #
32 #
33 # OPTIONAL:
33 # OPTIONAL:
34 # bzuser = ... # fallback bugzilla user name to record comments with
34 # bzuser = ... # fallback bugzilla user name to record comments with
35 # db = bugs # database to connect to
35 # db = bugs # database to connect to
36 # notify = ... # command to run to get bugzilla to send mail
36 # notify = ... # command to run to get bugzilla to send mail
37 # regexp = ... # regexp to match bug ids (must contain one "()" group)
37 # regexp = ... # regexp to match bug ids (must contain one "()" group)
38 # strip = 0 # number of slashes to strip for url paths
38 # strip = 0 # number of slashes to strip for url paths
39 # style = ... # style file to use when formatting comments
39 # style = ... # style file to use when formatting comments
40 # template = ... # template to use when formatting comments
40 # template = ... # template to use when formatting comments
41 # timeout = 5 # database connection timeout (seconds)
41 # timeout = 5 # database connection timeout (seconds)
42 # user = bugs # user to connect to database as
42 # user = bugs # user to connect to database as
43 # [web]
43 # [web]
44 # baseurl = http://hgserver/... # root of hg web site for browsing commits
44 # baseurl = http://hgserver/... # root of hg web site for browsing commits
45 #
45 #
46 # if hg committer names are not same as bugzilla user names, use
46 # if hg committer names are not same as bugzilla user names, use
47 # "usermap" feature to map from committer email to bugzilla user name.
47 # "usermap" feature to map from committer email to bugzilla user name.
48 # usermap can be in hgrc or separate config file.
48 # usermap can be in hgrc or separate config file.
49 #
49 #
50 # [bugzilla]
50 # [bugzilla]
51 # usermap = filename # cfg file with "committer"="bugzilla user" info
51 # usermap = filename # cfg file with "committer"="bugzilla user" info
52 # [usermap]
52 # [usermap]
53 # committer_email = bugzilla_user_name
53 # committer_email = bugzilla_user_name
54
54
55 from mercurial.demandload import *
55 from mercurial.demandload import *
56 from mercurial.i18n import gettext as _
56 from mercurial.i18n import gettext as _
57 from mercurial.node import *
57 from mercurial.node import *
58 demandload(globals(), 'mercurial:cmdutil,templater,util os re time')
58 demandload(globals(), 'mercurial:cmdutil,templater,util os re time')
59
59
60 MySQLdb = None
60 MySQLdb = None
61
61
62 def buglist(ids):
62 def buglist(ids):
63 return '(' + ','.join(map(str, ids)) + ')'
63 return '(' + ','.join(map(str, ids)) + ')'
64
64
65 class bugzilla_2_16(object):
65 class bugzilla_2_16(object):
66 '''support for bugzilla version 2.16.'''
66 '''support for bugzilla version 2.16.'''
67
67
68 def __init__(self, ui):
68 def __init__(self, ui):
69 self.ui = ui
69 self.ui = ui
70 host = self.ui.config('bugzilla', 'host', 'localhost')
70 host = self.ui.config('bugzilla', 'host', 'localhost')
71 user = self.ui.config('bugzilla', 'user', 'bugs')
71 user = self.ui.config('bugzilla', 'user', 'bugs')
72 passwd = self.ui.config('bugzilla', 'password')
72 passwd = self.ui.config('bugzilla', 'password')
73 db = self.ui.config('bugzilla', 'db', 'bugs')
73 db = self.ui.config('bugzilla', 'db', 'bugs')
74 timeout = int(self.ui.config('bugzilla', 'timeout', 5))
74 timeout = int(self.ui.config('bugzilla', 'timeout', 5))
75 usermap = self.ui.config('bugzilla', 'usermap')
75 usermap = self.ui.config('bugzilla', 'usermap')
76 if usermap:
76 if usermap:
77 self.ui.readsections(usermap, 'usermap')
77 self.ui.readsections(usermap, 'usermap')
78 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
78 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
79 (host, db, user, '*' * len(passwd)))
79 (host, db, user, '*' * len(passwd)))
80 self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd,
80 self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd,
81 db=db, connect_timeout=timeout)
81 db=db, connect_timeout=timeout)
82 self.cursor = self.conn.cursor()
82 self.cursor = self.conn.cursor()
83 self.run('select fieldid from fielddefs where name = "longdesc"')
83 self.run('select fieldid from fielddefs where name = "longdesc"')
84 ids = self.cursor.fetchall()
84 ids = self.cursor.fetchall()
85 if len(ids) != 1:
85 if len(ids) != 1:
86 raise util.Abort(_('unknown database schema'))
86 raise util.Abort(_('unknown database schema'))
87 self.longdesc_id = ids[0][0]
87 self.longdesc_id = ids[0][0]
88 self.user_ids = {}
88 self.user_ids = {}
89
89
90 def run(self, *args, **kwargs):
90 def run(self, *args, **kwargs):
91 '''run a query.'''
91 '''run a query.'''
92 self.ui.note(_('query: %s %s\n') % (args, kwargs))
92 self.ui.note(_('query: %s %s\n') % (args, kwargs))
93 try:
93 try:
94 self.cursor.execute(*args, **kwargs)
94 self.cursor.execute(*args, **kwargs)
95 except MySQLdb.MySQLError, err:
95 except MySQLdb.MySQLError, err:
96 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
96 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
97 raise
97 raise
98
98
99 def filter_real_bug_ids(self, ids):
99 def filter_real_bug_ids(self, ids):
100 '''filter not-existing bug ids from list.'''
100 '''filter not-existing bug ids from list.'''
101 self.run('select bug_id from bugs where bug_id in %s' % buglist(ids))
101 self.run('select bug_id from bugs where bug_id in %s' % buglist(ids))
102 ids = [c[0] for c in self.cursor.fetchall()]
102 ids = [c[0] for c in self.cursor.fetchall()]
103 ids.sort()
103 ids.sort()
104 return ids
104 return ids
105
105
106 def filter_unknown_bug_ids(self, node, ids):
106 def filter_unknown_bug_ids(self, node, ids):
107 '''filter bug ids from list that already refer to this changeset.'''
107 '''filter bug ids from list that already refer to this changeset.'''
108
108
109 self.run('''select bug_id from longdescs where
109 self.run('''select bug_id from longdescs where
110 bug_id in %s and thetext like "%%%s%%"''' %
110 bug_id in %s and thetext like "%%%s%%"''' %
111 (buglist(ids), short(node)))
111 (buglist(ids), short(node)))
112 unknown = dict.fromkeys(ids)
112 unknown = dict.fromkeys(ids)
113 for (id,) in self.cursor.fetchall():
113 for (id,) in self.cursor.fetchall():
114 self.ui.status(_('bug %d already knows about changeset %s\n') %
114 self.ui.status(_('bug %d already knows about changeset %s\n') %
115 (id, short(node)))
115 (id, short(node)))
116 unknown.pop(id, None)
116 unknown.pop(id, None)
117 ids = unknown.keys()
117 ids = unknown.keys()
118 ids.sort()
118 ids.sort()
119 return ids
119 return ids
120
120
121 def notify(self, ids):
121 def notify(self, ids):
122 '''tell bugzilla to send mail.'''
122 '''tell bugzilla to send mail.'''
123
123
124 self.ui.status(_('telling bugzilla to send mail:\n'))
124 self.ui.status(_('telling bugzilla to send mail:\n'))
125 for id in ids:
125 for id in ids:
126 self.ui.status(_(' bug %s\n') % id)
126 self.ui.status(_(' bug %s\n') % id)
127 cmd = self.ui.config('bugzilla', 'notify',
127 cmd = self.ui.config('bugzilla', 'notify',
128 'cd /var/www/html/bugzilla && '
128 'cd /var/www/html/bugzilla && '
129 './processmail %s nobody@nowhere.com') % id
129 './processmail %s nobody@nowhere.com') % id
130 fp = os.popen('(%s) 2>&1' % cmd)
130 fp = os.popen('(%s) 2>&1' % cmd)
131 out = fp.read()
131 out = fp.read()
132 ret = fp.close()
132 ret = fp.close()
133 if ret:
133 if ret:
134 self.ui.warn(out)
134 self.ui.warn(out)
135 raise util.Abort(_('bugzilla notify command %s') %
135 raise util.Abort(_('bugzilla notify command %s') %
136 util.explain_exit(ret)[0])
136 util.explain_exit(ret)[0])
137 self.ui.status(_('done\n'))
137 self.ui.status(_('done\n'))
138
138
139 def get_user_id(self, user):
139 def get_user_id(self, user):
140 '''look up numeric bugzilla user id.'''
140 '''look up numeric bugzilla user id.'''
141 try:
141 try:
142 return self.user_ids[user]
142 return self.user_ids[user]
143 except KeyError:
143 except KeyError:
144 try:
144 try:
145 userid = int(user)
145 userid = int(user)
146 except ValueError:
146 except ValueError:
147 self.ui.note(_('looking up user %s\n') % user)
147 self.ui.note(_('looking up user %s\n') % user)
148 self.run('''select userid from profiles
148 self.run('''select userid from profiles
149 where login_name like %s''', user)
149 where login_name like %s''', user)
150 all = self.cursor.fetchall()
150 all = self.cursor.fetchall()
151 if len(all) != 1:
151 if len(all) != 1:
152 raise KeyError(user)
152 raise KeyError(user)
153 userid = int(all[0][0])
153 userid = int(all[0][0])
154 self.user_ids[user] = userid
154 self.user_ids[user] = userid
155 return userid
155 return userid
156
156
157 def map_committer(self, user):
157 def map_committer(self, user):
158 '''map name of committer to bugzilla user name.'''
158 '''map name of committer to bugzilla user name.'''
159 for committer, bzuser in self.ui.configitems('usermap'):
159 for committer, bzuser in self.ui.configitems('usermap'):
160 if committer.lower() == user.lower():
160 if committer.lower() == user.lower():
161 return bzuser
161 return bzuser
162 return user
162 return user
163
163
164 def add_comment(self, bugid, text, committer):
164 def add_comment(self, bugid, text, committer):
165 '''add comment to bug. try adding comment as committer of
165 '''add comment to bug. try adding comment as committer of
166 changeset, otherwise as default bugzilla user.'''
166 changeset, otherwise as default bugzilla user.'''
167 user = self.map_committer(committer)
167 user = self.map_committer(committer)
168 try:
168 try:
169 userid = self.get_user_id(user)
169 userid = self.get_user_id(user)
170 except KeyError:
170 except KeyError:
171 try:
171 try:
172 defaultuser = self.ui.config('bugzilla', 'bzuser')
172 defaultuser = self.ui.config('bugzilla', 'bzuser')
173 if not defaultuser:
173 if not defaultuser:
174 raise util.Abort(_('cannot find bugzilla user id for %s') %
174 raise util.Abort(_('cannot find bugzilla user id for %s') %
175 user)
175 user)
176 userid = self.get_user_id(defaultuser)
176 userid = self.get_user_id(defaultuser)
177 except KeyError:
177 except KeyError:
178 raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
178 raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
179 (user, defaultuser))
179 (user, defaultuser))
180 now = time.strftime('%Y-%m-%d %H:%M:%S')
180 now = time.strftime('%Y-%m-%d %H:%M:%S')
181 self.run('''insert into longdescs
181 self.run('''insert into longdescs
182 (bug_id, who, bug_when, thetext)
182 (bug_id, who, bug_when, thetext)
183 values (%s, %s, %s, %s)''',
183 values (%s, %s, %s, %s)''',
184 (bugid, userid, now, text))
184 (bugid, userid, now, text))
185 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
185 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
186 values (%s, %s, %s, %s)''',
186 values (%s, %s, %s, %s)''',
187 (bugid, userid, now, self.longdesc_id))
187 (bugid, userid, now, self.longdesc_id))
188
188
189 class bugzilla(object):
189 class bugzilla(object):
190 # supported versions of bugzilla. different versions have
190 # supported versions of bugzilla. different versions have
191 # different schemas.
191 # different schemas.
192 _versions = {
192 _versions = {
193 '2.16': bugzilla_2_16,
193 '2.16': bugzilla_2_16,
194 }
194 }
195
195
196 _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
196 _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
197 r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
197 r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
198
198
199 _bz = None
199 _bz = None
200
200
201 def __init__(self, ui, repo):
201 def __init__(self, ui, repo):
202 self.ui = ui
202 self.ui = ui
203 self.repo = repo
203 self.repo = repo
204
204
205 def bz(self):
205 def bz(self):
206 '''return object that knows how to talk to bugzilla version in
206 '''return object that knows how to talk to bugzilla version in
207 use.'''
207 use.'''
208
208
209 if bugzilla._bz is None:
209 if bugzilla._bz is None:
210 bzversion = self.ui.config('bugzilla', 'version')
210 bzversion = self.ui.config('bugzilla', 'version')
211 try:
211 try:
212 bzclass = bugzilla._versions[bzversion]
212 bzclass = bugzilla._versions[bzversion]
213 except KeyError:
213 except KeyError:
214 raise util.Abort(_('bugzilla version %s not supported') %
214 raise util.Abort(_('bugzilla version %s not supported') %
215 bzversion)
215 bzversion)
216 bugzilla._bz = bzclass(self.ui)
216 bugzilla._bz = bzclass(self.ui)
217 return bugzilla._bz
217 return bugzilla._bz
218
218
219 def __getattr__(self, key):
219 def __getattr__(self, key):
220 return getattr(self.bz(), key)
220 return getattr(self.bz(), key)
221
221
222 _bug_re = None
222 _bug_re = None
223 _split_re = None
223 _split_re = None
224
224
225 def find_bug_ids(self, node, desc):
225 def find_bug_ids(self, node, desc):
226 '''find valid bug ids that are referred to in changeset
226 '''find valid bug ids that are referred to in changeset
227 comments and that do not already have references to this
227 comments and that do not already have references to this
228 changeset.'''
228 changeset.'''
229
229
230 if bugzilla._bug_re is None:
230 if bugzilla._bug_re is None:
231 bugzilla._bug_re = re.compile(
231 bugzilla._bug_re = re.compile(
232 self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
232 self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
233 re.IGNORECASE)
233 re.IGNORECASE)
234 bugzilla._split_re = re.compile(r'\D+')
234 bugzilla._split_re = re.compile(r'\D+')
235 start = 0
235 start = 0
236 ids = {}
236 ids = {}
237 while True:
237 while True:
238 m = bugzilla._bug_re.search(desc, start)
238 m = bugzilla._bug_re.search(desc, start)
239 if not m:
239 if not m:
240 break
240 break
241 start = m.end()
241 start = m.end()
242 for id in bugzilla._split_re.split(m.group(1)):
242 for id in bugzilla._split_re.split(m.group(1)):
243 if not id: continue
243 if not id: continue
244 ids[int(id)] = 1
244 ids[int(id)] = 1
245 ids = ids.keys()
245 ids = ids.keys()
246 if ids:
246 if ids:
247 ids = self.filter_real_bug_ids(ids)
247 ids = self.filter_real_bug_ids(ids)
248 if ids:
248 if ids:
249 ids = self.filter_unknown_bug_ids(node, ids)
249 ids = self.filter_unknown_bug_ids(node, ids)
250 return ids
250 return ids
251
251
252 def update(self, bugid, node, changes):
252 def update(self, bugid, node, changes):
253 '''update bugzilla bug with reference to changeset.'''
253 '''update bugzilla bug with reference to changeset.'''
254
254
255 def webroot(root):
255 def webroot(root):
256 '''strip leading prefix of repo root and turn into
256 '''strip leading prefix of repo root and turn into
257 url-safe path.'''
257 url-safe path.'''
258 count = int(self.ui.config('bugzilla', 'strip', 0))
258 count = int(self.ui.config('bugzilla', 'strip', 0))
259 root = util.pconvert(root)
259 root = util.pconvert(root)
260 while count > 0:
260 while count > 0:
261 c = root.find('/')
261 c = root.find('/')
262 if c == -1:
262 if c == -1:
263 break
263 break
264 root = root[c+1:]
264 root = root[c+1:]
265 count -= 1
265 count -= 1
266 return root
266 return root
267
267
268 mapfile = self.ui.config('bugzilla', 'style')
268 mapfile = self.ui.config('bugzilla', 'style')
269 tmpl = self.ui.config('bugzilla', 'template')
269 tmpl = self.ui.config('bugzilla', 'template')
270 t = cmdutil.changeset_templater(self.ui, self.repo,
270 t = cmdutil.changeset_templater(self.ui, self.repo,
271 False, None, mapfile, False)
271 False, mapfile, False)
272 if not mapfile and not tmpl:
272 if not mapfile and not tmpl:
273 tmpl = _('changeset {node|short} in repo {root} refers '
273 tmpl = _('changeset {node|short} in repo {root} refers '
274 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
274 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
275 if tmpl:
275 if tmpl:
276 tmpl = templater.parsestring(tmpl, quoted=False)
276 tmpl = templater.parsestring(tmpl, quoted=False)
277 t.use_template(tmpl)
277 t.use_template(tmpl)
278 self.ui.pushbuffer()
278 self.ui.pushbuffer()
279 t.show(changenode=node, changes=changes,
279 t.show(changenode=node, changes=changes,
280 bug=str(bugid),
280 bug=str(bugid),
281 hgweb=self.ui.config('web', 'baseurl'),
281 hgweb=self.ui.config('web', 'baseurl'),
282 root=self.repo.root,
282 root=self.repo.root,
283 webroot=webroot(self.repo.root))
283 webroot=webroot(self.repo.root))
284 data = self.ui.popbuffer()
284 data = self.ui.popbuffer()
285 self.add_comment(bugid, data, templater.email(changes[1]))
285 self.add_comment(bugid, data, templater.email(changes[1]))
286
286
287 def hook(ui, repo, hooktype, node=None, **kwargs):
287 def hook(ui, repo, hooktype, node=None, **kwargs):
288 '''add comment to bugzilla for each changeset that refers to a
288 '''add comment to bugzilla for each changeset that refers to a
289 bugzilla bug id. only add a comment once per bug, so same change
289 bugzilla bug id. only add a comment once per bug, so same change
290 seen multiple times does not fill bug with duplicate data.'''
290 seen multiple times does not fill bug with duplicate data.'''
291 try:
291 try:
292 import MySQLdb as mysql
292 import MySQLdb as mysql
293 global MySQLdb
293 global MySQLdb
294 MySQLdb = mysql
294 MySQLdb = mysql
295 except ImportError, err:
295 except ImportError, err:
296 raise util.Abort(_('python mysql support not available: %s') % err)
296 raise util.Abort(_('python mysql support not available: %s') % err)
297
297
298 if node is None:
298 if node is None:
299 raise util.Abort(_('hook type %s does not pass a changeset id') %
299 raise util.Abort(_('hook type %s does not pass a changeset id') %
300 hooktype)
300 hooktype)
301 try:
301 try:
302 bz = bugzilla(ui, repo)
302 bz = bugzilla(ui, repo)
303 bin_node = bin(node)
303 bin_node = bin(node)
304 changes = repo.changelog.read(bin_node)
304 changes = repo.changelog.read(bin_node)
305 ids = bz.find_bug_ids(bin_node, changes[4])
305 ids = bz.find_bug_ids(bin_node, changes[4])
306 if ids:
306 if ids:
307 for id in ids:
307 for id in ids:
308 bz.update(id, bin_node, changes)
308 bz.update(id, bin_node, changes)
309 bz.notify(ids)
309 bz.notify(ids)
310 except MySQLdb.MySQLError, err:
310 except MySQLdb.MySQLError, err:
311 raise util.Abort(_('database error: %s') % err[1])
311 raise util.Abort(_('database error: %s') % err[1])
312
312
@@ -1,280 +1,280 b''
1 # notify.py - email notifications for mercurial
1 # notify.py - email notifications for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 # hook extension to email notifications to people when changesets are
8 # hook extension to email notifications to people when changesets are
9 # committed to a repo they subscribe to.
9 # committed to a repo they subscribe to.
10 #
10 #
11 # default mode is to print messages to stdout, for testing and
11 # default mode is to print messages to stdout, for testing and
12 # configuring.
12 # configuring.
13 #
13 #
14 # to use, configure notify extension and enable in hgrc like this:
14 # to use, configure notify extension and enable in hgrc like this:
15 #
15 #
16 # [extensions]
16 # [extensions]
17 # hgext.notify =
17 # hgext.notify =
18 #
18 #
19 # [hooks]
19 # [hooks]
20 # # one email for each incoming changeset
20 # # one email for each incoming changeset
21 # incoming.notify = python:hgext.notify.hook
21 # incoming.notify = python:hgext.notify.hook
22 # # batch emails when many changesets incoming at one time
22 # # batch emails when many changesets incoming at one time
23 # changegroup.notify = python:hgext.notify.hook
23 # changegroup.notify = python:hgext.notify.hook
24 #
24 #
25 # [notify]
25 # [notify]
26 # # config items go in here
26 # # config items go in here
27 #
27 #
28 # config items:
28 # config items:
29 #
29 #
30 # REQUIRED:
30 # REQUIRED:
31 # config = /path/to/file # file containing subscriptions
31 # config = /path/to/file # file containing subscriptions
32 #
32 #
33 # OPTIONAL:
33 # OPTIONAL:
34 # test = True # print messages to stdout for testing
34 # test = True # print messages to stdout for testing
35 # strip = 3 # number of slashes to strip for url paths
35 # strip = 3 # number of slashes to strip for url paths
36 # domain = example.com # domain to use if committer missing domain
36 # domain = example.com # domain to use if committer missing domain
37 # style = ... # style file to use when formatting email
37 # style = ... # style file to use when formatting email
38 # template = ... # template to use when formatting email
38 # template = ... # template to use when formatting email
39 # incoming = ... # template to use when run as incoming hook
39 # incoming = ... # template to use when run as incoming hook
40 # changegroup = ... # template when run as changegroup hook
40 # changegroup = ... # template when run as changegroup hook
41 # maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
41 # maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
42 # maxsubject = 67 # truncate subject line longer than this
42 # maxsubject = 67 # truncate subject line longer than this
43 # diffstat = True # add a diffstat before the diff content
43 # diffstat = True # add a diffstat before the diff content
44 # sources = serve # notify if source of incoming changes in this list
44 # sources = serve # notify if source of incoming changes in this list
45 # # (serve == ssh or http, push, pull, bundle)
45 # # (serve == ssh or http, push, pull, bundle)
46 # [email]
46 # [email]
47 # from = user@host.com # email address to send as if none given
47 # from = user@host.com # email address to send as if none given
48 # [web]
48 # [web]
49 # baseurl = http://hgserver/... # root of hg web site for browsing commits
49 # baseurl = http://hgserver/... # root of hg web site for browsing commits
50 #
50 #
51 # notify config file has same format as regular hgrc. it has two
51 # notify config file has same format as regular hgrc. it has two
52 # sections so you can express subscriptions in whatever way is handier
52 # sections so you can express subscriptions in whatever way is handier
53 # for you.
53 # for you.
54 #
54 #
55 # [usersubs]
55 # [usersubs]
56 # # key is subscriber email, value is ","-separated list of glob patterns
56 # # key is subscriber email, value is ","-separated list of glob patterns
57 # user@host = pattern
57 # user@host = pattern
58 #
58 #
59 # [reposubs]
59 # [reposubs]
60 # # key is glob pattern, value is ","-separated list of subscriber emails
60 # # key is glob pattern, value is ","-separated list of subscriber emails
61 # pattern = user@host
61 # pattern = user@host
62 #
62 #
63 # glob patterns are matched against path to repo root.
63 # glob patterns are matched against path to repo root.
64 #
64 #
65 # if you like, you can put notify config file in repo that users can
65 # if you like, you can put notify config file in repo that users can
66 # push changes to, they can manage their own subscriptions.
66 # push changes to, they can manage their own subscriptions.
67
67
68 from mercurial.demandload import *
68 from mercurial.demandload import *
69 from mercurial.i18n import gettext as _
69 from mercurial.i18n import gettext as _
70 from mercurial.node import *
70 from mercurial.node import *
71 demandload(globals(), 'mercurial:patch,cmdutil,templater,util,mail')
71 demandload(globals(), 'mercurial:patch,cmdutil,templater,util,mail')
72 demandload(globals(), 'email.Parser fnmatch socket time')
72 demandload(globals(), 'email.Parser fnmatch socket time')
73
73
74 # template for single changeset can include email headers.
74 # template for single changeset can include email headers.
75 single_template = '''
75 single_template = '''
76 Subject: changeset in {webroot}: {desc|firstline|strip}
76 Subject: changeset in {webroot}: {desc|firstline|strip}
77 From: {author}
77 From: {author}
78
78
79 changeset {node|short} in {root}
79 changeset {node|short} in {root}
80 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
80 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
81 description:
81 description:
82 \t{desc|tabindent|strip}
82 \t{desc|tabindent|strip}
83 '''.lstrip()
83 '''.lstrip()
84
84
85 # template for multiple changesets should not contain email headers,
85 # template for multiple changesets should not contain email headers,
86 # because only first set of headers will be used and result will look
86 # because only first set of headers will be used and result will look
87 # strange.
87 # strange.
88 multiple_template = '''
88 multiple_template = '''
89 changeset {node|short} in {root}
89 changeset {node|short} in {root}
90 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
90 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
91 summary: {desc|firstline}
91 summary: {desc|firstline}
92 '''
92 '''
93
93
94 deftemplates = {
94 deftemplates = {
95 'changegroup': multiple_template,
95 'changegroup': multiple_template,
96 }
96 }
97
97
98 class notifier(object):
98 class notifier(object):
99 '''email notification class.'''
99 '''email notification class.'''
100
100
101 def __init__(self, ui, repo, hooktype):
101 def __init__(self, ui, repo, hooktype):
102 self.ui = ui
102 self.ui = ui
103 cfg = self.ui.config('notify', 'config')
103 cfg = self.ui.config('notify', 'config')
104 if cfg:
104 if cfg:
105 self.ui.readsections(cfg, 'usersubs', 'reposubs')
105 self.ui.readsections(cfg, 'usersubs', 'reposubs')
106 self.repo = repo
106 self.repo = repo
107 self.stripcount = int(self.ui.config('notify', 'strip', 0))
107 self.stripcount = int(self.ui.config('notify', 'strip', 0))
108 self.root = self.strip(self.repo.root)
108 self.root = self.strip(self.repo.root)
109 self.domain = self.ui.config('notify', 'domain')
109 self.domain = self.ui.config('notify', 'domain')
110 self.subs = self.subscribers()
110 self.subs = self.subscribers()
111
111
112 mapfile = self.ui.config('notify', 'style')
112 mapfile = self.ui.config('notify', 'style')
113 template = (self.ui.config('notify', hooktype) or
113 template = (self.ui.config('notify', hooktype) or
114 self.ui.config('notify', 'template'))
114 self.ui.config('notify', 'template'))
115 self.t = cmdutil.changeset_templater(self.ui, self.repo,
115 self.t = cmdutil.changeset_templater(self.ui, self.repo,
116 False, None, mapfile, False)
116 False, mapfile, False)
117 if not mapfile and not template:
117 if not mapfile and not template:
118 template = deftemplates.get(hooktype) or single_template
118 template = deftemplates.get(hooktype) or single_template
119 if template:
119 if template:
120 template = templater.parsestring(template, quoted=False)
120 template = templater.parsestring(template, quoted=False)
121 self.t.use_template(template)
121 self.t.use_template(template)
122
122
123 def strip(self, path):
123 def strip(self, path):
124 '''strip leading slashes from local path, turn into web-safe path.'''
124 '''strip leading slashes from local path, turn into web-safe path.'''
125
125
126 path = util.pconvert(path)
126 path = util.pconvert(path)
127 count = self.stripcount
127 count = self.stripcount
128 while count > 0:
128 while count > 0:
129 c = path.find('/')
129 c = path.find('/')
130 if c == -1:
130 if c == -1:
131 break
131 break
132 path = path[c+1:]
132 path = path[c+1:]
133 count -= 1
133 count -= 1
134 return path
134 return path
135
135
136 def fixmail(self, addr):
136 def fixmail(self, addr):
137 '''try to clean up email addresses.'''
137 '''try to clean up email addresses.'''
138
138
139 addr = templater.email(addr.strip())
139 addr = templater.email(addr.strip())
140 a = addr.find('@localhost')
140 a = addr.find('@localhost')
141 if a != -1:
141 if a != -1:
142 addr = addr[:a]
142 addr = addr[:a]
143 if '@' not in addr:
143 if '@' not in addr:
144 return addr + '@' + self.domain
144 return addr + '@' + self.domain
145 return addr
145 return addr
146
146
147 def subscribers(self):
147 def subscribers(self):
148 '''return list of email addresses of subscribers to this repo.'''
148 '''return list of email addresses of subscribers to this repo.'''
149
149
150 subs = {}
150 subs = {}
151 for user, pats in self.ui.configitems('usersubs'):
151 for user, pats in self.ui.configitems('usersubs'):
152 for pat in pats.split(','):
152 for pat in pats.split(','):
153 if fnmatch.fnmatch(self.repo.root, pat.strip()):
153 if fnmatch.fnmatch(self.repo.root, pat.strip()):
154 subs[self.fixmail(user)] = 1
154 subs[self.fixmail(user)] = 1
155 for pat, users in self.ui.configitems('reposubs'):
155 for pat, users in self.ui.configitems('reposubs'):
156 if fnmatch.fnmatch(self.repo.root, pat):
156 if fnmatch.fnmatch(self.repo.root, pat):
157 for user in users.split(','):
157 for user in users.split(','):
158 subs[self.fixmail(user)] = 1
158 subs[self.fixmail(user)] = 1
159 subs = subs.keys()
159 subs = subs.keys()
160 subs.sort()
160 subs.sort()
161 return subs
161 return subs
162
162
163 def url(self, path=None):
163 def url(self, path=None):
164 return self.ui.config('web', 'baseurl') + (path or self.root)
164 return self.ui.config('web', 'baseurl') + (path or self.root)
165
165
166 def node(self, node):
166 def node(self, node):
167 '''format one changeset.'''
167 '''format one changeset.'''
168
168
169 self.t.show(changenode=node, changes=self.repo.changelog.read(node),
169 self.t.show(changenode=node, changes=self.repo.changelog.read(node),
170 baseurl=self.ui.config('web', 'baseurl'),
170 baseurl=self.ui.config('web', 'baseurl'),
171 root=self.repo.root,
171 root=self.repo.root,
172 webroot=self.root)
172 webroot=self.root)
173
173
174 def skipsource(self, source):
174 def skipsource(self, source):
175 '''true if incoming changes from this source should be skipped.'''
175 '''true if incoming changes from this source should be skipped.'''
176 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
176 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
177 return source not in ok_sources
177 return source not in ok_sources
178
178
179 def send(self, node, count, data):
179 def send(self, node, count, data):
180 '''send message.'''
180 '''send message.'''
181
181
182 p = email.Parser.Parser()
182 p = email.Parser.Parser()
183 msg = p.parsestr(data)
183 msg = p.parsestr(data)
184
184
185 def fix_subject():
185 def fix_subject():
186 '''try to make subject line exist and be useful.'''
186 '''try to make subject line exist and be useful.'''
187
187
188 subject = msg['Subject']
188 subject = msg['Subject']
189 if not subject:
189 if not subject:
190 if count > 1:
190 if count > 1:
191 subject = _('%s: %d new changesets') % (self.root, count)
191 subject = _('%s: %d new changesets') % (self.root, count)
192 else:
192 else:
193 changes = self.repo.changelog.read(node)
193 changes = self.repo.changelog.read(node)
194 s = changes[4].lstrip().split('\n', 1)[0].rstrip()
194 s = changes[4].lstrip().split('\n', 1)[0].rstrip()
195 subject = '%s: %s' % (self.root, s)
195 subject = '%s: %s' % (self.root, s)
196 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
196 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
197 if maxsubject and len(subject) > maxsubject:
197 if maxsubject and len(subject) > maxsubject:
198 subject = subject[:maxsubject-3] + '...'
198 subject = subject[:maxsubject-3] + '...'
199 del msg['Subject']
199 del msg['Subject']
200 msg['Subject'] = subject
200 msg['Subject'] = subject
201
201
202 def fix_sender():
202 def fix_sender():
203 '''try to make message have proper sender.'''
203 '''try to make message have proper sender.'''
204
204
205 sender = msg['From']
205 sender = msg['From']
206 if not sender:
206 if not sender:
207 sender = self.ui.config('email', 'from') or self.ui.username()
207 sender = self.ui.config('email', 'from') or self.ui.username()
208 if '@' not in sender or '@localhost' in sender:
208 if '@' not in sender or '@localhost' in sender:
209 sender = self.fixmail(sender)
209 sender = self.fixmail(sender)
210 del msg['From']
210 del msg['From']
211 msg['From'] = sender
211 msg['From'] = sender
212
212
213 fix_subject()
213 fix_subject()
214 fix_sender()
214 fix_sender()
215
215
216 msg['X-Hg-Notification'] = 'changeset ' + short(node)
216 msg['X-Hg-Notification'] = 'changeset ' + short(node)
217 if not msg['Message-Id']:
217 if not msg['Message-Id']:
218 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
218 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
219 (short(node), int(time.time()),
219 (short(node), int(time.time()),
220 hash(self.repo.root), socket.getfqdn()))
220 hash(self.repo.root), socket.getfqdn()))
221 msg['To'] = ', '.join(self.subs)
221 msg['To'] = ', '.join(self.subs)
222
222
223 msgtext = msg.as_string(0)
223 msgtext = msg.as_string(0)
224 if self.ui.configbool('notify', 'test', True):
224 if self.ui.configbool('notify', 'test', True):
225 self.ui.write(msgtext)
225 self.ui.write(msgtext)
226 if not msgtext.endswith('\n'):
226 if not msgtext.endswith('\n'):
227 self.ui.write('\n')
227 self.ui.write('\n')
228 else:
228 else:
229 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
229 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
230 (len(self.subs), count))
230 (len(self.subs), count))
231 mail.sendmail(self.ui, templater.email(msg['From']),
231 mail.sendmail(self.ui, templater.email(msg['From']),
232 self.subs, msgtext)
232 self.subs, msgtext)
233
233
234 def diff(self, node, ref):
234 def diff(self, node, ref):
235 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
235 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
236 if maxdiff == 0:
236 if maxdiff == 0:
237 return
237 return
238 prev = self.repo.changelog.parents(node)[0]
238 prev = self.repo.changelog.parents(node)[0]
239 self.ui.pushbuffer()
239 self.ui.pushbuffer()
240 patch.diff(self.repo, prev, ref)
240 patch.diff(self.repo, prev, ref)
241 difflines = self.ui.popbuffer().splitlines(1)
241 difflines = self.ui.popbuffer().splitlines(1)
242 if self.ui.configbool('notify', 'diffstat', True):
242 if self.ui.configbool('notify', 'diffstat', True):
243 s = patch.diffstat(difflines)
243 s = patch.diffstat(difflines)
244 self.ui.write('\ndiffstat:\n\n' + s)
244 self.ui.write('\ndiffstat:\n\n' + s)
245 if maxdiff > 0 and len(difflines) > maxdiff:
245 if maxdiff > 0 and len(difflines) > maxdiff:
246 self.ui.write(_('\ndiffs (truncated from %d to %d lines):\n\n') %
246 self.ui.write(_('\ndiffs (truncated from %d to %d lines):\n\n') %
247 (len(difflines), maxdiff))
247 (len(difflines), maxdiff))
248 difflines = difflines[:maxdiff]
248 difflines = difflines[:maxdiff]
249 elif difflines:
249 elif difflines:
250 self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
250 self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
251 self.ui.write(*difflines)
251 self.ui.write(*difflines)
252
252
253 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
253 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
254 '''send email notifications to interested subscribers.
254 '''send email notifications to interested subscribers.
255
255
256 if used as changegroup hook, send one email for all changesets in
256 if used as changegroup hook, send one email for all changesets in
257 changegroup. else send one email per changeset.'''
257 changegroup. else send one email per changeset.'''
258 n = notifier(ui, repo, hooktype)
258 n = notifier(ui, repo, hooktype)
259 if not n.subs:
259 if not n.subs:
260 ui.debug(_('notify: no subscribers to repo %s\n') % n.root)
260 ui.debug(_('notify: no subscribers to repo %s\n') % n.root)
261 return
261 return
262 if n.skipsource(source):
262 if n.skipsource(source):
263 ui.debug(_('notify: changes have source "%s" - skipping\n') %
263 ui.debug(_('notify: changes have source "%s" - skipping\n') %
264 source)
264 source)
265 return
265 return
266 node = bin(node)
266 node = bin(node)
267 ui.pushbuffer()
267 ui.pushbuffer()
268 if hooktype == 'changegroup':
268 if hooktype == 'changegroup':
269 start = repo.changelog.rev(node)
269 start = repo.changelog.rev(node)
270 end = repo.changelog.count()
270 end = repo.changelog.count()
271 count = end - start
271 count = end - start
272 for rev in xrange(start, end):
272 for rev in xrange(start, end):
273 n.node(repo.changelog.node(rev))
273 n.node(repo.changelog.node(rev))
274 n.diff(node, repo.changelog.tip())
274 n.diff(node, repo.changelog.tip())
275 else:
275 else:
276 count = 1
276 count = 1
277 n.node(node)
277 n.node(node)
278 n.diff(node, node)
278 n.diff(node, node)
279 data = ui.popbuffer()
279 data = ui.popbuffer()
280 n.send(node, count, data)
280 n.send(node, count, data)
@@ -1,773 +1,756 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), 'os sys')
11 demandload(globals(), 'os sys')
12 demandload(globals(), 'mdiff util templater patch')
12 demandload(globals(), 'mdiff util templater patch')
13
13
14 revrangesep = ':'
14 revrangesep = ':'
15
15
16 def revpair(repo, revs):
16 def revpair(repo, revs):
17 '''return pair of nodes, given list of revisions. second item can
17 '''return pair of nodes, given list of revisions. second item can
18 be None, meaning use working dir.'''
18 be None, meaning use working dir.'''
19
19
20 def revfix(repo, val, defval):
20 def revfix(repo, val, defval):
21 if not val and val != 0 and defval is not None:
21 if not val and val != 0 and defval is not None:
22 val = defval
22 val = defval
23 return repo.lookup(val)
23 return repo.lookup(val)
24
24
25 if not revs:
25 if not revs:
26 return repo.dirstate.parents()[0], None
26 return repo.dirstate.parents()[0], None
27 end = None
27 end = None
28 if len(revs) == 1:
28 if len(revs) == 1:
29 if revrangesep in revs[0]:
29 if revrangesep in revs[0]:
30 start, end = revs[0].split(revrangesep, 1)
30 start, end = revs[0].split(revrangesep, 1)
31 start = revfix(repo, start, 0)
31 start = revfix(repo, start, 0)
32 end = revfix(repo, end, repo.changelog.count() - 1)
32 end = revfix(repo, end, repo.changelog.count() - 1)
33 else:
33 else:
34 start = revfix(repo, revs[0], None)
34 start = revfix(repo, revs[0], None)
35 elif len(revs) == 2:
35 elif len(revs) == 2:
36 if revrangesep in revs[0] or revrangesep in revs[1]:
36 if revrangesep in revs[0] or revrangesep in revs[1]:
37 raise util.Abort(_('too many revisions specified'))
37 raise util.Abort(_('too many revisions specified'))
38 start = revfix(repo, revs[0], None)
38 start = revfix(repo, revs[0], None)
39 end = revfix(repo, revs[1], None)
39 end = revfix(repo, revs[1], None)
40 else:
40 else:
41 raise util.Abort(_('too many revisions specified'))
41 raise util.Abort(_('too many revisions specified'))
42 return start, end
42 return start, end
43
43
44 def revrange(repo, revs):
44 def revrange(repo, revs):
45 """Yield revision as strings from a list of revision specifications."""
45 """Yield revision as strings from a list of revision specifications."""
46
46
47 def revfix(repo, val, defval):
47 def revfix(repo, val, defval):
48 if not val and val != 0 and defval is not None:
48 if not val and val != 0 and defval is not None:
49 return defval
49 return defval
50 return repo.changelog.rev(repo.lookup(val))
50 return repo.changelog.rev(repo.lookup(val))
51
51
52 seen, l = {}, []
52 seen, l = {}, []
53 for spec in revs:
53 for spec in revs:
54 if revrangesep in spec:
54 if revrangesep in spec:
55 start, end = spec.split(revrangesep, 1)
55 start, end = spec.split(revrangesep, 1)
56 start = revfix(repo, start, 0)
56 start = revfix(repo, start, 0)
57 end = revfix(repo, end, repo.changelog.count() - 1)
57 end = revfix(repo, end, repo.changelog.count() - 1)
58 step = start > end and -1 or 1
58 step = start > end and -1 or 1
59 for rev in xrange(start, end+step, step):
59 for rev in xrange(start, end+step, step):
60 if rev in seen:
60 if rev in seen:
61 continue
61 continue
62 seen[rev] = 1
62 seen[rev] = 1
63 l.append(rev)
63 l.append(rev)
64 else:
64 else:
65 rev = revfix(repo, spec, None)
65 rev = revfix(repo, spec, None)
66 if rev in seen:
66 if rev in seen:
67 continue
67 continue
68 seen[rev] = 1
68 seen[rev] = 1
69 l.append(rev)
69 l.append(rev)
70
70
71 return l
71 return l
72
72
73 def make_filename(repo, pat, node,
73 def make_filename(repo, pat, node,
74 total=None, seqno=None, revwidth=None, pathname=None):
74 total=None, seqno=None, revwidth=None, pathname=None):
75 node_expander = {
75 node_expander = {
76 'H': lambda: hex(node),
76 'H': lambda: hex(node),
77 'R': lambda: str(repo.changelog.rev(node)),
77 'R': lambda: str(repo.changelog.rev(node)),
78 'h': lambda: short(node),
78 'h': lambda: short(node),
79 }
79 }
80 expander = {
80 expander = {
81 '%': lambda: '%',
81 '%': lambda: '%',
82 'b': lambda: os.path.basename(repo.root),
82 'b': lambda: os.path.basename(repo.root),
83 }
83 }
84
84
85 try:
85 try:
86 if node:
86 if node:
87 expander.update(node_expander)
87 expander.update(node_expander)
88 if node and revwidth is not None:
88 if node and revwidth is not None:
89 expander['r'] = (lambda:
89 expander['r'] = (lambda:
90 str(repo.changelog.rev(node)).zfill(revwidth))
90 str(repo.changelog.rev(node)).zfill(revwidth))
91 if total is not None:
91 if total is not None:
92 expander['N'] = lambda: str(total)
92 expander['N'] = lambda: str(total)
93 if seqno is not None:
93 if seqno is not None:
94 expander['n'] = lambda: str(seqno)
94 expander['n'] = lambda: str(seqno)
95 if total is not None and seqno is not None:
95 if total is not None and seqno is not None:
96 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
96 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
97 if pathname is not None:
97 if pathname is not None:
98 expander['s'] = lambda: os.path.basename(pathname)
98 expander['s'] = lambda: os.path.basename(pathname)
99 expander['d'] = lambda: os.path.dirname(pathname) or '.'
99 expander['d'] = lambda: os.path.dirname(pathname) or '.'
100 expander['p'] = lambda: pathname
100 expander['p'] = lambda: pathname
101
101
102 newname = []
102 newname = []
103 patlen = len(pat)
103 patlen = len(pat)
104 i = 0
104 i = 0
105 while i < patlen:
105 while i < patlen:
106 c = pat[i]
106 c = pat[i]
107 if c == '%':
107 if c == '%':
108 i += 1
108 i += 1
109 c = pat[i]
109 c = pat[i]
110 c = expander[c]()
110 c = expander[c]()
111 newname.append(c)
111 newname.append(c)
112 i += 1
112 i += 1
113 return ''.join(newname)
113 return ''.join(newname)
114 except KeyError, inst:
114 except KeyError, inst:
115 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
115 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
116 inst.args[0])
116 inst.args[0])
117
117
118 def make_file(repo, pat, node=None,
118 def make_file(repo, pat, node=None,
119 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
119 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
120 if not pat or pat == '-':
120 if not pat or pat == '-':
121 return 'w' in mode and sys.stdout or sys.stdin
121 return 'w' in mode and sys.stdout or sys.stdin
122 if hasattr(pat, 'write') and 'w' in mode:
122 if hasattr(pat, 'write') and 'w' in mode:
123 return pat
123 return pat
124 if hasattr(pat, 'read') and 'r' in mode:
124 if hasattr(pat, 'read') and 'r' in mode:
125 return pat
125 return pat
126 return open(make_filename(repo, pat, node, total, seqno, revwidth,
126 return open(make_filename(repo, pat, node, total, seqno, revwidth,
127 pathname),
127 pathname),
128 mode)
128 mode)
129
129
130 def matchpats(repo, pats=[], opts={}, head=''):
130 def matchpats(repo, pats=[], opts={}, head=''):
131 cwd = repo.getcwd()
131 cwd = repo.getcwd()
132 if not pats and cwd:
132 if not pats and cwd:
133 opts['include'] = [os.path.join(cwd, i)
133 opts['include'] = [os.path.join(cwd, i)
134 for i in opts.get('include', [])]
134 for i in opts.get('include', [])]
135 opts['exclude'] = [os.path.join(cwd, x)
135 opts['exclude'] = [os.path.join(cwd, x)
136 for x in opts.get('exclude', [])]
136 for x in opts.get('exclude', [])]
137 cwd = ''
137 cwd = ''
138 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
138 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
139 opts.get('exclude'), head)
139 opts.get('exclude'), head)
140
140
141 def walk(repo, pats=[], opts={}, node=None, head='', badmatch=None):
141 def walk(repo, pats=[], opts={}, node=None, head='', badmatch=None):
142 files, matchfn, anypats = matchpats(repo, pats, opts, head)
142 files, matchfn, anypats = matchpats(repo, pats, opts, head)
143 exact = dict.fromkeys(files)
143 exact = dict.fromkeys(files)
144 for src, fn in repo.walk(node=node, files=files, match=matchfn,
144 for src, fn in repo.walk(node=node, files=files, match=matchfn,
145 badmatch=badmatch):
145 badmatch=badmatch):
146 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
146 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
147
147
148 def findrenames(repo, added=None, removed=None, threshold=0.5):
148 def findrenames(repo, added=None, removed=None, threshold=0.5):
149 if added is None or removed is None:
149 if added is None or removed is None:
150 added, removed = repo.status()[1:3]
150 added, removed = repo.status()[1:3]
151 changes = repo.changelog.read(repo.dirstate.parents()[0])
151 changes = repo.changelog.read(repo.dirstate.parents()[0])
152 mf = repo.manifest.read(changes[0])
152 mf = repo.manifest.read(changes[0])
153 for a in added:
153 for a in added:
154 aa = repo.wread(a)
154 aa = repo.wread(a)
155 bestscore, bestname = None, None
155 bestscore, bestname = None, None
156 for r in removed:
156 for r in removed:
157 rr = repo.file(r).read(mf[r])
157 rr = repo.file(r).read(mf[r])
158 delta = mdiff.textdiff(aa, rr)
158 delta = mdiff.textdiff(aa, rr)
159 if len(delta) < len(aa):
159 if len(delta) < len(aa):
160 myscore = 1.0 - (float(len(delta)) / len(aa))
160 myscore = 1.0 - (float(len(delta)) / len(aa))
161 if bestscore is None or myscore > bestscore:
161 if bestscore is None or myscore > bestscore:
162 bestscore, bestname = myscore, r
162 bestscore, bestname = myscore, r
163 if bestname and bestscore >= threshold:
163 if bestname and bestscore >= threshold:
164 yield bestname, a, bestscore
164 yield bestname, a, bestscore
165
165
166 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
166 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
167 similarity=None):
167 similarity=None):
168 if dry_run is None:
168 if dry_run is None:
169 dry_run = opts.get('dry_run')
169 dry_run = opts.get('dry_run')
170 if similarity is None:
170 if similarity is None:
171 similarity = float(opts.get('similarity') or 0)
171 similarity = float(opts.get('similarity') or 0)
172 add, remove = [], []
172 add, remove = [], []
173 mapping = {}
173 mapping = {}
174 for src, abs, rel, exact in walk(repo, pats, opts):
174 for src, abs, rel, exact in walk(repo, pats, opts):
175 if src == 'f' and repo.dirstate.state(abs) == '?':
175 if src == 'f' and repo.dirstate.state(abs) == '?':
176 add.append(abs)
176 add.append(abs)
177 mapping[abs] = rel, exact
177 mapping[abs] = rel, exact
178 if repo.ui.verbose or not exact:
178 if repo.ui.verbose or not exact:
179 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
179 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
180 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
180 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
181 remove.append(abs)
181 remove.append(abs)
182 mapping[abs] = rel, exact
182 mapping[abs] = rel, exact
183 if repo.ui.verbose or not exact:
183 if repo.ui.verbose or not exact:
184 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
184 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
185 if not dry_run:
185 if not dry_run:
186 repo.add(add, wlock=wlock)
186 repo.add(add, wlock=wlock)
187 repo.remove(remove, wlock=wlock)
187 repo.remove(remove, wlock=wlock)
188 if similarity > 0:
188 if similarity > 0:
189 for old, new, score in findrenames(repo, add, remove, similarity):
189 for old, new, score in findrenames(repo, add, remove, similarity):
190 oldrel, oldexact = mapping[old]
190 oldrel, oldexact = mapping[old]
191 newrel, newexact = mapping[new]
191 newrel, newexact = mapping[new]
192 if repo.ui.verbose or not oldexact or not newexact:
192 if repo.ui.verbose or not oldexact or not newexact:
193 repo.ui.status(_('recording removal of %s as rename to %s '
193 repo.ui.status(_('recording removal of %s as rename to %s '
194 '(%d%% similar)\n') %
194 '(%d%% similar)\n') %
195 (oldrel, newrel, score * 100))
195 (oldrel, newrel, score * 100))
196 if not dry_run:
196 if not dry_run:
197 repo.copy(old, new, wlock=wlock)
197 repo.copy(old, new, wlock=wlock)
198
198
199 class changeset_printer(object):
199 class changeset_printer(object):
200 '''show changeset information when templating not requested.'''
200 '''show changeset information when templating not requested.'''
201
201
202 def __init__(self, ui, repo, patch, brinfo, buffered):
202 def __init__(self, ui, repo, patch, buffered):
203 self.ui = ui
203 self.ui = ui
204 self.repo = repo
204 self.repo = repo
205 self.buffered = buffered
205 self.buffered = buffered
206 self.patch = patch
206 self.patch = patch
207 self.brinfo = brinfo
208 self.header = {}
207 self.header = {}
209 self.hunk = {}
208 self.hunk = {}
210 self.lastheader = None
209 self.lastheader = None
211
210
212 def flush(self, rev):
211 def flush(self, rev):
213 if rev in self.header:
212 if rev in self.header:
214 h = self.header[rev]
213 h = self.header[rev]
215 if h != self.lastheader:
214 if h != self.lastheader:
216 self.lastheader = h
215 self.lastheader = h
217 self.ui.write(h)
216 self.ui.write(h)
218 del self.header[rev]
217 del self.header[rev]
219 if rev in self.hunk:
218 if rev in self.hunk:
220 self.ui.write(self.hunk[rev])
219 self.ui.write(self.hunk[rev])
221 del self.hunk[rev]
220 del self.hunk[rev]
222 return 1
221 return 1
223 return 0
222 return 0
224
223
225 def show(self, rev=0, changenode=None, copies=None, **props):
224 def show(self, rev=0, changenode=None, copies=None, **props):
226 if self.buffered:
225 if self.buffered:
227 self.ui.pushbuffer()
226 self.ui.pushbuffer()
228 self._show(rev, changenode, copies, props)
227 self._show(rev, changenode, copies, props)
229 self.hunk[rev] = self.ui.popbuffer()
228 self.hunk[rev] = self.ui.popbuffer()
230 else:
229 else:
231 self._show(rev, changenode, copies, props)
230 self._show(rev, changenode, copies, props)
232
231
233 def _show(self, rev, changenode, copies, props):
232 def _show(self, rev, changenode, copies, props):
234 '''show a single changeset or file revision'''
233 '''show a single changeset or file revision'''
235 log = self.repo.changelog
234 log = self.repo.changelog
236 if changenode is None:
235 if changenode is None:
237 changenode = log.node(rev)
236 changenode = log.node(rev)
238 elif not rev:
237 elif not rev:
239 rev = log.rev(changenode)
238 rev = log.rev(changenode)
240
239
241 if self.ui.quiet:
240 if self.ui.quiet:
242 self.ui.write("%d:%s\n" % (rev, short(changenode)))
241 self.ui.write("%d:%s\n" % (rev, short(changenode)))
243 return
242 return
244
243
245 changes = log.read(changenode)
244 changes = log.read(changenode)
246 date = util.datestr(changes[2])
245 date = util.datestr(changes[2])
247 extra = changes[5]
246 extra = changes[5]
248 branch = extra.get("branch")
247 branch = extra.get("branch")
249
248
250 hexfunc = self.ui.debugflag and hex or short
249 hexfunc = self.ui.debugflag and hex or short
251
250
252 parents = log.parentrevs(rev)
251 parents = log.parentrevs(rev)
253 if not self.ui.debugflag:
252 if not self.ui.debugflag:
254 if parents[1] == nullrev:
253 if parents[1] == nullrev:
255 if parents[0] >= rev - 1:
254 if parents[0] >= rev - 1:
256 parents = []
255 parents = []
257 else:
256 else:
258 parents = [parents[0]]
257 parents = [parents[0]]
259 parents = [(p, hexfunc(log.node(p))) for p in parents]
258 parents = [(p, hexfunc(log.node(p))) for p in parents]
260
259
261 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
260 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
262
261
263 if branch:
262 if branch:
264 branch = util.tolocal(branch)
263 branch = util.tolocal(branch)
265 self.ui.write(_("branch: %s\n") % branch)
264 self.ui.write(_("branch: %s\n") % branch)
266 for tag in self.repo.nodetags(changenode):
265 for tag in self.repo.nodetags(changenode):
267 self.ui.write(_("tag: %s\n") % tag)
266 self.ui.write(_("tag: %s\n") % tag)
268 for parent in parents:
267 for parent in parents:
269 self.ui.write(_("parent: %d:%s\n") % parent)
268 self.ui.write(_("parent: %d:%s\n") % parent)
270
269
271 if self.brinfo:
272 br = self.repo.branchlookup([changenode])
273 if br:
274 self.ui.write(_("branch: %s\n") % " ".join(br[changenode]))
275
276 if self.ui.debugflag:
270 if self.ui.debugflag:
277 self.ui.write(_("manifest: %d:%s\n") %
271 self.ui.write(_("manifest: %d:%s\n") %
278 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
272 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
279 self.ui.write(_("user: %s\n") % changes[1])
273 self.ui.write(_("user: %s\n") % changes[1])
280 self.ui.write(_("date: %s\n") % date)
274 self.ui.write(_("date: %s\n") % date)
281
275
282 if self.ui.debugflag:
276 if self.ui.debugflag:
283 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
277 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
284 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
278 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
285 files):
279 files):
286 if value:
280 if value:
287 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
281 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
288 elif changes[3] and self.ui.verbose:
282 elif changes[3] and self.ui.verbose:
289 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
283 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
290 if copies and self.ui.verbose:
284 if copies and self.ui.verbose:
291 copies = ['%s (%s)' % c for c in copies]
285 copies = ['%s (%s)' % c for c in copies]
292 self.ui.write(_("copies: %s\n") % ' '.join(copies))
286 self.ui.write(_("copies: %s\n") % ' '.join(copies))
293
287
294 if extra and self.ui.debugflag:
288 if extra and self.ui.debugflag:
295 extraitems = extra.items()
289 extraitems = extra.items()
296 extraitems.sort()
290 extraitems.sort()
297 for key, value in extraitems:
291 for key, value in extraitems:
298 self.ui.write(_("extra: %s=%s\n")
292 self.ui.write(_("extra: %s=%s\n")
299 % (key, value.encode('string_escape')))
293 % (key, value.encode('string_escape')))
300
294
301 description = changes[4].strip()
295 description = changes[4].strip()
302 if description:
296 if description:
303 if self.ui.verbose:
297 if self.ui.verbose:
304 self.ui.write(_("description:\n"))
298 self.ui.write(_("description:\n"))
305 self.ui.write(description)
299 self.ui.write(description)
306 self.ui.write("\n\n")
300 self.ui.write("\n\n")
307 else:
301 else:
308 self.ui.write(_("summary: %s\n") %
302 self.ui.write(_("summary: %s\n") %
309 description.splitlines()[0])
303 description.splitlines()[0])
310 self.ui.write("\n")
304 self.ui.write("\n")
311
305
312 self.showpatch(changenode)
306 self.showpatch(changenode)
313
307
314 def showpatch(self, node):
308 def showpatch(self, node):
315 if self.patch:
309 if self.patch:
316 prev = self.repo.changelog.parents(node)[0]
310 prev = self.repo.changelog.parents(node)[0]
317 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui)
311 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui)
318 self.ui.write("\n")
312 self.ui.write("\n")
319
313
320 class changeset_templater(changeset_printer):
314 class changeset_templater(changeset_printer):
321 '''format changeset information.'''
315 '''format changeset information.'''
322
316
323 def __init__(self, ui, repo, patch, brinfo, mapfile, buffered):
317 def __init__(self, ui, repo, patch, mapfile, buffered):
324 changeset_printer.__init__(self, ui, repo, patch, brinfo, buffered)
318 changeset_printer.__init__(self, ui, repo, patch, buffered)
325 self.t = templater.templater(mapfile, templater.common_filters,
319 self.t = templater.templater(mapfile, templater.common_filters,
326 cache={'parent': '{rev}:{node|short} ',
320 cache={'parent': '{rev}:{node|short} ',
327 'manifest': '{rev}:{node|short}',
321 'manifest': '{rev}:{node|short}',
328 'filecopy': '{name} ({source})'})
322 'filecopy': '{name} ({source})'})
329
323
330 def use_template(self, t):
324 def use_template(self, t):
331 '''set template string to use'''
325 '''set template string to use'''
332 self.t.cache['changeset'] = t
326 self.t.cache['changeset'] = t
333
327
334 def _show(self, rev, changenode, copies, props):
328 def _show(self, rev, changenode, copies, props):
335 '''show a single changeset or file revision'''
329 '''show a single changeset or file revision'''
336 log = self.repo.changelog
330 log = self.repo.changelog
337 if changenode is None:
331 if changenode is None:
338 changenode = log.node(rev)
332 changenode = log.node(rev)
339 elif not rev:
333 elif not rev:
340 rev = log.rev(changenode)
334 rev = log.rev(changenode)
341
335
342 changes = log.read(changenode)
336 changes = log.read(changenode)
343
337
344 def showlist(name, values, plural=None, **args):
338 def showlist(name, values, plural=None, **args):
345 '''expand set of values.
339 '''expand set of values.
346 name is name of key in template map.
340 name is name of key in template map.
347 values is list of strings or dicts.
341 values is list of strings or dicts.
348 plural is plural of name, if not simply name + 's'.
342 plural is plural of name, if not simply name + 's'.
349
343
350 expansion works like this, given name 'foo'.
344 expansion works like this, given name 'foo'.
351
345
352 if values is empty, expand 'no_foos'.
346 if values is empty, expand 'no_foos'.
353
347
354 if 'foo' not in template map, return values as a string,
348 if 'foo' not in template map, return values as a string,
355 joined by space.
349 joined by space.
356
350
357 expand 'start_foos'.
351 expand 'start_foos'.
358
352
359 for each value, expand 'foo'. if 'last_foo' in template
353 for each value, expand 'foo'. if 'last_foo' in template
360 map, expand it instead of 'foo' for last key.
354 map, expand it instead of 'foo' for last key.
361
355
362 expand 'end_foos'.
356 expand 'end_foos'.
363 '''
357 '''
364 if plural: names = plural
358 if plural: names = plural
365 else: names = name + 's'
359 else: names = name + 's'
366 if not values:
360 if not values:
367 noname = 'no_' + names
361 noname = 'no_' + names
368 if noname in self.t:
362 if noname in self.t:
369 yield self.t(noname, **args)
363 yield self.t(noname, **args)
370 return
364 return
371 if name not in self.t:
365 if name not in self.t:
372 if isinstance(values[0], str):
366 if isinstance(values[0], str):
373 yield ' '.join(values)
367 yield ' '.join(values)
374 else:
368 else:
375 for v in values:
369 for v in values:
376 yield dict(v, **args)
370 yield dict(v, **args)
377 return
371 return
378 startname = 'start_' + names
372 startname = 'start_' + names
379 if startname in self.t:
373 if startname in self.t:
380 yield self.t(startname, **args)
374 yield self.t(startname, **args)
381 vargs = args.copy()
375 vargs = args.copy()
382 def one(v, tag=name):
376 def one(v, tag=name):
383 try:
377 try:
384 vargs.update(v)
378 vargs.update(v)
385 except (AttributeError, ValueError):
379 except (AttributeError, ValueError):
386 try:
380 try:
387 for a, b in v:
381 for a, b in v:
388 vargs[a] = b
382 vargs[a] = b
389 except ValueError:
383 except ValueError:
390 vargs[name] = v
384 vargs[name] = v
391 return self.t(tag, **vargs)
385 return self.t(tag, **vargs)
392 lastname = 'last_' + name
386 lastname = 'last_' + name
393 if lastname in self.t:
387 if lastname in self.t:
394 last = values.pop()
388 last = values.pop()
395 else:
389 else:
396 last = None
390 last = None
397 for v in values:
391 for v in values:
398 yield one(v)
392 yield one(v)
399 if last is not None:
393 if last is not None:
400 yield one(last, tag=lastname)
394 yield one(last, tag=lastname)
401 endname = 'end_' + names
395 endname = 'end_' + names
402 if endname in self.t:
396 if endname in self.t:
403 yield self.t(endname, **args)
397 yield self.t(endname, **args)
404
398
405 def showbranches(**args):
399 def showbranches(**args):
406 branch = changes[5].get("branch")
400 branch = changes[5].get("branch")
407 if branch:
401 if branch:
408 branch = util.tolocal(branch)
402 branch = util.tolocal(branch)
409 return showlist('branch', [branch], plural='branches', **args)
403 return showlist('branch', [branch], plural='branches', **args)
410 # add old style branches if requested
411 if self.brinfo:
412 br = self.repo.branchlookup([changenode])
413 if changenode in br:
414 return showlist('branch', br[changenode],
415 plural='branches', **args)
416
404
417 def showparents(**args):
405 def showparents(**args):
418 parents = [[('rev', log.rev(p)), ('node', hex(p))]
406 parents = [[('rev', log.rev(p)), ('node', hex(p))]
419 for p in log.parents(changenode)
407 for p in log.parents(changenode)
420 if self.ui.debugflag or p != nullid]
408 if self.ui.debugflag or p != nullid]
421 if (not self.ui.debugflag and len(parents) == 1 and
409 if (not self.ui.debugflag and len(parents) == 1 and
422 parents[0][0][1] == rev - 1):
410 parents[0][0][1] == rev - 1):
423 return
411 return
424 return showlist('parent', parents, **args)
412 return showlist('parent', parents, **args)
425
413
426 def showtags(**args):
414 def showtags(**args):
427 return showlist('tag', self.repo.nodetags(changenode), **args)
415 return showlist('tag', self.repo.nodetags(changenode), **args)
428
416
429 def showextras(**args):
417 def showextras(**args):
430 extras = changes[5].items()
418 extras = changes[5].items()
431 extras.sort()
419 extras.sort()
432 for key, value in extras:
420 for key, value in extras:
433 args = args.copy()
421 args = args.copy()
434 args.update(dict(key=key, value=value))
422 args.update(dict(key=key, value=value))
435 yield self.t('extra', **args)
423 yield self.t('extra', **args)
436
424
437 def showcopies(**args):
425 def showcopies(**args):
438 c = [{'name': x[0], 'source': x[1]} for x in copies]
426 c = [{'name': x[0], 'source': x[1]} for x in copies]
439 return showlist('file_copy', c, plural='file_copies', **args)
427 return showlist('file_copy', c, plural='file_copies', **args)
440
428
441 if self.ui.debugflag:
429 if self.ui.debugflag:
442 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
430 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
443 def showfiles(**args):
431 def showfiles(**args):
444 return showlist('file', files[0], **args)
432 return showlist('file', files[0], **args)
445 def showadds(**args):
433 def showadds(**args):
446 return showlist('file_add', files[1], **args)
434 return showlist('file_add', files[1], **args)
447 def showdels(**args):
435 def showdels(**args):
448 return showlist('file_del', files[2], **args)
436 return showlist('file_del', files[2], **args)
449 def showmanifest(**args):
437 def showmanifest(**args):
450 args = args.copy()
438 args = args.copy()
451 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
439 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
452 node=hex(changes[0])))
440 node=hex(changes[0])))
453 return self.t('manifest', **args)
441 return self.t('manifest', **args)
454 else:
442 else:
455 def showfiles(**args):
443 def showfiles(**args):
456 return showlist('file', changes[3], **args)
444 return showlist('file', changes[3], **args)
457 showadds = ''
445 showadds = ''
458 showdels = ''
446 showdels = ''
459 showmanifest = ''
447 showmanifest = ''
460
448
461 defprops = {
449 defprops = {
462 'author': changes[1],
450 'author': changes[1],
463 'branches': showbranches,
451 'branches': showbranches,
464 'date': changes[2],
452 'date': changes[2],
465 'desc': changes[4],
453 'desc': changes[4],
466 'file_adds': showadds,
454 'file_adds': showadds,
467 'file_dels': showdels,
455 'file_dels': showdels,
468 'files': showfiles,
456 'files': showfiles,
469 'file_copies': showcopies,
457 'file_copies': showcopies,
470 'manifest': showmanifest,
458 'manifest': showmanifest,
471 'node': hex(changenode),
459 'node': hex(changenode),
472 'parents': showparents,
460 'parents': showparents,
473 'rev': rev,
461 'rev': rev,
474 'tags': showtags,
462 'tags': showtags,
475 'extras': showextras,
463 'extras': showextras,
476 }
464 }
477 props = props.copy()
465 props = props.copy()
478 props.update(defprops)
466 props.update(defprops)
479
467
480 try:
468 try:
481 if self.ui.debugflag and 'header_debug' in self.t:
469 if self.ui.debugflag and 'header_debug' in self.t:
482 key = 'header_debug'
470 key = 'header_debug'
483 elif self.ui.quiet and 'header_quiet' in self.t:
471 elif self.ui.quiet and 'header_quiet' in self.t:
484 key = 'header_quiet'
472 key = 'header_quiet'
485 elif self.ui.verbose and 'header_verbose' in self.t:
473 elif self.ui.verbose and 'header_verbose' in self.t:
486 key = 'header_verbose'
474 key = 'header_verbose'
487 elif 'header' in self.t:
475 elif 'header' in self.t:
488 key = 'header'
476 key = 'header'
489 else:
477 else:
490 key = ''
478 key = ''
491 if key:
479 if key:
492 h = templater.stringify(self.t(key, **props))
480 h = templater.stringify(self.t(key, **props))
493 if self.buffered:
481 if self.buffered:
494 self.header[rev] = h
482 self.header[rev] = h
495 else:
483 else:
496 self.ui.write(h)
484 self.ui.write(h)
497 if self.ui.debugflag and 'changeset_debug' in self.t:
485 if self.ui.debugflag and 'changeset_debug' in self.t:
498 key = 'changeset_debug'
486 key = 'changeset_debug'
499 elif self.ui.quiet and 'changeset_quiet' in self.t:
487 elif self.ui.quiet and 'changeset_quiet' in self.t:
500 key = 'changeset_quiet'
488 key = 'changeset_quiet'
501 elif self.ui.verbose and 'changeset_verbose' in self.t:
489 elif self.ui.verbose and 'changeset_verbose' in self.t:
502 key = 'changeset_verbose'
490 key = 'changeset_verbose'
503 else:
491 else:
504 key = 'changeset'
492 key = 'changeset'
505 self.ui.write(templater.stringify(self.t(key, **props)))
493 self.ui.write(templater.stringify(self.t(key, **props)))
506 self.showpatch(changenode)
494 self.showpatch(changenode)
507 except KeyError, inst:
495 except KeyError, inst:
508 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
496 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
509 inst.args[0]))
497 inst.args[0]))
510 except SyntaxError, inst:
498 except SyntaxError, inst:
511 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
499 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
512
500
513 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
501 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
514 """show one changeset using template or regular display.
502 """show one changeset using template or regular display.
515
503
516 Display format will be the first non-empty hit of:
504 Display format will be the first non-empty hit of:
517 1. option 'template'
505 1. option 'template'
518 2. option 'style'
506 2. option 'style'
519 3. [ui] setting 'logtemplate'
507 3. [ui] setting 'logtemplate'
520 4. [ui] setting 'style'
508 4. [ui] setting 'style'
521 If all of these values are either the unset or the empty string,
509 If all of these values are either the unset or the empty string,
522 regular display via changeset_printer() is done.
510 regular display via changeset_printer() is done.
523 """
511 """
524 # options
512 # options
525 patch = False
513 patch = False
526 if opts.get('patch'):
514 if opts.get('patch'):
527 patch = matchfn or util.always
515 patch = matchfn or util.always
528
516
529 br = None
530 if opts.get('branches'):
531 ui.warn(_("the --branches option is deprecated, "
532 "please use 'hg branches' instead\n"))
533 br = True
534 tmpl = opts.get('template')
517 tmpl = opts.get('template')
535 mapfile = None
518 mapfile = None
536 if tmpl:
519 if tmpl:
537 tmpl = templater.parsestring(tmpl, quoted=False)
520 tmpl = templater.parsestring(tmpl, quoted=False)
538 else:
521 else:
539 mapfile = opts.get('style')
522 mapfile = opts.get('style')
540 # ui settings
523 # ui settings
541 if not mapfile:
524 if not mapfile:
542 tmpl = ui.config('ui', 'logtemplate')
525 tmpl = ui.config('ui', 'logtemplate')
543 if tmpl:
526 if tmpl:
544 tmpl = templater.parsestring(tmpl)
527 tmpl = templater.parsestring(tmpl)
545 else:
528 else:
546 mapfile = ui.config('ui', 'style')
529 mapfile = ui.config('ui', 'style')
547
530
548 if tmpl or mapfile:
531 if tmpl or mapfile:
549 if mapfile:
532 if mapfile:
550 if not os.path.split(mapfile)[0]:
533 if not os.path.split(mapfile)[0]:
551 mapname = (templater.templatepath('map-cmdline.' + mapfile)
534 mapname = (templater.templatepath('map-cmdline.' + mapfile)
552 or templater.templatepath(mapfile))
535 or templater.templatepath(mapfile))
553 if mapname: mapfile = mapname
536 if mapname: mapfile = mapname
554 try:
537 try:
555 t = changeset_templater(ui, repo, patch, br, mapfile, buffered)
538 t = changeset_templater(ui, repo, patch, mapfile, buffered)
556 except SyntaxError, inst:
539 except SyntaxError, inst:
557 raise util.Abort(inst.args[0])
540 raise util.Abort(inst.args[0])
558 if tmpl: t.use_template(tmpl)
541 if tmpl: t.use_template(tmpl)
559 return t
542 return t
560 return changeset_printer(ui, repo, patch, br, buffered)
543 return changeset_printer(ui, repo, patch, buffered)
561
544
562 def finddate(ui, repo, date):
545 def finddate(ui, repo, date):
563 """Find the tipmost changeset that matches the given date spec"""
546 """Find the tipmost changeset that matches the given date spec"""
564 df = util.matchdate(date + " to " + date)
547 df = util.matchdate(date + " to " + date)
565 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
548 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
566 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
549 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
567 results = {}
550 results = {}
568 for st, rev, fns in changeiter:
551 for st, rev, fns in changeiter:
569 if st == 'add':
552 if st == 'add':
570 d = get(rev)[2]
553 d = get(rev)[2]
571 if df(d[0]):
554 if df(d[0]):
572 results[rev] = d
555 results[rev] = d
573 elif st == 'iter':
556 elif st == 'iter':
574 if rev in results:
557 if rev in results:
575 ui.status("Found revision %s from %s\n" %
558 ui.status("Found revision %s from %s\n" %
576 (rev, util.datestr(results[rev])))
559 (rev, util.datestr(results[rev])))
577 return str(rev)
560 return str(rev)
578
561
579 raise util.Abort(_("revision matching date not found"))
562 raise util.Abort(_("revision matching date not found"))
580
563
581 def walkchangerevs(ui, repo, pats, change, opts):
564 def walkchangerevs(ui, repo, pats, change, opts):
582 '''Iterate over files and the revs they changed in.
565 '''Iterate over files and the revs they changed in.
583
566
584 Callers most commonly need to iterate backwards over the history
567 Callers most commonly need to iterate backwards over the history
585 it is interested in. Doing so has awful (quadratic-looking)
568 it is interested in. Doing so has awful (quadratic-looking)
586 performance, so we use iterators in a "windowed" way.
569 performance, so we use iterators in a "windowed" way.
587
570
588 We walk a window of revisions in the desired order. Within the
571 We walk a window of revisions in the desired order. Within the
589 window, we first walk forwards to gather data, then in the desired
572 window, we first walk forwards to gather data, then in the desired
590 order (usually backwards) to display it.
573 order (usually backwards) to display it.
591
574
592 This function returns an (iterator, matchfn) tuple. The iterator
575 This function returns an (iterator, matchfn) tuple. The iterator
593 yields 3-tuples. They will be of one of the following forms:
576 yields 3-tuples. They will be of one of the following forms:
594
577
595 "window", incrementing, lastrev: stepping through a window,
578 "window", incrementing, lastrev: stepping through a window,
596 positive if walking forwards through revs, last rev in the
579 positive if walking forwards through revs, last rev in the
597 sequence iterated over - use to reset state for the current window
580 sequence iterated over - use to reset state for the current window
598
581
599 "add", rev, fns: out-of-order traversal of the given file names
582 "add", rev, fns: out-of-order traversal of the given file names
600 fns, which changed during revision rev - use to gather data for
583 fns, which changed during revision rev - use to gather data for
601 possible display
584 possible display
602
585
603 "iter", rev, None: in-order traversal of the revs earlier iterated
586 "iter", rev, None: in-order traversal of the revs earlier iterated
604 over with "add" - use to display data'''
587 over with "add" - use to display data'''
605
588
606 def increasing_windows(start, end, windowsize=8, sizelimit=512):
589 def increasing_windows(start, end, windowsize=8, sizelimit=512):
607 if start < end:
590 if start < end:
608 while start < end:
591 while start < end:
609 yield start, min(windowsize, end-start)
592 yield start, min(windowsize, end-start)
610 start += windowsize
593 start += windowsize
611 if windowsize < sizelimit:
594 if windowsize < sizelimit:
612 windowsize *= 2
595 windowsize *= 2
613 else:
596 else:
614 while start > end:
597 while start > end:
615 yield start, min(windowsize, start-end-1)
598 yield start, min(windowsize, start-end-1)
616 start -= windowsize
599 start -= windowsize
617 if windowsize < sizelimit:
600 if windowsize < sizelimit:
618 windowsize *= 2
601 windowsize *= 2
619
602
620 files, matchfn, anypats = matchpats(repo, pats, opts)
603 files, matchfn, anypats = matchpats(repo, pats, opts)
621 follow = opts.get('follow') or opts.get('follow_first')
604 follow = opts.get('follow') or opts.get('follow_first')
622
605
623 if repo.changelog.count() == 0:
606 if repo.changelog.count() == 0:
624 return [], matchfn
607 return [], matchfn
625
608
626 if follow:
609 if follow:
627 defrange = '%s:0' % repo.changectx().rev()
610 defrange = '%s:0' % repo.changectx().rev()
628 else:
611 else:
629 defrange = 'tip:0'
612 defrange = 'tip:0'
630 revs = revrange(repo, opts['rev'] or [defrange])
613 revs = revrange(repo, opts['rev'] or [defrange])
631 wanted = {}
614 wanted = {}
632 slowpath = anypats or opts.get('removed')
615 slowpath = anypats or opts.get('removed')
633 fncache = {}
616 fncache = {}
634
617
635 if not slowpath and not files:
618 if not slowpath and not files:
636 # No files, no patterns. Display all revs.
619 # No files, no patterns. Display all revs.
637 wanted = dict.fromkeys(revs)
620 wanted = dict.fromkeys(revs)
638 copies = []
621 copies = []
639 if not slowpath:
622 if not slowpath:
640 # Only files, no patterns. Check the history of each file.
623 # Only files, no patterns. Check the history of each file.
641 def filerevgen(filelog, node):
624 def filerevgen(filelog, node):
642 cl_count = repo.changelog.count()
625 cl_count = repo.changelog.count()
643 if node is None:
626 if node is None:
644 last = filelog.count() - 1
627 last = filelog.count() - 1
645 else:
628 else:
646 last = filelog.rev(node)
629 last = filelog.rev(node)
647 for i, window in increasing_windows(last, nullrev):
630 for i, window in increasing_windows(last, nullrev):
648 revs = []
631 revs = []
649 for j in xrange(i - window, i + 1):
632 for j in xrange(i - window, i + 1):
650 n = filelog.node(j)
633 n = filelog.node(j)
651 revs.append((filelog.linkrev(n),
634 revs.append((filelog.linkrev(n),
652 follow and filelog.renamed(n)))
635 follow and filelog.renamed(n)))
653 revs.reverse()
636 revs.reverse()
654 for rev in revs:
637 for rev in revs:
655 # only yield rev for which we have the changelog, it can
638 # only yield rev for which we have the changelog, it can
656 # happen while doing "hg log" during a pull or commit
639 # happen while doing "hg log" during a pull or commit
657 if rev[0] < cl_count:
640 if rev[0] < cl_count:
658 yield rev
641 yield rev
659 def iterfiles():
642 def iterfiles():
660 for filename in files:
643 for filename in files:
661 yield filename, None
644 yield filename, None
662 for filename_node in copies:
645 for filename_node in copies:
663 yield filename_node
646 yield filename_node
664 minrev, maxrev = min(revs), max(revs)
647 minrev, maxrev = min(revs), max(revs)
665 for file_, node in iterfiles():
648 for file_, node in iterfiles():
666 filelog = repo.file(file_)
649 filelog = repo.file(file_)
667 # A zero count may be a directory or deleted file, so
650 # A zero count may be a directory or deleted file, so
668 # try to find matching entries on the slow path.
651 # try to find matching entries on the slow path.
669 if filelog.count() == 0:
652 if filelog.count() == 0:
670 slowpath = True
653 slowpath = True
671 break
654 break
672 for rev, copied in filerevgen(filelog, node):
655 for rev, copied in filerevgen(filelog, node):
673 if rev <= maxrev:
656 if rev <= maxrev:
674 if rev < minrev:
657 if rev < minrev:
675 break
658 break
676 fncache.setdefault(rev, [])
659 fncache.setdefault(rev, [])
677 fncache[rev].append(file_)
660 fncache[rev].append(file_)
678 wanted[rev] = 1
661 wanted[rev] = 1
679 if follow and copied:
662 if follow and copied:
680 copies.append(copied)
663 copies.append(copied)
681 if slowpath:
664 if slowpath:
682 if follow:
665 if follow:
683 raise util.Abort(_('can only follow copies/renames for explicit '
666 raise util.Abort(_('can only follow copies/renames for explicit '
684 'file names'))
667 'file names'))
685
668
686 # The slow path checks files modified in every changeset.
669 # The slow path checks files modified in every changeset.
687 def changerevgen():
670 def changerevgen():
688 for i, window in increasing_windows(repo.changelog.count()-1,
671 for i, window in increasing_windows(repo.changelog.count()-1,
689 nullrev):
672 nullrev):
690 for j in xrange(i - window, i + 1):
673 for j in xrange(i - window, i + 1):
691 yield j, change(j)[3]
674 yield j, change(j)[3]
692
675
693 for rev, changefiles in changerevgen():
676 for rev, changefiles in changerevgen():
694 matches = filter(matchfn, changefiles)
677 matches = filter(matchfn, changefiles)
695 if matches:
678 if matches:
696 fncache[rev] = matches
679 fncache[rev] = matches
697 wanted[rev] = 1
680 wanted[rev] = 1
698
681
699 class followfilter:
682 class followfilter:
700 def __init__(self, onlyfirst=False):
683 def __init__(self, onlyfirst=False):
701 self.startrev = nullrev
684 self.startrev = nullrev
702 self.roots = []
685 self.roots = []
703 self.onlyfirst = onlyfirst
686 self.onlyfirst = onlyfirst
704
687
705 def match(self, rev):
688 def match(self, rev):
706 def realparents(rev):
689 def realparents(rev):
707 if self.onlyfirst:
690 if self.onlyfirst:
708 return repo.changelog.parentrevs(rev)[0:1]
691 return repo.changelog.parentrevs(rev)[0:1]
709 else:
692 else:
710 return filter(lambda x: x != nullrev,
693 return filter(lambda x: x != nullrev,
711 repo.changelog.parentrevs(rev))
694 repo.changelog.parentrevs(rev))
712
695
713 if self.startrev == nullrev:
696 if self.startrev == nullrev:
714 self.startrev = rev
697 self.startrev = rev
715 return True
698 return True
716
699
717 if rev > self.startrev:
700 if rev > self.startrev:
718 # forward: all descendants
701 # forward: all descendants
719 if not self.roots:
702 if not self.roots:
720 self.roots.append(self.startrev)
703 self.roots.append(self.startrev)
721 for parent in realparents(rev):
704 for parent in realparents(rev):
722 if parent in self.roots:
705 if parent in self.roots:
723 self.roots.append(rev)
706 self.roots.append(rev)
724 return True
707 return True
725 else:
708 else:
726 # backwards: all parents
709 # backwards: all parents
727 if not self.roots:
710 if not self.roots:
728 self.roots.extend(realparents(self.startrev))
711 self.roots.extend(realparents(self.startrev))
729 if rev in self.roots:
712 if rev in self.roots:
730 self.roots.remove(rev)
713 self.roots.remove(rev)
731 self.roots.extend(realparents(rev))
714 self.roots.extend(realparents(rev))
732 return True
715 return True
733
716
734 return False
717 return False
735
718
736 # it might be worthwhile to do this in the iterator if the rev range
719 # it might be worthwhile to do this in the iterator if the rev range
737 # is descending and the prune args are all within that range
720 # is descending and the prune args are all within that range
738 for rev in opts.get('prune', ()):
721 for rev in opts.get('prune', ()):
739 rev = repo.changelog.rev(repo.lookup(rev))
722 rev = repo.changelog.rev(repo.lookup(rev))
740 ff = followfilter()
723 ff = followfilter()
741 stop = min(revs[0], revs[-1])
724 stop = min(revs[0], revs[-1])
742 for x in xrange(rev, stop-1, -1):
725 for x in xrange(rev, stop-1, -1):
743 if ff.match(x) and x in wanted:
726 if ff.match(x) and x in wanted:
744 del wanted[x]
727 del wanted[x]
745
728
746 def iterate():
729 def iterate():
747 if follow and not files:
730 if follow and not files:
748 ff = followfilter(onlyfirst=opts.get('follow_first'))
731 ff = followfilter(onlyfirst=opts.get('follow_first'))
749 def want(rev):
732 def want(rev):
750 if ff.match(rev) and rev in wanted:
733 if ff.match(rev) and rev in wanted:
751 return True
734 return True
752 return False
735 return False
753 else:
736 else:
754 def want(rev):
737 def want(rev):
755 return rev in wanted
738 return rev in wanted
756
739
757 for i, window in increasing_windows(0, len(revs)):
740 for i, window in increasing_windows(0, len(revs)):
758 yield 'window', revs[0] < revs[-1], revs[-1]
741 yield 'window', revs[0] < revs[-1], revs[-1]
759 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
742 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
760 srevs = list(nrevs)
743 srevs = list(nrevs)
761 srevs.sort()
744 srevs.sort()
762 for rev in srevs:
745 for rev in srevs:
763 fns = fncache.get(rev)
746 fns = fncache.get(rev)
764 if not fns:
747 if not fns:
765 def fns_generator():
748 def fns_generator():
766 for f in change(rev)[3]:
749 for f in change(rev)[3]:
767 if matchfn(f):
750 if matchfn(f):
768 yield f
751 yield f
769 fns = fns_generator()
752 fns = fns_generator()
770 yield 'add', rev, fns
753 yield 'add', rev, fns
771 for rev in nrevs:
754 for rev in nrevs:
772 yield 'iter', rev, None
755 yield 'iter', rev, None
773 return iterate(), matchfn
756 return iterate(), matchfn
@@ -1,3314 +1,3278 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "bisect os re sys signal imp urllib pdb shlex stat")
11 demandload(globals(), "bisect os re sys signal imp urllib pdb shlex stat")
12 demandload(globals(), "fancyopts ui hg util lock revlog bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog bundlerepo")
13 demandload(globals(), "difflib patch time help mdiff tempfile")
13 demandload(globals(), "difflib patch time help mdiff tempfile")
14 demandload(globals(), "traceback errno version atexit")
14 demandload(globals(), "traceback errno version atexit")
15 demandload(globals(), "archival changegroup cmdutil hgweb.server sshserver")
15 demandload(globals(), "archival changegroup cmdutil hgweb.server sshserver")
16
16
17 class UnknownCommand(Exception):
17 class UnknownCommand(Exception):
18 """Exception raised if command is not in the command table."""
18 """Exception raised if command is not in the command table."""
19 class AmbiguousCommand(Exception):
19 class AmbiguousCommand(Exception):
20 """Exception raised if command shortcut matches more than one command."""
20 """Exception raised if command shortcut matches more than one command."""
21
21
22 def bail_if_changed(repo):
22 def bail_if_changed(repo):
23 modified, added, removed, deleted = repo.status()[:4]
23 modified, added, removed, deleted = repo.status()[:4]
24 if modified or added or removed or deleted:
24 if modified or added or removed or deleted:
25 raise util.Abort(_("outstanding uncommitted changes"))
25 raise util.Abort(_("outstanding uncommitted changes"))
26
26
27 def logmessage(opts):
27 def logmessage(opts):
28 """ get the log message according to -m and -l option """
28 """ get the log message according to -m and -l option """
29 message = opts['message']
29 message = opts['message']
30 logfile = opts['logfile']
30 logfile = opts['logfile']
31
31
32 if message and logfile:
32 if message and logfile:
33 raise util.Abort(_('options --message and --logfile are mutually '
33 raise util.Abort(_('options --message and --logfile are mutually '
34 'exclusive'))
34 'exclusive'))
35 if not message and logfile:
35 if not message and logfile:
36 try:
36 try:
37 if logfile == '-':
37 if logfile == '-':
38 message = sys.stdin.read()
38 message = sys.stdin.read()
39 else:
39 else:
40 message = open(logfile).read()
40 message = open(logfile).read()
41 except IOError, inst:
41 except IOError, inst:
42 raise util.Abort(_("can't read commit message '%s': %s") %
42 raise util.Abort(_("can't read commit message '%s': %s") %
43 (logfile, inst.strerror))
43 (logfile, inst.strerror))
44 return message
44 return message
45
45
46 def setremoteconfig(ui, opts):
46 def setremoteconfig(ui, opts):
47 "copy remote options to ui tree"
47 "copy remote options to ui tree"
48 if opts.get('ssh'):
48 if opts.get('ssh'):
49 ui.setconfig("ui", "ssh", opts['ssh'])
49 ui.setconfig("ui", "ssh", opts['ssh'])
50 if opts.get('remotecmd'):
50 if opts.get('remotecmd'):
51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
52
52
53 # Commands start here, listed alphabetically
53 # Commands start here, listed alphabetically
54
54
55 def add(ui, repo, *pats, **opts):
55 def add(ui, repo, *pats, **opts):
56 """add the specified files on the next commit
56 """add the specified files on the next commit
57
57
58 Schedule files to be version controlled and added to the repository.
58 Schedule files to be version controlled and added to the repository.
59
59
60 The files will be added to the repository at the next commit. To
60 The files will be added to the repository at the next commit. To
61 undo an add before that, see hg revert.
61 undo an add before that, see hg revert.
62
62
63 If no names are given, add all files in the repository.
63 If no names are given, add all files in the repository.
64 """
64 """
65
65
66 names = []
66 names = []
67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
68 if exact:
68 if exact:
69 if ui.verbose:
69 if ui.verbose:
70 ui.status(_('adding %s\n') % rel)
70 ui.status(_('adding %s\n') % rel)
71 names.append(abs)
71 names.append(abs)
72 elif repo.dirstate.state(abs) == '?':
72 elif repo.dirstate.state(abs) == '?':
73 ui.status(_('adding %s\n') % rel)
73 ui.status(_('adding %s\n') % rel)
74 names.append(abs)
74 names.append(abs)
75 if not opts.get('dry_run'):
75 if not opts.get('dry_run'):
76 repo.add(names)
76 repo.add(names)
77
77
78 def addremove(ui, repo, *pats, **opts):
78 def addremove(ui, repo, *pats, **opts):
79 """add all new files, delete all missing files
79 """add all new files, delete all missing files
80
80
81 Add all new files and remove all missing files from the repository.
81 Add all new files and remove all missing files from the repository.
82
82
83 New files are ignored if they match any of the patterns in .hgignore. As
83 New files are ignored if they match any of the patterns in .hgignore. As
84 with add, these changes take effect at the next commit.
84 with add, these changes take effect at the next commit.
85
85
86 Use the -s option to detect renamed files. With a parameter > 0,
86 Use the -s option to detect renamed files. With a parameter > 0,
87 this compares every removed file with every added file and records
87 this compares every removed file with every added file and records
88 those similar enough as renames. This option takes a percentage
88 those similar enough as renames. This option takes a percentage
89 between 0 (disabled) and 100 (files must be identical) as its
89 between 0 (disabled) and 100 (files must be identical) as its
90 parameter. Detecting renamed files this way can be expensive.
90 parameter. Detecting renamed files this way can be expensive.
91 """
91 """
92 sim = float(opts.get('similarity') or 0)
92 sim = float(opts.get('similarity') or 0)
93 if sim < 0 or sim > 100:
93 if sim < 0 or sim > 100:
94 raise util.Abort(_('similarity must be between 0 and 100'))
94 raise util.Abort(_('similarity must be between 0 and 100'))
95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
96
96
97 def annotate(ui, repo, *pats, **opts):
97 def annotate(ui, repo, *pats, **opts):
98 """show changeset information per file line
98 """show changeset information per file line
99
99
100 List changes in files, showing the revision id responsible for each line
100 List changes in files, showing the revision id responsible for each line
101
101
102 This command is useful to discover who did a change or when a change took
102 This command is useful to discover who did a change or when a change took
103 place.
103 place.
104
104
105 Without the -a option, annotate will avoid processing files it
105 Without the -a option, annotate will avoid processing files it
106 detects as binary. With -a, annotate will generate an annotation
106 detects as binary. With -a, annotate will generate an annotation
107 anyway, probably with undesirable results.
107 anyway, probably with undesirable results.
108 """
108 """
109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
110
110
111 if not pats:
111 if not pats:
112 raise util.Abort(_('at least one file name or pattern required'))
112 raise util.Abort(_('at least one file name or pattern required'))
113
113
114 opmap = [['user', lambda x: ui.shortuser(x.user())],
114 opmap = [['user', lambda x: ui.shortuser(x.user())],
115 ['number', lambda x: str(x.rev())],
115 ['number', lambda x: str(x.rev())],
116 ['changeset', lambda x: short(x.node())],
116 ['changeset', lambda x: short(x.node())],
117 ['date', getdate], ['follow', lambda x: x.path()]]
117 ['date', getdate], ['follow', lambda x: x.path()]]
118 if (not opts['user'] and not opts['changeset'] and not opts['date']
118 if (not opts['user'] and not opts['changeset'] and not opts['date']
119 and not opts['follow']):
119 and not opts['follow']):
120 opts['number'] = 1
120 opts['number'] = 1
121
121
122 ctx = repo.changectx(opts['rev'])
122 ctx = repo.changectx(opts['rev'])
123
123
124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
125 node=ctx.node()):
125 node=ctx.node()):
126 fctx = ctx.filectx(abs)
126 fctx = ctx.filectx(abs)
127 if not opts['text'] and util.binary(fctx.data()):
127 if not opts['text'] and util.binary(fctx.data()):
128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
129 continue
129 continue
130
130
131 lines = fctx.annotate(follow=opts.get('follow'))
131 lines = fctx.annotate(follow=opts.get('follow'))
132 pieces = []
132 pieces = []
133
133
134 for o, f in opmap:
134 for o, f in opmap:
135 if opts[o]:
135 if opts[o]:
136 l = [f(n) for n, dummy in lines]
136 l = [f(n) for n, dummy in lines]
137 if l:
137 if l:
138 m = max(map(len, l))
138 m = max(map(len, l))
139 pieces.append(["%*s" % (m, x) for x in l])
139 pieces.append(["%*s" % (m, x) for x in l])
140
140
141 if pieces:
141 if pieces:
142 for p, l in zip(zip(*pieces), lines):
142 for p, l in zip(zip(*pieces), lines):
143 ui.write("%s: %s" % (" ".join(p), l[1]))
143 ui.write("%s: %s" % (" ".join(p), l[1]))
144
144
145 def archive(ui, repo, dest, **opts):
145 def archive(ui, repo, dest, **opts):
146 '''create unversioned archive of a repository revision
146 '''create unversioned archive of a repository revision
147
147
148 By default, the revision used is the parent of the working
148 By default, the revision used is the parent of the working
149 directory; use "-r" to specify a different revision.
149 directory; use "-r" to specify a different revision.
150
150
151 To specify the type of archive to create, use "-t". Valid
151 To specify the type of archive to create, use "-t". Valid
152 types are:
152 types are:
153
153
154 "files" (default): a directory full of files
154 "files" (default): a directory full of files
155 "tar": tar archive, uncompressed
155 "tar": tar archive, uncompressed
156 "tbz2": tar archive, compressed using bzip2
156 "tbz2": tar archive, compressed using bzip2
157 "tgz": tar archive, compressed using gzip
157 "tgz": tar archive, compressed using gzip
158 "uzip": zip archive, uncompressed
158 "uzip": zip archive, uncompressed
159 "zip": zip archive, compressed using deflate
159 "zip": zip archive, compressed using deflate
160
160
161 The exact name of the destination archive or directory is given
161 The exact name of the destination archive or directory is given
162 using a format string; see "hg help export" for details.
162 using a format string; see "hg help export" for details.
163
163
164 Each member added to an archive file has a directory prefix
164 Each member added to an archive file has a directory prefix
165 prepended. Use "-p" to specify a format string for the prefix.
165 prepended. Use "-p" to specify a format string for the prefix.
166 The default is the basename of the archive, with suffixes removed.
166 The default is the basename of the archive, with suffixes removed.
167 '''
167 '''
168
168
169 node = repo.changectx(opts['rev']).node()
169 node = repo.changectx(opts['rev']).node()
170 dest = cmdutil.make_filename(repo, dest, node)
170 dest = cmdutil.make_filename(repo, dest, node)
171 if os.path.realpath(dest) == repo.root:
171 if os.path.realpath(dest) == repo.root:
172 raise util.Abort(_('repository root cannot be destination'))
172 raise util.Abort(_('repository root cannot be destination'))
173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
174 kind = opts.get('type') or 'files'
174 kind = opts.get('type') or 'files'
175 prefix = opts['prefix']
175 prefix = opts['prefix']
176 if dest == '-':
176 if dest == '-':
177 if kind == 'files':
177 if kind == 'files':
178 raise util.Abort(_('cannot archive plain files to stdout'))
178 raise util.Abort(_('cannot archive plain files to stdout'))
179 dest = sys.stdout
179 dest = sys.stdout
180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
181 prefix = cmdutil.make_filename(repo, prefix, node)
181 prefix = cmdutil.make_filename(repo, prefix, node)
182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
183 matchfn, prefix)
183 matchfn, prefix)
184
184
185 def backout(ui, repo, rev, **opts):
185 def backout(ui, repo, rev, **opts):
186 '''reverse effect of earlier changeset
186 '''reverse effect of earlier changeset
187
187
188 Commit the backed out changes as a new changeset. The new
188 Commit the backed out changes as a new changeset. The new
189 changeset is a child of the backed out changeset.
189 changeset is a child of the backed out changeset.
190
190
191 If you back out a changeset other than the tip, a new head is
191 If you back out a changeset other than the tip, a new head is
192 created. This head is the parent of the working directory. If
192 created. This head is the parent of the working directory. If
193 you back out an old changeset, your working directory will appear
193 you back out an old changeset, your working directory will appear
194 old after the backout. You should merge the backout changeset
194 old after the backout. You should merge the backout changeset
195 with another head.
195 with another head.
196
196
197 The --merge option remembers the parent of the working directory
197 The --merge option remembers the parent of the working directory
198 before starting the backout, then merges the new head with that
198 before starting the backout, then merges the new head with that
199 changeset afterwards. This saves you from doing the merge by
199 changeset afterwards. This saves you from doing the merge by
200 hand. The result of this merge is not committed, as for a normal
200 hand. The result of this merge is not committed, as for a normal
201 merge.'''
201 merge.'''
202
202
203 bail_if_changed(repo)
203 bail_if_changed(repo)
204 op1, op2 = repo.dirstate.parents()
204 op1, op2 = repo.dirstate.parents()
205 if op2 != nullid:
205 if op2 != nullid:
206 raise util.Abort(_('outstanding uncommitted merge'))
206 raise util.Abort(_('outstanding uncommitted merge'))
207 node = repo.lookup(rev)
207 node = repo.lookup(rev)
208 p1, p2 = repo.changelog.parents(node)
208 p1, p2 = repo.changelog.parents(node)
209 if p1 == nullid:
209 if p1 == nullid:
210 raise util.Abort(_('cannot back out a change with no parents'))
210 raise util.Abort(_('cannot back out a change with no parents'))
211 if p2 != nullid:
211 if p2 != nullid:
212 if not opts['parent']:
212 if not opts['parent']:
213 raise util.Abort(_('cannot back out a merge changeset without '
213 raise util.Abort(_('cannot back out a merge changeset without '
214 '--parent'))
214 '--parent'))
215 p = repo.lookup(opts['parent'])
215 p = repo.lookup(opts['parent'])
216 if p not in (p1, p2):
216 if p not in (p1, p2):
217 raise util.Abort(_('%s is not a parent of %s') %
217 raise util.Abort(_('%s is not a parent of %s') %
218 (short(p), short(node)))
218 (short(p), short(node)))
219 parent = p
219 parent = p
220 else:
220 else:
221 if opts['parent']:
221 if opts['parent']:
222 raise util.Abort(_('cannot use --parent on non-merge changeset'))
222 raise util.Abort(_('cannot use --parent on non-merge changeset'))
223 parent = p1
223 parent = p1
224 hg.clean(repo, node, show_stats=False)
224 hg.clean(repo, node, show_stats=False)
225 revert_opts = opts.copy()
225 revert_opts = opts.copy()
226 revert_opts['date'] = None
226 revert_opts['date'] = None
227 revert_opts['all'] = True
227 revert_opts['all'] = True
228 revert_opts['rev'] = hex(parent)
228 revert_opts['rev'] = hex(parent)
229 revert(ui, repo, **revert_opts)
229 revert(ui, repo, **revert_opts)
230 commit_opts = opts.copy()
230 commit_opts = opts.copy()
231 commit_opts['addremove'] = False
231 commit_opts['addremove'] = False
232 if not commit_opts['message'] and not commit_opts['logfile']:
232 if not commit_opts['message'] and not commit_opts['logfile']:
233 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
233 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
234 commit_opts['force_editor'] = True
234 commit_opts['force_editor'] = True
235 commit(ui, repo, **commit_opts)
235 commit(ui, repo, **commit_opts)
236 def nice(node):
236 def nice(node):
237 return '%d:%s' % (repo.changelog.rev(node), short(node))
237 return '%d:%s' % (repo.changelog.rev(node), short(node))
238 ui.status(_('changeset %s backs out changeset %s\n') %
238 ui.status(_('changeset %s backs out changeset %s\n') %
239 (nice(repo.changelog.tip()), nice(node)))
239 (nice(repo.changelog.tip()), nice(node)))
240 if op1 != node:
240 if op1 != node:
241 if opts['merge']:
241 if opts['merge']:
242 ui.status(_('merging with changeset %s\n') % nice(op1))
242 ui.status(_('merging with changeset %s\n') % nice(op1))
243 n = _lookup(repo, hex(op1))
243 hg.merge(repo, hex(op1))
244 hg.merge(repo, n)
245 else:
244 else:
246 ui.status(_('the backout changeset is a new head - '
245 ui.status(_('the backout changeset is a new head - '
247 'do not forget to merge\n'))
246 'do not forget to merge\n'))
248 ui.status(_('(use "backout --merge" '
247 ui.status(_('(use "backout --merge" '
249 'if you want to auto-merge)\n'))
248 'if you want to auto-merge)\n'))
250
249
251 def branch(ui, repo, label=None):
250 def branch(ui, repo, label=None):
252 """set or show the current branch name
251 """set or show the current branch name
253
252
254 With <name>, set the current branch name. Otherwise, show the
253 With <name>, set the current branch name. Otherwise, show the
255 current branch name.
254 current branch name.
256 """
255 """
257
256
258 if label is not None:
257 if label is not None:
259 repo.opener("branch", "w").write(util.fromlocal(label) + '\n')
258 repo.opener("branch", "w").write(util.fromlocal(label) + '\n')
260 else:
259 else:
261 b = util.tolocal(repo.workingctx().branch())
260 b = util.tolocal(repo.workingctx().branch())
262 if b:
261 if b:
263 ui.write("%s\n" % b)
262 ui.write("%s\n" % b)
264
263
265 def branches(ui, repo):
264 def branches(ui, repo):
266 """list repository named branches
265 """list repository named branches
267
266
268 List the repository's named branches.
267 List the repository's named branches.
269 """
268 """
270 b = repo.branchtags()
269 b = repo.branchtags()
271 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
270 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
272 l.sort()
271 l.sort()
273 for r, n, t in l:
272 for r, n, t in l:
274 hexfunc = ui.debugflag and hex or short
273 hexfunc = ui.debugflag and hex or short
275 if ui.quiet:
274 if ui.quiet:
276 ui.write("%s\n" % t)
275 ui.write("%s\n" % t)
277 else:
276 else:
278 t = util.localsub(t, 30)
277 t = util.localsub(t, 30)
279 t += " " * (30 - util.locallen(t))
278 t += " " * (30 - util.locallen(t))
280 ui.write("%s %s:%s\n" % (t, -r, hexfunc(n)))
279 ui.write("%s %s:%s\n" % (t, -r, hexfunc(n)))
281
280
282 def bundle(ui, repo, fname, dest=None, **opts):
281 def bundle(ui, repo, fname, dest=None, **opts):
283 """create a changegroup file
282 """create a changegroup file
284
283
285 Generate a compressed changegroup file collecting changesets not
284 Generate a compressed changegroup file collecting changesets not
286 found in the other repository.
285 found in the other repository.
287
286
288 If no destination repository is specified the destination is assumed
287 If no destination repository is specified the destination is assumed
289 to have all the nodes specified by one or more --base parameters.
288 to have all the nodes specified by one or more --base parameters.
290
289
291 The bundle file can then be transferred using conventional means and
290 The bundle file can then be transferred using conventional means and
292 applied to another repository with the unbundle or pull command.
291 applied to another repository with the unbundle or pull command.
293 This is useful when direct push and pull are not available or when
292 This is useful when direct push and pull are not available or when
294 exporting an entire repository is undesirable.
293 exporting an entire repository is undesirable.
295
294
296 Applying bundles preserves all changeset contents including
295 Applying bundles preserves all changeset contents including
297 permissions, copy/rename information, and revision history.
296 permissions, copy/rename information, and revision history.
298 """
297 """
299 revs = opts.get('rev') or None
298 revs = opts.get('rev') or None
300 if revs:
299 if revs:
301 revs = [repo.lookup(rev) for rev in revs]
300 revs = [repo.lookup(rev) for rev in revs]
302 base = opts.get('base')
301 base = opts.get('base')
303 if base:
302 if base:
304 if dest:
303 if dest:
305 raise util.Abort(_("--base is incompatible with specifiying "
304 raise util.Abort(_("--base is incompatible with specifiying "
306 "a destination"))
305 "a destination"))
307 base = [repo.lookup(rev) for rev in base]
306 base = [repo.lookup(rev) for rev in base]
308 # create the right base
307 # create the right base
309 # XXX: nodesbetween / changegroup* should be "fixed" instead
308 # XXX: nodesbetween / changegroup* should be "fixed" instead
310 o = []
309 o = []
311 has = {nullid: None}
310 has = {nullid: None}
312 for n in base:
311 for n in base:
313 has.update(repo.changelog.reachable(n))
312 has.update(repo.changelog.reachable(n))
314 if revs:
313 if revs:
315 visit = list(revs)
314 visit = list(revs)
316 else:
315 else:
317 visit = repo.changelog.heads()
316 visit = repo.changelog.heads()
318 seen = {}
317 seen = {}
319 while visit:
318 while visit:
320 n = visit.pop(0)
319 n = visit.pop(0)
321 parents = [p for p in repo.changelog.parents(n) if p not in has]
320 parents = [p for p in repo.changelog.parents(n) if p not in has]
322 if len(parents) == 0:
321 if len(parents) == 0:
323 o.insert(0, n)
322 o.insert(0, n)
324 else:
323 else:
325 for p in parents:
324 for p in parents:
326 if p not in seen:
325 if p not in seen:
327 seen[p] = 1
326 seen[p] = 1
328 visit.append(p)
327 visit.append(p)
329 else:
328 else:
330 setremoteconfig(ui, opts)
329 setremoteconfig(ui, opts)
331 dest = ui.expandpath(dest or 'default-push', dest or 'default')
330 dest = ui.expandpath(dest or 'default-push', dest or 'default')
332 other = hg.repository(ui, dest)
331 other = hg.repository(ui, dest)
333 o = repo.findoutgoing(other, force=opts['force'])
332 o = repo.findoutgoing(other, force=opts['force'])
334
333
335 if revs:
334 if revs:
336 cg = repo.changegroupsubset(o, revs, 'bundle')
335 cg = repo.changegroupsubset(o, revs, 'bundle')
337 else:
336 else:
338 cg = repo.changegroup(o, 'bundle')
337 cg = repo.changegroup(o, 'bundle')
339 changegroup.writebundle(cg, fname, "HG10BZ")
338 changegroup.writebundle(cg, fname, "HG10BZ")
340
339
341 def cat(ui, repo, file1, *pats, **opts):
340 def cat(ui, repo, file1, *pats, **opts):
342 """output the latest or given revisions of files
341 """output the latest or given revisions of files
343
342
344 Print the specified files as they were at the given revision.
343 Print the specified files as they were at the given revision.
345 If no revision is given then working dir parent is used, or tip
344 If no revision is given then working dir parent is used, or tip
346 if no revision is checked out.
345 if no revision is checked out.
347
346
348 Output may be to a file, in which case the name of the file is
347 Output may be to a file, in which case the name of the file is
349 given using a format string. The formatting rules are the same as
348 given using a format string. The formatting rules are the same as
350 for the export command, with the following additions:
349 for the export command, with the following additions:
351
350
352 %s basename of file being printed
351 %s basename of file being printed
353 %d dirname of file being printed, or '.' if in repo root
352 %d dirname of file being printed, or '.' if in repo root
354 %p root-relative path name of file being printed
353 %p root-relative path name of file being printed
355 """
354 """
356 ctx = repo.changectx(opts['rev'])
355 ctx = repo.changectx(opts['rev'])
357 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
356 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
358 ctx.node()):
357 ctx.node()):
359 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
358 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
360 fp.write(ctx.filectx(abs).data())
359 fp.write(ctx.filectx(abs).data())
361
360
362 def clone(ui, source, dest=None, **opts):
361 def clone(ui, source, dest=None, **opts):
363 """make a copy of an existing repository
362 """make a copy of an existing repository
364
363
365 Create a copy of an existing repository in a new directory.
364 Create a copy of an existing repository in a new directory.
366
365
367 If no destination directory name is specified, it defaults to the
366 If no destination directory name is specified, it defaults to the
368 basename of the source.
367 basename of the source.
369
368
370 The location of the source is added to the new repository's
369 The location of the source is added to the new repository's
371 .hg/hgrc file, as the default to be used for future pulls.
370 .hg/hgrc file, as the default to be used for future pulls.
372
371
373 For efficiency, hardlinks are used for cloning whenever the source
372 For efficiency, hardlinks are used for cloning whenever the source
374 and destination are on the same filesystem (note this applies only
373 and destination are on the same filesystem (note this applies only
375 to the repository data, not to the checked out files). Some
374 to the repository data, not to the checked out files). Some
376 filesystems, such as AFS, implement hardlinking incorrectly, but
375 filesystems, such as AFS, implement hardlinking incorrectly, but
377 do not report errors. In these cases, use the --pull option to
376 do not report errors. In these cases, use the --pull option to
378 avoid hardlinking.
377 avoid hardlinking.
379
378
380 You can safely clone repositories and checked out files using full
379 You can safely clone repositories and checked out files using full
381 hardlinks with
380 hardlinks with
382
381
383 $ cp -al REPO REPOCLONE
382 $ cp -al REPO REPOCLONE
384
383
385 which is the fastest way to clone. However, the operation is not
384 which is the fastest way to clone. However, the operation is not
386 atomic (making sure REPO is not modified during the operation is
385 atomic (making sure REPO is not modified during the operation is
387 up to you) and you have to make sure your editor breaks hardlinks
386 up to you) and you have to make sure your editor breaks hardlinks
388 (Emacs and most Linux Kernel tools do so).
387 (Emacs and most Linux Kernel tools do so).
389
388
390 If you use the -r option to clone up to a specific revision, no
389 If you use the -r option to clone up to a specific revision, no
391 subsequent revisions will be present in the cloned repository.
390 subsequent revisions will be present in the cloned repository.
392 This option implies --pull, even on local repositories.
391 This option implies --pull, even on local repositories.
393
392
394 See pull for valid source format details.
393 See pull for valid source format details.
395
394
396 It is possible to specify an ssh:// URL as the destination, but no
395 It is possible to specify an ssh:// URL as the destination, but no
397 .hg/hgrc and working directory will be created on the remote side.
396 .hg/hgrc and working directory will be created on the remote side.
398 Look at the help text for the pull command for important details
397 Look at the help text for the pull command for important details
399 about ssh:// URLs.
398 about ssh:// URLs.
400 """
399 """
401 setremoteconfig(ui, opts)
400 setremoteconfig(ui, opts)
402 hg.clone(ui, ui.expandpath(source), dest,
401 hg.clone(ui, ui.expandpath(source), dest,
403 pull=opts['pull'],
402 pull=opts['pull'],
404 stream=opts['uncompressed'],
403 stream=opts['uncompressed'],
405 rev=opts['rev'],
404 rev=opts['rev'],
406 update=not opts['noupdate'])
405 update=not opts['noupdate'])
407
406
408 def commit(ui, repo, *pats, **opts):
407 def commit(ui, repo, *pats, **opts):
409 """commit the specified files or all outstanding changes
408 """commit the specified files or all outstanding changes
410
409
411 Commit changes to the given files into the repository.
410 Commit changes to the given files into the repository.
412
411
413 If a list of files is omitted, all changes reported by "hg status"
412 If a list of files is omitted, all changes reported by "hg status"
414 will be committed.
413 will be committed.
415
414
416 If no commit message is specified, the editor configured in your hgrc
415 If no commit message is specified, the editor configured in your hgrc
417 or in the EDITOR environment variable is started to enter a message.
416 or in the EDITOR environment variable is started to enter a message.
418 """
417 """
419 message = logmessage(opts)
418 message = logmessage(opts)
420
419
421 if opts['addremove']:
420 if opts['addremove']:
422 cmdutil.addremove(repo, pats, opts)
421 cmdutil.addremove(repo, pats, opts)
423 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
422 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
424 if pats:
423 if pats:
425 status = repo.status(files=fns, match=match)
424 status = repo.status(files=fns, match=match)
426 modified, added, removed, deleted, unknown = status[:5]
425 modified, added, removed, deleted, unknown = status[:5]
427 files = modified + added + removed
426 files = modified + added + removed
428 slist = None
427 slist = None
429 for f in fns:
428 for f in fns:
430 if f not in files:
429 if f not in files:
431 rf = repo.wjoin(f)
430 rf = repo.wjoin(f)
432 if f in unknown:
431 if f in unknown:
433 raise util.Abort(_("file %s not tracked!") % rf)
432 raise util.Abort(_("file %s not tracked!") % rf)
434 try:
433 try:
435 mode = os.lstat(rf)[stat.ST_MODE]
434 mode = os.lstat(rf)[stat.ST_MODE]
436 except OSError:
435 except OSError:
437 raise util.Abort(_("file %s not found!") % rf)
436 raise util.Abort(_("file %s not found!") % rf)
438 if stat.S_ISDIR(mode):
437 if stat.S_ISDIR(mode):
439 name = f + '/'
438 name = f + '/'
440 if slist is None:
439 if slist is None:
441 slist = list(files)
440 slist = list(files)
442 slist.sort()
441 slist.sort()
443 i = bisect.bisect(slist, name)
442 i = bisect.bisect(slist, name)
444 if i >= len(slist) or not slist[i].startswith(name):
443 if i >= len(slist) or not slist[i].startswith(name):
445 raise util.Abort(_("no match under directory %s!")
444 raise util.Abort(_("no match under directory %s!")
446 % rf)
445 % rf)
447 elif not stat.S_ISREG(mode):
446 elif not stat.S_ISREG(mode):
448 raise util.Abort(_("can't commit %s: "
447 raise util.Abort(_("can't commit %s: "
449 "unsupported file type!") % rf)
448 "unsupported file type!") % rf)
450 else:
449 else:
451 files = []
450 files = []
452 try:
451 try:
453 repo.commit(files, message, opts['user'], opts['date'], match,
452 repo.commit(files, message, opts['user'], opts['date'], match,
454 force_editor=opts.get('force_editor'))
453 force_editor=opts.get('force_editor'))
455 except ValueError, inst:
454 except ValueError, inst:
456 raise util.Abort(str(inst))
455 raise util.Abort(str(inst))
457
456
458 def docopy(ui, repo, pats, opts, wlock):
457 def docopy(ui, repo, pats, opts, wlock):
459 # called with the repo lock held
458 # called with the repo lock held
460 #
459 #
461 # hgsep => pathname that uses "/" to separate directories
460 # hgsep => pathname that uses "/" to separate directories
462 # ossep => pathname that uses os.sep to separate directories
461 # ossep => pathname that uses os.sep to separate directories
463 cwd = repo.getcwd()
462 cwd = repo.getcwd()
464 errors = 0
463 errors = 0
465 copied = []
464 copied = []
466 targets = {}
465 targets = {}
467
466
468 # abs: hgsep
467 # abs: hgsep
469 # rel: ossep
468 # rel: ossep
470 # return: hgsep
469 # return: hgsep
471 def okaytocopy(abs, rel, exact):
470 def okaytocopy(abs, rel, exact):
472 reasons = {'?': _('is not managed'),
471 reasons = {'?': _('is not managed'),
473 'a': _('has been marked for add'),
472 'a': _('has been marked for add'),
474 'r': _('has been marked for remove')}
473 'r': _('has been marked for remove')}
475 state = repo.dirstate.state(abs)
474 state = repo.dirstate.state(abs)
476 reason = reasons.get(state)
475 reason = reasons.get(state)
477 if reason:
476 if reason:
478 if state == 'a':
477 if state == 'a':
479 origsrc = repo.dirstate.copied(abs)
478 origsrc = repo.dirstate.copied(abs)
480 if origsrc is not None:
479 if origsrc is not None:
481 return origsrc
480 return origsrc
482 if exact:
481 if exact:
483 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
482 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
484 else:
483 else:
485 return abs
484 return abs
486
485
487 # origsrc: hgsep
486 # origsrc: hgsep
488 # abssrc: hgsep
487 # abssrc: hgsep
489 # relsrc: ossep
488 # relsrc: ossep
490 # target: ossep
489 # target: ossep
491 def copy(origsrc, abssrc, relsrc, target, exact):
490 def copy(origsrc, abssrc, relsrc, target, exact):
492 abstarget = util.canonpath(repo.root, cwd, target)
491 abstarget = util.canonpath(repo.root, cwd, target)
493 reltarget = util.pathto(cwd, abstarget)
492 reltarget = util.pathto(cwd, abstarget)
494 prevsrc = targets.get(abstarget)
493 prevsrc = targets.get(abstarget)
495 if prevsrc is not None:
494 if prevsrc is not None:
496 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
495 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
497 (reltarget, util.localpath(abssrc),
496 (reltarget, util.localpath(abssrc),
498 util.localpath(prevsrc)))
497 util.localpath(prevsrc)))
499 return
498 return
500 if (not opts['after'] and os.path.exists(reltarget) or
499 if (not opts['after'] and os.path.exists(reltarget) or
501 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
500 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
502 if not opts['force']:
501 if not opts['force']:
503 ui.warn(_('%s: not overwriting - file exists\n') %
502 ui.warn(_('%s: not overwriting - file exists\n') %
504 reltarget)
503 reltarget)
505 return
504 return
506 if not opts['after'] and not opts.get('dry_run'):
505 if not opts['after'] and not opts.get('dry_run'):
507 os.unlink(reltarget)
506 os.unlink(reltarget)
508 if opts['after']:
507 if opts['after']:
509 if not os.path.exists(reltarget):
508 if not os.path.exists(reltarget):
510 return
509 return
511 else:
510 else:
512 targetdir = os.path.dirname(reltarget) or '.'
511 targetdir = os.path.dirname(reltarget) or '.'
513 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
512 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
514 os.makedirs(targetdir)
513 os.makedirs(targetdir)
515 try:
514 try:
516 restore = repo.dirstate.state(abstarget) == 'r'
515 restore = repo.dirstate.state(abstarget) == 'r'
517 if restore and not opts.get('dry_run'):
516 if restore and not opts.get('dry_run'):
518 repo.undelete([abstarget], wlock)
517 repo.undelete([abstarget], wlock)
519 try:
518 try:
520 if not opts.get('dry_run'):
519 if not opts.get('dry_run'):
521 util.copyfile(relsrc, reltarget)
520 util.copyfile(relsrc, reltarget)
522 restore = False
521 restore = False
523 finally:
522 finally:
524 if restore:
523 if restore:
525 repo.remove([abstarget], wlock)
524 repo.remove([abstarget], wlock)
526 except IOError, inst:
525 except IOError, inst:
527 if inst.errno == errno.ENOENT:
526 if inst.errno == errno.ENOENT:
528 ui.warn(_('%s: deleted in working copy\n') % relsrc)
527 ui.warn(_('%s: deleted in working copy\n') % relsrc)
529 else:
528 else:
530 ui.warn(_('%s: cannot copy - %s\n') %
529 ui.warn(_('%s: cannot copy - %s\n') %
531 (relsrc, inst.strerror))
530 (relsrc, inst.strerror))
532 errors += 1
531 errors += 1
533 return
532 return
534 if ui.verbose or not exact:
533 if ui.verbose or not exact:
535 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
534 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
536 targets[abstarget] = abssrc
535 targets[abstarget] = abssrc
537 if abstarget != origsrc and not opts.get('dry_run'):
536 if abstarget != origsrc and not opts.get('dry_run'):
538 repo.copy(origsrc, abstarget, wlock)
537 repo.copy(origsrc, abstarget, wlock)
539 copied.append((abssrc, relsrc, exact))
538 copied.append((abssrc, relsrc, exact))
540
539
541 # pat: ossep
540 # pat: ossep
542 # dest ossep
541 # dest ossep
543 # srcs: list of (hgsep, hgsep, ossep, bool)
542 # srcs: list of (hgsep, hgsep, ossep, bool)
544 # return: function that takes hgsep and returns ossep
543 # return: function that takes hgsep and returns ossep
545 def targetpathfn(pat, dest, srcs):
544 def targetpathfn(pat, dest, srcs):
546 if os.path.isdir(pat):
545 if os.path.isdir(pat):
547 abspfx = util.canonpath(repo.root, cwd, pat)
546 abspfx = util.canonpath(repo.root, cwd, pat)
548 abspfx = util.localpath(abspfx)
547 abspfx = util.localpath(abspfx)
549 if destdirexists:
548 if destdirexists:
550 striplen = len(os.path.split(abspfx)[0])
549 striplen = len(os.path.split(abspfx)[0])
551 else:
550 else:
552 striplen = len(abspfx)
551 striplen = len(abspfx)
553 if striplen:
552 if striplen:
554 striplen += len(os.sep)
553 striplen += len(os.sep)
555 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
554 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
556 elif destdirexists:
555 elif destdirexists:
557 res = lambda p: os.path.join(dest,
556 res = lambda p: os.path.join(dest,
558 os.path.basename(util.localpath(p)))
557 os.path.basename(util.localpath(p)))
559 else:
558 else:
560 res = lambda p: dest
559 res = lambda p: dest
561 return res
560 return res
562
561
563 # pat: ossep
562 # pat: ossep
564 # dest ossep
563 # dest ossep
565 # srcs: list of (hgsep, hgsep, ossep, bool)
564 # srcs: list of (hgsep, hgsep, ossep, bool)
566 # return: function that takes hgsep and returns ossep
565 # return: function that takes hgsep and returns ossep
567 def targetpathafterfn(pat, dest, srcs):
566 def targetpathafterfn(pat, dest, srcs):
568 if util.patkind(pat, None)[0]:
567 if util.patkind(pat, None)[0]:
569 # a mercurial pattern
568 # a mercurial pattern
570 res = lambda p: os.path.join(dest,
569 res = lambda p: os.path.join(dest,
571 os.path.basename(util.localpath(p)))
570 os.path.basename(util.localpath(p)))
572 else:
571 else:
573 abspfx = util.canonpath(repo.root, cwd, pat)
572 abspfx = util.canonpath(repo.root, cwd, pat)
574 if len(abspfx) < len(srcs[0][0]):
573 if len(abspfx) < len(srcs[0][0]):
575 # A directory. Either the target path contains the last
574 # A directory. Either the target path contains the last
576 # component of the source path or it does not.
575 # component of the source path or it does not.
577 def evalpath(striplen):
576 def evalpath(striplen):
578 score = 0
577 score = 0
579 for s in srcs:
578 for s in srcs:
580 t = os.path.join(dest, util.localpath(s[0])[striplen:])
579 t = os.path.join(dest, util.localpath(s[0])[striplen:])
581 if os.path.exists(t):
580 if os.path.exists(t):
582 score += 1
581 score += 1
583 return score
582 return score
584
583
585 abspfx = util.localpath(abspfx)
584 abspfx = util.localpath(abspfx)
586 striplen = len(abspfx)
585 striplen = len(abspfx)
587 if striplen:
586 if striplen:
588 striplen += len(os.sep)
587 striplen += len(os.sep)
589 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
588 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
590 score = evalpath(striplen)
589 score = evalpath(striplen)
591 striplen1 = len(os.path.split(abspfx)[0])
590 striplen1 = len(os.path.split(abspfx)[0])
592 if striplen1:
591 if striplen1:
593 striplen1 += len(os.sep)
592 striplen1 += len(os.sep)
594 if evalpath(striplen1) > score:
593 if evalpath(striplen1) > score:
595 striplen = striplen1
594 striplen = striplen1
596 res = lambda p: os.path.join(dest,
595 res = lambda p: os.path.join(dest,
597 util.localpath(p)[striplen:])
596 util.localpath(p)[striplen:])
598 else:
597 else:
599 # a file
598 # a file
600 if destdirexists:
599 if destdirexists:
601 res = lambda p: os.path.join(dest,
600 res = lambda p: os.path.join(dest,
602 os.path.basename(util.localpath(p)))
601 os.path.basename(util.localpath(p)))
603 else:
602 else:
604 res = lambda p: dest
603 res = lambda p: dest
605 return res
604 return res
606
605
607
606
608 pats = list(pats)
607 pats = list(pats)
609 if not pats:
608 if not pats:
610 raise util.Abort(_('no source or destination specified'))
609 raise util.Abort(_('no source or destination specified'))
611 if len(pats) == 1:
610 if len(pats) == 1:
612 raise util.Abort(_('no destination specified'))
611 raise util.Abort(_('no destination specified'))
613 dest = pats.pop()
612 dest = pats.pop()
614 destdirexists = os.path.isdir(dest)
613 destdirexists = os.path.isdir(dest)
615 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
614 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
616 raise util.Abort(_('with multiple sources, destination must be an '
615 raise util.Abort(_('with multiple sources, destination must be an '
617 'existing directory'))
616 'existing directory'))
618 if opts['after']:
617 if opts['after']:
619 tfn = targetpathafterfn
618 tfn = targetpathafterfn
620 else:
619 else:
621 tfn = targetpathfn
620 tfn = targetpathfn
622 copylist = []
621 copylist = []
623 for pat in pats:
622 for pat in pats:
624 srcs = []
623 srcs = []
625 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
624 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
626 origsrc = okaytocopy(abssrc, relsrc, exact)
625 origsrc = okaytocopy(abssrc, relsrc, exact)
627 if origsrc:
626 if origsrc:
628 srcs.append((origsrc, abssrc, relsrc, exact))
627 srcs.append((origsrc, abssrc, relsrc, exact))
629 if not srcs:
628 if not srcs:
630 continue
629 continue
631 copylist.append((tfn(pat, dest, srcs), srcs))
630 copylist.append((tfn(pat, dest, srcs), srcs))
632 if not copylist:
631 if not copylist:
633 raise util.Abort(_('no files to copy'))
632 raise util.Abort(_('no files to copy'))
634
633
635 for targetpath, srcs in copylist:
634 for targetpath, srcs in copylist:
636 for origsrc, abssrc, relsrc, exact in srcs:
635 for origsrc, abssrc, relsrc, exact in srcs:
637 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
636 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
638
637
639 if errors:
638 if errors:
640 ui.warn(_('(consider using --after)\n'))
639 ui.warn(_('(consider using --after)\n'))
641 return errors, copied
640 return errors, copied
642
641
643 def copy(ui, repo, *pats, **opts):
642 def copy(ui, repo, *pats, **opts):
644 """mark files as copied for the next commit
643 """mark files as copied for the next commit
645
644
646 Mark dest as having copies of source files. If dest is a
645 Mark dest as having copies of source files. If dest is a
647 directory, copies are put in that directory. If dest is a file,
646 directory, copies are put in that directory. If dest is a file,
648 there can only be one source.
647 there can only be one source.
649
648
650 By default, this command copies the contents of files as they
649 By default, this command copies the contents of files as they
651 stand in the working directory. If invoked with --after, the
650 stand in the working directory. If invoked with --after, the
652 operation is recorded, but no copying is performed.
651 operation is recorded, but no copying is performed.
653
652
654 This command takes effect in the next commit. To undo a copy
653 This command takes effect in the next commit. To undo a copy
655 before that, see hg revert.
654 before that, see hg revert.
656 """
655 """
657 wlock = repo.wlock(0)
656 wlock = repo.wlock(0)
658 errs, copied = docopy(ui, repo, pats, opts, wlock)
657 errs, copied = docopy(ui, repo, pats, opts, wlock)
659 return errs
658 return errs
660
659
661 def debugancestor(ui, index, rev1, rev2):
660 def debugancestor(ui, index, rev1, rev2):
662 """find the ancestor revision of two revisions in a given index"""
661 """find the ancestor revision of two revisions in a given index"""
663 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
662 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
664 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
663 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
665 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
664 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
666
665
667 def debugcomplete(ui, cmd='', **opts):
666 def debugcomplete(ui, cmd='', **opts):
668 """returns the completion list associated with the given command"""
667 """returns the completion list associated with the given command"""
669
668
670 if opts['options']:
669 if opts['options']:
671 options = []
670 options = []
672 otables = [globalopts]
671 otables = [globalopts]
673 if cmd:
672 if cmd:
674 aliases, entry = findcmd(ui, cmd)
673 aliases, entry = findcmd(ui, cmd)
675 otables.append(entry[1])
674 otables.append(entry[1])
676 for t in otables:
675 for t in otables:
677 for o in t:
676 for o in t:
678 if o[0]:
677 if o[0]:
679 options.append('-%s' % o[0])
678 options.append('-%s' % o[0])
680 options.append('--%s' % o[1])
679 options.append('--%s' % o[1])
681 ui.write("%s\n" % "\n".join(options))
680 ui.write("%s\n" % "\n".join(options))
682 return
681 return
683
682
684 clist = findpossible(ui, cmd).keys()
683 clist = findpossible(ui, cmd).keys()
685 clist.sort()
684 clist.sort()
686 ui.write("%s\n" % "\n".join(clist))
685 ui.write("%s\n" % "\n".join(clist))
687
686
688 def debugrebuildstate(ui, repo, rev=None):
687 def debugrebuildstate(ui, repo, rev=None):
689 """rebuild the dirstate as it would look like for the given revision"""
688 """rebuild the dirstate as it would look like for the given revision"""
690 if not rev:
689 if not rev:
691 rev = repo.changelog.tip()
690 rev = repo.changelog.tip()
692 else:
691 else:
693 rev = repo.lookup(rev)
692 rev = repo.lookup(rev)
694 change = repo.changelog.read(rev)
693 change = repo.changelog.read(rev)
695 n = change[0]
694 n = change[0]
696 files = repo.manifest.read(n)
695 files = repo.manifest.read(n)
697 wlock = repo.wlock()
696 wlock = repo.wlock()
698 repo.dirstate.rebuild(rev, files)
697 repo.dirstate.rebuild(rev, files)
699
698
700 def debugcheckstate(ui, repo):
699 def debugcheckstate(ui, repo):
701 """validate the correctness of the current dirstate"""
700 """validate the correctness of the current dirstate"""
702 parent1, parent2 = repo.dirstate.parents()
701 parent1, parent2 = repo.dirstate.parents()
703 repo.dirstate.read()
702 repo.dirstate.read()
704 dc = repo.dirstate.map
703 dc = repo.dirstate.map
705 keys = dc.keys()
704 keys = dc.keys()
706 keys.sort()
705 keys.sort()
707 m1n = repo.changelog.read(parent1)[0]
706 m1n = repo.changelog.read(parent1)[0]
708 m2n = repo.changelog.read(parent2)[0]
707 m2n = repo.changelog.read(parent2)[0]
709 m1 = repo.manifest.read(m1n)
708 m1 = repo.manifest.read(m1n)
710 m2 = repo.manifest.read(m2n)
709 m2 = repo.manifest.read(m2n)
711 errors = 0
710 errors = 0
712 for f in dc:
711 for f in dc:
713 state = repo.dirstate.state(f)
712 state = repo.dirstate.state(f)
714 if state in "nr" and f not in m1:
713 if state in "nr" and f not in m1:
715 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
714 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
716 errors += 1
715 errors += 1
717 if state in "a" and f in m1:
716 if state in "a" and f in m1:
718 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
717 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
719 errors += 1
718 errors += 1
720 if state in "m" and f not in m1 and f not in m2:
719 if state in "m" and f not in m1 and f not in m2:
721 ui.warn(_("%s in state %s, but not in either manifest\n") %
720 ui.warn(_("%s in state %s, but not in either manifest\n") %
722 (f, state))
721 (f, state))
723 errors += 1
722 errors += 1
724 for f in m1:
723 for f in m1:
725 state = repo.dirstate.state(f)
724 state = repo.dirstate.state(f)
726 if state not in "nrm":
725 if state not in "nrm":
727 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
726 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
728 errors += 1
727 errors += 1
729 if errors:
728 if errors:
730 error = _(".hg/dirstate inconsistent with current parent's manifest")
729 error = _(".hg/dirstate inconsistent with current parent's manifest")
731 raise util.Abort(error)
730 raise util.Abort(error)
732
731
733 def showconfig(ui, repo, *values, **opts):
732 def showconfig(ui, repo, *values, **opts):
734 """show combined config settings from all hgrc files
733 """show combined config settings from all hgrc files
735
734
736 With no args, print names and values of all config items.
735 With no args, print names and values of all config items.
737
736
738 With one arg of the form section.name, print just the value of
737 With one arg of the form section.name, print just the value of
739 that config item.
738 that config item.
740
739
741 With multiple args, print names and values of all config items
740 With multiple args, print names and values of all config items
742 with matching section names."""
741 with matching section names."""
743
742
744 untrusted = bool(opts.get('untrusted'))
743 untrusted = bool(opts.get('untrusted'))
745 if values:
744 if values:
746 if len([v for v in values if '.' in v]) > 1:
745 if len([v for v in values if '.' in v]) > 1:
747 raise util.Abort(_('only one config item permitted'))
746 raise util.Abort(_('only one config item permitted'))
748 for section, name, value in ui.walkconfig(untrusted=untrusted):
747 for section, name, value in ui.walkconfig(untrusted=untrusted):
749 sectname = section + '.' + name
748 sectname = section + '.' + name
750 if values:
749 if values:
751 for v in values:
750 for v in values:
752 if v == section:
751 if v == section:
753 ui.write('%s=%s\n' % (sectname, value))
752 ui.write('%s=%s\n' % (sectname, value))
754 elif v == sectname:
753 elif v == sectname:
755 ui.write(value, '\n')
754 ui.write(value, '\n')
756 else:
755 else:
757 ui.write('%s=%s\n' % (sectname, value))
756 ui.write('%s=%s\n' % (sectname, value))
758
757
759 def debugsetparents(ui, repo, rev1, rev2=None):
758 def debugsetparents(ui, repo, rev1, rev2=None):
760 """manually set the parents of the current working directory
759 """manually set the parents of the current working directory
761
760
762 This is useful for writing repository conversion tools, but should
761 This is useful for writing repository conversion tools, but should
763 be used with care.
762 be used with care.
764 """
763 """
765
764
766 if not rev2:
765 if not rev2:
767 rev2 = hex(nullid)
766 rev2 = hex(nullid)
768
767
769 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
768 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
770
769
771 def debugstate(ui, repo):
770 def debugstate(ui, repo):
772 """show the contents of the current dirstate"""
771 """show the contents of the current dirstate"""
773 repo.dirstate.read()
772 repo.dirstate.read()
774 dc = repo.dirstate.map
773 dc = repo.dirstate.map
775 keys = dc.keys()
774 keys = dc.keys()
776 keys.sort()
775 keys.sort()
777 for file_ in keys:
776 for file_ in keys:
778 ui.write("%c %3o %10d %s %s\n"
777 ui.write("%c %3o %10d %s %s\n"
779 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
778 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
780 time.strftime("%x %X",
779 time.strftime("%x %X",
781 time.localtime(dc[file_][3])), file_))
780 time.localtime(dc[file_][3])), file_))
782 for f in repo.dirstate.copies():
781 for f in repo.dirstate.copies():
783 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
782 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
784
783
785 def debugdata(ui, file_, rev):
784 def debugdata(ui, file_, rev):
786 """dump the contents of an data file revision"""
785 """dump the contents of an data file revision"""
787 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
786 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
788 file_[:-2] + ".i", file_, 0)
787 file_[:-2] + ".i", file_, 0)
789 try:
788 try:
790 ui.write(r.revision(r.lookup(rev)))
789 ui.write(r.revision(r.lookup(rev)))
791 except KeyError:
790 except KeyError:
792 raise util.Abort(_('invalid revision identifier %s') % rev)
791 raise util.Abort(_('invalid revision identifier %s') % rev)
793
792
794 def debugdate(ui, date, range=None, **opts):
793 def debugdate(ui, date, range=None, **opts):
795 """parse and display a date"""
794 """parse and display a date"""
796 if opts["extended"]:
795 if opts["extended"]:
797 d = util.parsedate(date, util.extendeddateformats)
796 d = util.parsedate(date, util.extendeddateformats)
798 else:
797 else:
799 d = util.parsedate(date)
798 d = util.parsedate(date)
800 ui.write("internal: %s %s\n" % d)
799 ui.write("internal: %s %s\n" % d)
801 ui.write("standard: %s\n" % util.datestr(d))
800 ui.write("standard: %s\n" % util.datestr(d))
802 if range:
801 if range:
803 m = util.matchdate(range)
802 m = util.matchdate(range)
804 ui.write("match: %s\n" % m(d[0]))
803 ui.write("match: %s\n" % m(d[0]))
805
804
806 def debugindex(ui, file_):
805 def debugindex(ui, file_):
807 """dump the contents of an index file"""
806 """dump the contents of an index file"""
808 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
807 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
809 ui.write(" rev offset length base linkrev" +
808 ui.write(" rev offset length base linkrev" +
810 " nodeid p1 p2\n")
809 " nodeid p1 p2\n")
811 for i in xrange(r.count()):
810 for i in xrange(r.count()):
812 node = r.node(i)
811 node = r.node(i)
813 pp = r.parents(node)
812 pp = r.parents(node)
814 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
813 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
815 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
814 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
816 short(node), short(pp[0]), short(pp[1])))
815 short(node), short(pp[0]), short(pp[1])))
817
816
818 def debugindexdot(ui, file_):
817 def debugindexdot(ui, file_):
819 """dump an index DAG as a .dot file"""
818 """dump an index DAG as a .dot file"""
820 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
819 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
821 ui.write("digraph G {\n")
820 ui.write("digraph G {\n")
822 for i in xrange(r.count()):
821 for i in xrange(r.count()):
823 node = r.node(i)
822 node = r.node(i)
824 pp = r.parents(node)
823 pp = r.parents(node)
825 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
824 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
826 if pp[1] != nullid:
825 if pp[1] != nullid:
827 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
826 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
828 ui.write("}\n")
827 ui.write("}\n")
829
828
830 def debuginstall(ui):
829 def debuginstall(ui):
831 '''test Mercurial installation'''
830 '''test Mercurial installation'''
832
831
833 def writetemp(contents):
832 def writetemp(contents):
834 (fd, name) = tempfile.mkstemp()
833 (fd, name) = tempfile.mkstemp()
835 f = os.fdopen(fd, "wb")
834 f = os.fdopen(fd, "wb")
836 f.write(contents)
835 f.write(contents)
837 f.close()
836 f.close()
838 return name
837 return name
839
838
840 problems = 0
839 problems = 0
841
840
842 # encoding
841 # encoding
843 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
842 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
844 try:
843 try:
845 util.fromlocal("test")
844 util.fromlocal("test")
846 except util.Abort, inst:
845 except util.Abort, inst:
847 ui.write(" %s\n" % inst)
846 ui.write(" %s\n" % inst)
848 ui.write(_(" (check that your locale is properly set)\n"))
847 ui.write(_(" (check that your locale is properly set)\n"))
849 problems += 1
848 problems += 1
850
849
851 # compiled modules
850 # compiled modules
852 ui.status(_("Checking extensions...\n"))
851 ui.status(_("Checking extensions...\n"))
853 try:
852 try:
854 import bdiff, mpatch, base85
853 import bdiff, mpatch, base85
855 except Exception, inst:
854 except Exception, inst:
856 ui.write(" %s\n" % inst)
855 ui.write(" %s\n" % inst)
857 ui.write(_(" One or more extensions could not be found"))
856 ui.write(_(" One or more extensions could not be found"))
858 ui.write(_(" (check that you compiled the extensions)\n"))
857 ui.write(_(" (check that you compiled the extensions)\n"))
859 problems += 1
858 problems += 1
860
859
861 # templates
860 # templates
862 ui.status(_("Checking templates...\n"))
861 ui.status(_("Checking templates...\n"))
863 try:
862 try:
864 import templater
863 import templater
865 t = templater.templater(templater.templatepath("map-cmdline.default"))
864 t = templater.templater(templater.templatepath("map-cmdline.default"))
866 except Exception, inst:
865 except Exception, inst:
867 ui.write(" %s\n" % inst)
866 ui.write(" %s\n" % inst)
868 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
867 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
869 problems += 1
868 problems += 1
870
869
871 # patch
870 # patch
872 ui.status(_("Checking patch...\n"))
871 ui.status(_("Checking patch...\n"))
873 path = os.environ.get('PATH', '')
872 path = os.environ.get('PATH', '')
874 patcher = util.find_in_path('gpatch', path,
873 patcher = util.find_in_path('gpatch', path,
875 util.find_in_path('patch', path, None))
874 util.find_in_path('patch', path, None))
876 if not patcher:
875 if not patcher:
877 ui.write(_(" Can't find patch or gpatch in PATH\n"))
876 ui.write(_(" Can't find patch or gpatch in PATH\n"))
878 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
877 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
879 problems += 1
878 problems += 1
880 else:
879 else:
881 # actually attempt a patch here
880 # actually attempt a patch here
882 a = "1\n2\n3\n4\n"
881 a = "1\n2\n3\n4\n"
883 b = "1\n2\n3\ninsert\n4\n"
882 b = "1\n2\n3\ninsert\n4\n"
884 d = mdiff.unidiff(a, None, b, None, "a")
883 d = mdiff.unidiff(a, None, b, None, "a")
885 fa = writetemp(a)
884 fa = writetemp(a)
886 fd = writetemp(d)
885 fd = writetemp(d)
887 fp = os.popen('%s %s %s' % (patcher, fa, fd))
886 fp = os.popen('%s %s %s' % (patcher, fa, fd))
888 files = []
887 files = []
889 output = ""
888 output = ""
890 for line in fp:
889 for line in fp:
891 output += line
890 output += line
892 if line.startswith('patching file '):
891 if line.startswith('patching file '):
893 pf = util.parse_patch_output(line.rstrip())
892 pf = util.parse_patch_output(line.rstrip())
894 files.append(pf)
893 files.append(pf)
895 if files != [fa]:
894 if files != [fa]:
896 ui.write(_(" unexpected patch output!"))
895 ui.write(_(" unexpected patch output!"))
897 ui.write(_(" (you may have an incompatible version of patch)\n"))
896 ui.write(_(" (you may have an incompatible version of patch)\n"))
898 ui.write(output)
897 ui.write(output)
899 problems += 1
898 problems += 1
900 a = file(fa).read()
899 a = file(fa).read()
901 if a != b:
900 if a != b:
902 ui.write(_(" patch test failed!"))
901 ui.write(_(" patch test failed!"))
903 ui.write(_(" (you may have an incompatible version of patch)\n"))
902 ui.write(_(" (you may have an incompatible version of patch)\n"))
904 problems += 1
903 problems += 1
905 os.unlink(fa)
904 os.unlink(fa)
906 os.unlink(fd)
905 os.unlink(fd)
907
906
908 # merge helper
907 # merge helper
909 ui.status(_("Checking merge helper...\n"))
908 ui.status(_("Checking merge helper...\n"))
910 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
909 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
911 or "hgmerge")
910 or "hgmerge")
912 cmdpath = util.find_in_path(cmd, path)
911 cmdpath = util.find_in_path(cmd, path)
913 if not cmdpath:
912 if not cmdpath:
914 cmdpath = util.find_in_path(cmd.split()[0], path)
913 cmdpath = util.find_in_path(cmd.split()[0], path)
915 if not cmdpath:
914 if not cmdpath:
916 if cmd == 'hgmerge':
915 if cmd == 'hgmerge':
917 ui.write(_(" No merge helper set and can't find default"
916 ui.write(_(" No merge helper set and can't find default"
918 " hgmerge script in PATH\n"))
917 " hgmerge script in PATH\n"))
919 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
918 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
920 else:
919 else:
921 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
920 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
922 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
921 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
923 problems += 1
922 problems += 1
924 else:
923 else:
925 # actually attempt a patch here
924 # actually attempt a patch here
926 fa = writetemp("1\n2\n3\n4\n")
925 fa = writetemp("1\n2\n3\n4\n")
927 fl = writetemp("1\n2\n3\ninsert\n4\n")
926 fl = writetemp("1\n2\n3\ninsert\n4\n")
928 fr = writetemp("begin\n1\n2\n3\n4\n")
927 fr = writetemp("begin\n1\n2\n3\n4\n")
929 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
928 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
930 if r:
929 if r:
931 ui.write(_(" got unexpected merge error %d!") % r)
930 ui.write(_(" got unexpected merge error %d!") % r)
932 problems += 1
931 problems += 1
933 m = file(fl).read()
932 m = file(fl).read()
934 if m != "begin\n1\n2\n3\ninsert\n4\n":
933 if m != "begin\n1\n2\n3\ninsert\n4\n":
935 ui.write(_(" got unexpected merge results!") % r)
934 ui.write(_(" got unexpected merge results!") % r)
936 ui.write(_(" (your merge helper may have the"
935 ui.write(_(" (your merge helper may have the"
937 " wrong argument order)\n"))
936 " wrong argument order)\n"))
938 ui.write(m)
937 ui.write(m)
939 os.unlink(fa)
938 os.unlink(fa)
940 os.unlink(fl)
939 os.unlink(fl)
941 os.unlink(fr)
940 os.unlink(fr)
942
941
943 # editor
942 # editor
944 ui.status(_("Checking commit editor...\n"))
943 ui.status(_("Checking commit editor...\n"))
945 editor = (os.environ.get("HGEDITOR") or
944 editor = (os.environ.get("HGEDITOR") or
946 ui.config("ui", "editor") or
945 ui.config("ui", "editor") or
947 os.environ.get("EDITOR", "vi"))
946 os.environ.get("EDITOR", "vi"))
948 cmdpath = util.find_in_path(editor, path)
947 cmdpath = util.find_in_path(editor, path)
949 if not cmdpath:
948 if not cmdpath:
950 cmdpath = util.find_in_path(editor.split()[0], path)
949 cmdpath = util.find_in_path(editor.split()[0], path)
951 if not cmdpath:
950 if not cmdpath:
952 if editor == 'vi':
951 if editor == 'vi':
953 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
952 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
954 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
953 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
955 else:
954 else:
956 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
955 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
957 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
956 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
958 problems += 1
957 problems += 1
959
958
960 # check username
959 # check username
961 ui.status(_("Checking username...\n"))
960 ui.status(_("Checking username...\n"))
962 user = os.environ.get("HGUSER")
961 user = os.environ.get("HGUSER")
963 if user is None:
962 if user is None:
964 user = ui.config("ui", "username")
963 user = ui.config("ui", "username")
965 if user is None:
964 if user is None:
966 user = os.environ.get("EMAIL")
965 user = os.environ.get("EMAIL")
967 if not user:
966 if not user:
968 ui.warn(" ")
967 ui.warn(" ")
969 ui.username()
968 ui.username()
970 ui.write(_(" (specify a username in your .hgrc file)\n"))
969 ui.write(_(" (specify a username in your .hgrc file)\n"))
971
970
972 if not problems:
971 if not problems:
973 ui.status(_("No problems detected\n"))
972 ui.status(_("No problems detected\n"))
974 else:
973 else:
975 ui.write(_("%s problems detected,"
974 ui.write(_("%s problems detected,"
976 " please check your install!\n") % problems)
975 " please check your install!\n") % problems)
977
976
978 return problems
977 return problems
979
978
980 def debugrename(ui, repo, file1, *pats, **opts):
979 def debugrename(ui, repo, file1, *pats, **opts):
981 """dump rename information"""
980 """dump rename information"""
982
981
983 ctx = repo.changectx(opts.get('rev', 'tip'))
982 ctx = repo.changectx(opts.get('rev', 'tip'))
984 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
983 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
985 ctx.node()):
984 ctx.node()):
986 m = ctx.filectx(abs).renamed()
985 m = ctx.filectx(abs).renamed()
987 if m:
986 if m:
988 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
987 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
989 else:
988 else:
990 ui.write(_("%s not renamed\n") % rel)
989 ui.write(_("%s not renamed\n") % rel)
991
990
992 def debugwalk(ui, repo, *pats, **opts):
991 def debugwalk(ui, repo, *pats, **opts):
993 """show how files match on given patterns"""
992 """show how files match on given patterns"""
994 items = list(cmdutil.walk(repo, pats, opts))
993 items = list(cmdutil.walk(repo, pats, opts))
995 if not items:
994 if not items:
996 return
995 return
997 fmt = '%%s %%-%ds %%-%ds %%s' % (
996 fmt = '%%s %%-%ds %%-%ds %%s' % (
998 max([len(abs) for (src, abs, rel, exact) in items]),
997 max([len(abs) for (src, abs, rel, exact) in items]),
999 max([len(rel) for (src, abs, rel, exact) in items]))
998 max([len(rel) for (src, abs, rel, exact) in items]))
1000 for src, abs, rel, exact in items:
999 for src, abs, rel, exact in items:
1001 line = fmt % (src, abs, rel, exact and 'exact' or '')
1000 line = fmt % (src, abs, rel, exact and 'exact' or '')
1002 ui.write("%s\n" % line.rstrip())
1001 ui.write("%s\n" % line.rstrip())
1003
1002
1004 def diff(ui, repo, *pats, **opts):
1003 def diff(ui, repo, *pats, **opts):
1005 """diff repository (or selected files)
1004 """diff repository (or selected files)
1006
1005
1007 Show differences between revisions for the specified files.
1006 Show differences between revisions for the specified files.
1008
1007
1009 Differences between files are shown using the unified diff format.
1008 Differences between files are shown using the unified diff format.
1010
1009
1011 NOTE: diff may generate unexpected results for merges, as it will
1010 NOTE: diff may generate unexpected results for merges, as it will
1012 default to comparing against the working directory's first parent
1011 default to comparing against the working directory's first parent
1013 changeset if no revisions are specified.
1012 changeset if no revisions are specified.
1014
1013
1015 When two revision arguments are given, then changes are shown
1014 When two revision arguments are given, then changes are shown
1016 between those revisions. If only one revision is specified then
1015 between those revisions. If only one revision is specified then
1017 that revision is compared to the working directory, and, when no
1016 that revision is compared to the working directory, and, when no
1018 revisions are specified, the working directory files are compared
1017 revisions are specified, the working directory files are compared
1019 to its parent.
1018 to its parent.
1020
1019
1021 Without the -a option, diff will avoid generating diffs of files
1020 Without the -a option, diff will avoid generating diffs of files
1022 it detects as binary. With -a, diff will generate a diff anyway,
1021 it detects as binary. With -a, diff will generate a diff anyway,
1023 probably with undesirable results.
1022 probably with undesirable results.
1024 """
1023 """
1025 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1024 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1026
1025
1027 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1026 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1028
1027
1029 patch.diff(repo, node1, node2, fns, match=matchfn,
1028 patch.diff(repo, node1, node2, fns, match=matchfn,
1030 opts=patch.diffopts(ui, opts))
1029 opts=patch.diffopts(ui, opts))
1031
1030
1032 def export(ui, repo, *changesets, **opts):
1031 def export(ui, repo, *changesets, **opts):
1033 """dump the header and diffs for one or more changesets
1032 """dump the header and diffs for one or more changesets
1034
1033
1035 Print the changeset header and diffs for one or more revisions.
1034 Print the changeset header and diffs for one or more revisions.
1036
1035
1037 The information shown in the changeset header is: author,
1036 The information shown in the changeset header is: author,
1038 changeset hash, parent(s) and commit comment.
1037 changeset hash, parent(s) and commit comment.
1039
1038
1040 NOTE: export may generate unexpected diff output for merge changesets,
1039 NOTE: export may generate unexpected diff output for merge changesets,
1041 as it will compare the merge changeset against its first parent only.
1040 as it will compare the merge changeset against its first parent only.
1042
1041
1043 Output may be to a file, in which case the name of the file is
1042 Output may be to a file, in which case the name of the file is
1044 given using a format string. The formatting rules are as follows:
1043 given using a format string. The formatting rules are as follows:
1045
1044
1046 %% literal "%" character
1045 %% literal "%" character
1047 %H changeset hash (40 bytes of hexadecimal)
1046 %H changeset hash (40 bytes of hexadecimal)
1048 %N number of patches being generated
1047 %N number of patches being generated
1049 %R changeset revision number
1048 %R changeset revision number
1050 %b basename of the exporting repository
1049 %b basename of the exporting repository
1051 %h short-form changeset hash (12 bytes of hexadecimal)
1050 %h short-form changeset hash (12 bytes of hexadecimal)
1052 %n zero-padded sequence number, starting at 1
1051 %n zero-padded sequence number, starting at 1
1053 %r zero-padded changeset revision number
1052 %r zero-padded changeset revision number
1054
1053
1055 Without the -a option, export will avoid generating diffs of files
1054 Without the -a option, export will avoid generating diffs of files
1056 it detects as binary. With -a, export will generate a diff anyway,
1055 it detects as binary. With -a, export will generate a diff anyway,
1057 probably with undesirable results.
1056 probably with undesirable results.
1058
1057
1059 With the --switch-parent option, the diff will be against the second
1058 With the --switch-parent option, the diff will be against the second
1060 parent. It can be useful to review a merge.
1059 parent. It can be useful to review a merge.
1061 """
1060 """
1062 if not changesets:
1061 if not changesets:
1063 raise util.Abort(_("export requires at least one changeset"))
1062 raise util.Abort(_("export requires at least one changeset"))
1064 revs = cmdutil.revrange(repo, changesets)
1063 revs = cmdutil.revrange(repo, changesets)
1065 if len(revs) > 1:
1064 if len(revs) > 1:
1066 ui.note(_('exporting patches:\n'))
1065 ui.note(_('exporting patches:\n'))
1067 else:
1066 else:
1068 ui.note(_('exporting patch:\n'))
1067 ui.note(_('exporting patch:\n'))
1069 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1068 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1070 switch_parent=opts['switch_parent'],
1069 switch_parent=opts['switch_parent'],
1071 opts=patch.diffopts(ui, opts))
1070 opts=patch.diffopts(ui, opts))
1072
1071
1073 def grep(ui, repo, pattern, *pats, **opts):
1072 def grep(ui, repo, pattern, *pats, **opts):
1074 """search for a pattern in specified files and revisions
1073 """search for a pattern in specified files and revisions
1075
1074
1076 Search revisions of files for a regular expression.
1075 Search revisions of files for a regular expression.
1077
1076
1078 This command behaves differently than Unix grep. It only accepts
1077 This command behaves differently than Unix grep. It only accepts
1079 Python/Perl regexps. It searches repository history, not the
1078 Python/Perl regexps. It searches repository history, not the
1080 working directory. It always prints the revision number in which
1079 working directory. It always prints the revision number in which
1081 a match appears.
1080 a match appears.
1082
1081
1083 By default, grep only prints output for the first revision of a
1082 By default, grep only prints output for the first revision of a
1084 file in which it finds a match. To get it to print every revision
1083 file in which it finds a match. To get it to print every revision
1085 that contains a change in match status ("-" for a match that
1084 that contains a change in match status ("-" for a match that
1086 becomes a non-match, or "+" for a non-match that becomes a match),
1085 becomes a non-match, or "+" for a non-match that becomes a match),
1087 use the --all flag.
1086 use the --all flag.
1088 """
1087 """
1089 reflags = 0
1088 reflags = 0
1090 if opts['ignore_case']:
1089 if opts['ignore_case']:
1091 reflags |= re.I
1090 reflags |= re.I
1092 regexp = re.compile(pattern, reflags)
1091 regexp = re.compile(pattern, reflags)
1093 sep, eol = ':', '\n'
1092 sep, eol = ':', '\n'
1094 if opts['print0']:
1093 if opts['print0']:
1095 sep = eol = '\0'
1094 sep = eol = '\0'
1096
1095
1097 fcache = {}
1096 fcache = {}
1098 def getfile(fn):
1097 def getfile(fn):
1099 if fn not in fcache:
1098 if fn not in fcache:
1100 fcache[fn] = repo.file(fn)
1099 fcache[fn] = repo.file(fn)
1101 return fcache[fn]
1100 return fcache[fn]
1102
1101
1103 def matchlines(body):
1102 def matchlines(body):
1104 begin = 0
1103 begin = 0
1105 linenum = 0
1104 linenum = 0
1106 while True:
1105 while True:
1107 match = regexp.search(body, begin)
1106 match = regexp.search(body, begin)
1108 if not match:
1107 if not match:
1109 break
1108 break
1110 mstart, mend = match.span()
1109 mstart, mend = match.span()
1111 linenum += body.count('\n', begin, mstart) + 1
1110 linenum += body.count('\n', begin, mstart) + 1
1112 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1111 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1113 lend = body.find('\n', mend)
1112 lend = body.find('\n', mend)
1114 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1113 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1115 begin = lend + 1
1114 begin = lend + 1
1116
1115
1117 class linestate(object):
1116 class linestate(object):
1118 def __init__(self, line, linenum, colstart, colend):
1117 def __init__(self, line, linenum, colstart, colend):
1119 self.line = line
1118 self.line = line
1120 self.linenum = linenum
1119 self.linenum = linenum
1121 self.colstart = colstart
1120 self.colstart = colstart
1122 self.colend = colend
1121 self.colend = colend
1123
1122
1124 def __eq__(self, other):
1123 def __eq__(self, other):
1125 return self.line == other.line
1124 return self.line == other.line
1126
1125
1127 matches = {}
1126 matches = {}
1128 copies = {}
1127 copies = {}
1129 def grepbody(fn, rev, body):
1128 def grepbody(fn, rev, body):
1130 matches[rev].setdefault(fn, [])
1129 matches[rev].setdefault(fn, [])
1131 m = matches[rev][fn]
1130 m = matches[rev][fn]
1132 for lnum, cstart, cend, line in matchlines(body):
1131 for lnum, cstart, cend, line in matchlines(body):
1133 s = linestate(line, lnum, cstart, cend)
1132 s = linestate(line, lnum, cstart, cend)
1134 m.append(s)
1133 m.append(s)
1135
1134
1136 def difflinestates(a, b):
1135 def difflinestates(a, b):
1137 sm = difflib.SequenceMatcher(None, a, b)
1136 sm = difflib.SequenceMatcher(None, a, b)
1138 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1137 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1139 if tag == 'insert':
1138 if tag == 'insert':
1140 for i in xrange(blo, bhi):
1139 for i in xrange(blo, bhi):
1141 yield ('+', b[i])
1140 yield ('+', b[i])
1142 elif tag == 'delete':
1141 elif tag == 'delete':
1143 for i in xrange(alo, ahi):
1142 for i in xrange(alo, ahi):
1144 yield ('-', a[i])
1143 yield ('-', a[i])
1145 elif tag == 'replace':
1144 elif tag == 'replace':
1146 for i in xrange(alo, ahi):
1145 for i in xrange(alo, ahi):
1147 yield ('-', a[i])
1146 yield ('-', a[i])
1148 for i in xrange(blo, bhi):
1147 for i in xrange(blo, bhi):
1149 yield ('+', b[i])
1148 yield ('+', b[i])
1150
1149
1151 prev = {}
1150 prev = {}
1152 def display(fn, rev, states, prevstates):
1151 def display(fn, rev, states, prevstates):
1153 counts = {'-': 0, '+': 0}
1152 counts = {'-': 0, '+': 0}
1154 filerevmatches = {}
1153 filerevmatches = {}
1155 if incrementing or not opts['all']:
1154 if incrementing or not opts['all']:
1156 a, b, r = prevstates, states, rev
1155 a, b, r = prevstates, states, rev
1157 else:
1156 else:
1158 a, b, r = states, prevstates, prev.get(fn, -1)
1157 a, b, r = states, prevstates, prev.get(fn, -1)
1159 for change, l in difflinestates(a, b):
1158 for change, l in difflinestates(a, b):
1160 cols = [fn, str(r)]
1159 cols = [fn, str(r)]
1161 if opts['line_number']:
1160 if opts['line_number']:
1162 cols.append(str(l.linenum))
1161 cols.append(str(l.linenum))
1163 if opts['all']:
1162 if opts['all']:
1164 cols.append(change)
1163 cols.append(change)
1165 if opts['user']:
1164 if opts['user']:
1166 cols.append(ui.shortuser(get(r)[1]))
1165 cols.append(ui.shortuser(get(r)[1]))
1167 if opts['files_with_matches']:
1166 if opts['files_with_matches']:
1168 c = (fn, r)
1167 c = (fn, r)
1169 if c in filerevmatches:
1168 if c in filerevmatches:
1170 continue
1169 continue
1171 filerevmatches[c] = 1
1170 filerevmatches[c] = 1
1172 else:
1171 else:
1173 cols.append(l.line)
1172 cols.append(l.line)
1174 ui.write(sep.join(cols), eol)
1173 ui.write(sep.join(cols), eol)
1175 counts[change] += 1
1174 counts[change] += 1
1176 return counts['+'], counts['-']
1175 return counts['+'], counts['-']
1177
1176
1178 fstate = {}
1177 fstate = {}
1179 skip = {}
1178 skip = {}
1180 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1179 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1181 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1180 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1182 count = 0
1181 count = 0
1183 incrementing = False
1182 incrementing = False
1184 follow = opts.get('follow')
1183 follow = opts.get('follow')
1185 for st, rev, fns in changeiter:
1184 for st, rev, fns in changeiter:
1186 if st == 'window':
1185 if st == 'window':
1187 incrementing = rev
1186 incrementing = rev
1188 matches.clear()
1187 matches.clear()
1189 elif st == 'add':
1188 elif st == 'add':
1190 mf = repo.changectx(rev).manifest()
1189 mf = repo.changectx(rev).manifest()
1191 matches[rev] = {}
1190 matches[rev] = {}
1192 for fn in fns:
1191 for fn in fns:
1193 if fn in skip:
1192 if fn in skip:
1194 continue
1193 continue
1195 fstate.setdefault(fn, {})
1194 fstate.setdefault(fn, {})
1196 try:
1195 try:
1197 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1196 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1198 if follow:
1197 if follow:
1199 copied = getfile(fn).renamed(mf[fn])
1198 copied = getfile(fn).renamed(mf[fn])
1200 if copied:
1199 if copied:
1201 copies.setdefault(rev, {})[fn] = copied[0]
1200 copies.setdefault(rev, {})[fn] = copied[0]
1202 except KeyError:
1201 except KeyError:
1203 pass
1202 pass
1204 elif st == 'iter':
1203 elif st == 'iter':
1205 states = matches[rev].items()
1204 states = matches[rev].items()
1206 states.sort()
1205 states.sort()
1207 for fn, m in states:
1206 for fn, m in states:
1208 copy = copies.get(rev, {}).get(fn)
1207 copy = copies.get(rev, {}).get(fn)
1209 if fn in skip:
1208 if fn in skip:
1210 if copy:
1209 if copy:
1211 skip[copy] = True
1210 skip[copy] = True
1212 continue
1211 continue
1213 if incrementing or not opts['all'] or fstate[fn]:
1212 if incrementing or not opts['all'] or fstate[fn]:
1214 pos, neg = display(fn, rev, m, fstate[fn])
1213 pos, neg = display(fn, rev, m, fstate[fn])
1215 count += pos + neg
1214 count += pos + neg
1216 if pos and not opts['all']:
1215 if pos and not opts['all']:
1217 skip[fn] = True
1216 skip[fn] = True
1218 if copy:
1217 if copy:
1219 skip[copy] = True
1218 skip[copy] = True
1220 fstate[fn] = m
1219 fstate[fn] = m
1221 if copy:
1220 if copy:
1222 fstate[copy] = m
1221 fstate[copy] = m
1223 prev[fn] = rev
1222 prev[fn] = rev
1224
1223
1225 if not incrementing:
1224 if not incrementing:
1226 fstate = fstate.items()
1225 fstate = fstate.items()
1227 fstate.sort()
1226 fstate.sort()
1228 for fn, state in fstate:
1227 for fn, state in fstate:
1229 if fn in skip:
1228 if fn in skip:
1230 continue
1229 continue
1231 if fn not in copies.get(prev[fn], {}):
1230 if fn not in copies.get(prev[fn], {}):
1232 display(fn, rev, {}, state)
1231 display(fn, rev, {}, state)
1233 return (count == 0 and 1) or 0
1232 return (count == 0 and 1) or 0
1234
1233
1235 def heads(ui, repo, **opts):
1234 def heads(ui, repo, **opts):
1236 """show current repository heads
1235 """show current repository heads
1237
1236
1238 Show all repository head changesets.
1237 Show all repository head changesets.
1239
1238
1240 Repository "heads" are changesets that don't have children
1239 Repository "heads" are changesets that don't have children
1241 changesets. They are where development generally takes place and
1240 changesets. They are where development generally takes place and
1242 are the usual targets for update and merge operations.
1241 are the usual targets for update and merge operations.
1243 """
1242 """
1244 if opts['rev']:
1243 if opts['rev']:
1245 heads = repo.heads(repo.lookup(opts['rev']))
1244 heads = repo.heads(repo.lookup(opts['rev']))
1246 else:
1245 else:
1247 heads = repo.heads()
1246 heads = repo.heads()
1248 displayer = cmdutil.show_changeset(ui, repo, opts)
1247 displayer = cmdutil.show_changeset(ui, repo, opts)
1249 for n in heads:
1248 for n in heads:
1250 displayer.show(changenode=n)
1249 displayer.show(changenode=n)
1251
1250
1252 def help_(ui, name=None, with_version=False):
1251 def help_(ui, name=None, with_version=False):
1253 """show help for a command, extension, or list of commands
1252 """show help for a command, extension, or list of commands
1254
1253
1255 With no arguments, print a list of commands and short help.
1254 With no arguments, print a list of commands and short help.
1256
1255
1257 Given a command name, print help for that command.
1256 Given a command name, print help for that command.
1258
1257
1259 Given an extension name, print help for that extension, and the
1258 Given an extension name, print help for that extension, and the
1260 commands it provides."""
1259 commands it provides."""
1261 option_lists = []
1260 option_lists = []
1262
1261
1263 def helpcmd(name):
1262 def helpcmd(name):
1264 if with_version:
1263 if with_version:
1265 version_(ui)
1264 version_(ui)
1266 ui.write('\n')
1265 ui.write('\n')
1267 aliases, i = findcmd(ui, name)
1266 aliases, i = findcmd(ui, name)
1268 # synopsis
1267 # synopsis
1269 ui.write("%s\n\n" % i[2])
1268 ui.write("%s\n\n" % i[2])
1270
1269
1271 # description
1270 # description
1272 doc = i[0].__doc__
1271 doc = i[0].__doc__
1273 if not doc:
1272 if not doc:
1274 doc = _("(No help text available)")
1273 doc = _("(No help text available)")
1275 if ui.quiet:
1274 if ui.quiet:
1276 doc = doc.splitlines(0)[0]
1275 doc = doc.splitlines(0)[0]
1277 ui.write("%s\n" % doc.rstrip())
1276 ui.write("%s\n" % doc.rstrip())
1278
1277
1279 if not ui.quiet:
1278 if not ui.quiet:
1280 # aliases
1279 # aliases
1281 if len(aliases) > 1:
1280 if len(aliases) > 1:
1282 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1281 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1283
1282
1284 # options
1283 # options
1285 if i[1]:
1284 if i[1]:
1286 option_lists.append(("options", i[1]))
1285 option_lists.append(("options", i[1]))
1287
1286
1288 def helplist(select=None):
1287 def helplist(select=None):
1289 h = {}
1288 h = {}
1290 cmds = {}
1289 cmds = {}
1291 for c, e in table.items():
1290 for c, e in table.items():
1292 f = c.split("|", 1)[0]
1291 f = c.split("|", 1)[0]
1293 if select and not select(f):
1292 if select and not select(f):
1294 continue
1293 continue
1295 if name == "shortlist" and not f.startswith("^"):
1294 if name == "shortlist" and not f.startswith("^"):
1296 continue
1295 continue
1297 f = f.lstrip("^")
1296 f = f.lstrip("^")
1298 if not ui.debugflag and f.startswith("debug"):
1297 if not ui.debugflag and f.startswith("debug"):
1299 continue
1298 continue
1300 doc = e[0].__doc__
1299 doc = e[0].__doc__
1301 if not doc:
1300 if not doc:
1302 doc = _("(No help text available)")
1301 doc = _("(No help text available)")
1303 h[f] = doc.splitlines(0)[0].rstrip()
1302 h[f] = doc.splitlines(0)[0].rstrip()
1304 cmds[f] = c.lstrip("^")
1303 cmds[f] = c.lstrip("^")
1305
1304
1306 fns = h.keys()
1305 fns = h.keys()
1307 fns.sort()
1306 fns.sort()
1308 m = max(map(len, fns))
1307 m = max(map(len, fns))
1309 for f in fns:
1308 for f in fns:
1310 if ui.verbose:
1309 if ui.verbose:
1311 commands = cmds[f].replace("|",", ")
1310 commands = cmds[f].replace("|",", ")
1312 ui.write(" %s:\n %s\n"%(commands, h[f]))
1311 ui.write(" %s:\n %s\n"%(commands, h[f]))
1313 else:
1312 else:
1314 ui.write(' %-*s %s\n' % (m, f, h[f]))
1313 ui.write(' %-*s %s\n' % (m, f, h[f]))
1315
1314
1316 def helptopic(name):
1315 def helptopic(name):
1317 v = None
1316 v = None
1318 for i in help.helptable:
1317 for i in help.helptable:
1319 l = i.split('|')
1318 l = i.split('|')
1320 if name in l:
1319 if name in l:
1321 v = i
1320 v = i
1322 header = l[-1]
1321 header = l[-1]
1323 if not v:
1322 if not v:
1324 raise UnknownCommand(name)
1323 raise UnknownCommand(name)
1325
1324
1326 # description
1325 # description
1327 doc = help.helptable[v]
1326 doc = help.helptable[v]
1328 if not doc:
1327 if not doc:
1329 doc = _("(No help text available)")
1328 doc = _("(No help text available)")
1330 if callable(doc):
1329 if callable(doc):
1331 doc = doc()
1330 doc = doc()
1332
1331
1333 ui.write("%s\n" % header)
1332 ui.write("%s\n" % header)
1334 ui.write("%s\n" % doc.rstrip())
1333 ui.write("%s\n" % doc.rstrip())
1335
1334
1336 def helpext(name):
1335 def helpext(name):
1337 try:
1336 try:
1338 mod = findext(name)
1337 mod = findext(name)
1339 except KeyError:
1338 except KeyError:
1340 raise UnknownCommand(name)
1339 raise UnknownCommand(name)
1341
1340
1342 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1341 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1343 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1342 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1344 for d in doc[1:]:
1343 for d in doc[1:]:
1345 ui.write(d, '\n')
1344 ui.write(d, '\n')
1346
1345
1347 ui.status('\n')
1346 ui.status('\n')
1348 if ui.verbose:
1347 if ui.verbose:
1349 ui.status(_('list of commands:\n\n'))
1348 ui.status(_('list of commands:\n\n'))
1350 else:
1349 else:
1351 ui.status(_('list of commands (use "hg help -v %s" '
1350 ui.status(_('list of commands (use "hg help -v %s" '
1352 'to show aliases and global options):\n\n') % name)
1351 'to show aliases and global options):\n\n') % name)
1353
1352
1354 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
1353 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
1355 helplist(modcmds.has_key)
1354 helplist(modcmds.has_key)
1356
1355
1357 if name and name != 'shortlist':
1356 if name and name != 'shortlist':
1358 i = None
1357 i = None
1359 for f in (helpcmd, helptopic, helpext):
1358 for f in (helpcmd, helptopic, helpext):
1360 try:
1359 try:
1361 f(name)
1360 f(name)
1362 i = None
1361 i = None
1363 break
1362 break
1364 except UnknownCommand, inst:
1363 except UnknownCommand, inst:
1365 i = inst
1364 i = inst
1366 if i:
1365 if i:
1367 raise i
1366 raise i
1368
1367
1369 else:
1368 else:
1370 # program name
1369 # program name
1371 if ui.verbose or with_version:
1370 if ui.verbose or with_version:
1372 version_(ui)
1371 version_(ui)
1373 else:
1372 else:
1374 ui.status(_("Mercurial Distributed SCM\n"))
1373 ui.status(_("Mercurial Distributed SCM\n"))
1375 ui.status('\n')
1374 ui.status('\n')
1376
1375
1377 # list of commands
1376 # list of commands
1378 if name == "shortlist":
1377 if name == "shortlist":
1379 ui.status(_('basic commands (use "hg help" '
1378 ui.status(_('basic commands (use "hg help" '
1380 'for the full list or option "-v" for details):\n\n'))
1379 'for the full list or option "-v" for details):\n\n'))
1381 elif ui.verbose:
1380 elif ui.verbose:
1382 ui.status(_('list of commands:\n\n'))
1381 ui.status(_('list of commands:\n\n'))
1383 else:
1382 else:
1384 ui.status(_('list of commands (use "hg help -v" '
1383 ui.status(_('list of commands (use "hg help -v" '
1385 'to show aliases and global options):\n\n'))
1384 'to show aliases and global options):\n\n'))
1386
1385
1387 helplist()
1386 helplist()
1388
1387
1389 # global options
1388 # global options
1390 if ui.verbose:
1389 if ui.verbose:
1391 option_lists.append(("global options", globalopts))
1390 option_lists.append(("global options", globalopts))
1392
1391
1393 # list all option lists
1392 # list all option lists
1394 opt_output = []
1393 opt_output = []
1395 for title, options in option_lists:
1394 for title, options in option_lists:
1396 opt_output.append(("\n%s:\n" % title, None))
1395 opt_output.append(("\n%s:\n" % title, None))
1397 for shortopt, longopt, default, desc in options:
1396 for shortopt, longopt, default, desc in options:
1398 if "DEPRECATED" in desc and not ui.verbose: continue
1397 if "DEPRECATED" in desc and not ui.verbose: continue
1399 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1398 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1400 longopt and " --%s" % longopt),
1399 longopt and " --%s" % longopt),
1401 "%s%s" % (desc,
1400 "%s%s" % (desc,
1402 default
1401 default
1403 and _(" (default: %s)") % default
1402 and _(" (default: %s)") % default
1404 or "")))
1403 or "")))
1405
1404
1406 if opt_output:
1405 if opt_output:
1407 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1406 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1408 for first, second in opt_output:
1407 for first, second in opt_output:
1409 if second:
1408 if second:
1410 ui.write(" %-*s %s\n" % (opts_len, first, second))
1409 ui.write(" %-*s %s\n" % (opts_len, first, second))
1411 else:
1410 else:
1412 ui.write("%s\n" % first)
1411 ui.write("%s\n" % first)
1413
1412
1414 def identify(ui, repo):
1413 def identify(ui, repo):
1415 """print information about the working copy
1414 """print information about the working copy
1416
1415
1417 Print a short summary of the current state of the repo.
1416 Print a short summary of the current state of the repo.
1418
1417
1419 This summary identifies the repository state using one or two parent
1418 This summary identifies the repository state using one or two parent
1420 hash identifiers, followed by a "+" if there are uncommitted changes
1419 hash identifiers, followed by a "+" if there are uncommitted changes
1421 in the working directory, followed by a list of tags for this revision.
1420 in the working directory, followed by a list of tags for this revision.
1422 """
1421 """
1423 parents = [p for p in repo.dirstate.parents() if p != nullid]
1422 parents = [p for p in repo.dirstate.parents() if p != nullid]
1424 if not parents:
1423 if not parents:
1425 ui.write(_("unknown\n"))
1424 ui.write(_("unknown\n"))
1426 return
1425 return
1427
1426
1428 hexfunc = ui.debugflag and hex or short
1427 hexfunc = ui.debugflag and hex or short
1429 modified, added, removed, deleted = repo.status()[:4]
1428 modified, added, removed, deleted = repo.status()[:4]
1430 output = ["%s%s" %
1429 output = ["%s%s" %
1431 ('+'.join([hexfunc(parent) for parent in parents]),
1430 ('+'.join([hexfunc(parent) for parent in parents]),
1432 (modified or added or removed or deleted) and "+" or "")]
1431 (modified or added or removed or deleted) and "+" or "")]
1433
1432
1434 if not ui.quiet:
1433 if not ui.quiet:
1435
1434
1436 branch = util.tolocal(repo.workingctx().branch())
1435 branch = util.tolocal(repo.workingctx().branch())
1437 if branch:
1436 if branch:
1438 output.append("(%s)" % branch)
1437 output.append("(%s)" % branch)
1439
1438
1440 # multiple tags for a single parent separated by '/'
1439 # multiple tags for a single parent separated by '/'
1441 parenttags = ['/'.join(tags)
1440 parenttags = ['/'.join(tags)
1442 for tags in map(repo.nodetags, parents) if tags]
1441 for tags in map(repo.nodetags, parents) if tags]
1443 # tags for multiple parents separated by ' + '
1442 # tags for multiple parents separated by ' + '
1444 if parenttags:
1443 if parenttags:
1445 output.append(' + '.join(parenttags))
1444 output.append(' + '.join(parenttags))
1446
1445
1447 ui.write("%s\n" % ' '.join(output))
1446 ui.write("%s\n" % ' '.join(output))
1448
1447
1449 def import_(ui, repo, patch1, *patches, **opts):
1448 def import_(ui, repo, patch1, *patches, **opts):
1450 """import an ordered set of patches
1449 """import an ordered set of patches
1451
1450
1452 Import a list of patches and commit them individually.
1451 Import a list of patches and commit them individually.
1453
1452
1454 If there are outstanding changes in the working directory, import
1453 If there are outstanding changes in the working directory, import
1455 will abort unless given the -f flag.
1454 will abort unless given the -f flag.
1456
1455
1457 You can import a patch straight from a mail message. Even patches
1456 You can import a patch straight from a mail message. Even patches
1458 as attachments work (body part must be type text/plain or
1457 as attachments work (body part must be type text/plain or
1459 text/x-patch to be used). From and Subject headers of email
1458 text/x-patch to be used). From and Subject headers of email
1460 message are used as default committer and commit message. All
1459 message are used as default committer and commit message. All
1461 text/plain body parts before first diff are added to commit
1460 text/plain body parts before first diff are added to commit
1462 message.
1461 message.
1463
1462
1464 If imported patch was generated by hg export, user and description
1463 If imported patch was generated by hg export, user and description
1465 from patch override values from message headers and body. Values
1464 from patch override values from message headers and body. Values
1466 given on command line with -m and -u override these.
1465 given on command line with -m and -u override these.
1467
1466
1468 To read a patch from standard input, use patch name "-".
1467 To read a patch from standard input, use patch name "-".
1469 """
1468 """
1470 patches = (patch1,) + patches
1469 patches = (patch1,) + patches
1471
1470
1472 if not opts['force']:
1471 if not opts['force']:
1473 bail_if_changed(repo)
1472 bail_if_changed(repo)
1474
1473
1475 d = opts["base"]
1474 d = opts["base"]
1476 strip = opts["strip"]
1475 strip = opts["strip"]
1477
1476
1478 wlock = repo.wlock()
1477 wlock = repo.wlock()
1479 lock = repo.lock()
1478 lock = repo.lock()
1480
1479
1481 for p in patches:
1480 for p in patches:
1482 pf = os.path.join(d, p)
1481 pf = os.path.join(d, p)
1483
1482
1484 if pf == '-':
1483 if pf == '-':
1485 ui.status(_("applying patch from stdin\n"))
1484 ui.status(_("applying patch from stdin\n"))
1486 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1485 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1487 else:
1486 else:
1488 ui.status(_("applying %s\n") % p)
1487 ui.status(_("applying %s\n") % p)
1489 tmpname, message, user, date = patch.extract(ui, file(pf))
1488 tmpname, message, user, date = patch.extract(ui, file(pf))
1490
1489
1491 if tmpname is None:
1490 if tmpname is None:
1492 raise util.Abort(_('no diffs found'))
1491 raise util.Abort(_('no diffs found'))
1493
1492
1494 try:
1493 try:
1495 cmdline_message = logmessage(opts)
1494 cmdline_message = logmessage(opts)
1496 if cmdline_message:
1495 if cmdline_message:
1497 # pickup the cmdline msg
1496 # pickup the cmdline msg
1498 message = cmdline_message
1497 message = cmdline_message
1499 elif message:
1498 elif message:
1500 # pickup the patch msg
1499 # pickup the patch msg
1501 message = message.strip()
1500 message = message.strip()
1502 else:
1501 else:
1503 # launch the editor
1502 # launch the editor
1504 message = None
1503 message = None
1505 ui.debug(_('message:\n%s\n') % message)
1504 ui.debug(_('message:\n%s\n') % message)
1506
1505
1507 files = {}
1506 files = {}
1508 try:
1507 try:
1509 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1508 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1510 files=files)
1509 files=files)
1511 finally:
1510 finally:
1512 files = patch.updatedir(ui, repo, files, wlock=wlock)
1511 files = patch.updatedir(ui, repo, files, wlock=wlock)
1513 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1512 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1514 finally:
1513 finally:
1515 os.unlink(tmpname)
1514 os.unlink(tmpname)
1516
1515
1517 def incoming(ui, repo, source="default", **opts):
1516 def incoming(ui, repo, source="default", **opts):
1518 """show new changesets found in source
1517 """show new changesets found in source
1519
1518
1520 Show new changesets found in the specified path/URL or the default
1519 Show new changesets found in the specified path/URL or the default
1521 pull location. These are the changesets that would be pulled if a pull
1520 pull location. These are the changesets that would be pulled if a pull
1522 was requested.
1521 was requested.
1523
1522
1524 For remote repository, using --bundle avoids downloading the changesets
1523 For remote repository, using --bundle avoids downloading the changesets
1525 twice if the incoming is followed by a pull.
1524 twice if the incoming is followed by a pull.
1526
1525
1527 See pull for valid source format details.
1526 See pull for valid source format details.
1528 """
1527 """
1529 source = ui.expandpath(source)
1528 source = ui.expandpath(source)
1530 setremoteconfig(ui, opts)
1529 setremoteconfig(ui, opts)
1531
1530
1532 other = hg.repository(ui, source)
1531 other = hg.repository(ui, source)
1533 incoming = repo.findincoming(other, force=opts["force"])
1532 incoming = repo.findincoming(other, force=opts["force"])
1534 if not incoming:
1533 if not incoming:
1535 ui.status(_("no changes found\n"))
1534 ui.status(_("no changes found\n"))
1536 return
1535 return
1537
1536
1538 cleanup = None
1537 cleanup = None
1539 try:
1538 try:
1540 fname = opts["bundle"]
1539 fname = opts["bundle"]
1541 if fname or not other.local():
1540 if fname or not other.local():
1542 # create a bundle (uncompressed if other repo is not local)
1541 # create a bundle (uncompressed if other repo is not local)
1543 cg = other.changegroup(incoming, "incoming")
1542 cg = other.changegroup(incoming, "incoming")
1544 bundletype = other.local() and "HG10BZ" or "HG10UN"
1543 bundletype = other.local() and "HG10BZ" or "HG10UN"
1545 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1544 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1546 # keep written bundle?
1545 # keep written bundle?
1547 if opts["bundle"]:
1546 if opts["bundle"]:
1548 cleanup = None
1547 cleanup = None
1549 if not other.local():
1548 if not other.local():
1550 # use the created uncompressed bundlerepo
1549 # use the created uncompressed bundlerepo
1551 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1550 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1552
1551
1553 revs = None
1552 revs = None
1554 if opts['rev']:
1553 if opts['rev']:
1555 revs = [other.lookup(rev) for rev in opts['rev']]
1554 revs = [other.lookup(rev) for rev in opts['rev']]
1556 o = other.changelog.nodesbetween(incoming, revs)[0]
1555 o = other.changelog.nodesbetween(incoming, revs)[0]
1557 if opts['newest_first']:
1556 if opts['newest_first']:
1558 o.reverse()
1557 o.reverse()
1559 displayer = cmdutil.show_changeset(ui, other, opts)
1558 displayer = cmdutil.show_changeset(ui, other, opts)
1560 for n in o:
1559 for n in o:
1561 parents = [p for p in other.changelog.parents(n) if p != nullid]
1560 parents = [p for p in other.changelog.parents(n) if p != nullid]
1562 if opts['no_merges'] and len(parents) == 2:
1561 if opts['no_merges'] and len(parents) == 2:
1563 continue
1562 continue
1564 displayer.show(changenode=n)
1563 displayer.show(changenode=n)
1565 finally:
1564 finally:
1566 if hasattr(other, 'close'):
1565 if hasattr(other, 'close'):
1567 other.close()
1566 other.close()
1568 if cleanup:
1567 if cleanup:
1569 os.unlink(cleanup)
1568 os.unlink(cleanup)
1570
1569
1571 def init(ui, dest=".", **opts):
1570 def init(ui, dest=".", **opts):
1572 """create a new repository in the given directory
1571 """create a new repository in the given directory
1573
1572
1574 Initialize a new repository in the given directory. If the given
1573 Initialize a new repository in the given directory. If the given
1575 directory does not exist, it is created.
1574 directory does not exist, it is created.
1576
1575
1577 If no directory is given, the current directory is used.
1576 If no directory is given, the current directory is used.
1578
1577
1579 It is possible to specify an ssh:// URL as the destination.
1578 It is possible to specify an ssh:// URL as the destination.
1580 Look at the help text for the pull command for important details
1579 Look at the help text for the pull command for important details
1581 about ssh:// URLs.
1580 about ssh:// URLs.
1582 """
1581 """
1583 setremoteconfig(ui, opts)
1582 setremoteconfig(ui, opts)
1584 hg.repository(ui, dest, create=1)
1583 hg.repository(ui, dest, create=1)
1585
1584
1586 def locate(ui, repo, *pats, **opts):
1585 def locate(ui, repo, *pats, **opts):
1587 """locate files matching specific patterns
1586 """locate files matching specific patterns
1588
1587
1589 Print all files under Mercurial control whose names match the
1588 Print all files under Mercurial control whose names match the
1590 given patterns.
1589 given patterns.
1591
1590
1592 This command searches the current directory and its
1591 This command searches the current directory and its
1593 subdirectories. To search an entire repository, move to the root
1592 subdirectories. To search an entire repository, move to the root
1594 of the repository.
1593 of the repository.
1595
1594
1596 If no patterns are given to match, this command prints all file
1595 If no patterns are given to match, this command prints all file
1597 names.
1596 names.
1598
1597
1599 If you want to feed the output of this command into the "xargs"
1598 If you want to feed the output of this command into the "xargs"
1600 command, use the "-0" option to both this command and "xargs".
1599 command, use the "-0" option to both this command and "xargs".
1601 This will avoid the problem of "xargs" treating single filenames
1600 This will avoid the problem of "xargs" treating single filenames
1602 that contain white space as multiple filenames.
1601 that contain white space as multiple filenames.
1603 """
1602 """
1604 end = opts['print0'] and '\0' or '\n'
1603 end = opts['print0'] and '\0' or '\n'
1605 rev = opts['rev']
1604 rev = opts['rev']
1606 if rev:
1605 if rev:
1607 node = repo.lookup(rev)
1606 node = repo.lookup(rev)
1608 else:
1607 else:
1609 node = None
1608 node = None
1610
1609
1611 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1610 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1612 head='(?:.*/|)'):
1611 head='(?:.*/|)'):
1613 if not node and repo.dirstate.state(abs) == '?':
1612 if not node and repo.dirstate.state(abs) == '?':
1614 continue
1613 continue
1615 if opts['fullpath']:
1614 if opts['fullpath']:
1616 ui.write(os.path.join(repo.root, abs), end)
1615 ui.write(os.path.join(repo.root, abs), end)
1617 else:
1616 else:
1618 ui.write(((pats and rel) or abs), end)
1617 ui.write(((pats and rel) or abs), end)
1619
1618
1620 def log(ui, repo, *pats, **opts):
1619 def log(ui, repo, *pats, **opts):
1621 """show revision history of entire repository or files
1620 """show revision history of entire repository or files
1622
1621
1623 Print the revision history of the specified files or the entire
1622 Print the revision history of the specified files or the entire
1624 project.
1623 project.
1625
1624
1626 File history is shown without following rename or copy history of
1625 File history is shown without following rename or copy history of
1627 files. Use -f/--follow with a file name to follow history across
1626 files. Use -f/--follow with a file name to follow history across
1628 renames and copies. --follow without a file name will only show
1627 renames and copies. --follow without a file name will only show
1629 ancestors or descendants of the starting revision. --follow-first
1628 ancestors or descendants of the starting revision. --follow-first
1630 only follows the first parent of merge revisions.
1629 only follows the first parent of merge revisions.
1631
1630
1632 If no revision range is specified, the default is tip:0 unless
1631 If no revision range is specified, the default is tip:0 unless
1633 --follow is set, in which case the working directory parent is
1632 --follow is set, in which case the working directory parent is
1634 used as the starting revision.
1633 used as the starting revision.
1635
1634
1636 By default this command outputs: changeset id and hash, tags,
1635 By default this command outputs: changeset id and hash, tags,
1637 non-trivial parents, user, date and time, and a summary for each
1636 non-trivial parents, user, date and time, and a summary for each
1638 commit. When the -v/--verbose switch is used, the list of changed
1637 commit. When the -v/--verbose switch is used, the list of changed
1639 files and full commit message is shown.
1638 files and full commit message is shown.
1640
1639
1641 NOTE: log -p may generate unexpected diff output for merge
1640 NOTE: log -p may generate unexpected diff output for merge
1642 changesets, as it will compare the merge changeset against its
1641 changesets, as it will compare the merge changeset against its
1643 first parent only. Also, the files: list will only reflect files
1642 first parent only. Also, the files: list will only reflect files
1644 that are different from BOTH parents.
1643 that are different from BOTH parents.
1645
1644
1646 """
1645 """
1647
1646
1648 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1647 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1649 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1648 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1650
1649
1651 if opts['limit']:
1650 if opts['limit']:
1652 try:
1651 try:
1653 limit = int(opts['limit'])
1652 limit = int(opts['limit'])
1654 except ValueError:
1653 except ValueError:
1655 raise util.Abort(_('limit must be a positive integer'))
1654 raise util.Abort(_('limit must be a positive integer'))
1656 if limit <= 0: raise util.Abort(_('limit must be positive'))
1655 if limit <= 0: raise util.Abort(_('limit must be positive'))
1657 else:
1656 else:
1658 limit = sys.maxint
1657 limit = sys.maxint
1659 count = 0
1658 count = 0
1660
1659
1661 if opts['copies'] and opts['rev']:
1660 if opts['copies'] and opts['rev']:
1662 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1661 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1663 else:
1662 else:
1664 endrev = repo.changelog.count()
1663 endrev = repo.changelog.count()
1665 rcache = {}
1664 rcache = {}
1666 ncache = {}
1665 ncache = {}
1667 dcache = []
1666 dcache = []
1668 def getrenamed(fn, rev, man):
1667 def getrenamed(fn, rev, man):
1669 '''looks up all renames for a file (up to endrev) the first
1668 '''looks up all renames for a file (up to endrev) the first
1670 time the file is given. It indexes on the changerev and only
1669 time the file is given. It indexes on the changerev and only
1671 parses the manifest if linkrev != changerev.
1670 parses the manifest if linkrev != changerev.
1672 Returns rename info for fn at changerev rev.'''
1671 Returns rename info for fn at changerev rev.'''
1673 if fn not in rcache:
1672 if fn not in rcache:
1674 rcache[fn] = {}
1673 rcache[fn] = {}
1675 ncache[fn] = {}
1674 ncache[fn] = {}
1676 fl = repo.file(fn)
1675 fl = repo.file(fn)
1677 for i in xrange(fl.count()):
1676 for i in xrange(fl.count()):
1678 node = fl.node(i)
1677 node = fl.node(i)
1679 lr = fl.linkrev(node)
1678 lr = fl.linkrev(node)
1680 renamed = fl.renamed(node)
1679 renamed = fl.renamed(node)
1681 rcache[fn][lr] = renamed
1680 rcache[fn][lr] = renamed
1682 if renamed:
1681 if renamed:
1683 ncache[fn][node] = renamed
1682 ncache[fn][node] = renamed
1684 if lr >= endrev:
1683 if lr >= endrev:
1685 break
1684 break
1686 if rev in rcache[fn]:
1685 if rev in rcache[fn]:
1687 return rcache[fn][rev]
1686 return rcache[fn][rev]
1688 mr = repo.manifest.rev(man)
1687 mr = repo.manifest.rev(man)
1689 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1688 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1690 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1689 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1691 if not dcache or dcache[0] != man:
1690 if not dcache or dcache[0] != man:
1692 dcache[:] = [man, repo.manifest.readdelta(man)]
1691 dcache[:] = [man, repo.manifest.readdelta(man)]
1693 if fn in dcache[1]:
1692 if fn in dcache[1]:
1694 return ncache[fn].get(dcache[1][fn])
1693 return ncache[fn].get(dcache[1][fn])
1695 return None
1694 return None
1696
1695
1697 df = False
1696 df = False
1698 if opts["date"]:
1697 if opts["date"]:
1699 df = util.matchdate(opts["date"])
1698 df = util.matchdate(opts["date"])
1700
1699
1701
1702 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1700 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1703 for st, rev, fns in changeiter:
1701 for st, rev, fns in changeiter:
1704 if st == 'add':
1702 if st == 'add':
1705 changenode = repo.changelog.node(rev)
1703 changenode = repo.changelog.node(rev)
1706 parents = [p for p in repo.changelog.parentrevs(rev)
1704 parents = [p for p in repo.changelog.parentrevs(rev)
1707 if p != nullrev]
1705 if p != nullrev]
1708 if opts['no_merges'] and len(parents) == 2:
1706 if opts['no_merges'] and len(parents) == 2:
1709 continue
1707 continue
1710 if opts['only_merges'] and len(parents) != 2:
1708 if opts['only_merges'] and len(parents) != 2:
1711 continue
1709 continue
1712
1710
1713 if df:
1711 if df:
1714 changes = get(rev)
1712 changes = get(rev)
1715 if not df(changes[2][0]):
1713 if not df(changes[2][0]):
1716 continue
1714 continue
1717
1715
1718 if opts['keyword']:
1716 if opts['keyword']:
1719 changes = get(rev)
1717 changes = get(rev)
1720 miss = 0
1718 miss = 0
1721 for k in [kw.lower() for kw in opts['keyword']]:
1719 for k in [kw.lower() for kw in opts['keyword']]:
1722 if not (k in changes[1].lower() or
1720 if not (k in changes[1].lower() or
1723 k in changes[4].lower() or
1721 k in changes[4].lower() or
1724 k in " ".join(changes[3][:20]).lower()):
1722 k in " ".join(changes[3][:20]).lower()):
1725 miss = 1
1723 miss = 1
1726 break
1724 break
1727 if miss:
1725 if miss:
1728 continue
1726 continue
1729
1727
1730 copies = []
1728 copies = []
1731 if opts.get('copies') and rev:
1729 if opts.get('copies') and rev:
1732 mf = get(rev)[0]
1730 mf = get(rev)[0]
1733 for fn in get(rev)[3]:
1731 for fn in get(rev)[3]:
1734 rename = getrenamed(fn, rev, mf)
1732 rename = getrenamed(fn, rev, mf)
1735 if rename:
1733 if rename:
1736 copies.append((fn, rename[0]))
1734 copies.append((fn, rename[0]))
1737 displayer.show(rev, changenode, copies=copies)
1735 displayer.show(rev, changenode, copies=copies)
1738 elif st == 'iter':
1736 elif st == 'iter':
1739 if count == limit: break
1737 if count == limit: break
1740 if displayer.flush(rev):
1738 if displayer.flush(rev):
1741 count += 1
1739 count += 1
1742
1740
1743 def manifest(ui, repo, rev=None):
1741 def manifest(ui, repo, rev=None):
1744 """output the latest or given revision of the project manifest
1742 """output the latest or given revision of the project manifest
1745
1743
1746 Print a list of version controlled files for the given revision.
1744 Print a list of version controlled files for the given revision.
1747
1745
1748 The manifest is the list of files being version controlled. If no revision
1746 The manifest is the list of files being version controlled. If no revision
1749 is given then the first parent of the working directory is used.
1747 is given then the first parent of the working directory is used.
1750
1748
1751 With -v flag, print file permissions. With --debug flag, print
1749 With -v flag, print file permissions. With --debug flag, print
1752 file revision hashes.
1750 file revision hashes.
1753 """
1751 """
1754
1752
1755 m = repo.changectx(rev).manifest()
1753 m = repo.changectx(rev).manifest()
1756 files = m.keys()
1754 files = m.keys()
1757 files.sort()
1755 files.sort()
1758
1756
1759 for f in files:
1757 for f in files:
1760 if ui.debugflag:
1758 if ui.debugflag:
1761 ui.write("%40s " % hex(m[f]))
1759 ui.write("%40s " % hex(m[f]))
1762 if ui.verbose:
1760 if ui.verbose:
1763 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1761 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1764 ui.write("%s\n" % f)
1762 ui.write("%s\n" % f)
1765
1763
1766 def merge(ui, repo, node=None, force=None, branch=None):
1764 def merge(ui, repo, node=None, force=None):
1767 """Merge working directory with another revision
1765 """Merge working directory with another revision
1768
1766
1769 Merge the contents of the current working directory and the
1767 Merge the contents of the current working directory and the
1770 requested revision. Files that changed between either parent are
1768 requested revision. Files that changed between either parent are
1771 marked as changed for the next commit and a commit must be
1769 marked as changed for the next commit and a commit must be
1772 performed before any further updates are allowed.
1770 performed before any further updates are allowed.
1773
1771
1774 If no revision is specified, the working directory's parent is a
1772 If no revision is specified, the working directory's parent is a
1775 head revision, and the repository contains exactly one other head,
1773 head revision, and the repository contains exactly one other head,
1776 the other head is merged with by default. Otherwise, an explicit
1774 the other head is merged with by default. Otherwise, an explicit
1777 revision to merge with must be provided.
1775 revision to merge with must be provided.
1778 """
1776 """
1779
1777
1780 if node or branch:
1778 if not node:
1781 node = _lookup(repo, node, branch)
1782 else:
1783 heads = repo.heads()
1779 heads = repo.heads()
1784 if len(heads) > 2:
1780 if len(heads) > 2:
1785 raise util.Abort(_('repo has %d heads - '
1781 raise util.Abort(_('repo has %d heads - '
1786 'please merge with an explicit rev') %
1782 'please merge with an explicit rev') %
1787 len(heads))
1783 len(heads))
1788 if len(heads) == 1:
1784 if len(heads) == 1:
1789 raise util.Abort(_('there is nothing to merge - '
1785 raise util.Abort(_('there is nothing to merge - '
1790 'use "hg update" instead'))
1786 'use "hg update" instead'))
1791 parent = repo.dirstate.parents()[0]
1787 parent = repo.dirstate.parents()[0]
1792 if parent not in heads:
1788 if parent not in heads:
1793 raise util.Abort(_('working dir not at a head rev - '
1789 raise util.Abort(_('working dir not at a head rev - '
1794 'use "hg update" or merge with an explicit rev'))
1790 'use "hg update" or merge with an explicit rev'))
1795 node = parent == heads[0] and heads[-1] or heads[0]
1791 node = parent == heads[0] and heads[-1] or heads[0]
1796 return hg.merge(repo, node, force=force)
1792 return hg.merge(repo, node, force=force)
1797
1793
1798 def outgoing(ui, repo, dest=None, **opts):
1794 def outgoing(ui, repo, dest=None, **opts):
1799 """show changesets not found in destination
1795 """show changesets not found in destination
1800
1796
1801 Show changesets not found in the specified destination repository or
1797 Show changesets not found in the specified destination repository or
1802 the default push location. These are the changesets that would be pushed
1798 the default push location. These are the changesets that would be pushed
1803 if a push was requested.
1799 if a push was requested.
1804
1800
1805 See pull for valid destination format details.
1801 See pull for valid destination format details.
1806 """
1802 """
1807 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1803 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1808 setremoteconfig(ui, opts)
1804 setremoteconfig(ui, opts)
1809 revs = None
1805 revs = None
1810 if opts['rev']:
1806 if opts['rev']:
1811 revs = [repo.lookup(rev) for rev in opts['rev']]
1807 revs = [repo.lookup(rev) for rev in opts['rev']]
1812
1808
1813 other = hg.repository(ui, dest)
1809 other = hg.repository(ui, dest)
1814 o = repo.findoutgoing(other, force=opts['force'])
1810 o = repo.findoutgoing(other, force=opts['force'])
1815 if not o:
1811 if not o:
1816 ui.status(_("no changes found\n"))
1812 ui.status(_("no changes found\n"))
1817 return
1813 return
1818 o = repo.changelog.nodesbetween(o, revs)[0]
1814 o = repo.changelog.nodesbetween(o, revs)[0]
1819 if opts['newest_first']:
1815 if opts['newest_first']:
1820 o.reverse()
1816 o.reverse()
1821 displayer = cmdutil.show_changeset(ui, repo, opts)
1817 displayer = cmdutil.show_changeset(ui, repo, opts)
1822 for n in o:
1818 for n in o:
1823 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1819 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1824 if opts['no_merges'] and len(parents) == 2:
1820 if opts['no_merges'] and len(parents) == 2:
1825 continue
1821 continue
1826 displayer.show(changenode=n)
1822 displayer.show(changenode=n)
1827
1823
1828 def parents(ui, repo, file_=None, **opts):
1824 def parents(ui, repo, file_=None, **opts):
1829 """show the parents of the working dir or revision
1825 """show the parents of the working dir or revision
1830
1826
1831 Print the working directory's parent revisions.
1827 Print the working directory's parent revisions.
1832 """
1828 """
1833 rev = opts.get('rev')
1829 rev = opts.get('rev')
1834 if rev:
1830 if rev:
1835 if file_:
1831 if file_:
1836 ctx = repo.filectx(file_, changeid=rev)
1832 ctx = repo.filectx(file_, changeid=rev)
1837 else:
1833 else:
1838 ctx = repo.changectx(rev)
1834 ctx = repo.changectx(rev)
1839 p = [cp.node() for cp in ctx.parents()]
1835 p = [cp.node() for cp in ctx.parents()]
1840 else:
1836 else:
1841 p = repo.dirstate.parents()
1837 p = repo.dirstate.parents()
1842
1838
1843 displayer = cmdutil.show_changeset(ui, repo, opts)
1839 displayer = cmdutil.show_changeset(ui, repo, opts)
1844 for n in p:
1840 for n in p:
1845 if n != nullid:
1841 if n != nullid:
1846 displayer.show(changenode=n)
1842 displayer.show(changenode=n)
1847
1843
1848 def paths(ui, repo, search=None):
1844 def paths(ui, repo, search=None):
1849 """show definition of symbolic path names
1845 """show definition of symbolic path names
1850
1846
1851 Show definition of symbolic path name NAME. If no name is given, show
1847 Show definition of symbolic path name NAME. If no name is given, show
1852 definition of available names.
1848 definition of available names.
1853
1849
1854 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1850 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1855 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1851 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1856 """
1852 """
1857 if search:
1853 if search:
1858 for name, path in ui.configitems("paths"):
1854 for name, path in ui.configitems("paths"):
1859 if name == search:
1855 if name == search:
1860 ui.write("%s\n" % path)
1856 ui.write("%s\n" % path)
1861 return
1857 return
1862 ui.warn(_("not found!\n"))
1858 ui.warn(_("not found!\n"))
1863 return 1
1859 return 1
1864 else:
1860 else:
1865 for name, path in ui.configitems("paths"):
1861 for name, path in ui.configitems("paths"):
1866 ui.write("%s = %s\n" % (name, path))
1862 ui.write("%s = %s\n" % (name, path))
1867
1863
1868 def postincoming(ui, repo, modheads, optupdate):
1864 def postincoming(ui, repo, modheads, optupdate):
1869 if modheads == 0:
1865 if modheads == 0:
1870 return
1866 return
1871 if optupdate:
1867 if optupdate:
1872 if modheads == 1:
1868 if modheads == 1:
1873 return hg.update(repo, repo.changelog.tip()) # update
1869 return hg.update(repo, repo.changelog.tip()) # update
1874 else:
1870 else:
1875 ui.status(_("not updating, since new heads added\n"))
1871 ui.status(_("not updating, since new heads added\n"))
1876 if modheads > 1:
1872 if modheads > 1:
1877 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1873 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1878 else:
1874 else:
1879 ui.status(_("(run 'hg update' to get a working copy)\n"))
1875 ui.status(_("(run 'hg update' to get a working copy)\n"))
1880
1876
1881 def pull(ui, repo, source="default", **opts):
1877 def pull(ui, repo, source="default", **opts):
1882 """pull changes from the specified source
1878 """pull changes from the specified source
1883
1879
1884 Pull changes from a remote repository to a local one.
1880 Pull changes from a remote repository to a local one.
1885
1881
1886 This finds all changes from the repository at the specified path
1882 This finds all changes from the repository at the specified path
1887 or URL and adds them to the local repository. By default, this
1883 or URL and adds them to the local repository. By default, this
1888 does not update the copy of the project in the working directory.
1884 does not update the copy of the project in the working directory.
1889
1885
1890 Valid URLs are of the form:
1886 Valid URLs are of the form:
1891
1887
1892 local/filesystem/path (or file://local/filesystem/path)
1888 local/filesystem/path (or file://local/filesystem/path)
1893 http://[user@]host[:port]/[path]
1889 http://[user@]host[:port]/[path]
1894 https://[user@]host[:port]/[path]
1890 https://[user@]host[:port]/[path]
1895 ssh://[user@]host[:port]/[path]
1891 ssh://[user@]host[:port]/[path]
1896 static-http://host[:port]/[path]
1892 static-http://host[:port]/[path]
1897
1893
1898 Paths in the local filesystem can either point to Mercurial
1894 Paths in the local filesystem can either point to Mercurial
1899 repositories or to bundle files (as created by 'hg bundle' or
1895 repositories or to bundle files (as created by 'hg bundle' or
1900 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1896 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1901 allows access to a Mercurial repository where you simply use a web
1897 allows access to a Mercurial repository where you simply use a web
1902 server to publish the .hg directory as static content.
1898 server to publish the .hg directory as static content.
1903
1899
1904 Some notes about using SSH with Mercurial:
1900 Some notes about using SSH with Mercurial:
1905 - SSH requires an accessible shell account on the destination machine
1901 - SSH requires an accessible shell account on the destination machine
1906 and a copy of hg in the remote path or specified with as remotecmd.
1902 and a copy of hg in the remote path or specified with as remotecmd.
1907 - path is relative to the remote user's home directory by default.
1903 - path is relative to the remote user's home directory by default.
1908 Use an extra slash at the start of a path to specify an absolute path:
1904 Use an extra slash at the start of a path to specify an absolute path:
1909 ssh://example.com//tmp/repository
1905 ssh://example.com//tmp/repository
1910 - Mercurial doesn't use its own compression via SSH; the right thing
1906 - Mercurial doesn't use its own compression via SSH; the right thing
1911 to do is to configure it in your ~/.ssh/config, e.g.:
1907 to do is to configure it in your ~/.ssh/config, e.g.:
1912 Host *.mylocalnetwork.example.com
1908 Host *.mylocalnetwork.example.com
1913 Compression no
1909 Compression no
1914 Host *
1910 Host *
1915 Compression yes
1911 Compression yes
1916 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1912 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1917 with the --ssh command line option.
1913 with the --ssh command line option.
1918 """
1914 """
1919 source = ui.expandpath(source)
1915 source = ui.expandpath(source)
1920 setremoteconfig(ui, opts)
1916 setremoteconfig(ui, opts)
1921
1917
1922 other = hg.repository(ui, source)
1918 other = hg.repository(ui, source)
1923 ui.status(_('pulling from %s\n') % (source))
1919 ui.status(_('pulling from %s\n') % (source))
1924 revs = None
1920 revs = None
1925 if opts['rev']:
1921 if opts['rev']:
1926 if 'lookup' in other.capabilities:
1922 if 'lookup' in other.capabilities:
1927 revs = [other.lookup(rev) for rev in opts['rev']]
1923 revs = [other.lookup(rev) for rev in opts['rev']]
1928 else:
1924 else:
1929 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1925 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1930 raise util.Abort(error)
1926 raise util.Abort(error)
1931 modheads = repo.pull(other, heads=revs, force=opts['force'])
1927 modheads = repo.pull(other, heads=revs, force=opts['force'])
1932 return postincoming(ui, repo, modheads, opts['update'])
1928 return postincoming(ui, repo, modheads, opts['update'])
1933
1929
1934 def push(ui, repo, dest=None, **opts):
1930 def push(ui, repo, dest=None, **opts):
1935 """push changes to the specified destination
1931 """push changes to the specified destination
1936
1932
1937 Push changes from the local repository to the given destination.
1933 Push changes from the local repository to the given destination.
1938
1934
1939 This is the symmetrical operation for pull. It helps to move
1935 This is the symmetrical operation for pull. It helps to move
1940 changes from the current repository to a different one. If the
1936 changes from the current repository to a different one. If the
1941 destination is local this is identical to a pull in that directory
1937 destination is local this is identical to a pull in that directory
1942 from the current one.
1938 from the current one.
1943
1939
1944 By default, push will refuse to run if it detects the result would
1940 By default, push will refuse to run if it detects the result would
1945 increase the number of remote heads. This generally indicates the
1941 increase the number of remote heads. This generally indicates the
1946 the client has forgotten to sync and merge before pushing.
1942 the client has forgotten to sync and merge before pushing.
1947
1943
1948 Valid URLs are of the form:
1944 Valid URLs are of the form:
1949
1945
1950 local/filesystem/path (or file://local/filesystem/path)
1946 local/filesystem/path (or file://local/filesystem/path)
1951 ssh://[user@]host[:port]/[path]
1947 ssh://[user@]host[:port]/[path]
1952 http://[user@]host[:port]/[path]
1948 http://[user@]host[:port]/[path]
1953 https://[user@]host[:port]/[path]
1949 https://[user@]host[:port]/[path]
1954
1950
1955 Look at the help text for the pull command for important details
1951 Look at the help text for the pull command for important details
1956 about ssh:// URLs.
1952 about ssh:// URLs.
1957
1953
1958 Pushing to http:// and https:// URLs is only possible, if this
1954 Pushing to http:// and https:// URLs is only possible, if this
1959 feature is explicitly enabled on the remote Mercurial server.
1955 feature is explicitly enabled on the remote Mercurial server.
1960 """
1956 """
1961 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1957 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1962 setremoteconfig(ui, opts)
1958 setremoteconfig(ui, opts)
1963
1959
1964 other = hg.repository(ui, dest)
1960 other = hg.repository(ui, dest)
1965 ui.status('pushing to %s\n' % (dest))
1961 ui.status('pushing to %s\n' % (dest))
1966 revs = None
1962 revs = None
1967 if opts['rev']:
1963 if opts['rev']:
1968 revs = [repo.lookup(rev) for rev in opts['rev']]
1964 revs = [repo.lookup(rev) for rev in opts['rev']]
1969 r = repo.push(other, opts['force'], revs=revs)
1965 r = repo.push(other, opts['force'], revs=revs)
1970 return r == 0
1966 return r == 0
1971
1967
1972 def rawcommit(ui, repo, *pats, **opts):
1968 def rawcommit(ui, repo, *pats, **opts):
1973 """raw commit interface (DEPRECATED)
1969 """raw commit interface (DEPRECATED)
1974
1970
1975 (DEPRECATED)
1971 (DEPRECATED)
1976 Lowlevel commit, for use in helper scripts.
1972 Lowlevel commit, for use in helper scripts.
1977
1973
1978 This command is not intended to be used by normal users, as it is
1974 This command is not intended to be used by normal users, as it is
1979 primarily useful for importing from other SCMs.
1975 primarily useful for importing from other SCMs.
1980
1976
1981 This command is now deprecated and will be removed in a future
1977 This command is now deprecated and will be removed in a future
1982 release, please use debugsetparents and commit instead.
1978 release, please use debugsetparents and commit instead.
1983 """
1979 """
1984
1980
1985 ui.warn(_("(the rawcommit command is deprecated)\n"))
1981 ui.warn(_("(the rawcommit command is deprecated)\n"))
1986
1982
1987 message = logmessage(opts)
1983 message = logmessage(opts)
1988
1984
1989 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
1985 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
1990 if opts['files']:
1986 if opts['files']:
1991 files += open(opts['files']).read().splitlines()
1987 files += open(opts['files']).read().splitlines()
1992
1988
1993 parents = [repo.lookup(p) for p in opts['parent']]
1989 parents = [repo.lookup(p) for p in opts['parent']]
1994
1990
1995 try:
1991 try:
1996 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
1992 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
1997 except ValueError, inst:
1993 except ValueError, inst:
1998 raise util.Abort(str(inst))
1994 raise util.Abort(str(inst))
1999
1995
2000 def recover(ui, repo):
1996 def recover(ui, repo):
2001 """roll back an interrupted transaction
1997 """roll back an interrupted transaction
2002
1998
2003 Recover from an interrupted commit or pull.
1999 Recover from an interrupted commit or pull.
2004
2000
2005 This command tries to fix the repository status after an interrupted
2001 This command tries to fix the repository status after an interrupted
2006 operation. It should only be necessary when Mercurial suggests it.
2002 operation. It should only be necessary when Mercurial suggests it.
2007 """
2003 """
2008 if repo.recover():
2004 if repo.recover():
2009 return hg.verify(repo)
2005 return hg.verify(repo)
2010 return 1
2006 return 1
2011
2007
2012 def remove(ui, repo, *pats, **opts):
2008 def remove(ui, repo, *pats, **opts):
2013 """remove the specified files on the next commit
2009 """remove the specified files on the next commit
2014
2010
2015 Schedule the indicated files for removal from the repository.
2011 Schedule the indicated files for removal from the repository.
2016
2012
2017 This only removes files from the current branch, not from the
2013 This only removes files from the current branch, not from the
2018 entire project history. If the files still exist in the working
2014 entire project history. If the files still exist in the working
2019 directory, they will be deleted from it. If invoked with --after,
2015 directory, they will be deleted from it. If invoked with --after,
2020 files that have been manually deleted are marked as removed.
2016 files that have been manually deleted are marked as removed.
2021
2017
2022 This command schedules the files to be removed at the next commit.
2018 This command schedules the files to be removed at the next commit.
2023 To undo a remove before that, see hg revert.
2019 To undo a remove before that, see hg revert.
2024
2020
2025 Modified files and added files are not removed by default. To
2021 Modified files and added files are not removed by default. To
2026 remove them, use the -f/--force option.
2022 remove them, use the -f/--force option.
2027 """
2023 """
2028 names = []
2024 names = []
2029 if not opts['after'] and not pats:
2025 if not opts['after'] and not pats:
2030 raise util.Abort(_('no files specified'))
2026 raise util.Abort(_('no files specified'))
2031 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2027 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2032 exact = dict.fromkeys(files)
2028 exact = dict.fromkeys(files)
2033 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2029 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2034 modified, added, removed, deleted, unknown = mardu
2030 modified, added, removed, deleted, unknown = mardu
2035 remove, forget = [], []
2031 remove, forget = [], []
2036 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2032 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2037 reason = None
2033 reason = None
2038 if abs not in deleted and opts['after']:
2034 if abs not in deleted and opts['after']:
2039 reason = _('is still present')
2035 reason = _('is still present')
2040 elif abs in modified and not opts['force']:
2036 elif abs in modified and not opts['force']:
2041 reason = _('is modified (use -f to force removal)')
2037 reason = _('is modified (use -f to force removal)')
2042 elif abs in added:
2038 elif abs in added:
2043 if opts['force']:
2039 if opts['force']:
2044 forget.append(abs)
2040 forget.append(abs)
2045 continue
2041 continue
2046 reason = _('has been marked for add (use -f to force removal)')
2042 reason = _('has been marked for add (use -f to force removal)')
2047 elif abs in unknown:
2043 elif abs in unknown:
2048 reason = _('is not managed')
2044 reason = _('is not managed')
2049 elif abs in removed:
2045 elif abs in removed:
2050 continue
2046 continue
2051 if reason:
2047 if reason:
2052 if exact:
2048 if exact:
2053 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2049 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2054 else:
2050 else:
2055 if ui.verbose or not exact:
2051 if ui.verbose or not exact:
2056 ui.status(_('removing %s\n') % rel)
2052 ui.status(_('removing %s\n') % rel)
2057 remove.append(abs)
2053 remove.append(abs)
2058 repo.forget(forget)
2054 repo.forget(forget)
2059 repo.remove(remove, unlink=not opts['after'])
2055 repo.remove(remove, unlink=not opts['after'])
2060
2056
2061 def rename(ui, repo, *pats, **opts):
2057 def rename(ui, repo, *pats, **opts):
2062 """rename files; equivalent of copy + remove
2058 """rename files; equivalent of copy + remove
2063
2059
2064 Mark dest as copies of sources; mark sources for deletion. If
2060 Mark dest as copies of sources; mark sources for deletion. If
2065 dest is a directory, copies are put in that directory. If dest is
2061 dest is a directory, copies are put in that directory. If dest is
2066 a file, there can only be one source.
2062 a file, there can only be one source.
2067
2063
2068 By default, this command copies the contents of files as they
2064 By default, this command copies the contents of files as they
2069 stand in the working directory. If invoked with --after, the
2065 stand in the working directory. If invoked with --after, the
2070 operation is recorded, but no copying is performed.
2066 operation is recorded, but no copying is performed.
2071
2067
2072 This command takes effect in the next commit. To undo a rename
2068 This command takes effect in the next commit. To undo a rename
2073 before that, see hg revert.
2069 before that, see hg revert.
2074 """
2070 """
2075 wlock = repo.wlock(0)
2071 wlock = repo.wlock(0)
2076 errs, copied = docopy(ui, repo, pats, opts, wlock)
2072 errs, copied = docopy(ui, repo, pats, opts, wlock)
2077 names = []
2073 names = []
2078 for abs, rel, exact in copied:
2074 for abs, rel, exact in copied:
2079 if ui.verbose or not exact:
2075 if ui.verbose or not exact:
2080 ui.status(_('removing %s\n') % rel)
2076 ui.status(_('removing %s\n') % rel)
2081 names.append(abs)
2077 names.append(abs)
2082 if not opts.get('dry_run'):
2078 if not opts.get('dry_run'):
2083 repo.remove(names, True, wlock)
2079 repo.remove(names, True, wlock)
2084 return errs
2080 return errs
2085
2081
2086 def revert(ui, repo, *pats, **opts):
2082 def revert(ui, repo, *pats, **opts):
2087 """revert files or dirs to their states as of some revision
2083 """revert files or dirs to their states as of some revision
2088
2084
2089 With no revision specified, revert the named files or directories
2085 With no revision specified, revert the named files or directories
2090 to the contents they had in the parent of the working directory.
2086 to the contents they had in the parent of the working directory.
2091 This restores the contents of the affected files to an unmodified
2087 This restores the contents of the affected files to an unmodified
2092 state and unschedules adds, removes, copies, and renames. If the
2088 state and unschedules adds, removes, copies, and renames. If the
2093 working directory has two parents, you must explicitly specify the
2089 working directory has two parents, you must explicitly specify the
2094 revision to revert to.
2090 revision to revert to.
2095
2091
2096 Modified files are saved with a .orig suffix before reverting.
2092 Modified files are saved with a .orig suffix before reverting.
2097 To disable these backups, use --no-backup.
2093 To disable these backups, use --no-backup.
2098
2094
2099 Using the -r option, revert the given files or directories to their
2095 Using the -r option, revert the given files or directories to their
2100 contents as of a specific revision. This can be helpful to "roll
2096 contents as of a specific revision. This can be helpful to "roll
2101 back" some or all of a change that should not have been committed.
2097 back" some or all of a change that should not have been committed.
2102
2098
2103 Revert modifies the working directory. It does not commit any
2099 Revert modifies the working directory. It does not commit any
2104 changes, or change the parent of the working directory. If you
2100 changes, or change the parent of the working directory. If you
2105 revert to a revision other than the parent of the working
2101 revert to a revision other than the parent of the working
2106 directory, the reverted files will thus appear modified
2102 directory, the reverted files will thus appear modified
2107 afterwards.
2103 afterwards.
2108
2104
2109 If a file has been deleted, it is recreated. If the executable
2105 If a file has been deleted, it is recreated. If the executable
2110 mode of a file was changed, it is reset.
2106 mode of a file was changed, it is reset.
2111
2107
2112 If names are given, all files matching the names are reverted.
2108 If names are given, all files matching the names are reverted.
2113
2109
2114 If no arguments are given, no files are reverted.
2110 If no arguments are given, no files are reverted.
2115 """
2111 """
2116
2112
2117 if opts["date"]:
2113 if opts["date"]:
2118 if opts["rev"]:
2114 if opts["rev"]:
2119 raise util.Abort(_("you can't specify a revision and a date"))
2115 raise util.Abort(_("you can't specify a revision and a date"))
2120 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2116 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2121
2117
2122 if not pats and not opts['all']:
2118 if not pats and not opts['all']:
2123 raise util.Abort(_('no files or directories specified; '
2119 raise util.Abort(_('no files or directories specified; '
2124 'use --all to revert the whole repo'))
2120 'use --all to revert the whole repo'))
2125
2121
2126 parent, p2 = repo.dirstate.parents()
2122 parent, p2 = repo.dirstate.parents()
2127 if not opts['rev'] and p2 != nullid:
2123 if not opts['rev'] and p2 != nullid:
2128 raise util.Abort(_('uncommitted merge - please provide a '
2124 raise util.Abort(_('uncommitted merge - please provide a '
2129 'specific revision'))
2125 'specific revision'))
2130 node = repo.changectx(opts['rev']).node()
2126 node = repo.changectx(opts['rev']).node()
2131 mf = repo.manifest.read(repo.changelog.read(node)[0])
2127 mf = repo.manifest.read(repo.changelog.read(node)[0])
2132 if node == parent:
2128 if node == parent:
2133 pmf = mf
2129 pmf = mf
2134 else:
2130 else:
2135 pmf = None
2131 pmf = None
2136
2132
2137 wlock = repo.wlock()
2133 wlock = repo.wlock()
2138
2134
2139 # need all matching names in dirstate and manifest of target rev,
2135 # need all matching names in dirstate and manifest of target rev,
2140 # so have to walk both. do not print errors if files exist in one
2136 # so have to walk both. do not print errors if files exist in one
2141 # but not other.
2137 # but not other.
2142
2138
2143 names = {}
2139 names = {}
2144 target_only = {}
2140 target_only = {}
2145
2141
2146 # walk dirstate.
2142 # walk dirstate.
2147
2143
2148 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2144 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2149 badmatch=mf.has_key):
2145 badmatch=mf.has_key):
2150 names[abs] = (rel, exact)
2146 names[abs] = (rel, exact)
2151 if src == 'b':
2147 if src == 'b':
2152 target_only[abs] = True
2148 target_only[abs] = True
2153
2149
2154 # walk target manifest.
2150 # walk target manifest.
2155
2151
2156 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2152 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2157 badmatch=names.has_key):
2153 badmatch=names.has_key):
2158 if abs in names: continue
2154 if abs in names: continue
2159 names[abs] = (rel, exact)
2155 names[abs] = (rel, exact)
2160 target_only[abs] = True
2156 target_only[abs] = True
2161
2157
2162 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2158 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2163 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2159 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2164
2160
2165 revert = ([], _('reverting %s\n'))
2161 revert = ([], _('reverting %s\n'))
2166 add = ([], _('adding %s\n'))
2162 add = ([], _('adding %s\n'))
2167 remove = ([], _('removing %s\n'))
2163 remove = ([], _('removing %s\n'))
2168 forget = ([], _('forgetting %s\n'))
2164 forget = ([], _('forgetting %s\n'))
2169 undelete = ([], _('undeleting %s\n'))
2165 undelete = ([], _('undeleting %s\n'))
2170 update = {}
2166 update = {}
2171
2167
2172 disptable = (
2168 disptable = (
2173 # dispatch table:
2169 # dispatch table:
2174 # file state
2170 # file state
2175 # action if in target manifest
2171 # action if in target manifest
2176 # action if not in target manifest
2172 # action if not in target manifest
2177 # make backup if in target manifest
2173 # make backup if in target manifest
2178 # make backup if not in target manifest
2174 # make backup if not in target manifest
2179 (modified, revert, remove, True, True),
2175 (modified, revert, remove, True, True),
2180 (added, revert, forget, True, False),
2176 (added, revert, forget, True, False),
2181 (removed, undelete, None, False, False),
2177 (removed, undelete, None, False, False),
2182 (deleted, revert, remove, False, False),
2178 (deleted, revert, remove, False, False),
2183 (unknown, add, None, True, False),
2179 (unknown, add, None, True, False),
2184 (target_only, add, None, False, False),
2180 (target_only, add, None, False, False),
2185 )
2181 )
2186
2182
2187 entries = names.items()
2183 entries = names.items()
2188 entries.sort()
2184 entries.sort()
2189
2185
2190 for abs, (rel, exact) in entries:
2186 for abs, (rel, exact) in entries:
2191 mfentry = mf.get(abs)
2187 mfentry = mf.get(abs)
2192 def handle(xlist, dobackup):
2188 def handle(xlist, dobackup):
2193 xlist[0].append(abs)
2189 xlist[0].append(abs)
2194 update[abs] = 1
2190 update[abs] = 1
2195 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2191 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2196 bakname = "%s.orig" % rel
2192 bakname = "%s.orig" % rel
2197 ui.note(_('saving current version of %s as %s\n') %
2193 ui.note(_('saving current version of %s as %s\n') %
2198 (rel, bakname))
2194 (rel, bakname))
2199 if not opts.get('dry_run'):
2195 if not opts.get('dry_run'):
2200 util.copyfile(rel, bakname)
2196 util.copyfile(rel, bakname)
2201 if ui.verbose or not exact:
2197 if ui.verbose or not exact:
2202 ui.status(xlist[1] % rel)
2198 ui.status(xlist[1] % rel)
2203 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2199 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2204 if abs not in table: continue
2200 if abs not in table: continue
2205 # file has changed in dirstate
2201 # file has changed in dirstate
2206 if mfentry:
2202 if mfentry:
2207 handle(hitlist, backuphit)
2203 handle(hitlist, backuphit)
2208 elif misslist is not None:
2204 elif misslist is not None:
2209 handle(misslist, backupmiss)
2205 handle(misslist, backupmiss)
2210 else:
2206 else:
2211 if exact: ui.warn(_('file not managed: %s\n') % rel)
2207 if exact: ui.warn(_('file not managed: %s\n') % rel)
2212 break
2208 break
2213 else:
2209 else:
2214 # file has not changed in dirstate
2210 # file has not changed in dirstate
2215 if node == parent:
2211 if node == parent:
2216 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2212 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2217 continue
2213 continue
2218 if pmf is None:
2214 if pmf is None:
2219 # only need parent manifest in this unlikely case,
2215 # only need parent manifest in this unlikely case,
2220 # so do not read by default
2216 # so do not read by default
2221 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2217 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2222 if abs in pmf:
2218 if abs in pmf:
2223 if mfentry:
2219 if mfentry:
2224 # if version of file is same in parent and target
2220 # if version of file is same in parent and target
2225 # manifests, do nothing
2221 # manifests, do nothing
2226 if pmf[abs] != mfentry:
2222 if pmf[abs] != mfentry:
2227 handle(revert, False)
2223 handle(revert, False)
2228 else:
2224 else:
2229 handle(remove, False)
2225 handle(remove, False)
2230
2226
2231 if not opts.get('dry_run'):
2227 if not opts.get('dry_run'):
2232 repo.dirstate.forget(forget[0])
2228 repo.dirstate.forget(forget[0])
2233 r = hg.revert(repo, node, update.has_key, wlock)
2229 r = hg.revert(repo, node, update.has_key, wlock)
2234 repo.dirstate.update(add[0], 'a')
2230 repo.dirstate.update(add[0], 'a')
2235 repo.dirstate.update(undelete[0], 'n')
2231 repo.dirstate.update(undelete[0], 'n')
2236 repo.dirstate.update(remove[0], 'r')
2232 repo.dirstate.update(remove[0], 'r')
2237 return r
2233 return r
2238
2234
2239 def rollback(ui, repo):
2235 def rollback(ui, repo):
2240 """roll back the last transaction in this repository
2236 """roll back the last transaction in this repository
2241
2237
2242 Roll back the last transaction in this repository, restoring the
2238 Roll back the last transaction in this repository, restoring the
2243 project to its state prior to the transaction.
2239 project to its state prior to the transaction.
2244
2240
2245 Transactions are used to encapsulate the effects of all commands
2241 Transactions are used to encapsulate the effects of all commands
2246 that create new changesets or propagate existing changesets into a
2242 that create new changesets or propagate existing changesets into a
2247 repository. For example, the following commands are transactional,
2243 repository. For example, the following commands are transactional,
2248 and their effects can be rolled back:
2244 and their effects can be rolled back:
2249
2245
2250 commit
2246 commit
2251 import
2247 import
2252 pull
2248 pull
2253 push (with this repository as destination)
2249 push (with this repository as destination)
2254 unbundle
2250 unbundle
2255
2251
2256 This command should be used with care. There is only one level of
2252 This command should be used with care. There is only one level of
2257 rollback, and there is no way to undo a rollback.
2253 rollback, and there is no way to undo a rollback.
2258
2254
2259 This command is not intended for use on public repositories. Once
2255 This command is not intended for use on public repositories. Once
2260 changes are visible for pull by other users, rolling a transaction
2256 changes are visible for pull by other users, rolling a transaction
2261 back locally is ineffective (someone else may already have pulled
2257 back locally is ineffective (someone else may already have pulled
2262 the changes). Furthermore, a race is possible with readers of the
2258 the changes). Furthermore, a race is possible with readers of the
2263 repository; for example an in-progress pull from the repository
2259 repository; for example an in-progress pull from the repository
2264 may fail if a rollback is performed.
2260 may fail if a rollback is performed.
2265 """
2261 """
2266 repo.rollback()
2262 repo.rollback()
2267
2263
2268 def root(ui, repo):
2264 def root(ui, repo):
2269 """print the root (top) of the current working dir
2265 """print the root (top) of the current working dir
2270
2266
2271 Print the root directory of the current repository.
2267 Print the root directory of the current repository.
2272 """
2268 """
2273 ui.write(repo.root + "\n")
2269 ui.write(repo.root + "\n")
2274
2270
2275 def serve(ui, repo, **opts):
2271 def serve(ui, repo, **opts):
2276 """export the repository via HTTP
2272 """export the repository via HTTP
2277
2273
2278 Start a local HTTP repository browser and pull server.
2274 Start a local HTTP repository browser and pull server.
2279
2275
2280 By default, the server logs accesses to stdout and errors to
2276 By default, the server logs accesses to stdout and errors to
2281 stderr. Use the "-A" and "-E" options to log to files.
2277 stderr. Use the "-A" and "-E" options to log to files.
2282 """
2278 """
2283
2279
2284 if opts["stdio"]:
2280 if opts["stdio"]:
2285 if repo is None:
2281 if repo is None:
2286 raise hg.RepoError(_("There is no Mercurial repository here"
2282 raise hg.RepoError(_("There is no Mercurial repository here"
2287 " (.hg not found)"))
2283 " (.hg not found)"))
2288 s = sshserver.sshserver(ui, repo)
2284 s = sshserver.sshserver(ui, repo)
2289 s.serve_forever()
2285 s.serve_forever()
2290
2286
2291 optlist = ("name templates style address port ipv6"
2287 optlist = ("name templates style address port ipv6"
2292 " accesslog errorlog webdir_conf")
2288 " accesslog errorlog webdir_conf")
2293 for o in optlist.split():
2289 for o in optlist.split():
2294 if opts[o]:
2290 if opts[o]:
2295 ui.setconfig("web", o, str(opts[o]))
2291 ui.setconfig("web", o, str(opts[o]))
2296
2292
2297 if repo is None and not ui.config("web", "webdir_conf"):
2293 if repo is None and not ui.config("web", "webdir_conf"):
2298 raise hg.RepoError(_("There is no Mercurial repository here"
2294 raise hg.RepoError(_("There is no Mercurial repository here"
2299 " (.hg not found)"))
2295 " (.hg not found)"))
2300
2296
2301 if opts['daemon'] and not opts['daemon_pipefds']:
2297 if opts['daemon'] and not opts['daemon_pipefds']:
2302 rfd, wfd = os.pipe()
2298 rfd, wfd = os.pipe()
2303 args = sys.argv[:]
2299 args = sys.argv[:]
2304 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2300 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2305 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2301 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2306 args[0], args)
2302 args[0], args)
2307 os.close(wfd)
2303 os.close(wfd)
2308 os.read(rfd, 1)
2304 os.read(rfd, 1)
2309 os._exit(0)
2305 os._exit(0)
2310
2306
2311 httpd = hgweb.server.create_server(ui, repo)
2307 httpd = hgweb.server.create_server(ui, repo)
2312
2308
2313 if ui.verbose:
2309 if ui.verbose:
2314 if httpd.port != 80:
2310 if httpd.port != 80:
2315 ui.status(_('listening at http://%s:%d/\n') %
2311 ui.status(_('listening at http://%s:%d/\n') %
2316 (httpd.addr, httpd.port))
2312 (httpd.addr, httpd.port))
2317 else:
2313 else:
2318 ui.status(_('listening at http://%s/\n') % httpd.addr)
2314 ui.status(_('listening at http://%s/\n') % httpd.addr)
2319
2315
2320 if opts['pid_file']:
2316 if opts['pid_file']:
2321 fp = open(opts['pid_file'], 'w')
2317 fp = open(opts['pid_file'], 'w')
2322 fp.write(str(os.getpid()) + '\n')
2318 fp.write(str(os.getpid()) + '\n')
2323 fp.close()
2319 fp.close()
2324
2320
2325 if opts['daemon_pipefds']:
2321 if opts['daemon_pipefds']:
2326 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2322 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2327 os.close(rfd)
2323 os.close(rfd)
2328 os.write(wfd, 'y')
2324 os.write(wfd, 'y')
2329 os.close(wfd)
2325 os.close(wfd)
2330 sys.stdout.flush()
2326 sys.stdout.flush()
2331 sys.stderr.flush()
2327 sys.stderr.flush()
2332 fd = os.open(util.nulldev, os.O_RDWR)
2328 fd = os.open(util.nulldev, os.O_RDWR)
2333 if fd != 0: os.dup2(fd, 0)
2329 if fd != 0: os.dup2(fd, 0)
2334 if fd != 1: os.dup2(fd, 1)
2330 if fd != 1: os.dup2(fd, 1)
2335 if fd != 2: os.dup2(fd, 2)
2331 if fd != 2: os.dup2(fd, 2)
2336 if fd not in (0, 1, 2): os.close(fd)
2332 if fd not in (0, 1, 2): os.close(fd)
2337
2333
2338 httpd.serve_forever()
2334 httpd.serve_forever()
2339
2335
2340 def status(ui, repo, *pats, **opts):
2336 def status(ui, repo, *pats, **opts):
2341 """show changed files in the working directory
2337 """show changed files in the working directory
2342
2338
2343 Show status of files in the repository. If names are given, only
2339 Show status of files in the repository. If names are given, only
2344 files that match are shown. Files that are clean or ignored, are
2340 files that match are shown. Files that are clean or ignored, are
2345 not listed unless -c (clean), -i (ignored) or -A is given.
2341 not listed unless -c (clean), -i (ignored) or -A is given.
2346
2342
2347 NOTE: status may appear to disagree with diff if permissions have
2343 NOTE: status may appear to disagree with diff if permissions have
2348 changed or a merge has occurred. The standard diff format does not
2344 changed or a merge has occurred. The standard diff format does not
2349 report permission changes and diff only reports changes relative
2345 report permission changes and diff only reports changes relative
2350 to one merge parent.
2346 to one merge parent.
2351
2347
2352 If one revision is given, it is used as the base revision.
2348 If one revision is given, it is used as the base revision.
2353 If two revisions are given, the difference between them is shown.
2349 If two revisions are given, the difference between them is shown.
2354
2350
2355 The codes used to show the status of files are:
2351 The codes used to show the status of files are:
2356 M = modified
2352 M = modified
2357 A = added
2353 A = added
2358 R = removed
2354 R = removed
2359 C = clean
2355 C = clean
2360 ! = deleted, but still tracked
2356 ! = deleted, but still tracked
2361 ? = not tracked
2357 ? = not tracked
2362 I = ignored (not shown by default)
2358 I = ignored (not shown by default)
2363 = the previous added file was copied from here
2359 = the previous added file was copied from here
2364 """
2360 """
2365
2361
2366 all = opts['all']
2362 all = opts['all']
2367 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2363 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2368
2364
2369 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2365 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2370 cwd = (pats and repo.getcwd()) or ''
2366 cwd = (pats and repo.getcwd()) or ''
2371 modified, added, removed, deleted, unknown, ignored, clean = [
2367 modified, added, removed, deleted, unknown, ignored, clean = [
2372 [util.pathto(cwd, x) for x in n]
2368 [util.pathto(cwd, x) for x in n]
2373 for n in repo.status(node1=node1, node2=node2, files=files,
2369 for n in repo.status(node1=node1, node2=node2, files=files,
2374 match=matchfn,
2370 match=matchfn,
2375 list_ignored=all or opts['ignored'],
2371 list_ignored=all or opts['ignored'],
2376 list_clean=all or opts['clean'])]
2372 list_clean=all or opts['clean'])]
2377
2373
2378 changetypes = (('modified', 'M', modified),
2374 changetypes = (('modified', 'M', modified),
2379 ('added', 'A', added),
2375 ('added', 'A', added),
2380 ('removed', 'R', removed),
2376 ('removed', 'R', removed),
2381 ('deleted', '!', deleted),
2377 ('deleted', '!', deleted),
2382 ('unknown', '?', unknown),
2378 ('unknown', '?', unknown),
2383 ('ignored', 'I', ignored))
2379 ('ignored', 'I', ignored))
2384
2380
2385 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2381 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2386
2382
2387 end = opts['print0'] and '\0' or '\n'
2383 end = opts['print0'] and '\0' or '\n'
2388
2384
2389 for opt, char, changes in ([ct for ct in explicit_changetypes
2385 for opt, char, changes in ([ct for ct in explicit_changetypes
2390 if all or opts[ct[0]]]
2386 if all or opts[ct[0]]]
2391 or changetypes):
2387 or changetypes):
2392 if opts['no_status']:
2388 if opts['no_status']:
2393 format = "%%s%s" % end
2389 format = "%%s%s" % end
2394 else:
2390 else:
2395 format = "%s %%s%s" % (char, end)
2391 format = "%s %%s%s" % (char, end)
2396
2392
2397 for f in changes:
2393 for f in changes:
2398 ui.write(format % f)
2394 ui.write(format % f)
2399 if ((all or opts.get('copies')) and not opts.get('no_status')):
2395 if ((all or opts.get('copies')) and not opts.get('no_status')):
2400 copied = repo.dirstate.copied(f)
2396 copied = repo.dirstate.copied(f)
2401 if copied:
2397 if copied:
2402 ui.write(' %s%s' % (copied, end))
2398 ui.write(' %s%s' % (copied, end))
2403
2399
2404 def tag(ui, repo, name, rev_=None, **opts):
2400 def tag(ui, repo, name, rev_=None, **opts):
2405 """add a tag for the current tip or a given revision
2401 """add a tag for the current tip or a given revision
2406
2402
2407 Name a particular revision using <name>.
2403 Name a particular revision using <name>.
2408
2404
2409 Tags are used to name particular revisions of the repository and are
2405 Tags are used to name particular revisions of the repository and are
2410 very useful to compare different revision, to go back to significant
2406 very useful to compare different revision, to go back to significant
2411 earlier versions or to mark branch points as releases, etc.
2407 earlier versions or to mark branch points as releases, etc.
2412
2408
2413 If no revision is given, the parent of the working directory is used.
2409 If no revision is given, the parent of the working directory is used.
2414
2410
2415 To facilitate version control, distribution, and merging of tags,
2411 To facilitate version control, distribution, and merging of tags,
2416 they are stored as a file named ".hgtags" which is managed
2412 they are stored as a file named ".hgtags" which is managed
2417 similarly to other project files and can be hand-edited if
2413 similarly to other project files and can be hand-edited if
2418 necessary. The file '.hg/localtags' is used for local tags (not
2414 necessary. The file '.hg/localtags' is used for local tags (not
2419 shared among repositories).
2415 shared among repositories).
2420 """
2416 """
2421 if name in ['tip', '.', 'null']:
2417 if name in ['tip', '.', 'null']:
2422 raise util.Abort(_("the name '%s' is reserved") % name)
2418 raise util.Abort(_("the name '%s' is reserved") % name)
2423 if rev_ is not None:
2419 if rev_ is not None:
2424 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2420 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2425 "please use 'hg tag [-r REV] NAME' instead\n"))
2421 "please use 'hg tag [-r REV] NAME' instead\n"))
2426 if opts['rev']:
2422 if opts['rev']:
2427 raise util.Abort(_("use only one form to specify the revision"))
2423 raise util.Abort(_("use only one form to specify the revision"))
2428 if opts['rev']:
2424 if opts['rev']:
2429 rev_ = opts['rev']
2425 rev_ = opts['rev']
2430 if not rev_ and repo.dirstate.parents()[1] != nullid:
2426 if not rev_ and repo.dirstate.parents()[1] != nullid:
2431 raise util.Abort(_('uncommitted merge - please provide a '
2427 raise util.Abort(_('uncommitted merge - please provide a '
2432 'specific revision'))
2428 'specific revision'))
2433 r = repo.changectx(rev_).node()
2429 r = repo.changectx(rev_).node()
2434
2430
2435 message = opts['message']
2431 message = opts['message']
2436 if not message:
2432 if not message:
2437 message = _('Added tag %s for changeset %s') % (name, short(r))
2433 message = _('Added tag %s for changeset %s') % (name, short(r))
2438
2434
2439 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2435 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2440
2436
2441 def tags(ui, repo):
2437 def tags(ui, repo):
2442 """list repository tags
2438 """list repository tags
2443
2439
2444 List the repository tags.
2440 List the repository tags.
2445
2441
2446 This lists both regular and local tags.
2442 This lists both regular and local tags.
2447 """
2443 """
2448
2444
2449 l = repo.tagslist()
2445 l = repo.tagslist()
2450 l.reverse()
2446 l.reverse()
2451 hexfunc = ui.debugflag and hex or short
2447 hexfunc = ui.debugflag and hex or short
2452 for t, n in l:
2448 for t, n in l:
2453 try:
2449 try:
2454 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2450 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2455 except KeyError:
2451 except KeyError:
2456 r = " ?:?"
2452 r = " ?:?"
2457 if ui.quiet:
2453 if ui.quiet:
2458 ui.write("%s\n" % t)
2454 ui.write("%s\n" % t)
2459 else:
2455 else:
2460 t = util.localsub(t, 30)
2456 t = util.localsub(t, 30)
2461 t += " " * (30 - util.locallen(t))
2457 t += " " * (30 - util.locallen(t))
2462 ui.write("%s %s\n" % (t, r))
2458 ui.write("%s %s\n" % (t, r))
2463
2459
2464 def tip(ui, repo, **opts):
2460 def tip(ui, repo, **opts):
2465 """show the tip revision
2461 """show the tip revision
2466
2462
2467 Show the tip revision.
2463 Show the tip revision.
2468 """
2464 """
2469 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2465 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2470
2466
2471 def unbundle(ui, repo, fname, **opts):
2467 def unbundle(ui, repo, fname, **opts):
2472 """apply a changegroup file
2468 """apply a changegroup file
2473
2469
2474 Apply a compressed changegroup file generated by the bundle
2470 Apply a compressed changegroup file generated by the bundle
2475 command.
2471 command.
2476 """
2472 """
2477 gen = changegroup.readbundle(urllib.urlopen(fname), fname)
2473 gen = changegroup.readbundle(urllib.urlopen(fname), fname)
2478 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2474 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2479 return postincoming(ui, repo, modheads, opts['update'])
2475 return postincoming(ui, repo, modheads, opts['update'])
2480
2476
2481 def update(ui, repo, node=None, clean=False, branch=None, date=None):
2477 def update(ui, repo, node=None, clean=False, date=None):
2482 """update or merge working directory
2478 """update or merge working directory
2483
2479
2484 Update the working directory to the specified revision.
2480 Update the working directory to the specified revision.
2485
2481
2486 If there are no outstanding changes in the working directory and
2482 If there are no outstanding changes in the working directory and
2487 there is a linear relationship between the current version and the
2483 there is a linear relationship between the current version and the
2488 requested version, the result is the requested version.
2484 requested version, the result is the requested version.
2489
2485
2490 To merge the working directory with another revision, use the
2486 To merge the working directory with another revision, use the
2491 merge command.
2487 merge command.
2492
2488
2493 By default, update will refuse to run if doing so would require
2489 By default, update will refuse to run if doing so would require
2494 merging or discarding local changes.
2490 merging or discarding local changes.
2495 """
2491 """
2496 if date:
2492 if date:
2497 if node:
2493 if node:
2498 raise util.Abort(_("you can't specify a revision and a date"))
2494 raise util.Abort(_("you can't specify a revision and a date"))
2499 node = cmdutil.finddate(ui, repo, date)
2495 node = cmdutil.finddate(ui, repo, date)
2500
2496
2501 node = _lookup(repo, node, branch)
2502 if clean:
2497 if clean:
2503 return hg.clean(repo, node)
2498 return hg.clean(repo, node)
2504 else:
2499 else:
2505 return hg.update(repo, node)
2500 return hg.update(repo, node)
2506
2501
2507 def _lookup(repo, node, branch=None):
2508 if branch:
2509 repo.ui.warn(_("the --branch option is deprecated, "
2510 "please use 'hg branch' instead\n"))
2511 br = repo.branchlookup(branch=branch)
2512 found = []
2513 for x in br:
2514 if branch in br[x]:
2515 found.append(x)
2516 if len(found) > 1:
2517 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2518 for x in found:
2519 cmdutil.show_changeset(ui, repo, {}).show(changenode=x)
2520 raise util.Abort("")
2521 if len(found) == 1:
2522 node = found[0]
2523 repo.ui.warn(_("Using head %s for branch %s\n")
2524 % (short(node), branch))
2525 else:
2526 raise util.Abort(_("branch %s not found") % branch)
2527 else:
2528 node = node and repo.lookup(node) or repo.changelog.tip()
2529 return node
2530
2531 def verify(ui, repo):
2502 def verify(ui, repo):
2532 """verify the integrity of the repository
2503 """verify the integrity of the repository
2533
2504
2534 Verify the integrity of the current repository.
2505 Verify the integrity of the current repository.
2535
2506
2536 This will perform an extensive check of the repository's
2507 This will perform an extensive check of the repository's
2537 integrity, validating the hashes and checksums of each entry in
2508 integrity, validating the hashes and checksums of each entry in
2538 the changelog, manifest, and tracked files, as well as the
2509 the changelog, manifest, and tracked files, as well as the
2539 integrity of their crosslinks and indices.
2510 integrity of their crosslinks and indices.
2540 """
2511 """
2541 return hg.verify(repo)
2512 return hg.verify(repo)
2542
2513
2543 def version_(ui):
2514 def version_(ui):
2544 """output version and copyright information"""
2515 """output version and copyright information"""
2545 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2516 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2546 % version.get_version())
2517 % version.get_version())
2547 ui.status(_(
2518 ui.status(_(
2548 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2519 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2549 "This is free software; see the source for copying conditions. "
2520 "This is free software; see the source for copying conditions. "
2550 "There is NO\nwarranty; "
2521 "There is NO\nwarranty; "
2551 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2522 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2552 ))
2523 ))
2553
2524
2554 # Command options and aliases are listed here, alphabetically
2525 # Command options and aliases are listed here, alphabetically
2555
2526
2556 globalopts = [
2527 globalopts = [
2557 ('R', 'repository', '',
2528 ('R', 'repository', '',
2558 _('repository root directory or symbolic path name')),
2529 _('repository root directory or symbolic path name')),
2559 ('', 'cwd', '', _('change working directory')),
2530 ('', 'cwd', '', _('change working directory')),
2560 ('y', 'noninteractive', None,
2531 ('y', 'noninteractive', None,
2561 _('do not prompt, assume \'yes\' for any required answers')),
2532 _('do not prompt, assume \'yes\' for any required answers')),
2562 ('q', 'quiet', None, _('suppress output')),
2533 ('q', 'quiet', None, _('suppress output')),
2563 ('v', 'verbose', None, _('enable additional output')),
2534 ('v', 'verbose', None, _('enable additional output')),
2564 ('', 'config', [], _('set/override config option')),
2535 ('', 'config', [], _('set/override config option')),
2565 ('', 'debug', None, _('enable debugging output')),
2536 ('', 'debug', None, _('enable debugging output')),
2566 ('', 'debugger', None, _('start debugger')),
2537 ('', 'debugger', None, _('start debugger')),
2567 ('', 'encoding', util._encoding, _('set the charset encoding')),
2538 ('', 'encoding', util._encoding, _('set the charset encoding')),
2568 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2539 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2569 ('', 'lsprof', None, _('print improved command execution profile')),
2540 ('', 'lsprof', None, _('print improved command execution profile')),
2570 ('', 'traceback', None, _('print traceback on exception')),
2541 ('', 'traceback', None, _('print traceback on exception')),
2571 ('', 'time', None, _('time how long the command takes')),
2542 ('', 'time', None, _('time how long the command takes')),
2572 ('', 'profile', None, _('print command execution profile')),
2543 ('', 'profile', None, _('print command execution profile')),
2573 ('', 'version', None, _('output version information and exit')),
2544 ('', 'version', None, _('output version information and exit')),
2574 ('h', 'help', None, _('display help and exit')),
2545 ('h', 'help', None, _('display help and exit')),
2575 ]
2546 ]
2576
2547
2577 dryrunopts = [('n', 'dry-run', None,
2548 dryrunopts = [('n', 'dry-run', None,
2578 _('do not perform actions, just print output'))]
2549 _('do not perform actions, just print output'))]
2579
2550
2580 remoteopts = [
2551 remoteopts = [
2581 ('e', 'ssh', '', _('specify ssh command to use')),
2552 ('e', 'ssh', '', _('specify ssh command to use')),
2582 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2553 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2583 ]
2554 ]
2584
2555
2585 walkopts = [
2556 walkopts = [
2586 ('I', 'include', [], _('include names matching the given patterns')),
2557 ('I', 'include', [], _('include names matching the given patterns')),
2587 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2558 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2588 ]
2559 ]
2589
2560
2590 commitopts = [
2561 commitopts = [
2591 ('m', 'message', '', _('use <text> as commit message')),
2562 ('m', 'message', '', _('use <text> as commit message')),
2592 ('l', 'logfile', '', _('read commit message from <file>')),
2563 ('l', 'logfile', '', _('read commit message from <file>')),
2593 ]
2564 ]
2594
2565
2595 table = {
2566 table = {
2596 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2567 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2597 "addremove":
2568 "addremove":
2598 (addremove,
2569 (addremove,
2599 [('s', 'similarity', '',
2570 [('s', 'similarity', '',
2600 _('guess renamed files by similarity (0<=s<=100)')),
2571 _('guess renamed files by similarity (0<=s<=100)')),
2601 ] + walkopts + dryrunopts,
2572 ] + walkopts + dryrunopts,
2602 _('hg addremove [OPTION]... [FILE]...')),
2573 _('hg addremove [OPTION]... [FILE]...')),
2603 "^annotate":
2574 "^annotate":
2604 (annotate,
2575 (annotate,
2605 [('r', 'rev', '', _('annotate the specified revision')),
2576 [('r', 'rev', '', _('annotate the specified revision')),
2606 ('f', 'follow', None, _('follow file copies and renames')),
2577 ('f', 'follow', None, _('follow file copies and renames')),
2607 ('a', 'text', None, _('treat all files as text')),
2578 ('a', 'text', None, _('treat all files as text')),
2608 ('u', 'user', None, _('list the author')),
2579 ('u', 'user', None, _('list the author')),
2609 ('d', 'date', None, _('list the date')),
2580 ('d', 'date', None, _('list the date')),
2610 ('n', 'number', None, _('list the revision number (default)')),
2581 ('n', 'number', None, _('list the revision number (default)')),
2611 ('c', 'changeset', None, _('list the changeset')),
2582 ('c', 'changeset', None, _('list the changeset')),
2612 ] + walkopts,
2583 ] + walkopts,
2613 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2584 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2614 "archive":
2585 "archive":
2615 (archive,
2586 (archive,
2616 [('', 'no-decode', None, _('do not pass files through decoders')),
2587 [('', 'no-decode', None, _('do not pass files through decoders')),
2617 ('p', 'prefix', '', _('directory prefix for files in archive')),
2588 ('p', 'prefix', '', _('directory prefix for files in archive')),
2618 ('r', 'rev', '', _('revision to distribute')),
2589 ('r', 'rev', '', _('revision to distribute')),
2619 ('t', 'type', '', _('type of distribution to create')),
2590 ('t', 'type', '', _('type of distribution to create')),
2620 ] + walkopts,
2591 ] + walkopts,
2621 _('hg archive [OPTION]... DEST')),
2592 _('hg archive [OPTION]... DEST')),
2622 "backout":
2593 "backout":
2623 (backout,
2594 (backout,
2624 [('', 'merge', None,
2595 [('', 'merge', None,
2625 _('merge with old dirstate parent after backout')),
2596 _('merge with old dirstate parent after backout')),
2626 ('d', 'date', '', _('record datecode as commit date')),
2597 ('d', 'date', '', _('record datecode as commit date')),
2627 ('', 'parent', '', _('parent to choose when backing out merge')),
2598 ('', 'parent', '', _('parent to choose when backing out merge')),
2628 ('u', 'user', '', _('record user as committer')),
2599 ('u', 'user', '', _('record user as committer')),
2629 ] + walkopts + commitopts,
2600 ] + walkopts + commitopts,
2630 _('hg backout [OPTION]... REV')),
2601 _('hg backout [OPTION]... REV')),
2631 "branch": (branch, [], _('hg branch [NAME]')),
2602 "branch": (branch, [], _('hg branch [NAME]')),
2632 "branches": (branches, [], _('hg branches')),
2603 "branches": (branches, [], _('hg branches')),
2633 "bundle":
2604 "bundle":
2634 (bundle,
2605 (bundle,
2635 [('f', 'force', None,
2606 [('f', 'force', None,
2636 _('run even when remote repository is unrelated')),
2607 _('run even when remote repository is unrelated')),
2637 ('r', 'rev', [],
2608 ('r', 'rev', [],
2638 _('a changeset you would like to bundle')),
2609 _('a changeset you would like to bundle')),
2639 ('', 'base', [],
2610 ('', 'base', [],
2640 _('a base changeset to specify instead of a destination')),
2611 _('a base changeset to specify instead of a destination')),
2641 ] + remoteopts,
2612 ] + remoteopts,
2642 _('hg bundle [--base REV]... [--rev REV]... FILE [DEST]')),
2613 _('hg bundle [--base REV]... [--rev REV]... FILE [DEST]')),
2643 "cat":
2614 "cat":
2644 (cat,
2615 (cat,
2645 [('o', 'output', '', _('print output to file with formatted name')),
2616 [('o', 'output', '', _('print output to file with formatted name')),
2646 ('r', 'rev', '', _('print the given revision')),
2617 ('r', 'rev', '', _('print the given revision')),
2647 ] + walkopts,
2618 ] + walkopts,
2648 _('hg cat [OPTION]... FILE...')),
2619 _('hg cat [OPTION]... FILE...')),
2649 "^clone":
2620 "^clone":
2650 (clone,
2621 (clone,
2651 [('U', 'noupdate', None, _('do not update the new working directory')),
2622 [('U', 'noupdate', None, _('do not update the new working directory')),
2652 ('r', 'rev', [],
2623 ('r', 'rev', [],
2653 _('a changeset you would like to have after cloning')),
2624 _('a changeset you would like to have after cloning')),
2654 ('', 'pull', None, _('use pull protocol to copy metadata')),
2625 ('', 'pull', None, _('use pull protocol to copy metadata')),
2655 ('', 'uncompressed', None,
2626 ('', 'uncompressed', None,
2656 _('use uncompressed transfer (fast over LAN)')),
2627 _('use uncompressed transfer (fast over LAN)')),
2657 ] + remoteopts,
2628 ] + remoteopts,
2658 _('hg clone [OPTION]... SOURCE [DEST]')),
2629 _('hg clone [OPTION]... SOURCE [DEST]')),
2659 "^commit|ci":
2630 "^commit|ci":
2660 (commit,
2631 (commit,
2661 [('A', 'addremove', None,
2632 [('A', 'addremove', None,
2662 _('mark new/missing files as added/removed before committing')),
2633 _('mark new/missing files as added/removed before committing')),
2663 ('d', 'date', '', _('record datecode as commit date')),
2634 ('d', 'date', '', _('record datecode as commit date')),
2664 ('u', 'user', '', _('record user as commiter')),
2635 ('u', 'user', '', _('record user as commiter')),
2665 ] + walkopts + commitopts,
2636 ] + walkopts + commitopts,
2666 _('hg commit [OPTION]... [FILE]...')),
2637 _('hg commit [OPTION]... [FILE]...')),
2667 "copy|cp":
2638 "copy|cp":
2668 (copy,
2639 (copy,
2669 [('A', 'after', None, _('record a copy that has already occurred')),
2640 [('A', 'after', None, _('record a copy that has already occurred')),
2670 ('f', 'force', None,
2641 ('f', 'force', None,
2671 _('forcibly copy over an existing managed file')),
2642 _('forcibly copy over an existing managed file')),
2672 ] + walkopts + dryrunopts,
2643 ] + walkopts + dryrunopts,
2673 _('hg copy [OPTION]... [SOURCE]... DEST')),
2644 _('hg copy [OPTION]... [SOURCE]... DEST')),
2674 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2645 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2675 "debugcomplete":
2646 "debugcomplete":
2676 (debugcomplete,
2647 (debugcomplete,
2677 [('o', 'options', None, _('show the command options'))],
2648 [('o', 'options', None, _('show the command options'))],
2678 _('debugcomplete [-o] CMD')),
2649 _('debugcomplete [-o] CMD')),
2679 "debuginstall": (debuginstall, [], _('debuginstall')),
2650 "debuginstall": (debuginstall, [], _('debuginstall')),
2680 "debugrebuildstate":
2651 "debugrebuildstate":
2681 (debugrebuildstate,
2652 (debugrebuildstate,
2682 [('r', 'rev', '', _('revision to rebuild to'))],
2653 [('r', 'rev', '', _('revision to rebuild to'))],
2683 _('debugrebuildstate [-r REV] [REV]')),
2654 _('debugrebuildstate [-r REV] [REV]')),
2684 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2655 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2685 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2656 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2686 "debugstate": (debugstate, [], _('debugstate')),
2657 "debugstate": (debugstate, [], _('debugstate')),
2687 "debugdate":
2658 "debugdate":
2688 (debugdate,
2659 (debugdate,
2689 [('e', 'extended', None, _('try extended date formats'))],
2660 [('e', 'extended', None, _('try extended date formats'))],
2690 _('debugdate [-e] DATE [RANGE]')),
2661 _('debugdate [-e] DATE [RANGE]')),
2691 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2662 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2692 "debugindex": (debugindex, [], _('debugindex FILE')),
2663 "debugindex": (debugindex, [], _('debugindex FILE')),
2693 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2664 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2694 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2665 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2695 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2666 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2696 "^diff":
2667 "^diff":
2697 (diff,
2668 (diff,
2698 [('r', 'rev', [], _('revision')),
2669 [('r', 'rev', [], _('revision')),
2699 ('a', 'text', None, _('treat all files as text')),
2670 ('a', 'text', None, _('treat all files as text')),
2700 ('p', 'show-function', None,
2671 ('p', 'show-function', None,
2701 _('show which function each change is in')),
2672 _('show which function each change is in')),
2702 ('g', 'git', None, _('use git extended diff format')),
2673 ('g', 'git', None, _('use git extended diff format')),
2703 ('', 'nodates', None, _("don't include dates in diff headers")),
2674 ('', 'nodates', None, _("don't include dates in diff headers")),
2704 ('w', 'ignore-all-space', None,
2675 ('w', 'ignore-all-space', None,
2705 _('ignore white space when comparing lines')),
2676 _('ignore white space when comparing lines')),
2706 ('b', 'ignore-space-change', None,
2677 ('b', 'ignore-space-change', None,
2707 _('ignore changes in the amount of white space')),
2678 _('ignore changes in the amount of white space')),
2708 ('B', 'ignore-blank-lines', None,
2679 ('B', 'ignore-blank-lines', None,
2709 _('ignore changes whose lines are all blank')),
2680 _('ignore changes whose lines are all blank')),
2710 ] + walkopts,
2681 ] + walkopts,
2711 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2682 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2712 "^export":
2683 "^export":
2713 (export,
2684 (export,
2714 [('o', 'output', '', _('print output to file with formatted name')),
2685 [('o', 'output', '', _('print output to file with formatted name')),
2715 ('a', 'text', None, _('treat all files as text')),
2686 ('a', 'text', None, _('treat all files as text')),
2716 ('g', 'git', None, _('use git extended diff format')),
2687 ('g', 'git', None, _('use git extended diff format')),
2717 ('', 'nodates', None, _("don't include dates in diff headers")),
2688 ('', 'nodates', None, _("don't include dates in diff headers")),
2718 ('', 'switch-parent', None, _('diff against the second parent'))],
2689 ('', 'switch-parent', None, _('diff against the second parent'))],
2719 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2690 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2720 "grep":
2691 "grep":
2721 (grep,
2692 (grep,
2722 [('0', 'print0', None, _('end fields with NUL')),
2693 [('0', 'print0', None, _('end fields with NUL')),
2723 ('', 'all', None, _('print all revisions that match')),
2694 ('', 'all', None, _('print all revisions that match')),
2724 ('f', 'follow', None,
2695 ('f', 'follow', None,
2725 _('follow changeset history, or file history across copies and renames')),
2696 _('follow changeset history, or file history across copies and renames')),
2726 ('i', 'ignore-case', None, _('ignore case when matching')),
2697 ('i', 'ignore-case', None, _('ignore case when matching')),
2727 ('l', 'files-with-matches', None,
2698 ('l', 'files-with-matches', None,
2728 _('print only filenames and revs that match')),
2699 _('print only filenames and revs that match')),
2729 ('n', 'line-number', None, _('print matching line numbers')),
2700 ('n', 'line-number', None, _('print matching line numbers')),
2730 ('r', 'rev', [], _('search in given revision range')),
2701 ('r', 'rev', [], _('search in given revision range')),
2731 ('u', 'user', None, _('print user who committed change')),
2702 ('u', 'user', None, _('print user who committed change')),
2732 ] + walkopts,
2703 ] + walkopts,
2733 _('hg grep [OPTION]... PATTERN [FILE]...')),
2704 _('hg grep [OPTION]... PATTERN [FILE]...')),
2734 "heads":
2705 "heads":
2735 (heads,
2706 (heads,
2736 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2707 [('', 'style', '', _('display using template map file')),
2737 ('', 'style', '', _('display using template map file')),
2738 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2708 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2739 ('', 'template', '', _('display with template'))],
2709 ('', 'template', '', _('display with template'))],
2740 _('hg heads [-r REV]')),
2710 _('hg heads [-r REV]')),
2741 "help": (help_, [], _('hg help [COMMAND]')),
2711 "help": (help_, [], _('hg help [COMMAND]')),
2742 "identify|id": (identify, [], _('hg identify')),
2712 "identify|id": (identify, [], _('hg identify')),
2743 "import|patch":
2713 "import|patch":
2744 (import_,
2714 (import_,
2745 [('p', 'strip', 1,
2715 [('p', 'strip', 1,
2746 _('directory strip option for patch. This has the same\n'
2716 _('directory strip option for patch. This has the same\n'
2747 'meaning as the corresponding patch option')),
2717 'meaning as the corresponding patch option')),
2748 ('b', 'base', '', _('base path (DEPRECATED)')),
2718 ('b', 'base', '', _('base path')),
2749 ('f', 'force', None,
2719 ('f', 'force', None,
2750 _('skip check for outstanding uncommitted changes'))] + commitopts,
2720 _('skip check for outstanding uncommitted changes'))] + commitopts,
2751 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2721 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2752 "incoming|in": (incoming,
2722 "incoming|in": (incoming,
2753 [('M', 'no-merges', None, _('do not show merges')),
2723 [('M', 'no-merges', None, _('do not show merges')),
2754 ('f', 'force', None,
2724 ('f', 'force', None,
2755 _('run even when remote repository is unrelated')),
2725 _('run even when remote repository is unrelated')),
2756 ('', 'style', '', _('display using template map file')),
2726 ('', 'style', '', _('display using template map file')),
2757 ('n', 'newest-first', None, _('show newest record first')),
2727 ('n', 'newest-first', None, _('show newest record first')),
2758 ('', 'bundle', '', _('file to store the bundles into')),
2728 ('', 'bundle', '', _('file to store the bundles into')),
2759 ('p', 'patch', None, _('show patch')),
2729 ('p', 'patch', None, _('show patch')),
2760 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2730 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2761 ('', 'template', '', _('display with template')),
2731 ('', 'template', '', _('display with template')),
2762 ] + remoteopts,
2732 ] + remoteopts,
2763 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2733 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2764 ' [--bundle FILENAME] [SOURCE]')),
2734 ' [--bundle FILENAME] [SOURCE]')),
2765 "^init":
2735 "^init":
2766 (init, remoteopts, _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2736 (init, remoteopts, _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2767 "locate":
2737 "locate":
2768 (locate,
2738 (locate,
2769 [('r', 'rev', '', _('search the repository as it stood at rev')),
2739 [('r', 'rev', '', _('search the repository as it stood at rev')),
2770 ('0', 'print0', None,
2740 ('0', 'print0', None,
2771 _('end filenames with NUL, for use with xargs')),
2741 _('end filenames with NUL, for use with xargs')),
2772 ('f', 'fullpath', None,
2742 ('f', 'fullpath', None,
2773 _('print complete paths from the filesystem root')),
2743 _('print complete paths from the filesystem root')),
2774 ] + walkopts,
2744 ] + walkopts,
2775 _('hg locate [OPTION]... [PATTERN]...')),
2745 _('hg locate [OPTION]... [PATTERN]...')),
2776 "^log|history":
2746 "^log|history":
2777 (log,
2747 (log,
2778 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2748 [('f', 'follow', None,
2779 ('f', 'follow', None,
2780 _('follow changeset history, or file history across copies and renames')),
2749 _('follow changeset history, or file history across copies and renames')),
2781 ('', 'follow-first', None,
2750 ('', 'follow-first', None,
2782 _('only follow the first parent of merge changesets')),
2751 _('only follow the first parent of merge changesets')),
2783 ('d', 'date', '', _('show revs matching date spec')),
2752 ('d', 'date', '', _('show revs matching date spec')),
2784 ('C', 'copies', None, _('show copied files')),
2753 ('C', 'copies', None, _('show copied files')),
2785 ('k', 'keyword', [], _('search for a keyword')),
2754 ('k', 'keyword', [], _('search for a keyword')),
2786 ('l', 'limit', '', _('limit number of changes displayed')),
2755 ('l', 'limit', '', _('limit number of changes displayed')),
2787 ('r', 'rev', [], _('show the specified revision or range')),
2756 ('r', 'rev', [], _('show the specified revision or range')),
2788 ('', 'removed', None, _('include revs where files were removed')),
2757 ('', 'removed', None, _('include revs where files were removed')),
2789 ('M', 'no-merges', None, _('do not show merges')),
2758 ('M', 'no-merges', None, _('do not show merges')),
2790 ('', 'style', '', _('display using template map file')),
2759 ('', 'style', '', _('display using template map file')),
2791 ('m', 'only-merges', None, _('show only merges')),
2760 ('m', 'only-merges', None, _('show only merges')),
2792 ('p', 'patch', None, _('show patch')),
2761 ('p', 'patch', None, _('show patch')),
2793 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2762 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2794 ('', 'template', '', _('display with template')),
2763 ('', 'template', '', _('display with template')),
2795 ] + walkopts,
2764 ] + walkopts,
2796 _('hg log [OPTION]... [FILE]')),
2765 _('hg log [OPTION]... [FILE]')),
2797 "manifest": (manifest, [], _('hg manifest [REV]')),
2766 "manifest": (manifest, [], _('hg manifest [REV]')),
2798 "merge":
2767 "merge":
2799 (merge,
2768 (merge,
2800 [('b', 'branch', '', _('merge with head of a specific branch (DEPRECATED)')),
2769 [('f', 'force', None, _('force a merge with outstanding changes'))],
2801 ('f', 'force', None, _('force a merge with outstanding changes'))],
2802 _('hg merge [-f] [REV]')),
2770 _('hg merge [-f] [REV]')),
2803 "outgoing|out": (outgoing,
2771 "outgoing|out": (outgoing,
2804 [('M', 'no-merges', None, _('do not show merges')),
2772 [('M', 'no-merges', None, _('do not show merges')),
2805 ('f', 'force', None,
2773 ('f', 'force', None,
2806 _('run even when remote repository is unrelated')),
2774 _('run even when remote repository is unrelated')),
2807 ('p', 'patch', None, _('show patch')),
2775 ('p', 'patch', None, _('show patch')),
2808 ('', 'style', '', _('display using template map file')),
2776 ('', 'style', '', _('display using template map file')),
2809 ('r', 'rev', [], _('a specific revision you would like to push')),
2777 ('r', 'rev', [], _('a specific revision you would like to push')),
2810 ('n', 'newest-first', None, _('show newest record first')),
2778 ('n', 'newest-first', None, _('show newest record first')),
2811 ('', 'template', '', _('display with template')),
2779 ('', 'template', '', _('display with template')),
2812 ] + remoteopts,
2780 ] + remoteopts,
2813 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
2781 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
2814 "^parents":
2782 "^parents":
2815 (parents,
2783 (parents,
2816 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2784 [('r', 'rev', '', _('show parents from the specified rev')),
2817 ('r', 'rev', '', _('show parents from the specified rev')),
2818 ('', 'style', '', _('display using template map file')),
2785 ('', 'style', '', _('display using template map file')),
2819 ('', 'template', '', _('display with template'))],
2786 ('', 'template', '', _('display with template'))],
2820 _('hg parents [-r REV] [FILE]')),
2787 _('hg parents [-r REV] [FILE]')),
2821 "paths": (paths, [], _('hg paths [NAME]')),
2788 "paths": (paths, [], _('hg paths [NAME]')),
2822 "^pull":
2789 "^pull":
2823 (pull,
2790 (pull,
2824 [('u', 'update', None,
2791 [('u', 'update', None,
2825 _('update to new tip if changesets were pulled')),
2792 _('update to new tip if changesets were pulled')),
2826 ('f', 'force', None,
2793 ('f', 'force', None,
2827 _('run even when remote repository is unrelated')),
2794 _('run even when remote repository is unrelated')),
2828 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2795 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2829 ] + remoteopts,
2796 ] + remoteopts,
2830 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
2797 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
2831 "^push":
2798 "^push":
2832 (push,
2799 (push,
2833 [('f', 'force', None, _('force push')),
2800 [('f', 'force', None, _('force push')),
2834 ('r', 'rev', [], _('a specific revision you would like to push')),
2801 ('r', 'rev', [], _('a specific revision you would like to push')),
2835 ] + remoteopts,
2802 ] + remoteopts,
2836 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
2803 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
2837 "debugrawcommit|rawcommit":
2804 "debugrawcommit|rawcommit":
2838 (rawcommit,
2805 (rawcommit,
2839 [('p', 'parent', [], _('parent')),
2806 [('p', 'parent', [], _('parent')),
2840 ('d', 'date', '', _('date code')),
2807 ('d', 'date', '', _('date code')),
2841 ('u', 'user', '', _('user')),
2808 ('u', 'user', '', _('user')),
2842 ('F', 'files', '', _('file list'))
2809 ('F', 'files', '', _('file list'))
2843 ] + commitopts,
2810 ] + commitopts,
2844 _('hg debugrawcommit [OPTION]... [FILE]...')),
2811 _('hg debugrawcommit [OPTION]... [FILE]...')),
2845 "recover": (recover, [], _('hg recover')),
2812 "recover": (recover, [], _('hg recover')),
2846 "^remove|rm":
2813 "^remove|rm":
2847 (remove,
2814 (remove,
2848 [('A', 'after', None, _('record remove that has already occurred')),
2815 [('A', 'after', None, _('record remove that has already occurred')),
2849 ('f', 'force', None, _('remove file even if modified')),
2816 ('f', 'force', None, _('remove file even if modified')),
2850 ] + walkopts,
2817 ] + walkopts,
2851 _('hg remove [OPTION]... FILE...')),
2818 _('hg remove [OPTION]... FILE...')),
2852 "rename|mv":
2819 "rename|mv":
2853 (rename,
2820 (rename,
2854 [('A', 'after', None, _('record a rename that has already occurred')),
2821 [('A', 'after', None, _('record a rename that has already occurred')),
2855 ('f', 'force', None,
2822 ('f', 'force', None,
2856 _('forcibly copy over an existing managed file')),
2823 _('forcibly copy over an existing managed file')),
2857 ] + walkopts + dryrunopts,
2824 ] + walkopts + dryrunopts,
2858 _('hg rename [OPTION]... SOURCE... DEST')),
2825 _('hg rename [OPTION]... SOURCE... DEST')),
2859 "^revert":
2826 "^revert":
2860 (revert,
2827 (revert,
2861 [('a', 'all', None, _('revert all changes when no arguments given')),
2828 [('a', 'all', None, _('revert all changes when no arguments given')),
2862 ('d', 'date', '', _('tipmost revision matching date')),
2829 ('d', 'date', '', _('tipmost revision matching date')),
2863 ('r', 'rev', '', _('revision to revert to')),
2830 ('r', 'rev', '', _('revision to revert to')),
2864 ('', 'no-backup', None, _('do not save backup copies of files')),
2831 ('', 'no-backup', None, _('do not save backup copies of files')),
2865 ] + walkopts + dryrunopts,
2832 ] + walkopts + dryrunopts,
2866 _('hg revert [-r REV] [NAME]...')),
2833 _('hg revert [-r REV] [NAME]...')),
2867 "rollback": (rollback, [], _('hg rollback')),
2834 "rollback": (rollback, [], _('hg rollback')),
2868 "root": (root, [], _('hg root')),
2835 "root": (root, [], _('hg root')),
2869 "showconfig|debugconfig":
2836 "showconfig|debugconfig":
2870 (showconfig,
2837 (showconfig,
2871 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2838 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2872 _('showconfig [-u] [NAME]...')),
2839 _('showconfig [-u] [NAME]...')),
2873 "^serve":
2840 "^serve":
2874 (serve,
2841 (serve,
2875 [('A', 'accesslog', '', _('name of access log file to write to')),
2842 [('A', 'accesslog', '', _('name of access log file to write to')),
2876 ('d', 'daemon', None, _('run server in background')),
2843 ('d', 'daemon', None, _('run server in background')),
2877 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2844 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2878 ('E', 'errorlog', '', _('name of error log file to write to')),
2845 ('E', 'errorlog', '', _('name of error log file to write to')),
2879 ('p', 'port', 0, _('port to use (default: 8000)')),
2846 ('p', 'port', 0, _('port to use (default: 8000)')),
2880 ('a', 'address', '', _('address to use')),
2847 ('a', 'address', '', _('address to use')),
2881 ('n', 'name', '',
2848 ('n', 'name', '',
2882 _('name to show in web pages (default: working dir)')),
2849 _('name to show in web pages (default: working dir)')),
2883 ('', 'webdir-conf', '', _('name of the webdir config file'
2850 ('', 'webdir-conf', '', _('name of the webdir config file'
2884 ' (serve more than one repo)')),
2851 ' (serve more than one repo)')),
2885 ('', 'pid-file', '', _('name of file to write process ID to')),
2852 ('', 'pid-file', '', _('name of file to write process ID to')),
2886 ('', 'stdio', None, _('for remote clients')),
2853 ('', 'stdio', None, _('for remote clients')),
2887 ('t', 'templates', '', _('web templates to use')),
2854 ('t', 'templates', '', _('web templates to use')),
2888 ('', 'style', '', _('template style to use')),
2855 ('', 'style', '', _('template style to use')),
2889 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2856 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2890 _('hg serve [OPTION]...')),
2857 _('hg serve [OPTION]...')),
2891 "^status|st":
2858 "^status|st":
2892 (status,
2859 (status,
2893 [('A', 'all', None, _('show status of all files')),
2860 [('A', 'all', None, _('show status of all files')),
2894 ('m', 'modified', None, _('show only modified files')),
2861 ('m', 'modified', None, _('show only modified files')),
2895 ('a', 'added', None, _('show only added files')),
2862 ('a', 'added', None, _('show only added files')),
2896 ('r', 'removed', None, _('show only removed files')),
2863 ('r', 'removed', None, _('show only removed files')),
2897 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2864 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2898 ('c', 'clean', None, _('show only files without changes')),
2865 ('c', 'clean', None, _('show only files without changes')),
2899 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2866 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2900 ('i', 'ignored', None, _('show ignored files')),
2867 ('i', 'ignored', None, _('show ignored files')),
2901 ('n', 'no-status', None, _('hide status prefix')),
2868 ('n', 'no-status', None, _('hide status prefix')),
2902 ('C', 'copies', None, _('show source of copied files')),
2869 ('C', 'copies', None, _('show source of copied files')),
2903 ('0', 'print0', None,
2870 ('0', 'print0', None,
2904 _('end filenames with NUL, for use with xargs')),
2871 _('end filenames with NUL, for use with xargs')),
2905 ('', 'rev', [], _('show difference from revision')),
2872 ('', 'rev', [], _('show difference from revision')),
2906 ] + walkopts,
2873 ] + walkopts,
2907 _('hg status [OPTION]... [FILE]...')),
2874 _('hg status [OPTION]... [FILE]...')),
2908 "tag":
2875 "tag":
2909 (tag,
2876 (tag,
2910 [('l', 'local', None, _('make the tag local')),
2877 [('l', 'local', None, _('make the tag local')),
2911 ('m', 'message', '', _('message for tag commit log entry')),
2878 ('m', 'message', '', _('message for tag commit log entry')),
2912 ('d', 'date', '', _('record datecode as commit date')),
2879 ('d', 'date', '', _('record datecode as commit date')),
2913 ('u', 'user', '', _('record user as commiter')),
2880 ('u', 'user', '', _('record user as commiter')),
2914 ('r', 'rev', '', _('revision to tag'))],
2881 ('r', 'rev', '', _('revision to tag'))],
2915 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2882 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2916 "tags": (tags, [], _('hg tags')),
2883 "tags": (tags, [], _('hg tags')),
2917 "tip":
2884 "tip":
2918 (tip,
2885 (tip,
2919 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2886 [('', 'style', '', _('display using template map file')),
2920 ('', 'style', '', _('display using template map file')),
2921 ('p', 'patch', None, _('show patch')),
2887 ('p', 'patch', None, _('show patch')),
2922 ('', 'template', '', _('display with template'))],
2888 ('', 'template', '', _('display with template'))],
2923 _('hg tip [-p]')),
2889 _('hg tip [-p]')),
2924 "unbundle":
2890 "unbundle":
2925 (unbundle,
2891 (unbundle,
2926 [('u', 'update', None,
2892 [('u', 'update', None,
2927 _('update to new tip if changesets were unbundled'))],
2893 _('update to new tip if changesets were unbundled'))],
2928 _('hg unbundle [-u] FILE')),
2894 _('hg unbundle [-u] FILE')),
2929 "^update|up|checkout|co":
2895 "^update|up|checkout|co":
2930 (update,
2896 (update,
2931 [('b', 'branch', '',
2897 [('C', 'clean', None, _('overwrite locally modified files')),
2932 _('checkout the head of a specific branch (DEPRECATED)')),
2933 ('C', 'clean', None, _('overwrite locally modified files')),
2934 ('d', 'date', '', _('tipmost revision matching date'))],
2898 ('d', 'date', '', _('tipmost revision matching date'))],
2935 _('hg update [-C] [REV]')),
2899 _('hg update [-C] [REV]')),
2936 "verify": (verify, [], _('hg verify')),
2900 "verify": (verify, [], _('hg verify')),
2937 "version": (version_, [], _('hg version')),
2901 "version": (version_, [], _('hg version')),
2938 }
2902 }
2939
2903
2940 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2904 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2941 " debugindex debugindexdot debugdate debuginstall")
2905 " debugindex debugindexdot debugdate debuginstall")
2942 optionalrepo = ("paths serve showconfig")
2906 optionalrepo = ("paths serve showconfig")
2943
2907
2944 def findpossible(ui, cmd):
2908 def findpossible(ui, cmd):
2945 """
2909 """
2946 Return cmd -> (aliases, command table entry)
2910 Return cmd -> (aliases, command table entry)
2947 for each matching command.
2911 for each matching command.
2948 Return debug commands (or their aliases) only if no normal command matches.
2912 Return debug commands (or their aliases) only if no normal command matches.
2949 """
2913 """
2950 choice = {}
2914 choice = {}
2951 debugchoice = {}
2915 debugchoice = {}
2952 for e in table.keys():
2916 for e in table.keys():
2953 aliases = e.lstrip("^").split("|")
2917 aliases = e.lstrip("^").split("|")
2954 found = None
2918 found = None
2955 if cmd in aliases:
2919 if cmd in aliases:
2956 found = cmd
2920 found = cmd
2957 elif not ui.config("ui", "strict"):
2921 elif not ui.config("ui", "strict"):
2958 for a in aliases:
2922 for a in aliases:
2959 if a.startswith(cmd):
2923 if a.startswith(cmd):
2960 found = a
2924 found = a
2961 break
2925 break
2962 if found is not None:
2926 if found is not None:
2963 if aliases[0].startswith("debug") or found.startswith("debug"):
2927 if aliases[0].startswith("debug") or found.startswith("debug"):
2964 debugchoice[found] = (aliases, table[e])
2928 debugchoice[found] = (aliases, table[e])
2965 else:
2929 else:
2966 choice[found] = (aliases, table[e])
2930 choice[found] = (aliases, table[e])
2967
2931
2968 if not choice and debugchoice:
2932 if not choice and debugchoice:
2969 choice = debugchoice
2933 choice = debugchoice
2970
2934
2971 return choice
2935 return choice
2972
2936
2973 def findcmd(ui, cmd):
2937 def findcmd(ui, cmd):
2974 """Return (aliases, command table entry) for command string."""
2938 """Return (aliases, command table entry) for command string."""
2975 choice = findpossible(ui, cmd)
2939 choice = findpossible(ui, cmd)
2976
2940
2977 if choice.has_key(cmd):
2941 if choice.has_key(cmd):
2978 return choice[cmd]
2942 return choice[cmd]
2979
2943
2980 if len(choice) > 1:
2944 if len(choice) > 1:
2981 clist = choice.keys()
2945 clist = choice.keys()
2982 clist.sort()
2946 clist.sort()
2983 raise AmbiguousCommand(cmd, clist)
2947 raise AmbiguousCommand(cmd, clist)
2984
2948
2985 if choice:
2949 if choice:
2986 return choice.values()[0]
2950 return choice.values()[0]
2987
2951
2988 raise UnknownCommand(cmd)
2952 raise UnknownCommand(cmd)
2989
2953
2990 def catchterm(*args):
2954 def catchterm(*args):
2991 raise util.SignalInterrupt
2955 raise util.SignalInterrupt
2992
2956
2993 def run():
2957 def run():
2994 sys.exit(dispatch(sys.argv[1:]))
2958 sys.exit(dispatch(sys.argv[1:]))
2995
2959
2996 class ParseError(Exception):
2960 class ParseError(Exception):
2997 """Exception raised on errors in parsing the command line."""
2961 """Exception raised on errors in parsing the command line."""
2998
2962
2999 def parse(ui, args):
2963 def parse(ui, args):
3000 options = {}
2964 options = {}
3001 cmdoptions = {}
2965 cmdoptions = {}
3002
2966
3003 try:
2967 try:
3004 args = fancyopts.fancyopts(args, globalopts, options)
2968 args = fancyopts.fancyopts(args, globalopts, options)
3005 except fancyopts.getopt.GetoptError, inst:
2969 except fancyopts.getopt.GetoptError, inst:
3006 raise ParseError(None, inst)
2970 raise ParseError(None, inst)
3007
2971
3008 if args:
2972 if args:
3009 cmd, args = args[0], args[1:]
2973 cmd, args = args[0], args[1:]
3010 aliases, i = findcmd(ui, cmd)
2974 aliases, i = findcmd(ui, cmd)
3011 cmd = aliases[0]
2975 cmd = aliases[0]
3012 defaults = ui.config("defaults", cmd)
2976 defaults = ui.config("defaults", cmd)
3013 if defaults:
2977 if defaults:
3014 args = shlex.split(defaults) + args
2978 args = shlex.split(defaults) + args
3015 c = list(i[1])
2979 c = list(i[1])
3016 else:
2980 else:
3017 cmd = None
2981 cmd = None
3018 c = []
2982 c = []
3019
2983
3020 # combine global options into local
2984 # combine global options into local
3021 for o in globalopts:
2985 for o in globalopts:
3022 c.append((o[0], o[1], options[o[1]], o[3]))
2986 c.append((o[0], o[1], options[o[1]], o[3]))
3023
2987
3024 try:
2988 try:
3025 args = fancyopts.fancyopts(args, c, cmdoptions)
2989 args = fancyopts.fancyopts(args, c, cmdoptions)
3026 except fancyopts.getopt.GetoptError, inst:
2990 except fancyopts.getopt.GetoptError, inst:
3027 raise ParseError(cmd, inst)
2991 raise ParseError(cmd, inst)
3028
2992
3029 # separate global options back out
2993 # separate global options back out
3030 for o in globalopts:
2994 for o in globalopts:
3031 n = o[1]
2995 n = o[1]
3032 options[n] = cmdoptions[n]
2996 options[n] = cmdoptions[n]
3033 del cmdoptions[n]
2997 del cmdoptions[n]
3034
2998
3035 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2999 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3036
3000
3037 external = {}
3001 external = {}
3038
3002
3039 def findext(name):
3003 def findext(name):
3040 '''return module with given extension name'''
3004 '''return module with given extension name'''
3041 try:
3005 try:
3042 return sys.modules[external[name]]
3006 return sys.modules[external[name]]
3043 except KeyError:
3007 except KeyError:
3044 for k, v in external.iteritems():
3008 for k, v in external.iteritems():
3045 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3009 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3046 return sys.modules[v]
3010 return sys.modules[v]
3047 raise KeyError(name)
3011 raise KeyError(name)
3048
3012
3049 def load_extensions(ui):
3013 def load_extensions(ui):
3050 added = []
3014 added = []
3051 for ext_name, load_from_name in ui.extensions():
3015 for ext_name, load_from_name in ui.extensions():
3052 if ext_name in external:
3016 if ext_name in external:
3053 continue
3017 continue
3054 try:
3018 try:
3055 if load_from_name:
3019 if load_from_name:
3056 # the module will be loaded in sys.modules
3020 # the module will be loaded in sys.modules
3057 # choose an unique name so that it doesn't
3021 # choose an unique name so that it doesn't
3058 # conflicts with other modules
3022 # conflicts with other modules
3059 module_name = "hgext_%s" % ext_name.replace('.', '_')
3023 module_name = "hgext_%s" % ext_name.replace('.', '_')
3060 mod = imp.load_source(module_name, load_from_name)
3024 mod = imp.load_source(module_name, load_from_name)
3061 else:
3025 else:
3062 def importh(name):
3026 def importh(name):
3063 mod = __import__(name)
3027 mod = __import__(name)
3064 components = name.split('.')
3028 components = name.split('.')
3065 for comp in components[1:]:
3029 for comp in components[1:]:
3066 mod = getattr(mod, comp)
3030 mod = getattr(mod, comp)
3067 return mod
3031 return mod
3068 try:
3032 try:
3069 mod = importh("hgext.%s" % ext_name)
3033 mod = importh("hgext.%s" % ext_name)
3070 except ImportError:
3034 except ImportError:
3071 mod = importh(ext_name)
3035 mod = importh(ext_name)
3072 external[ext_name] = mod.__name__
3036 external[ext_name] = mod.__name__
3073 added.append((mod, ext_name))
3037 added.append((mod, ext_name))
3074 except (util.SignalInterrupt, KeyboardInterrupt):
3038 except (util.SignalInterrupt, KeyboardInterrupt):
3075 raise
3039 raise
3076 except Exception, inst:
3040 except Exception, inst:
3077 ui.warn(_("*** failed to import extension %s: %s\n") %
3041 ui.warn(_("*** failed to import extension %s: %s\n") %
3078 (ext_name, inst))
3042 (ext_name, inst))
3079 if ui.print_exc():
3043 if ui.print_exc():
3080 return 1
3044 return 1
3081
3045
3082 for mod, name in added:
3046 for mod, name in added:
3083 uisetup = getattr(mod, 'uisetup', None)
3047 uisetup = getattr(mod, 'uisetup', None)
3084 if uisetup:
3048 if uisetup:
3085 uisetup(ui)
3049 uisetup(ui)
3086 cmdtable = getattr(mod, 'cmdtable', {})
3050 cmdtable = getattr(mod, 'cmdtable', {})
3087 for t in cmdtable:
3051 for t in cmdtable:
3088 if t in table:
3052 if t in table:
3089 ui.warn(_("module %s overrides %s\n") % (name, t))
3053 ui.warn(_("module %s overrides %s\n") % (name, t))
3090 table.update(cmdtable)
3054 table.update(cmdtable)
3091
3055
3092 def parseconfig(config):
3056 def parseconfig(config):
3093 """parse the --config options from the command line"""
3057 """parse the --config options from the command line"""
3094 parsed = []
3058 parsed = []
3095 for cfg in config:
3059 for cfg in config:
3096 try:
3060 try:
3097 name, value = cfg.split('=', 1)
3061 name, value = cfg.split('=', 1)
3098 section, name = name.split('.', 1)
3062 section, name = name.split('.', 1)
3099 if not section or not name:
3063 if not section or not name:
3100 raise IndexError
3064 raise IndexError
3101 parsed.append((section, name, value))
3065 parsed.append((section, name, value))
3102 except (IndexError, ValueError):
3066 except (IndexError, ValueError):
3103 raise util.Abort(_('malformed --config option: %s') % cfg)
3067 raise util.Abort(_('malformed --config option: %s') % cfg)
3104 return parsed
3068 return parsed
3105
3069
3106 def dispatch(args):
3070 def dispatch(args):
3107 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3071 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3108 num = getattr(signal, name, None)
3072 num = getattr(signal, name, None)
3109 if num: signal.signal(num, catchterm)
3073 if num: signal.signal(num, catchterm)
3110
3074
3111 try:
3075 try:
3112 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3076 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3113 except util.Abort, inst:
3077 except util.Abort, inst:
3114 sys.stderr.write(_("abort: %s\n") % inst)
3078 sys.stderr.write(_("abort: %s\n") % inst)
3115 return -1
3079 return -1
3116
3080
3117 load_extensions(u)
3081 load_extensions(u)
3118 u.addreadhook(load_extensions)
3082 u.addreadhook(load_extensions)
3119
3083
3120 try:
3084 try:
3121 cmd, func, args, options, cmdoptions = parse(u, args)
3085 cmd, func, args, options, cmdoptions = parse(u, args)
3122 if options["encoding"]:
3086 if options["encoding"]:
3123 util._encoding = options["encoding"]
3087 util._encoding = options["encoding"]
3124 if options["encodingmode"]:
3088 if options["encodingmode"]:
3125 util._encodingmode = options["encodingmode"]
3089 util._encodingmode = options["encodingmode"]
3126 if options["time"]:
3090 if options["time"]:
3127 def get_times():
3091 def get_times():
3128 t = os.times()
3092 t = os.times()
3129 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3093 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3130 t = (t[0], t[1], t[2], t[3], time.clock())
3094 t = (t[0], t[1], t[2], t[3], time.clock())
3131 return t
3095 return t
3132 s = get_times()
3096 s = get_times()
3133 def print_time():
3097 def print_time():
3134 t = get_times()
3098 t = get_times()
3135 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3099 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3136 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3100 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3137 atexit.register(print_time)
3101 atexit.register(print_time)
3138
3102
3139 # enter the debugger before command execution
3103 # enter the debugger before command execution
3140 if options['debugger']:
3104 if options['debugger']:
3141 pdb.set_trace()
3105 pdb.set_trace()
3142
3106
3143 try:
3107 try:
3144 if options['cwd']:
3108 if options['cwd']:
3145 os.chdir(options['cwd'])
3109 os.chdir(options['cwd'])
3146
3110
3147 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3111 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3148 not options["noninteractive"], options["traceback"],
3112 not options["noninteractive"], options["traceback"],
3149 parseconfig(options["config"]))
3113 parseconfig(options["config"]))
3150
3114
3151 path = u.expandpath(options["repository"]) or ""
3115 path = u.expandpath(options["repository"]) or ""
3152 repo = path and hg.repository(u, path=path) or None
3116 repo = path and hg.repository(u, path=path) or None
3153 if repo and not repo.local():
3117 if repo and not repo.local():
3154 raise util.Abort(_("repository '%s' is not local") % path)
3118 raise util.Abort(_("repository '%s' is not local") % path)
3155
3119
3156 if options['help']:
3120 if options['help']:
3157 return help_(u, cmd, options['version'])
3121 return help_(u, cmd, options['version'])
3158 elif options['version']:
3122 elif options['version']:
3159 return version_(u)
3123 return version_(u)
3160 elif not cmd:
3124 elif not cmd:
3161 return help_(u, 'shortlist')
3125 return help_(u, 'shortlist')
3162
3126
3163 if cmd not in norepo.split():
3127 if cmd not in norepo.split():
3164 try:
3128 try:
3165 if not repo:
3129 if not repo:
3166 repo = hg.repository(u, path=path)
3130 repo = hg.repository(u, path=path)
3167 u = repo.ui
3131 u = repo.ui
3168 for name in external.itervalues():
3132 for name in external.itervalues():
3169 mod = sys.modules[name]
3133 mod = sys.modules[name]
3170 if hasattr(mod, 'reposetup'):
3134 if hasattr(mod, 'reposetup'):
3171 mod.reposetup(u, repo)
3135 mod.reposetup(u, repo)
3172 hg.repo_setup_hooks.append(mod.reposetup)
3136 hg.repo_setup_hooks.append(mod.reposetup)
3173 except hg.RepoError:
3137 except hg.RepoError:
3174 if cmd not in optionalrepo.split():
3138 if cmd not in optionalrepo.split():
3175 raise
3139 raise
3176 d = lambda: func(u, repo, *args, **cmdoptions)
3140 d = lambda: func(u, repo, *args, **cmdoptions)
3177 else:
3141 else:
3178 d = lambda: func(u, *args, **cmdoptions)
3142 d = lambda: func(u, *args, **cmdoptions)
3179
3143
3180 try:
3144 try:
3181 if options['profile']:
3145 if options['profile']:
3182 import hotshot, hotshot.stats
3146 import hotshot, hotshot.stats
3183 prof = hotshot.Profile("hg.prof")
3147 prof = hotshot.Profile("hg.prof")
3184 try:
3148 try:
3185 try:
3149 try:
3186 return prof.runcall(d)
3150 return prof.runcall(d)
3187 except:
3151 except:
3188 try:
3152 try:
3189 u.warn(_('exception raised - generating '
3153 u.warn(_('exception raised - generating '
3190 'profile anyway\n'))
3154 'profile anyway\n'))
3191 except:
3155 except:
3192 pass
3156 pass
3193 raise
3157 raise
3194 finally:
3158 finally:
3195 prof.close()
3159 prof.close()
3196 stats = hotshot.stats.load("hg.prof")
3160 stats = hotshot.stats.load("hg.prof")
3197 stats.strip_dirs()
3161 stats.strip_dirs()
3198 stats.sort_stats('time', 'calls')
3162 stats.sort_stats('time', 'calls')
3199 stats.print_stats(40)
3163 stats.print_stats(40)
3200 elif options['lsprof']:
3164 elif options['lsprof']:
3201 try:
3165 try:
3202 from mercurial import lsprof
3166 from mercurial import lsprof
3203 except ImportError:
3167 except ImportError:
3204 raise util.Abort(_(
3168 raise util.Abort(_(
3205 'lsprof not available - install from '
3169 'lsprof not available - install from '
3206 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3170 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3207 p = lsprof.Profiler()
3171 p = lsprof.Profiler()
3208 p.enable(subcalls=True)
3172 p.enable(subcalls=True)
3209 try:
3173 try:
3210 return d()
3174 return d()
3211 finally:
3175 finally:
3212 p.disable()
3176 p.disable()
3213 stats = lsprof.Stats(p.getstats())
3177 stats = lsprof.Stats(p.getstats())
3214 stats.sort()
3178 stats.sort()
3215 stats.pprint(top=10, file=sys.stderr, climit=5)
3179 stats.pprint(top=10, file=sys.stderr, climit=5)
3216 else:
3180 else:
3217 return d()
3181 return d()
3218 finally:
3182 finally:
3219 u.flush()
3183 u.flush()
3220 except:
3184 except:
3221 # enter the debugger when we hit an exception
3185 # enter the debugger when we hit an exception
3222 if options['debugger']:
3186 if options['debugger']:
3223 pdb.post_mortem(sys.exc_info()[2])
3187 pdb.post_mortem(sys.exc_info()[2])
3224 u.print_exc()
3188 u.print_exc()
3225 raise
3189 raise
3226 except ParseError, inst:
3190 except ParseError, inst:
3227 if inst.args[0]:
3191 if inst.args[0]:
3228 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3192 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3229 help_(u, inst.args[0])
3193 help_(u, inst.args[0])
3230 else:
3194 else:
3231 u.warn(_("hg: %s\n") % inst.args[1])
3195 u.warn(_("hg: %s\n") % inst.args[1])
3232 help_(u, 'shortlist')
3196 help_(u, 'shortlist')
3233 except AmbiguousCommand, inst:
3197 except AmbiguousCommand, inst:
3234 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3198 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3235 (inst.args[0], " ".join(inst.args[1])))
3199 (inst.args[0], " ".join(inst.args[1])))
3236 except UnknownCommand, inst:
3200 except UnknownCommand, inst:
3237 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3201 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3238 help_(u, 'shortlist')
3202 help_(u, 'shortlist')
3239 except hg.RepoError, inst:
3203 except hg.RepoError, inst:
3240 u.warn(_("abort: %s!\n") % inst)
3204 u.warn(_("abort: %s!\n") % inst)
3241 except lock.LockHeld, inst:
3205 except lock.LockHeld, inst:
3242 if inst.errno == errno.ETIMEDOUT:
3206 if inst.errno == errno.ETIMEDOUT:
3243 reason = _('timed out waiting for lock held by %s') % inst.locker
3207 reason = _('timed out waiting for lock held by %s') % inst.locker
3244 else:
3208 else:
3245 reason = _('lock held by %s') % inst.locker
3209 reason = _('lock held by %s') % inst.locker
3246 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3210 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3247 except lock.LockUnavailable, inst:
3211 except lock.LockUnavailable, inst:
3248 u.warn(_("abort: could not lock %s: %s\n") %
3212 u.warn(_("abort: could not lock %s: %s\n") %
3249 (inst.desc or inst.filename, inst.strerror))
3213 (inst.desc or inst.filename, inst.strerror))
3250 except revlog.RevlogError, inst:
3214 except revlog.RevlogError, inst:
3251 u.warn(_("abort: %s!\n") % inst)
3215 u.warn(_("abort: %s!\n") % inst)
3252 except util.SignalInterrupt:
3216 except util.SignalInterrupt:
3253 u.warn(_("killed!\n"))
3217 u.warn(_("killed!\n"))
3254 except KeyboardInterrupt:
3218 except KeyboardInterrupt:
3255 try:
3219 try:
3256 u.warn(_("interrupted!\n"))
3220 u.warn(_("interrupted!\n"))
3257 except IOError, inst:
3221 except IOError, inst:
3258 if inst.errno == errno.EPIPE:
3222 if inst.errno == errno.EPIPE:
3259 if u.debugflag:
3223 if u.debugflag:
3260 u.warn(_("\nbroken pipe\n"))
3224 u.warn(_("\nbroken pipe\n"))
3261 else:
3225 else:
3262 raise
3226 raise
3263 except IOError, inst:
3227 except IOError, inst:
3264 if hasattr(inst, "code"):
3228 if hasattr(inst, "code"):
3265 u.warn(_("abort: %s\n") % inst)
3229 u.warn(_("abort: %s\n") % inst)
3266 elif hasattr(inst, "reason"):
3230 elif hasattr(inst, "reason"):
3267 u.warn(_("abort: error: %s\n") % inst.reason[1])
3231 u.warn(_("abort: error: %s\n") % inst.reason[1])
3268 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3232 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3269 if u.debugflag:
3233 if u.debugflag:
3270 u.warn(_("broken pipe\n"))
3234 u.warn(_("broken pipe\n"))
3271 elif getattr(inst, "strerror", None):
3235 elif getattr(inst, "strerror", None):
3272 if getattr(inst, "filename", None):
3236 if getattr(inst, "filename", None):
3273 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3237 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3274 else:
3238 else:
3275 u.warn(_("abort: %s\n") % inst.strerror)
3239 u.warn(_("abort: %s\n") % inst.strerror)
3276 else:
3240 else:
3277 raise
3241 raise
3278 except OSError, inst:
3242 except OSError, inst:
3279 if getattr(inst, "filename", None):
3243 if getattr(inst, "filename", None):
3280 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3244 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3281 else:
3245 else:
3282 u.warn(_("abort: %s\n") % inst.strerror)
3246 u.warn(_("abort: %s\n") % inst.strerror)
3283 except util.UnexpectedOutput, inst:
3247 except util.UnexpectedOutput, inst:
3284 u.warn(_("abort: %s") % inst[0])
3248 u.warn(_("abort: %s") % inst[0])
3285 if not isinstance(inst[1], basestring):
3249 if not isinstance(inst[1], basestring):
3286 u.warn(" %r\n" % (inst[1],))
3250 u.warn(" %r\n" % (inst[1],))
3287 elif not inst[1]:
3251 elif not inst[1]:
3288 u.warn(_(" empty string\n"))
3252 u.warn(_(" empty string\n"))
3289 else:
3253 else:
3290 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3254 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3291 except util.Abort, inst:
3255 except util.Abort, inst:
3292 u.warn(_("abort: %s\n") % inst)
3256 u.warn(_("abort: %s\n") % inst)
3293 except TypeError, inst:
3257 except TypeError, inst:
3294 # was this an argument error?
3258 # was this an argument error?
3295 tb = traceback.extract_tb(sys.exc_info()[2])
3259 tb = traceback.extract_tb(sys.exc_info()[2])
3296 if len(tb) > 2: # no
3260 if len(tb) > 2: # no
3297 raise
3261 raise
3298 u.debug(inst, "\n")
3262 u.debug(inst, "\n")
3299 u.warn(_("%s: invalid arguments\n") % cmd)
3263 u.warn(_("%s: invalid arguments\n") % cmd)
3300 help_(u, cmd)
3264 help_(u, cmd)
3301 except SystemExit, inst:
3265 except SystemExit, inst:
3302 # Commands shouldn't sys.exit directly, but give a return code.
3266 # Commands shouldn't sys.exit directly, but give a return code.
3303 # Just in case catch this and and pass exit code to caller.
3267 # Just in case catch this and and pass exit code to caller.
3304 return inst.code
3268 return inst.code
3305 except:
3269 except:
3306 u.warn(_("** unknown exception encountered, details follow\n"))
3270 u.warn(_("** unknown exception encountered, details follow\n"))
3307 u.warn(_("** report bug details to "
3271 u.warn(_("** report bug details to "
3308 "http://www.selenic.com/mercurial/bts\n"))
3272 "http://www.selenic.com/mercurial/bts\n"))
3309 u.warn(_("** or mercurial@selenic.com\n"))
3273 u.warn(_("** or mercurial@selenic.com\n"))
3310 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3274 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3311 % version.get_version())
3275 % version.get_version())
3312 raise
3276 raise
3313
3277
3314 return -1
3278 return -1
@@ -1,1971 +1,1865 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 import repo
11 import repo
12 demandload(globals(), "appendfile changegroup")
12 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "changelog dirstate filelog manifest context")
13 demandload(globals(), "changelog dirstate filelog manifest context")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "os revlog time util")
15 demandload(globals(), "os revlog time util")
16
16
17 class localrepository(repo.repository):
17 class localrepository(repo.repository):
18 capabilities = ('lookup', 'changegroupsubset')
18 capabilities = ('lookup', 'changegroupsubset')
19 supported = ('revlogv1', 'store')
19 supported = ('revlogv1', 'store')
20
20
21 def __del__(self):
21 def __del__(self):
22 self.transhandle = None
22 self.transhandle = None
23 def __init__(self, parentui, path=None, create=0):
23 def __init__(self, parentui, path=None, create=0):
24 repo.repository.__init__(self)
24 repo.repository.__init__(self)
25 if not path:
25 if not path:
26 p = os.getcwd()
26 p = os.getcwd()
27 while not os.path.isdir(os.path.join(p, ".hg")):
27 while not os.path.isdir(os.path.join(p, ".hg")):
28 oldp = p
28 oldp = p
29 p = os.path.dirname(p)
29 p = os.path.dirname(p)
30 if p == oldp:
30 if p == oldp:
31 raise repo.RepoError(_("There is no Mercurial repository"
31 raise repo.RepoError(_("There is no Mercurial repository"
32 " here (.hg not found)"))
32 " here (.hg not found)"))
33 path = p
33 path = p
34
34
35 self.path = os.path.join(path, ".hg")
35 self.path = os.path.join(path, ".hg")
36 self.root = os.path.realpath(path)
36 self.root = os.path.realpath(path)
37 self.origroot = path
37 self.origroot = path
38 self.opener = util.opener(self.path)
38 self.opener = util.opener(self.path)
39 self.wopener = util.opener(self.root)
39 self.wopener = util.opener(self.root)
40
40
41 if not os.path.isdir(self.path):
41 if not os.path.isdir(self.path):
42 if create:
42 if create:
43 if not os.path.exists(path):
43 if not os.path.exists(path):
44 os.mkdir(path)
44 os.mkdir(path)
45 os.mkdir(self.path)
45 os.mkdir(self.path)
46 os.mkdir(os.path.join(self.path, "store"))
46 os.mkdir(os.path.join(self.path, "store"))
47 requirements = ("revlogv1", "store")
47 requirements = ("revlogv1", "store")
48 reqfile = self.opener("requires", "w")
48 reqfile = self.opener("requires", "w")
49 for r in requirements:
49 for r in requirements:
50 reqfile.write("%s\n" % r)
50 reqfile.write("%s\n" % r)
51 reqfile.close()
51 reqfile.close()
52 # create an invalid changelog
52 # create an invalid changelog
53 self.opener("00changelog.i", "a").write(
53 self.opener("00changelog.i", "a").write(
54 '\0\0\0\2' # represents revlogv2
54 '\0\0\0\2' # represents revlogv2
55 ' dummy changelog to prevent using the old repo layout'
55 ' dummy changelog to prevent using the old repo layout'
56 )
56 )
57 else:
57 else:
58 raise repo.RepoError(_("repository %s not found") % path)
58 raise repo.RepoError(_("repository %s not found") % path)
59 elif create:
59 elif create:
60 raise repo.RepoError(_("repository %s already exists") % path)
60 raise repo.RepoError(_("repository %s already exists") % path)
61 else:
61 else:
62 # find requirements
62 # find requirements
63 try:
63 try:
64 requirements = self.opener("requires").read().splitlines()
64 requirements = self.opener("requires").read().splitlines()
65 except IOError, inst:
65 except IOError, inst:
66 if inst.errno != errno.ENOENT:
66 if inst.errno != errno.ENOENT:
67 raise
67 raise
68 requirements = []
68 requirements = []
69 # check them
69 # check them
70 for r in requirements:
70 for r in requirements:
71 if r not in self.supported:
71 if r not in self.supported:
72 raise repo.RepoError(_("requirement '%s' not supported") % r)
72 raise repo.RepoError(_("requirement '%s' not supported") % r)
73
73
74 # setup store
74 # setup store
75 if "store" in requirements:
75 if "store" in requirements:
76 self.encodefn = util.encodefilename
76 self.encodefn = util.encodefilename
77 self.decodefn = util.decodefilename
77 self.decodefn = util.decodefilename
78 self.spath = os.path.join(self.path, "store")
78 self.spath = os.path.join(self.path, "store")
79 else:
79 else:
80 self.encodefn = lambda x: x
80 self.encodefn = lambda x: x
81 self.decodefn = lambda x: x
81 self.decodefn = lambda x: x
82 self.spath = self.path
82 self.spath = self.path
83 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
83 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
84
84
85 self.ui = ui.ui(parentui=parentui)
85 self.ui = ui.ui(parentui=parentui)
86 try:
86 try:
87 self.ui.readconfig(self.join("hgrc"), self.root)
87 self.ui.readconfig(self.join("hgrc"), self.root)
88 except IOError:
88 except IOError:
89 pass
89 pass
90
90
91 v = self.ui.configrevlog()
91 v = self.ui.configrevlog()
92 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
92 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
93 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
93 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
94 fl = v.get('flags', None)
94 fl = v.get('flags', None)
95 flags = 0
95 flags = 0
96 if fl != None:
96 if fl != None:
97 for x in fl.split():
97 for x in fl.split():
98 flags |= revlog.flagstr(x)
98 flags |= revlog.flagstr(x)
99 elif self.revlogv1:
99 elif self.revlogv1:
100 flags = revlog.REVLOG_DEFAULT_FLAGS
100 flags = revlog.REVLOG_DEFAULT_FLAGS
101
101
102 v = self.revlogversion | flags
102 v = self.revlogversion | flags
103 self.manifest = manifest.manifest(self.sopener, v)
103 self.manifest = manifest.manifest(self.sopener, v)
104 self.changelog = changelog.changelog(self.sopener, v)
104 self.changelog = changelog.changelog(self.sopener, v)
105
105
106 fallback = self.ui.config('ui', 'fallbackencoding')
106 fallback = self.ui.config('ui', 'fallbackencoding')
107 if fallback:
107 if fallback:
108 util._fallbackencoding = fallback
108 util._fallbackencoding = fallback
109
109
110 # the changelog might not have the inline index flag
110 # the changelog might not have the inline index flag
111 # on. If the format of the changelog is the same as found in
111 # on. If the format of the changelog is the same as found in
112 # .hgrc, apply any flags found in the .hgrc as well.
112 # .hgrc, apply any flags found in the .hgrc as well.
113 # Otherwise, just version from the changelog
113 # Otherwise, just version from the changelog
114 v = self.changelog.version
114 v = self.changelog.version
115 if v == self.revlogversion:
115 if v == self.revlogversion:
116 v |= flags
116 v |= flags
117 self.revlogversion = v
117 self.revlogversion = v
118
118
119 self.tagscache = None
119 self.tagscache = None
120 self.branchcache = None
120 self.branchcache = None
121 self.nodetagscache = None
121 self.nodetagscache = None
122 self.encodepats = None
122 self.encodepats = None
123 self.decodepats = None
123 self.decodepats = None
124 self.transhandle = None
124 self.transhandle = None
125
125
126 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
126 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
127
127
128 def url(self):
128 def url(self):
129 return 'file:' + self.root
129 return 'file:' + self.root
130
130
131 def hook(self, name, throw=False, **args):
131 def hook(self, name, throw=False, **args):
132 def callhook(hname, funcname):
132 def callhook(hname, funcname):
133 '''call python hook. hook is callable object, looked up as
133 '''call python hook. hook is callable object, looked up as
134 name in python module. if callable returns "true", hook
134 name in python module. if callable returns "true", hook
135 fails, else passes. if hook raises exception, treated as
135 fails, else passes. if hook raises exception, treated as
136 hook failure. exception propagates if throw is "true".
136 hook failure. exception propagates if throw is "true".
137
137
138 reason for "true" meaning "hook failed" is so that
138 reason for "true" meaning "hook failed" is so that
139 unmodified commands (e.g. mercurial.commands.update) can
139 unmodified commands (e.g. mercurial.commands.update) can
140 be run as hooks without wrappers to convert return values.'''
140 be run as hooks without wrappers to convert return values.'''
141
141
142 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
142 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
143 d = funcname.rfind('.')
143 d = funcname.rfind('.')
144 if d == -1:
144 if d == -1:
145 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
145 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
146 % (hname, funcname))
146 % (hname, funcname))
147 modname = funcname[:d]
147 modname = funcname[:d]
148 try:
148 try:
149 obj = __import__(modname)
149 obj = __import__(modname)
150 except ImportError:
150 except ImportError:
151 try:
151 try:
152 # extensions are loaded with hgext_ prefix
152 # extensions are loaded with hgext_ prefix
153 obj = __import__("hgext_%s" % modname)
153 obj = __import__("hgext_%s" % modname)
154 except ImportError:
154 except ImportError:
155 raise util.Abort(_('%s hook is invalid '
155 raise util.Abort(_('%s hook is invalid '
156 '(import of "%s" failed)') %
156 '(import of "%s" failed)') %
157 (hname, modname))
157 (hname, modname))
158 try:
158 try:
159 for p in funcname.split('.')[1:]:
159 for p in funcname.split('.')[1:]:
160 obj = getattr(obj, p)
160 obj = getattr(obj, p)
161 except AttributeError, err:
161 except AttributeError, err:
162 raise util.Abort(_('%s hook is invalid '
162 raise util.Abort(_('%s hook is invalid '
163 '("%s" is not defined)') %
163 '("%s" is not defined)') %
164 (hname, funcname))
164 (hname, funcname))
165 if not callable(obj):
165 if not callable(obj):
166 raise util.Abort(_('%s hook is invalid '
166 raise util.Abort(_('%s hook is invalid '
167 '("%s" is not callable)') %
167 '("%s" is not callable)') %
168 (hname, funcname))
168 (hname, funcname))
169 try:
169 try:
170 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
170 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
171 except (KeyboardInterrupt, util.SignalInterrupt):
171 except (KeyboardInterrupt, util.SignalInterrupt):
172 raise
172 raise
173 except Exception, exc:
173 except Exception, exc:
174 if isinstance(exc, util.Abort):
174 if isinstance(exc, util.Abort):
175 self.ui.warn(_('error: %s hook failed: %s\n') %
175 self.ui.warn(_('error: %s hook failed: %s\n') %
176 (hname, exc.args[0]))
176 (hname, exc.args[0]))
177 else:
177 else:
178 self.ui.warn(_('error: %s hook raised an exception: '
178 self.ui.warn(_('error: %s hook raised an exception: '
179 '%s\n') % (hname, exc))
179 '%s\n') % (hname, exc))
180 if throw:
180 if throw:
181 raise
181 raise
182 self.ui.print_exc()
182 self.ui.print_exc()
183 return True
183 return True
184 if r:
184 if r:
185 if throw:
185 if throw:
186 raise util.Abort(_('%s hook failed') % hname)
186 raise util.Abort(_('%s hook failed') % hname)
187 self.ui.warn(_('warning: %s hook failed\n') % hname)
187 self.ui.warn(_('warning: %s hook failed\n') % hname)
188 return r
188 return r
189
189
190 def runhook(name, cmd):
190 def runhook(name, cmd):
191 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
191 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
192 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
192 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
193 r = util.system(cmd, environ=env, cwd=self.root)
193 r = util.system(cmd, environ=env, cwd=self.root)
194 if r:
194 if r:
195 desc, r = util.explain_exit(r)
195 desc, r = util.explain_exit(r)
196 if throw:
196 if throw:
197 raise util.Abort(_('%s hook %s') % (name, desc))
197 raise util.Abort(_('%s hook %s') % (name, desc))
198 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
198 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
199 return r
199 return r
200
200
201 r = False
201 r = False
202 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
202 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
203 if hname.split(".", 1)[0] == name and cmd]
203 if hname.split(".", 1)[0] == name and cmd]
204 hooks.sort()
204 hooks.sort()
205 for hname, cmd in hooks:
205 for hname, cmd in hooks:
206 if cmd.startswith('python:'):
206 if cmd.startswith('python:'):
207 r = callhook(hname, cmd[7:].strip()) or r
207 r = callhook(hname, cmd[7:].strip()) or r
208 else:
208 else:
209 r = runhook(hname, cmd) or r
209 r = runhook(hname, cmd) or r
210 return r
210 return r
211
211
212 tag_disallowed = ':\r\n'
212 tag_disallowed = ':\r\n'
213
213
214 def tag(self, name, node, message, local, user, date):
214 def tag(self, name, node, message, local, user, date):
215 '''tag a revision with a symbolic name.
215 '''tag a revision with a symbolic name.
216
216
217 if local is True, the tag is stored in a per-repository file.
217 if local is True, the tag is stored in a per-repository file.
218 otherwise, it is stored in the .hgtags file, and a new
218 otherwise, it is stored in the .hgtags file, and a new
219 changeset is committed with the change.
219 changeset is committed with the change.
220
220
221 keyword arguments:
221 keyword arguments:
222
222
223 local: whether to store tag in non-version-controlled file
223 local: whether to store tag in non-version-controlled file
224 (default False)
224 (default False)
225
225
226 message: commit message to use if committing
226 message: commit message to use if committing
227
227
228 user: name of user to use if committing
228 user: name of user to use if committing
229
229
230 date: date tuple to use if committing'''
230 date: date tuple to use if committing'''
231
231
232 for c in self.tag_disallowed:
232 for c in self.tag_disallowed:
233 if c in name:
233 if c in name:
234 raise util.Abort(_('%r cannot be used in a tag name') % c)
234 raise util.Abort(_('%r cannot be used in a tag name') % c)
235
235
236 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
236 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
237
237
238 if local:
238 if local:
239 # local tags are stored in the current charset
239 # local tags are stored in the current charset
240 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
240 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
241 self.hook('tag', node=hex(node), tag=name, local=local)
241 self.hook('tag', node=hex(node), tag=name, local=local)
242 return
242 return
243
243
244 for x in self.status()[:5]:
244 for x in self.status()[:5]:
245 if '.hgtags' in x:
245 if '.hgtags' in x:
246 raise util.Abort(_('working copy of .hgtags is changed '
246 raise util.Abort(_('working copy of .hgtags is changed '
247 '(please commit .hgtags manually)'))
247 '(please commit .hgtags manually)'))
248
248
249 # committed tags are stored in UTF-8
249 # committed tags are stored in UTF-8
250 line = '%s %s\n' % (hex(node), util.fromlocal(name))
250 line = '%s %s\n' % (hex(node), util.fromlocal(name))
251 self.wfile('.hgtags', 'ab').write(line)
251 self.wfile('.hgtags', 'ab').write(line)
252 if self.dirstate.state('.hgtags') == '?':
252 if self.dirstate.state('.hgtags') == '?':
253 self.add(['.hgtags'])
253 self.add(['.hgtags'])
254
254
255 self.commit(['.hgtags'], message, user, date)
255 self.commit(['.hgtags'], message, user, date)
256 self.hook('tag', node=hex(node), tag=name, local=local)
256 self.hook('tag', node=hex(node), tag=name, local=local)
257
257
258 def tags(self):
258 def tags(self):
259 '''return a mapping of tag to node'''
259 '''return a mapping of tag to node'''
260 if not self.tagscache:
260 if not self.tagscache:
261 self.tagscache = {}
261 self.tagscache = {}
262
262
263 def parsetag(line, context):
263 def parsetag(line, context):
264 if not line:
264 if not line:
265 return
265 return
266 s = l.split(" ", 1)
266 s = l.split(" ", 1)
267 if len(s) != 2:
267 if len(s) != 2:
268 self.ui.warn(_("%s: cannot parse entry\n") % context)
268 self.ui.warn(_("%s: cannot parse entry\n") % context)
269 return
269 return
270 node, key = s
270 node, key = s
271 key = util.tolocal(key.strip()) # stored in UTF-8
271 key = util.tolocal(key.strip()) # stored in UTF-8
272 try:
272 try:
273 bin_n = bin(node)
273 bin_n = bin(node)
274 except TypeError:
274 except TypeError:
275 self.ui.warn(_("%s: node '%s' is not well formed\n") %
275 self.ui.warn(_("%s: node '%s' is not well formed\n") %
276 (context, node))
276 (context, node))
277 return
277 return
278 if bin_n not in self.changelog.nodemap:
278 if bin_n not in self.changelog.nodemap:
279 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
279 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
280 (context, key))
280 (context, key))
281 return
281 return
282 self.tagscache[key] = bin_n
282 self.tagscache[key] = bin_n
283
283
284 # read the tags file from each head, ending with the tip,
284 # read the tags file from each head, ending with the tip,
285 # and add each tag found to the map, with "newer" ones
285 # and add each tag found to the map, with "newer" ones
286 # taking precedence
286 # taking precedence
287 f = None
287 f = None
288 for rev, node, fnode in self._hgtagsnodes():
288 for rev, node, fnode in self._hgtagsnodes():
289 f = (f and f.filectx(fnode) or
289 f = (f and f.filectx(fnode) or
290 self.filectx('.hgtags', fileid=fnode))
290 self.filectx('.hgtags', fileid=fnode))
291 count = 0
291 count = 0
292 for l in f.data().splitlines():
292 for l in f.data().splitlines():
293 count += 1
293 count += 1
294 parsetag(l, _("%s, line %d") % (str(f), count))
294 parsetag(l, _("%s, line %d") % (str(f), count))
295
295
296 try:
296 try:
297 f = self.opener("localtags")
297 f = self.opener("localtags")
298 count = 0
298 count = 0
299 for l in f:
299 for l in f:
300 # localtags are stored in the local character set
300 # localtags are stored in the local character set
301 # while the internal tag table is stored in UTF-8
301 # while the internal tag table is stored in UTF-8
302 l = util.fromlocal(l)
302 l = util.fromlocal(l)
303 count += 1
303 count += 1
304 parsetag(l, _("localtags, line %d") % count)
304 parsetag(l, _("localtags, line %d") % count)
305 except IOError:
305 except IOError:
306 pass
306 pass
307
307
308 self.tagscache['tip'] = self.changelog.tip()
308 self.tagscache['tip'] = self.changelog.tip()
309
309
310 return self.tagscache
310 return self.tagscache
311
311
312 def _hgtagsnodes(self):
312 def _hgtagsnodes(self):
313 heads = self.heads()
313 heads = self.heads()
314 heads.reverse()
314 heads.reverse()
315 last = {}
315 last = {}
316 ret = []
316 ret = []
317 for node in heads:
317 for node in heads:
318 c = self.changectx(node)
318 c = self.changectx(node)
319 rev = c.rev()
319 rev = c.rev()
320 try:
320 try:
321 fnode = c.filenode('.hgtags')
321 fnode = c.filenode('.hgtags')
322 except repo.LookupError:
322 except repo.LookupError:
323 continue
323 continue
324 ret.append((rev, node, fnode))
324 ret.append((rev, node, fnode))
325 if fnode in last:
325 if fnode in last:
326 ret[last[fnode]] = None
326 ret[last[fnode]] = None
327 last[fnode] = len(ret) - 1
327 last[fnode] = len(ret) - 1
328 return [item for item in ret if item]
328 return [item for item in ret if item]
329
329
330 def tagslist(self):
330 def tagslist(self):
331 '''return a list of tags ordered by revision'''
331 '''return a list of tags ordered by revision'''
332 l = []
332 l = []
333 for t, n in self.tags().items():
333 for t, n in self.tags().items():
334 try:
334 try:
335 r = self.changelog.rev(n)
335 r = self.changelog.rev(n)
336 except:
336 except:
337 r = -2 # sort to the beginning of the list if unknown
337 r = -2 # sort to the beginning of the list if unknown
338 l.append((r, t, n))
338 l.append((r, t, n))
339 l.sort()
339 l.sort()
340 return [(t, n) for r, t, n in l]
340 return [(t, n) for r, t, n in l]
341
341
342 def nodetags(self, node):
342 def nodetags(self, node):
343 '''return the tags associated with a node'''
343 '''return the tags associated with a node'''
344 if not self.nodetagscache:
344 if not self.nodetagscache:
345 self.nodetagscache = {}
345 self.nodetagscache = {}
346 for t, n in self.tags().items():
346 for t, n in self.tags().items():
347 self.nodetagscache.setdefault(n, []).append(t)
347 self.nodetagscache.setdefault(n, []).append(t)
348 return self.nodetagscache.get(node, [])
348 return self.nodetagscache.get(node, [])
349
349
350 def _branchtags(self):
350 def _branchtags(self):
351 partial, last, lrev = self._readbranchcache()
351 partial, last, lrev = self._readbranchcache()
352
352
353 tiprev = self.changelog.count() - 1
353 tiprev = self.changelog.count() - 1
354 if lrev != tiprev:
354 if lrev != tiprev:
355 self._updatebranchcache(partial, lrev+1, tiprev+1)
355 self._updatebranchcache(partial, lrev+1, tiprev+1)
356 self._writebranchcache(partial, self.changelog.tip(), tiprev)
356 self._writebranchcache(partial, self.changelog.tip(), tiprev)
357
357
358 return partial
358 return partial
359
359
360 def branchtags(self):
360 def branchtags(self):
361 if self.branchcache is not None:
361 if self.branchcache is not None:
362 return self.branchcache
362 return self.branchcache
363
363
364 self.branchcache = {} # avoid recursion in changectx
364 self.branchcache = {} # avoid recursion in changectx
365 partial = self._branchtags()
365 partial = self._branchtags()
366
366
367 # the branch cache is stored on disk as UTF-8, but in the local
367 # the branch cache is stored on disk as UTF-8, but in the local
368 # charset internally
368 # charset internally
369 for k, v in partial.items():
369 for k, v in partial.items():
370 self.branchcache[util.tolocal(k)] = v
370 self.branchcache[util.tolocal(k)] = v
371 return self.branchcache
371 return self.branchcache
372
372
373 def _readbranchcache(self):
373 def _readbranchcache(self):
374 partial = {}
374 partial = {}
375 try:
375 try:
376 f = self.opener("branches.cache")
376 f = self.opener("branches.cache")
377 lines = f.read().split('\n')
377 lines = f.read().split('\n')
378 f.close()
378 f.close()
379 last, lrev = lines.pop(0).rstrip().split(" ", 1)
379 last, lrev = lines.pop(0).rstrip().split(" ", 1)
380 last, lrev = bin(last), int(lrev)
380 last, lrev = bin(last), int(lrev)
381 if not (lrev < self.changelog.count() and
381 if not (lrev < self.changelog.count() and
382 self.changelog.node(lrev) == last): # sanity check
382 self.changelog.node(lrev) == last): # sanity check
383 # invalidate the cache
383 # invalidate the cache
384 raise ValueError('Invalid branch cache: unknown tip')
384 raise ValueError('Invalid branch cache: unknown tip')
385 for l in lines:
385 for l in lines:
386 if not l: continue
386 if not l: continue
387 node, label = l.rstrip().split(" ", 1)
387 node, label = l.rstrip().split(" ", 1)
388 partial[label] = bin(node)
388 partial[label] = bin(node)
389 except (KeyboardInterrupt, util.SignalInterrupt):
389 except (KeyboardInterrupt, util.SignalInterrupt):
390 raise
390 raise
391 except Exception, inst:
391 except Exception, inst:
392 if self.ui.debugflag:
392 if self.ui.debugflag:
393 self.ui.warn(str(inst), '\n')
393 self.ui.warn(str(inst), '\n')
394 partial, last, lrev = {}, nullid, nullrev
394 partial, last, lrev = {}, nullid, nullrev
395 return partial, last, lrev
395 return partial, last, lrev
396
396
397 def _writebranchcache(self, branches, tip, tiprev):
397 def _writebranchcache(self, branches, tip, tiprev):
398 try:
398 try:
399 f = self.opener("branches.cache", "w")
399 f = self.opener("branches.cache", "w")
400 f.write("%s %s\n" % (hex(tip), tiprev))
400 f.write("%s %s\n" % (hex(tip), tiprev))
401 for label, node in branches.iteritems():
401 for label, node in branches.iteritems():
402 f.write("%s %s\n" % (hex(node), label))
402 f.write("%s %s\n" % (hex(node), label))
403 except IOError:
403 except IOError:
404 pass
404 pass
405
405
406 def _updatebranchcache(self, partial, start, end):
406 def _updatebranchcache(self, partial, start, end):
407 for r in xrange(start, end):
407 for r in xrange(start, end):
408 c = self.changectx(r)
408 c = self.changectx(r)
409 b = c.branch()
409 b = c.branch()
410 if b:
410 if b:
411 partial[b] = c.node()
411 partial[b] = c.node()
412
412
413 def lookup(self, key):
413 def lookup(self, key):
414 if key == '.':
414 if key == '.':
415 key = self.dirstate.parents()[0]
415 key = self.dirstate.parents()[0]
416 if key == nullid:
416 if key == nullid:
417 raise repo.RepoError(_("no revision checked out"))
417 raise repo.RepoError(_("no revision checked out"))
418 elif key == 'null':
418 elif key == 'null':
419 return nullid
419 return nullid
420 n = self.changelog._match(key)
420 n = self.changelog._match(key)
421 if n:
421 if n:
422 return n
422 return n
423 if key in self.tags():
423 if key in self.tags():
424 return self.tags()[key]
424 return self.tags()[key]
425 if key in self.branchtags():
425 if key in self.branchtags():
426 return self.branchtags()[key]
426 return self.branchtags()[key]
427 n = self.changelog._partialmatch(key)
427 n = self.changelog._partialmatch(key)
428 if n:
428 if n:
429 return n
429 return n
430 raise repo.RepoError(_("unknown revision '%s'") % key)
430 raise repo.RepoError(_("unknown revision '%s'") % key)
431
431
432 def dev(self):
432 def dev(self):
433 return os.lstat(self.path).st_dev
433 return os.lstat(self.path).st_dev
434
434
435 def local(self):
435 def local(self):
436 return True
436 return True
437
437
438 def join(self, f):
438 def join(self, f):
439 return os.path.join(self.path, f)
439 return os.path.join(self.path, f)
440
440
441 def sjoin(self, f):
441 def sjoin(self, f):
442 f = self.encodefn(f)
442 f = self.encodefn(f)
443 return os.path.join(self.spath, f)
443 return os.path.join(self.spath, f)
444
444
445 def wjoin(self, f):
445 def wjoin(self, f):
446 return os.path.join(self.root, f)
446 return os.path.join(self.root, f)
447
447
448 def file(self, f):
448 def file(self, f):
449 if f[0] == '/':
449 if f[0] == '/':
450 f = f[1:]
450 f = f[1:]
451 return filelog.filelog(self.sopener, f, self.revlogversion)
451 return filelog.filelog(self.sopener, f, self.revlogversion)
452
452
453 def changectx(self, changeid=None):
453 def changectx(self, changeid=None):
454 return context.changectx(self, changeid)
454 return context.changectx(self, changeid)
455
455
456 def workingctx(self):
456 def workingctx(self):
457 return context.workingctx(self)
457 return context.workingctx(self)
458
458
459 def parents(self, changeid=None):
459 def parents(self, changeid=None):
460 '''
460 '''
461 get list of changectxs for parents of changeid or working directory
461 get list of changectxs for parents of changeid or working directory
462 '''
462 '''
463 if changeid is None:
463 if changeid is None:
464 pl = self.dirstate.parents()
464 pl = self.dirstate.parents()
465 else:
465 else:
466 n = self.changelog.lookup(changeid)
466 n = self.changelog.lookup(changeid)
467 pl = self.changelog.parents(n)
467 pl = self.changelog.parents(n)
468 if pl[1] == nullid:
468 if pl[1] == nullid:
469 return [self.changectx(pl[0])]
469 return [self.changectx(pl[0])]
470 return [self.changectx(pl[0]), self.changectx(pl[1])]
470 return [self.changectx(pl[0]), self.changectx(pl[1])]
471
471
472 def filectx(self, path, changeid=None, fileid=None):
472 def filectx(self, path, changeid=None, fileid=None):
473 """changeid can be a changeset revision, node, or tag.
473 """changeid can be a changeset revision, node, or tag.
474 fileid can be a file revision or node."""
474 fileid can be a file revision or node."""
475 return context.filectx(self, path, changeid, fileid)
475 return context.filectx(self, path, changeid, fileid)
476
476
477 def getcwd(self):
477 def getcwd(self):
478 return self.dirstate.getcwd()
478 return self.dirstate.getcwd()
479
479
480 def wfile(self, f, mode='r'):
480 def wfile(self, f, mode='r'):
481 return self.wopener(f, mode)
481 return self.wopener(f, mode)
482
482
483 def wread(self, filename):
483 def wread(self, filename):
484 if self.encodepats == None:
484 if self.encodepats == None:
485 l = []
485 l = []
486 for pat, cmd in self.ui.configitems("encode"):
486 for pat, cmd in self.ui.configitems("encode"):
487 mf = util.matcher(self.root, "", [pat], [], [])[1]
487 mf = util.matcher(self.root, "", [pat], [], [])[1]
488 l.append((mf, cmd))
488 l.append((mf, cmd))
489 self.encodepats = l
489 self.encodepats = l
490
490
491 data = self.wopener(filename, 'r').read()
491 data = self.wopener(filename, 'r').read()
492
492
493 for mf, cmd in self.encodepats:
493 for mf, cmd in self.encodepats:
494 if mf(filename):
494 if mf(filename):
495 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
495 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
496 data = util.filter(data, cmd)
496 data = util.filter(data, cmd)
497 break
497 break
498
498
499 return data
499 return data
500
500
501 def wwrite(self, filename, data, fd=None):
501 def wwrite(self, filename, data, fd=None):
502 if self.decodepats == None:
502 if self.decodepats == None:
503 l = []
503 l = []
504 for pat, cmd in self.ui.configitems("decode"):
504 for pat, cmd in self.ui.configitems("decode"):
505 mf = util.matcher(self.root, "", [pat], [], [])[1]
505 mf = util.matcher(self.root, "", [pat], [], [])[1]
506 l.append((mf, cmd))
506 l.append((mf, cmd))
507 self.decodepats = l
507 self.decodepats = l
508
508
509 for mf, cmd in self.decodepats:
509 for mf, cmd in self.decodepats:
510 if mf(filename):
510 if mf(filename):
511 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
511 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
512 data = util.filter(data, cmd)
512 data = util.filter(data, cmd)
513 break
513 break
514
514
515 if fd:
515 if fd:
516 return fd.write(data)
516 return fd.write(data)
517 return self.wopener(filename, 'w').write(data)
517 return self.wopener(filename, 'w').write(data)
518
518
519 def transaction(self):
519 def transaction(self):
520 tr = self.transhandle
520 tr = self.transhandle
521 if tr != None and tr.running():
521 if tr != None and tr.running():
522 return tr.nest()
522 return tr.nest()
523
523
524 # save dirstate for rollback
524 # save dirstate for rollback
525 try:
525 try:
526 ds = self.opener("dirstate").read()
526 ds = self.opener("dirstate").read()
527 except IOError:
527 except IOError:
528 ds = ""
528 ds = ""
529 self.opener("journal.dirstate", "w").write(ds)
529 self.opener("journal.dirstate", "w").write(ds)
530
530
531 renames = [(self.sjoin("journal"), self.sjoin("undo")),
531 renames = [(self.sjoin("journal"), self.sjoin("undo")),
532 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
532 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
533 tr = transaction.transaction(self.ui.warn, self.sopener,
533 tr = transaction.transaction(self.ui.warn, self.sopener,
534 self.sjoin("journal"),
534 self.sjoin("journal"),
535 aftertrans(renames))
535 aftertrans(renames))
536 self.transhandle = tr
536 self.transhandle = tr
537 return tr
537 return tr
538
538
539 def recover(self):
539 def recover(self):
540 l = self.lock()
540 l = self.lock()
541 if os.path.exists(self.sjoin("journal")):
541 if os.path.exists(self.sjoin("journal")):
542 self.ui.status(_("rolling back interrupted transaction\n"))
542 self.ui.status(_("rolling back interrupted transaction\n"))
543 transaction.rollback(self.sopener, self.sjoin("journal"))
543 transaction.rollback(self.sopener, self.sjoin("journal"))
544 self.reload()
544 self.reload()
545 return True
545 return True
546 else:
546 else:
547 self.ui.warn(_("no interrupted transaction available\n"))
547 self.ui.warn(_("no interrupted transaction available\n"))
548 return False
548 return False
549
549
550 def rollback(self, wlock=None):
550 def rollback(self, wlock=None):
551 if not wlock:
551 if not wlock:
552 wlock = self.wlock()
552 wlock = self.wlock()
553 l = self.lock()
553 l = self.lock()
554 if os.path.exists(self.sjoin("undo")):
554 if os.path.exists(self.sjoin("undo")):
555 self.ui.status(_("rolling back last transaction\n"))
555 self.ui.status(_("rolling back last transaction\n"))
556 transaction.rollback(self.sopener, self.sjoin("undo"))
556 transaction.rollback(self.sopener, self.sjoin("undo"))
557 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
557 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
558 self.reload()
558 self.reload()
559 self.wreload()
559 self.wreload()
560 else:
560 else:
561 self.ui.warn(_("no rollback information available\n"))
561 self.ui.warn(_("no rollback information available\n"))
562
562
563 def wreload(self):
563 def wreload(self):
564 self.dirstate.read()
564 self.dirstate.read()
565
565
566 def reload(self):
566 def reload(self):
567 self.changelog.load()
567 self.changelog.load()
568 self.manifest.load()
568 self.manifest.load()
569 self.tagscache = None
569 self.tagscache = None
570 self.nodetagscache = None
570 self.nodetagscache = None
571
571
572 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
572 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
573 desc=None):
573 desc=None):
574 try:
574 try:
575 l = lock.lock(lockname, 0, releasefn, desc=desc)
575 l = lock.lock(lockname, 0, releasefn, desc=desc)
576 except lock.LockHeld, inst:
576 except lock.LockHeld, inst:
577 if not wait:
577 if not wait:
578 raise
578 raise
579 self.ui.warn(_("waiting for lock on %s held by %r\n") %
579 self.ui.warn(_("waiting for lock on %s held by %r\n") %
580 (desc, inst.locker))
580 (desc, inst.locker))
581 # default to 600 seconds timeout
581 # default to 600 seconds timeout
582 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
582 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
583 releasefn, desc=desc)
583 releasefn, desc=desc)
584 if acquirefn:
584 if acquirefn:
585 acquirefn()
585 acquirefn()
586 return l
586 return l
587
587
588 def lock(self, wait=1):
588 def lock(self, wait=1):
589 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
589 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
590 desc=_('repository %s') % self.origroot)
590 desc=_('repository %s') % self.origroot)
591
591
592 def wlock(self, wait=1):
592 def wlock(self, wait=1):
593 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
593 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
594 self.wreload,
594 self.wreload,
595 desc=_('working directory of %s') % self.origroot)
595 desc=_('working directory of %s') % self.origroot)
596
596
597 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
597 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
598 """
598 """
599 commit an individual file as part of a larger transaction
599 commit an individual file as part of a larger transaction
600 """
600 """
601
601
602 t = self.wread(fn)
602 t = self.wread(fn)
603 fl = self.file(fn)
603 fl = self.file(fn)
604 fp1 = manifest1.get(fn, nullid)
604 fp1 = manifest1.get(fn, nullid)
605 fp2 = manifest2.get(fn, nullid)
605 fp2 = manifest2.get(fn, nullid)
606
606
607 meta = {}
607 meta = {}
608 cp = self.dirstate.copied(fn)
608 cp = self.dirstate.copied(fn)
609 if cp:
609 if cp:
610 meta["copy"] = cp
610 meta["copy"] = cp
611 if not manifest2: # not a branch merge
611 if not manifest2: # not a branch merge
612 meta["copyrev"] = hex(manifest1.get(cp, nullid))
612 meta["copyrev"] = hex(manifest1.get(cp, nullid))
613 fp2 = nullid
613 fp2 = nullid
614 elif fp2 != nullid: # copied on remote side
614 elif fp2 != nullid: # copied on remote side
615 meta["copyrev"] = hex(manifest1.get(cp, nullid))
615 meta["copyrev"] = hex(manifest1.get(cp, nullid))
616 elif fp1 != nullid: # copied on local side, reversed
616 elif fp1 != nullid: # copied on local side, reversed
617 meta["copyrev"] = hex(manifest2.get(cp))
617 meta["copyrev"] = hex(manifest2.get(cp))
618 fp2 = nullid
618 fp2 = nullid
619 else: # directory rename
619 else: # directory rename
620 meta["copyrev"] = hex(manifest1.get(cp, nullid))
620 meta["copyrev"] = hex(manifest1.get(cp, nullid))
621 self.ui.debug(_(" %s: copy %s:%s\n") %
621 self.ui.debug(_(" %s: copy %s:%s\n") %
622 (fn, cp, meta["copyrev"]))
622 (fn, cp, meta["copyrev"]))
623 fp1 = nullid
623 fp1 = nullid
624 elif fp2 != nullid:
624 elif fp2 != nullid:
625 # is one parent an ancestor of the other?
625 # is one parent an ancestor of the other?
626 fpa = fl.ancestor(fp1, fp2)
626 fpa = fl.ancestor(fp1, fp2)
627 if fpa == fp1:
627 if fpa == fp1:
628 fp1, fp2 = fp2, nullid
628 fp1, fp2 = fp2, nullid
629 elif fpa == fp2:
629 elif fpa == fp2:
630 fp2 = nullid
630 fp2 = nullid
631
631
632 # is the file unmodified from the parent? report existing entry
632 # is the file unmodified from the parent? report existing entry
633 if fp2 == nullid and not fl.cmp(fp1, t):
633 if fp2 == nullid and not fl.cmp(fp1, t):
634 return fp1
634 return fp1
635
635
636 changelist.append(fn)
636 changelist.append(fn)
637 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
637 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
638
638
639 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
639 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
640 if p1 is None:
640 if p1 is None:
641 p1, p2 = self.dirstate.parents()
641 p1, p2 = self.dirstate.parents()
642 return self.commit(files=files, text=text, user=user, date=date,
642 return self.commit(files=files, text=text, user=user, date=date,
643 p1=p1, p2=p2, wlock=wlock)
643 p1=p1, p2=p2, wlock=wlock)
644
644
645 def commit(self, files=None, text="", user=None, date=None,
645 def commit(self, files=None, text="", user=None, date=None,
646 match=util.always, force=False, lock=None, wlock=None,
646 match=util.always, force=False, lock=None, wlock=None,
647 force_editor=False, p1=None, p2=None, extra={}):
647 force_editor=False, p1=None, p2=None, extra={}):
648
648
649 commit = []
649 commit = []
650 remove = []
650 remove = []
651 changed = []
651 changed = []
652 use_dirstate = (p1 is None) # not rawcommit
652 use_dirstate = (p1 is None) # not rawcommit
653 extra = extra.copy()
653 extra = extra.copy()
654
654
655 if use_dirstate:
655 if use_dirstate:
656 if files:
656 if files:
657 for f in files:
657 for f in files:
658 s = self.dirstate.state(f)
658 s = self.dirstate.state(f)
659 if s in 'nmai':
659 if s in 'nmai':
660 commit.append(f)
660 commit.append(f)
661 elif s == 'r':
661 elif s == 'r':
662 remove.append(f)
662 remove.append(f)
663 else:
663 else:
664 self.ui.warn(_("%s not tracked!\n") % f)
664 self.ui.warn(_("%s not tracked!\n") % f)
665 else:
665 else:
666 changes = self.status(match=match)[:5]
666 changes = self.status(match=match)[:5]
667 modified, added, removed, deleted, unknown = changes
667 modified, added, removed, deleted, unknown = changes
668 commit = modified + added
668 commit = modified + added
669 remove = removed
669 remove = removed
670 else:
670 else:
671 commit = files
671 commit = files
672
672
673 if use_dirstate:
673 if use_dirstate:
674 p1, p2 = self.dirstate.parents()
674 p1, p2 = self.dirstate.parents()
675 update_dirstate = True
675 update_dirstate = True
676 else:
676 else:
677 p1, p2 = p1, p2 or nullid
677 p1, p2 = p1, p2 or nullid
678 update_dirstate = (self.dirstate.parents()[0] == p1)
678 update_dirstate = (self.dirstate.parents()[0] == p1)
679
679
680 c1 = self.changelog.read(p1)
680 c1 = self.changelog.read(p1)
681 c2 = self.changelog.read(p2)
681 c2 = self.changelog.read(p2)
682 m1 = self.manifest.read(c1[0]).copy()
682 m1 = self.manifest.read(c1[0]).copy()
683 m2 = self.manifest.read(c2[0])
683 m2 = self.manifest.read(c2[0])
684
684
685 if use_dirstate:
685 if use_dirstate:
686 branchname = self.workingctx().branch()
686 branchname = self.workingctx().branch()
687 try:
687 try:
688 branchname = branchname.decode('UTF-8').encode('UTF-8')
688 branchname = branchname.decode('UTF-8').encode('UTF-8')
689 except UnicodeDecodeError:
689 except UnicodeDecodeError:
690 raise util.Abort(_('branch name not in UTF-8!'))
690 raise util.Abort(_('branch name not in UTF-8!'))
691 else:
691 else:
692 branchname = ""
692 branchname = ""
693
693
694 if use_dirstate:
694 if use_dirstate:
695 oldname = c1[5].get("branch", "") # stored in UTF-8
695 oldname = c1[5].get("branch", "") # stored in UTF-8
696 if not commit and not remove and not force and p2 == nullid and \
696 if not commit and not remove and not force and p2 == nullid and \
697 branchname == oldname:
697 branchname == oldname:
698 self.ui.status(_("nothing changed\n"))
698 self.ui.status(_("nothing changed\n"))
699 return None
699 return None
700
700
701 xp1 = hex(p1)
701 xp1 = hex(p1)
702 if p2 == nullid: xp2 = ''
702 if p2 == nullid: xp2 = ''
703 else: xp2 = hex(p2)
703 else: xp2 = hex(p2)
704
704
705 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
705 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
706
706
707 if not wlock:
707 if not wlock:
708 wlock = self.wlock()
708 wlock = self.wlock()
709 if not lock:
709 if not lock:
710 lock = self.lock()
710 lock = self.lock()
711 tr = self.transaction()
711 tr = self.transaction()
712
712
713 # check in files
713 # check in files
714 new = {}
714 new = {}
715 linkrev = self.changelog.count()
715 linkrev = self.changelog.count()
716 commit.sort()
716 commit.sort()
717 for f in commit:
717 for f in commit:
718 self.ui.note(f + "\n")
718 self.ui.note(f + "\n")
719 try:
719 try:
720 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
720 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
721 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
721 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
722 except IOError:
722 except IOError:
723 if use_dirstate:
723 if use_dirstate:
724 self.ui.warn(_("trouble committing %s!\n") % f)
724 self.ui.warn(_("trouble committing %s!\n") % f)
725 raise
725 raise
726 else:
726 else:
727 remove.append(f)
727 remove.append(f)
728
728
729 # update manifest
729 # update manifest
730 m1.update(new)
730 m1.update(new)
731 remove.sort()
731 remove.sort()
732
732
733 for f in remove:
733 for f in remove:
734 if f in m1:
734 if f in m1:
735 del m1[f]
735 del m1[f]
736 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
736 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
737
737
738 # add changeset
738 # add changeset
739 new = new.keys()
739 new = new.keys()
740 new.sort()
740 new.sort()
741
741
742 user = user or self.ui.username()
742 user = user or self.ui.username()
743 if not text or force_editor:
743 if not text or force_editor:
744 edittext = []
744 edittext = []
745 if text:
745 if text:
746 edittext.append(text)
746 edittext.append(text)
747 edittext.append("")
747 edittext.append("")
748 edittext.append("HG: user: %s" % user)
748 edittext.append("HG: user: %s" % user)
749 if p2 != nullid:
749 if p2 != nullid:
750 edittext.append("HG: branch merge")
750 edittext.append("HG: branch merge")
751 edittext.extend(["HG: changed %s" % f for f in changed])
751 edittext.extend(["HG: changed %s" % f for f in changed])
752 edittext.extend(["HG: removed %s" % f for f in remove])
752 edittext.extend(["HG: removed %s" % f for f in remove])
753 if not changed and not remove:
753 if not changed and not remove:
754 edittext.append("HG: no files changed")
754 edittext.append("HG: no files changed")
755 edittext.append("")
755 edittext.append("")
756 # run editor in the repository root
756 # run editor in the repository root
757 olddir = os.getcwd()
757 olddir = os.getcwd()
758 os.chdir(self.root)
758 os.chdir(self.root)
759 text = self.ui.edit("\n".join(edittext), user)
759 text = self.ui.edit("\n".join(edittext), user)
760 os.chdir(olddir)
760 os.chdir(olddir)
761
761
762 lines = [line.rstrip() for line in text.rstrip().splitlines()]
762 lines = [line.rstrip() for line in text.rstrip().splitlines()]
763 while lines and not lines[0]:
763 while lines and not lines[0]:
764 del lines[0]
764 del lines[0]
765 if not lines:
765 if not lines:
766 return None
766 return None
767 text = '\n'.join(lines)
767 text = '\n'.join(lines)
768 if branchname:
768 if branchname:
769 extra["branch"] = branchname
769 extra["branch"] = branchname
770 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
770 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
771 user, date, extra)
771 user, date, extra)
772 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
772 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
773 parent2=xp2)
773 parent2=xp2)
774 tr.close()
774 tr.close()
775
775
776 if use_dirstate or update_dirstate:
776 if use_dirstate or update_dirstate:
777 self.dirstate.setparents(n)
777 self.dirstate.setparents(n)
778 if use_dirstate:
778 if use_dirstate:
779 self.dirstate.update(new, "n")
779 self.dirstate.update(new, "n")
780 self.dirstate.forget(remove)
780 self.dirstate.forget(remove)
781
781
782 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
782 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
783 return n
783 return n
784
784
785 def walk(self, node=None, files=[], match=util.always, badmatch=None):
785 def walk(self, node=None, files=[], match=util.always, badmatch=None):
786 '''
786 '''
787 walk recursively through the directory tree or a given
787 walk recursively through the directory tree or a given
788 changeset, finding all files matched by the match
788 changeset, finding all files matched by the match
789 function
789 function
790
790
791 results are yielded in a tuple (src, filename), where src
791 results are yielded in a tuple (src, filename), where src
792 is one of:
792 is one of:
793 'f' the file was found in the directory tree
793 'f' the file was found in the directory tree
794 'm' the file was only in the dirstate and not in the tree
794 'm' the file was only in the dirstate and not in the tree
795 'b' file was not found and matched badmatch
795 'b' file was not found and matched badmatch
796 '''
796 '''
797
797
798 if node:
798 if node:
799 fdict = dict.fromkeys(files)
799 fdict = dict.fromkeys(files)
800 for fn in self.manifest.read(self.changelog.read(node)[0]):
800 for fn in self.manifest.read(self.changelog.read(node)[0]):
801 for ffn in fdict:
801 for ffn in fdict:
802 # match if the file is the exact name or a directory
802 # match if the file is the exact name or a directory
803 if ffn == fn or fn.startswith("%s/" % ffn):
803 if ffn == fn or fn.startswith("%s/" % ffn):
804 del fdict[ffn]
804 del fdict[ffn]
805 break
805 break
806 if match(fn):
806 if match(fn):
807 yield 'm', fn
807 yield 'm', fn
808 for fn in fdict:
808 for fn in fdict:
809 if badmatch and badmatch(fn):
809 if badmatch and badmatch(fn):
810 if match(fn):
810 if match(fn):
811 yield 'b', fn
811 yield 'b', fn
812 else:
812 else:
813 self.ui.warn(_('%s: No such file in rev %s\n') % (
813 self.ui.warn(_('%s: No such file in rev %s\n') % (
814 util.pathto(self.getcwd(), fn), short(node)))
814 util.pathto(self.getcwd(), fn), short(node)))
815 else:
815 else:
816 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
816 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
817 yield src, fn
817 yield src, fn
818
818
819 def status(self, node1=None, node2=None, files=[], match=util.always,
819 def status(self, node1=None, node2=None, files=[], match=util.always,
820 wlock=None, list_ignored=False, list_clean=False):
820 wlock=None, list_ignored=False, list_clean=False):
821 """return status of files between two nodes or node and working directory
821 """return status of files between two nodes or node and working directory
822
822
823 If node1 is None, use the first dirstate parent instead.
823 If node1 is None, use the first dirstate parent instead.
824 If node2 is None, compare node1 with working directory.
824 If node2 is None, compare node1 with working directory.
825 """
825 """
826
826
827 def fcmp(fn, mf):
827 def fcmp(fn, mf):
828 t1 = self.wread(fn)
828 t1 = self.wread(fn)
829 return self.file(fn).cmp(mf.get(fn, nullid), t1)
829 return self.file(fn).cmp(mf.get(fn, nullid), t1)
830
830
831 def mfmatches(node):
831 def mfmatches(node):
832 change = self.changelog.read(node)
832 change = self.changelog.read(node)
833 mf = self.manifest.read(change[0]).copy()
833 mf = self.manifest.read(change[0]).copy()
834 for fn in mf.keys():
834 for fn in mf.keys():
835 if not match(fn):
835 if not match(fn):
836 del mf[fn]
836 del mf[fn]
837 return mf
837 return mf
838
838
839 modified, added, removed, deleted, unknown = [], [], [], [], []
839 modified, added, removed, deleted, unknown = [], [], [], [], []
840 ignored, clean = [], []
840 ignored, clean = [], []
841
841
842 compareworking = False
842 compareworking = False
843 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
843 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
844 compareworking = True
844 compareworking = True
845
845
846 if not compareworking:
846 if not compareworking:
847 # read the manifest from node1 before the manifest from node2,
847 # read the manifest from node1 before the manifest from node2,
848 # so that we'll hit the manifest cache if we're going through
848 # so that we'll hit the manifest cache if we're going through
849 # all the revisions in parent->child order.
849 # all the revisions in parent->child order.
850 mf1 = mfmatches(node1)
850 mf1 = mfmatches(node1)
851
851
852 # are we comparing the working directory?
852 # are we comparing the working directory?
853 if not node2:
853 if not node2:
854 if not wlock:
854 if not wlock:
855 try:
855 try:
856 wlock = self.wlock(wait=0)
856 wlock = self.wlock(wait=0)
857 except lock.LockException:
857 except lock.LockException:
858 wlock = None
858 wlock = None
859 (lookup, modified, added, removed, deleted, unknown,
859 (lookup, modified, added, removed, deleted, unknown,
860 ignored, clean) = self.dirstate.status(files, match,
860 ignored, clean) = self.dirstate.status(files, match,
861 list_ignored, list_clean)
861 list_ignored, list_clean)
862
862
863 # are we comparing working dir against its parent?
863 # are we comparing working dir against its parent?
864 if compareworking:
864 if compareworking:
865 if lookup:
865 if lookup:
866 # do a full compare of any files that might have changed
866 # do a full compare of any files that might have changed
867 mf2 = mfmatches(self.dirstate.parents()[0])
867 mf2 = mfmatches(self.dirstate.parents()[0])
868 for f in lookup:
868 for f in lookup:
869 if fcmp(f, mf2):
869 if fcmp(f, mf2):
870 modified.append(f)
870 modified.append(f)
871 else:
871 else:
872 clean.append(f)
872 clean.append(f)
873 if wlock is not None:
873 if wlock is not None:
874 self.dirstate.update([f], "n")
874 self.dirstate.update([f], "n")
875 else:
875 else:
876 # we are comparing working dir against non-parent
876 # we are comparing working dir against non-parent
877 # generate a pseudo-manifest for the working dir
877 # generate a pseudo-manifest for the working dir
878 # XXX: create it in dirstate.py ?
878 # XXX: create it in dirstate.py ?
879 mf2 = mfmatches(self.dirstate.parents()[0])
879 mf2 = mfmatches(self.dirstate.parents()[0])
880 for f in lookup + modified + added:
880 for f in lookup + modified + added:
881 mf2[f] = ""
881 mf2[f] = ""
882 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
882 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
883 for f in removed:
883 for f in removed:
884 if f in mf2:
884 if f in mf2:
885 del mf2[f]
885 del mf2[f]
886 else:
886 else:
887 # we are comparing two revisions
887 # we are comparing two revisions
888 mf2 = mfmatches(node2)
888 mf2 = mfmatches(node2)
889
889
890 if not compareworking:
890 if not compareworking:
891 # flush lists from dirstate before comparing manifests
891 # flush lists from dirstate before comparing manifests
892 modified, added, clean = [], [], []
892 modified, added, clean = [], [], []
893
893
894 # make sure to sort the files so we talk to the disk in a
894 # make sure to sort the files so we talk to the disk in a
895 # reasonable order
895 # reasonable order
896 mf2keys = mf2.keys()
896 mf2keys = mf2.keys()
897 mf2keys.sort()
897 mf2keys.sort()
898 for fn in mf2keys:
898 for fn in mf2keys:
899 if mf1.has_key(fn):
899 if mf1.has_key(fn):
900 if mf1.flags(fn) != mf2.flags(fn) or \
900 if mf1.flags(fn) != mf2.flags(fn) or \
901 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
901 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
902 modified.append(fn)
902 modified.append(fn)
903 elif list_clean:
903 elif list_clean:
904 clean.append(fn)
904 clean.append(fn)
905 del mf1[fn]
905 del mf1[fn]
906 else:
906 else:
907 added.append(fn)
907 added.append(fn)
908
908
909 removed = mf1.keys()
909 removed = mf1.keys()
910
910
911 # sort and return results:
911 # sort and return results:
912 for l in modified, added, removed, deleted, unknown, ignored, clean:
912 for l in modified, added, removed, deleted, unknown, ignored, clean:
913 l.sort()
913 l.sort()
914 return (modified, added, removed, deleted, unknown, ignored, clean)
914 return (modified, added, removed, deleted, unknown, ignored, clean)
915
915
916 def add(self, list, wlock=None):
916 def add(self, list, wlock=None):
917 if not wlock:
917 if not wlock:
918 wlock = self.wlock()
918 wlock = self.wlock()
919 for f in list:
919 for f in list:
920 p = self.wjoin(f)
920 p = self.wjoin(f)
921 if not os.path.exists(p):
921 if not os.path.exists(p):
922 self.ui.warn(_("%s does not exist!\n") % f)
922 self.ui.warn(_("%s does not exist!\n") % f)
923 elif not os.path.isfile(p):
923 elif not os.path.isfile(p):
924 self.ui.warn(_("%s not added: only files supported currently\n")
924 self.ui.warn(_("%s not added: only files supported currently\n")
925 % f)
925 % f)
926 elif self.dirstate.state(f) in 'an':
926 elif self.dirstate.state(f) in 'an':
927 self.ui.warn(_("%s already tracked!\n") % f)
927 self.ui.warn(_("%s already tracked!\n") % f)
928 else:
928 else:
929 self.dirstate.update([f], "a")
929 self.dirstate.update([f], "a")
930
930
931 def forget(self, list, wlock=None):
931 def forget(self, list, wlock=None):
932 if not wlock:
932 if not wlock:
933 wlock = self.wlock()
933 wlock = self.wlock()
934 for f in list:
934 for f in list:
935 if self.dirstate.state(f) not in 'ai':
935 if self.dirstate.state(f) not in 'ai':
936 self.ui.warn(_("%s not added!\n") % f)
936 self.ui.warn(_("%s not added!\n") % f)
937 else:
937 else:
938 self.dirstate.forget([f])
938 self.dirstate.forget([f])
939
939
940 def remove(self, list, unlink=False, wlock=None):
940 def remove(self, list, unlink=False, wlock=None):
941 if unlink:
941 if unlink:
942 for f in list:
942 for f in list:
943 try:
943 try:
944 util.unlink(self.wjoin(f))
944 util.unlink(self.wjoin(f))
945 except OSError, inst:
945 except OSError, inst:
946 if inst.errno != errno.ENOENT:
946 if inst.errno != errno.ENOENT:
947 raise
947 raise
948 if not wlock:
948 if not wlock:
949 wlock = self.wlock()
949 wlock = self.wlock()
950 for f in list:
950 for f in list:
951 p = self.wjoin(f)
951 p = self.wjoin(f)
952 if os.path.exists(p):
952 if os.path.exists(p):
953 self.ui.warn(_("%s still exists!\n") % f)
953 self.ui.warn(_("%s still exists!\n") % f)
954 elif self.dirstate.state(f) == 'a':
954 elif self.dirstate.state(f) == 'a':
955 self.dirstate.forget([f])
955 self.dirstate.forget([f])
956 elif f not in self.dirstate:
956 elif f not in self.dirstate:
957 self.ui.warn(_("%s not tracked!\n") % f)
957 self.ui.warn(_("%s not tracked!\n") % f)
958 else:
958 else:
959 self.dirstate.update([f], "r")
959 self.dirstate.update([f], "r")
960
960
961 def undelete(self, list, wlock=None):
961 def undelete(self, list, wlock=None):
962 p = self.dirstate.parents()[0]
962 p = self.dirstate.parents()[0]
963 mn = self.changelog.read(p)[0]
963 mn = self.changelog.read(p)[0]
964 m = self.manifest.read(mn)
964 m = self.manifest.read(mn)
965 if not wlock:
965 if not wlock:
966 wlock = self.wlock()
966 wlock = self.wlock()
967 for f in list:
967 for f in list:
968 if self.dirstate.state(f) not in "r":
968 if self.dirstate.state(f) not in "r":
969 self.ui.warn("%s not removed!\n" % f)
969 self.ui.warn("%s not removed!\n" % f)
970 else:
970 else:
971 t = self.file(f).read(m[f])
971 t = self.file(f).read(m[f])
972 self.wwrite(f, t)
972 self.wwrite(f, t)
973 util.set_exec(self.wjoin(f), m.execf(f))
973 util.set_exec(self.wjoin(f), m.execf(f))
974 self.dirstate.update([f], "n")
974 self.dirstate.update([f], "n")
975
975
976 def copy(self, source, dest, wlock=None):
976 def copy(self, source, dest, wlock=None):
977 p = self.wjoin(dest)
977 p = self.wjoin(dest)
978 if not os.path.exists(p):
978 if not os.path.exists(p):
979 self.ui.warn(_("%s does not exist!\n") % dest)
979 self.ui.warn(_("%s does not exist!\n") % dest)
980 elif not os.path.isfile(p):
980 elif not os.path.isfile(p):
981 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
981 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
982 else:
982 else:
983 if not wlock:
983 if not wlock:
984 wlock = self.wlock()
984 wlock = self.wlock()
985 if self.dirstate.state(dest) == '?':
985 if self.dirstate.state(dest) == '?':
986 self.dirstate.update([dest], "a")
986 self.dirstate.update([dest], "a")
987 self.dirstate.copy(source, dest)
987 self.dirstate.copy(source, dest)
988
988
989 def heads(self, start=None):
989 def heads(self, start=None):
990 heads = self.changelog.heads(start)
990 heads = self.changelog.heads(start)
991 # sort the output in rev descending order
991 # sort the output in rev descending order
992 heads = [(-self.changelog.rev(h), h) for h in heads]
992 heads = [(-self.changelog.rev(h), h) for h in heads]
993 heads.sort()
993 heads.sort()
994 return [n for (r, n) in heads]
994 return [n for (r, n) in heads]
995
995
996 # branchlookup returns a dict giving a list of branches for
997 # each head. A branch is defined as the tag of a node or
998 # the branch of the node's parents. If a node has multiple
999 # branch tags, tags are eliminated if they are visible from other
1000 # branch tags.
1001 #
1002 # So, for this graph: a->b->c->d->e
1003 # \ /
1004 # aa -----/
1005 # a has tag 2.6.12
1006 # d has tag 2.6.13
1007 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1008 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1009 # from the list.
1010 #
1011 # It is possible that more than one head will have the same branch tag.
1012 # callers need to check the result for multiple heads under the same
1013 # branch tag if that is a problem for them (ie checkout of a specific
1014 # branch).
1015 #
1016 # passing in a specific branch will limit the depth of the search
1017 # through the parents. It won't limit the branches returned in the
1018 # result though.
1019 def branchlookup(self, heads=None, branch=None):
1020 if not heads:
1021 heads = self.heads()
1022 headt = [ h for h in heads ]
1023 chlog = self.changelog
1024 branches = {}
1025 merges = []
1026 seenmerge = {}
1027
1028 # traverse the tree once for each head, recording in the branches
1029 # dict which tags are visible from this head. The branches
1030 # dict also records which tags are visible from each tag
1031 # while we traverse.
1032 while headt or merges:
1033 if merges:
1034 n, found = merges.pop()
1035 visit = [n]
1036 else:
1037 h = headt.pop()
1038 visit = [h]
1039 found = [h]
1040 seen = {}
1041 while visit:
1042 n = visit.pop()
1043 if n in seen:
1044 continue
1045 pp = chlog.parents(n)
1046 tags = self.nodetags(n)
1047 if tags:
1048 for x in tags:
1049 if x == 'tip':
1050 continue
1051 for f in found:
1052 branches.setdefault(f, {})[n] = 1
1053 branches.setdefault(n, {})[n] = 1
1054 break
1055 if n not in found:
1056 found.append(n)
1057 if branch in tags:
1058 continue
1059 seen[n] = 1
1060 if pp[1] != nullid and n not in seenmerge:
1061 merges.append((pp[1], [x for x in found]))
1062 seenmerge[n] = 1
1063 if pp[0] != nullid:
1064 visit.append(pp[0])
1065 # traverse the branches dict, eliminating branch tags from each
1066 # head that are visible from another branch tag for that head.
1067 out = {}
1068 viscache = {}
1069 for h in heads:
1070 def visible(node):
1071 if node in viscache:
1072 return viscache[node]
1073 ret = {}
1074 visit = [node]
1075 while visit:
1076 x = visit.pop()
1077 if x in viscache:
1078 ret.update(viscache[x])
1079 elif x not in ret:
1080 ret[x] = 1
1081 if x in branches:
1082 visit[len(visit):] = branches[x].keys()
1083 viscache[node] = ret
1084 return ret
1085 if h not in branches:
1086 continue
1087 # O(n^2), but somewhat limited. This only searches the
1088 # tags visible from a specific head, not all the tags in the
1089 # whole repo.
1090 for b in branches[h]:
1091 vis = False
1092 for bb in branches[h].keys():
1093 if b != bb:
1094 if b in visible(bb):
1095 vis = True
1096 break
1097 if not vis:
1098 l = out.setdefault(h, [])
1099 l[len(l):] = self.nodetags(b)
1100 return out
1101
1102 def branches(self, nodes):
996 def branches(self, nodes):
1103 if not nodes:
997 if not nodes:
1104 nodes = [self.changelog.tip()]
998 nodes = [self.changelog.tip()]
1105 b = []
999 b = []
1106 for n in nodes:
1000 for n in nodes:
1107 t = n
1001 t = n
1108 while 1:
1002 while 1:
1109 p = self.changelog.parents(n)
1003 p = self.changelog.parents(n)
1110 if p[1] != nullid or p[0] == nullid:
1004 if p[1] != nullid or p[0] == nullid:
1111 b.append((t, n, p[0], p[1]))
1005 b.append((t, n, p[0], p[1]))
1112 break
1006 break
1113 n = p[0]
1007 n = p[0]
1114 return b
1008 return b
1115
1009
1116 def between(self, pairs):
1010 def between(self, pairs):
1117 r = []
1011 r = []
1118
1012
1119 for top, bottom in pairs:
1013 for top, bottom in pairs:
1120 n, l, i = top, [], 0
1014 n, l, i = top, [], 0
1121 f = 1
1015 f = 1
1122
1016
1123 while n != bottom:
1017 while n != bottom:
1124 p = self.changelog.parents(n)[0]
1018 p = self.changelog.parents(n)[0]
1125 if i == f:
1019 if i == f:
1126 l.append(n)
1020 l.append(n)
1127 f = f * 2
1021 f = f * 2
1128 n = p
1022 n = p
1129 i += 1
1023 i += 1
1130
1024
1131 r.append(l)
1025 r.append(l)
1132
1026
1133 return r
1027 return r
1134
1028
1135 def findincoming(self, remote, base=None, heads=None, force=False):
1029 def findincoming(self, remote, base=None, heads=None, force=False):
1136 """Return list of roots of the subsets of missing nodes from remote
1030 """Return list of roots of the subsets of missing nodes from remote
1137
1031
1138 If base dict is specified, assume that these nodes and their parents
1032 If base dict is specified, assume that these nodes and their parents
1139 exist on the remote side and that no child of a node of base exists
1033 exist on the remote side and that no child of a node of base exists
1140 in both remote and self.
1034 in both remote and self.
1141 Furthermore base will be updated to include the nodes that exists
1035 Furthermore base will be updated to include the nodes that exists
1142 in self and remote but no children exists in self and remote.
1036 in self and remote but no children exists in self and remote.
1143 If a list of heads is specified, return only nodes which are heads
1037 If a list of heads is specified, return only nodes which are heads
1144 or ancestors of these heads.
1038 or ancestors of these heads.
1145
1039
1146 All the ancestors of base are in self and in remote.
1040 All the ancestors of base are in self and in remote.
1147 All the descendants of the list returned are missing in self.
1041 All the descendants of the list returned are missing in self.
1148 (and so we know that the rest of the nodes are missing in remote, see
1042 (and so we know that the rest of the nodes are missing in remote, see
1149 outgoing)
1043 outgoing)
1150 """
1044 """
1151 m = self.changelog.nodemap
1045 m = self.changelog.nodemap
1152 search = []
1046 search = []
1153 fetch = {}
1047 fetch = {}
1154 seen = {}
1048 seen = {}
1155 seenbranch = {}
1049 seenbranch = {}
1156 if base == None:
1050 if base == None:
1157 base = {}
1051 base = {}
1158
1052
1159 if not heads:
1053 if not heads:
1160 heads = remote.heads()
1054 heads = remote.heads()
1161
1055
1162 if self.changelog.tip() == nullid:
1056 if self.changelog.tip() == nullid:
1163 base[nullid] = 1
1057 base[nullid] = 1
1164 if heads != [nullid]:
1058 if heads != [nullid]:
1165 return [nullid]
1059 return [nullid]
1166 return []
1060 return []
1167
1061
1168 # assume we're closer to the tip than the root
1062 # assume we're closer to the tip than the root
1169 # and start by examining the heads
1063 # and start by examining the heads
1170 self.ui.status(_("searching for changes\n"))
1064 self.ui.status(_("searching for changes\n"))
1171
1065
1172 unknown = []
1066 unknown = []
1173 for h in heads:
1067 for h in heads:
1174 if h not in m:
1068 if h not in m:
1175 unknown.append(h)
1069 unknown.append(h)
1176 else:
1070 else:
1177 base[h] = 1
1071 base[h] = 1
1178
1072
1179 if not unknown:
1073 if not unknown:
1180 return []
1074 return []
1181
1075
1182 req = dict.fromkeys(unknown)
1076 req = dict.fromkeys(unknown)
1183 reqcnt = 0
1077 reqcnt = 0
1184
1078
1185 # search through remote branches
1079 # search through remote branches
1186 # a 'branch' here is a linear segment of history, with four parts:
1080 # a 'branch' here is a linear segment of history, with four parts:
1187 # head, root, first parent, second parent
1081 # head, root, first parent, second parent
1188 # (a branch always has two parents (or none) by definition)
1082 # (a branch always has two parents (or none) by definition)
1189 unknown = remote.branches(unknown)
1083 unknown = remote.branches(unknown)
1190 while unknown:
1084 while unknown:
1191 r = []
1085 r = []
1192 while unknown:
1086 while unknown:
1193 n = unknown.pop(0)
1087 n = unknown.pop(0)
1194 if n[0] in seen:
1088 if n[0] in seen:
1195 continue
1089 continue
1196
1090
1197 self.ui.debug(_("examining %s:%s\n")
1091 self.ui.debug(_("examining %s:%s\n")
1198 % (short(n[0]), short(n[1])))
1092 % (short(n[0]), short(n[1])))
1199 if n[0] == nullid: # found the end of the branch
1093 if n[0] == nullid: # found the end of the branch
1200 pass
1094 pass
1201 elif n in seenbranch:
1095 elif n in seenbranch:
1202 self.ui.debug(_("branch already found\n"))
1096 self.ui.debug(_("branch already found\n"))
1203 continue
1097 continue
1204 elif n[1] and n[1] in m: # do we know the base?
1098 elif n[1] and n[1] in m: # do we know the base?
1205 self.ui.debug(_("found incomplete branch %s:%s\n")
1099 self.ui.debug(_("found incomplete branch %s:%s\n")
1206 % (short(n[0]), short(n[1])))
1100 % (short(n[0]), short(n[1])))
1207 search.append(n) # schedule branch range for scanning
1101 search.append(n) # schedule branch range for scanning
1208 seenbranch[n] = 1
1102 seenbranch[n] = 1
1209 else:
1103 else:
1210 if n[1] not in seen and n[1] not in fetch:
1104 if n[1] not in seen and n[1] not in fetch:
1211 if n[2] in m and n[3] in m:
1105 if n[2] in m and n[3] in m:
1212 self.ui.debug(_("found new changeset %s\n") %
1106 self.ui.debug(_("found new changeset %s\n") %
1213 short(n[1]))
1107 short(n[1]))
1214 fetch[n[1]] = 1 # earliest unknown
1108 fetch[n[1]] = 1 # earliest unknown
1215 for p in n[2:4]:
1109 for p in n[2:4]:
1216 if p in m:
1110 if p in m:
1217 base[p] = 1 # latest known
1111 base[p] = 1 # latest known
1218
1112
1219 for p in n[2:4]:
1113 for p in n[2:4]:
1220 if p not in req and p not in m:
1114 if p not in req and p not in m:
1221 r.append(p)
1115 r.append(p)
1222 req[p] = 1
1116 req[p] = 1
1223 seen[n[0]] = 1
1117 seen[n[0]] = 1
1224
1118
1225 if r:
1119 if r:
1226 reqcnt += 1
1120 reqcnt += 1
1227 self.ui.debug(_("request %d: %s\n") %
1121 self.ui.debug(_("request %d: %s\n") %
1228 (reqcnt, " ".join(map(short, r))))
1122 (reqcnt, " ".join(map(short, r))))
1229 for p in xrange(0, len(r), 10):
1123 for p in xrange(0, len(r), 10):
1230 for b in remote.branches(r[p:p+10]):
1124 for b in remote.branches(r[p:p+10]):
1231 self.ui.debug(_("received %s:%s\n") %
1125 self.ui.debug(_("received %s:%s\n") %
1232 (short(b[0]), short(b[1])))
1126 (short(b[0]), short(b[1])))
1233 unknown.append(b)
1127 unknown.append(b)
1234
1128
1235 # do binary search on the branches we found
1129 # do binary search on the branches we found
1236 while search:
1130 while search:
1237 n = search.pop(0)
1131 n = search.pop(0)
1238 reqcnt += 1
1132 reqcnt += 1
1239 l = remote.between([(n[0], n[1])])[0]
1133 l = remote.between([(n[0], n[1])])[0]
1240 l.append(n[1])
1134 l.append(n[1])
1241 p = n[0]
1135 p = n[0]
1242 f = 1
1136 f = 1
1243 for i in l:
1137 for i in l:
1244 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1138 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1245 if i in m:
1139 if i in m:
1246 if f <= 2:
1140 if f <= 2:
1247 self.ui.debug(_("found new branch changeset %s\n") %
1141 self.ui.debug(_("found new branch changeset %s\n") %
1248 short(p))
1142 short(p))
1249 fetch[p] = 1
1143 fetch[p] = 1
1250 base[i] = 1
1144 base[i] = 1
1251 else:
1145 else:
1252 self.ui.debug(_("narrowed branch search to %s:%s\n")
1146 self.ui.debug(_("narrowed branch search to %s:%s\n")
1253 % (short(p), short(i)))
1147 % (short(p), short(i)))
1254 search.append((p, i))
1148 search.append((p, i))
1255 break
1149 break
1256 p, f = i, f * 2
1150 p, f = i, f * 2
1257
1151
1258 # sanity check our fetch list
1152 # sanity check our fetch list
1259 for f in fetch.keys():
1153 for f in fetch.keys():
1260 if f in m:
1154 if f in m:
1261 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1155 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1262
1156
1263 if base.keys() == [nullid]:
1157 if base.keys() == [nullid]:
1264 if force:
1158 if force:
1265 self.ui.warn(_("warning: repository is unrelated\n"))
1159 self.ui.warn(_("warning: repository is unrelated\n"))
1266 else:
1160 else:
1267 raise util.Abort(_("repository is unrelated"))
1161 raise util.Abort(_("repository is unrelated"))
1268
1162
1269 self.ui.debug(_("found new changesets starting at ") +
1163 self.ui.debug(_("found new changesets starting at ") +
1270 " ".join([short(f) for f in fetch]) + "\n")
1164 " ".join([short(f) for f in fetch]) + "\n")
1271
1165
1272 self.ui.debug(_("%d total queries\n") % reqcnt)
1166 self.ui.debug(_("%d total queries\n") % reqcnt)
1273
1167
1274 return fetch.keys()
1168 return fetch.keys()
1275
1169
1276 def findoutgoing(self, remote, base=None, heads=None, force=False):
1170 def findoutgoing(self, remote, base=None, heads=None, force=False):
1277 """Return list of nodes that are roots of subsets not in remote
1171 """Return list of nodes that are roots of subsets not in remote
1278
1172
1279 If base dict is specified, assume that these nodes and their parents
1173 If base dict is specified, assume that these nodes and their parents
1280 exist on the remote side.
1174 exist on the remote side.
1281 If a list of heads is specified, return only nodes which are heads
1175 If a list of heads is specified, return only nodes which are heads
1282 or ancestors of these heads, and return a second element which
1176 or ancestors of these heads, and return a second element which
1283 contains all remote heads which get new children.
1177 contains all remote heads which get new children.
1284 """
1178 """
1285 if base == None:
1179 if base == None:
1286 base = {}
1180 base = {}
1287 self.findincoming(remote, base, heads, force=force)
1181 self.findincoming(remote, base, heads, force=force)
1288
1182
1289 self.ui.debug(_("common changesets up to ")
1183 self.ui.debug(_("common changesets up to ")
1290 + " ".join(map(short, base.keys())) + "\n")
1184 + " ".join(map(short, base.keys())) + "\n")
1291
1185
1292 remain = dict.fromkeys(self.changelog.nodemap)
1186 remain = dict.fromkeys(self.changelog.nodemap)
1293
1187
1294 # prune everything remote has from the tree
1188 # prune everything remote has from the tree
1295 del remain[nullid]
1189 del remain[nullid]
1296 remove = base.keys()
1190 remove = base.keys()
1297 while remove:
1191 while remove:
1298 n = remove.pop(0)
1192 n = remove.pop(0)
1299 if n in remain:
1193 if n in remain:
1300 del remain[n]
1194 del remain[n]
1301 for p in self.changelog.parents(n):
1195 for p in self.changelog.parents(n):
1302 remove.append(p)
1196 remove.append(p)
1303
1197
1304 # find every node whose parents have been pruned
1198 # find every node whose parents have been pruned
1305 subset = []
1199 subset = []
1306 # find every remote head that will get new children
1200 # find every remote head that will get new children
1307 updated_heads = {}
1201 updated_heads = {}
1308 for n in remain:
1202 for n in remain:
1309 p1, p2 = self.changelog.parents(n)
1203 p1, p2 = self.changelog.parents(n)
1310 if p1 not in remain and p2 not in remain:
1204 if p1 not in remain and p2 not in remain:
1311 subset.append(n)
1205 subset.append(n)
1312 if heads:
1206 if heads:
1313 if p1 in heads:
1207 if p1 in heads:
1314 updated_heads[p1] = True
1208 updated_heads[p1] = True
1315 if p2 in heads:
1209 if p2 in heads:
1316 updated_heads[p2] = True
1210 updated_heads[p2] = True
1317
1211
1318 # this is the set of all roots we have to push
1212 # this is the set of all roots we have to push
1319 if heads:
1213 if heads:
1320 return subset, updated_heads.keys()
1214 return subset, updated_heads.keys()
1321 else:
1215 else:
1322 return subset
1216 return subset
1323
1217
1324 def pull(self, remote, heads=None, force=False, lock=None):
1218 def pull(self, remote, heads=None, force=False, lock=None):
1325 mylock = False
1219 mylock = False
1326 if not lock:
1220 if not lock:
1327 lock = self.lock()
1221 lock = self.lock()
1328 mylock = True
1222 mylock = True
1329
1223
1330 try:
1224 try:
1331 fetch = self.findincoming(remote, force=force)
1225 fetch = self.findincoming(remote, force=force)
1332 if fetch == [nullid]:
1226 if fetch == [nullid]:
1333 self.ui.status(_("requesting all changes\n"))
1227 self.ui.status(_("requesting all changes\n"))
1334
1228
1335 if not fetch:
1229 if not fetch:
1336 self.ui.status(_("no changes found\n"))
1230 self.ui.status(_("no changes found\n"))
1337 return 0
1231 return 0
1338
1232
1339 if heads is None:
1233 if heads is None:
1340 cg = remote.changegroup(fetch, 'pull')
1234 cg = remote.changegroup(fetch, 'pull')
1341 else:
1235 else:
1342 if 'changegroupsubset' not in remote.capabilities:
1236 if 'changegroupsubset' not in remote.capabilities:
1343 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1237 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1344 cg = remote.changegroupsubset(fetch, heads, 'pull')
1238 cg = remote.changegroupsubset(fetch, heads, 'pull')
1345 return self.addchangegroup(cg, 'pull', remote.url())
1239 return self.addchangegroup(cg, 'pull', remote.url())
1346 finally:
1240 finally:
1347 if mylock:
1241 if mylock:
1348 lock.release()
1242 lock.release()
1349
1243
1350 def push(self, remote, force=False, revs=None):
1244 def push(self, remote, force=False, revs=None):
1351 # there are two ways to push to remote repo:
1245 # there are two ways to push to remote repo:
1352 #
1246 #
1353 # addchangegroup assumes local user can lock remote
1247 # addchangegroup assumes local user can lock remote
1354 # repo (local filesystem, old ssh servers).
1248 # repo (local filesystem, old ssh servers).
1355 #
1249 #
1356 # unbundle assumes local user cannot lock remote repo (new ssh
1250 # unbundle assumes local user cannot lock remote repo (new ssh
1357 # servers, http servers).
1251 # servers, http servers).
1358
1252
1359 if remote.capable('unbundle'):
1253 if remote.capable('unbundle'):
1360 return self.push_unbundle(remote, force, revs)
1254 return self.push_unbundle(remote, force, revs)
1361 return self.push_addchangegroup(remote, force, revs)
1255 return self.push_addchangegroup(remote, force, revs)
1362
1256
1363 def prepush(self, remote, force, revs):
1257 def prepush(self, remote, force, revs):
1364 base = {}
1258 base = {}
1365 remote_heads = remote.heads()
1259 remote_heads = remote.heads()
1366 inc = self.findincoming(remote, base, remote_heads, force=force)
1260 inc = self.findincoming(remote, base, remote_heads, force=force)
1367
1261
1368 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1262 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1369 if revs is not None:
1263 if revs is not None:
1370 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1264 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1371 else:
1265 else:
1372 bases, heads = update, self.changelog.heads()
1266 bases, heads = update, self.changelog.heads()
1373
1267
1374 if not bases:
1268 if not bases:
1375 self.ui.status(_("no changes found\n"))
1269 self.ui.status(_("no changes found\n"))
1376 return None, 1
1270 return None, 1
1377 elif not force:
1271 elif not force:
1378 # check if we're creating new remote heads
1272 # check if we're creating new remote heads
1379 # to be a remote head after push, node must be either
1273 # to be a remote head after push, node must be either
1380 # - unknown locally
1274 # - unknown locally
1381 # - a local outgoing head descended from update
1275 # - a local outgoing head descended from update
1382 # - a remote head that's known locally and not
1276 # - a remote head that's known locally and not
1383 # ancestral to an outgoing head
1277 # ancestral to an outgoing head
1384
1278
1385 warn = 0
1279 warn = 0
1386
1280
1387 if remote_heads == [nullid]:
1281 if remote_heads == [nullid]:
1388 warn = 0
1282 warn = 0
1389 elif not revs and len(heads) > len(remote_heads):
1283 elif not revs and len(heads) > len(remote_heads):
1390 warn = 1
1284 warn = 1
1391 else:
1285 else:
1392 newheads = list(heads)
1286 newheads = list(heads)
1393 for r in remote_heads:
1287 for r in remote_heads:
1394 if r in self.changelog.nodemap:
1288 if r in self.changelog.nodemap:
1395 desc = self.changelog.heads(r)
1289 desc = self.changelog.heads(r)
1396 l = [h for h in heads if h in desc]
1290 l = [h for h in heads if h in desc]
1397 if not l:
1291 if not l:
1398 newheads.append(r)
1292 newheads.append(r)
1399 else:
1293 else:
1400 newheads.append(r)
1294 newheads.append(r)
1401 if len(newheads) > len(remote_heads):
1295 if len(newheads) > len(remote_heads):
1402 warn = 1
1296 warn = 1
1403
1297
1404 if warn:
1298 if warn:
1405 self.ui.warn(_("abort: push creates new remote branches!\n"))
1299 self.ui.warn(_("abort: push creates new remote branches!\n"))
1406 self.ui.status(_("(did you forget to merge?"
1300 self.ui.status(_("(did you forget to merge?"
1407 " use push -f to force)\n"))
1301 " use push -f to force)\n"))
1408 return None, 1
1302 return None, 1
1409 elif inc:
1303 elif inc:
1410 self.ui.warn(_("note: unsynced remote changes!\n"))
1304 self.ui.warn(_("note: unsynced remote changes!\n"))
1411
1305
1412
1306
1413 if revs is None:
1307 if revs is None:
1414 cg = self.changegroup(update, 'push')
1308 cg = self.changegroup(update, 'push')
1415 else:
1309 else:
1416 cg = self.changegroupsubset(update, revs, 'push')
1310 cg = self.changegroupsubset(update, revs, 'push')
1417 return cg, remote_heads
1311 return cg, remote_heads
1418
1312
1419 def push_addchangegroup(self, remote, force, revs):
1313 def push_addchangegroup(self, remote, force, revs):
1420 lock = remote.lock()
1314 lock = remote.lock()
1421
1315
1422 ret = self.prepush(remote, force, revs)
1316 ret = self.prepush(remote, force, revs)
1423 if ret[0] is not None:
1317 if ret[0] is not None:
1424 cg, remote_heads = ret
1318 cg, remote_heads = ret
1425 return remote.addchangegroup(cg, 'push', self.url())
1319 return remote.addchangegroup(cg, 'push', self.url())
1426 return ret[1]
1320 return ret[1]
1427
1321
1428 def push_unbundle(self, remote, force, revs):
1322 def push_unbundle(self, remote, force, revs):
1429 # local repo finds heads on server, finds out what revs it
1323 # local repo finds heads on server, finds out what revs it
1430 # must push. once revs transferred, if server finds it has
1324 # must push. once revs transferred, if server finds it has
1431 # different heads (someone else won commit/push race), server
1325 # different heads (someone else won commit/push race), server
1432 # aborts.
1326 # aborts.
1433
1327
1434 ret = self.prepush(remote, force, revs)
1328 ret = self.prepush(remote, force, revs)
1435 if ret[0] is not None:
1329 if ret[0] is not None:
1436 cg, remote_heads = ret
1330 cg, remote_heads = ret
1437 if force: remote_heads = ['force']
1331 if force: remote_heads = ['force']
1438 return remote.unbundle(cg, remote_heads, 'push')
1332 return remote.unbundle(cg, remote_heads, 'push')
1439 return ret[1]
1333 return ret[1]
1440
1334
1441 def changegroupinfo(self, nodes):
1335 def changegroupinfo(self, nodes):
1442 self.ui.note(_("%d changesets found\n") % len(nodes))
1336 self.ui.note(_("%d changesets found\n") % len(nodes))
1443 if self.ui.debugflag:
1337 if self.ui.debugflag:
1444 self.ui.debug(_("List of changesets:\n"))
1338 self.ui.debug(_("List of changesets:\n"))
1445 for node in nodes:
1339 for node in nodes:
1446 self.ui.debug("%s\n" % hex(node))
1340 self.ui.debug("%s\n" % hex(node))
1447
1341
1448 def changegroupsubset(self, bases, heads, source):
1342 def changegroupsubset(self, bases, heads, source):
1449 """This function generates a changegroup consisting of all the nodes
1343 """This function generates a changegroup consisting of all the nodes
1450 that are descendents of any of the bases, and ancestors of any of
1344 that are descendents of any of the bases, and ancestors of any of
1451 the heads.
1345 the heads.
1452
1346
1453 It is fairly complex as determining which filenodes and which
1347 It is fairly complex as determining which filenodes and which
1454 manifest nodes need to be included for the changeset to be complete
1348 manifest nodes need to be included for the changeset to be complete
1455 is non-trivial.
1349 is non-trivial.
1456
1350
1457 Another wrinkle is doing the reverse, figuring out which changeset in
1351 Another wrinkle is doing the reverse, figuring out which changeset in
1458 the changegroup a particular filenode or manifestnode belongs to."""
1352 the changegroup a particular filenode or manifestnode belongs to."""
1459
1353
1460 self.hook('preoutgoing', throw=True, source=source)
1354 self.hook('preoutgoing', throw=True, source=source)
1461
1355
1462 # Set up some initial variables
1356 # Set up some initial variables
1463 # Make it easy to refer to self.changelog
1357 # Make it easy to refer to self.changelog
1464 cl = self.changelog
1358 cl = self.changelog
1465 # msng is short for missing - compute the list of changesets in this
1359 # msng is short for missing - compute the list of changesets in this
1466 # changegroup.
1360 # changegroup.
1467 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1361 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1468 self.changegroupinfo(msng_cl_lst)
1362 self.changegroupinfo(msng_cl_lst)
1469 # Some bases may turn out to be superfluous, and some heads may be
1363 # Some bases may turn out to be superfluous, and some heads may be
1470 # too. nodesbetween will return the minimal set of bases and heads
1364 # too. nodesbetween will return the minimal set of bases and heads
1471 # necessary to re-create the changegroup.
1365 # necessary to re-create the changegroup.
1472
1366
1473 # Known heads are the list of heads that it is assumed the recipient
1367 # Known heads are the list of heads that it is assumed the recipient
1474 # of this changegroup will know about.
1368 # of this changegroup will know about.
1475 knownheads = {}
1369 knownheads = {}
1476 # We assume that all parents of bases are known heads.
1370 # We assume that all parents of bases are known heads.
1477 for n in bases:
1371 for n in bases:
1478 for p in cl.parents(n):
1372 for p in cl.parents(n):
1479 if p != nullid:
1373 if p != nullid:
1480 knownheads[p] = 1
1374 knownheads[p] = 1
1481 knownheads = knownheads.keys()
1375 knownheads = knownheads.keys()
1482 if knownheads:
1376 if knownheads:
1483 # Now that we know what heads are known, we can compute which
1377 # Now that we know what heads are known, we can compute which
1484 # changesets are known. The recipient must know about all
1378 # changesets are known. The recipient must know about all
1485 # changesets required to reach the known heads from the null
1379 # changesets required to reach the known heads from the null
1486 # changeset.
1380 # changeset.
1487 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1381 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1488 junk = None
1382 junk = None
1489 # Transform the list into an ersatz set.
1383 # Transform the list into an ersatz set.
1490 has_cl_set = dict.fromkeys(has_cl_set)
1384 has_cl_set = dict.fromkeys(has_cl_set)
1491 else:
1385 else:
1492 # If there were no known heads, the recipient cannot be assumed to
1386 # If there were no known heads, the recipient cannot be assumed to
1493 # know about any changesets.
1387 # know about any changesets.
1494 has_cl_set = {}
1388 has_cl_set = {}
1495
1389
1496 # Make it easy to refer to self.manifest
1390 # Make it easy to refer to self.manifest
1497 mnfst = self.manifest
1391 mnfst = self.manifest
1498 # We don't know which manifests are missing yet
1392 # We don't know which manifests are missing yet
1499 msng_mnfst_set = {}
1393 msng_mnfst_set = {}
1500 # Nor do we know which filenodes are missing.
1394 # Nor do we know which filenodes are missing.
1501 msng_filenode_set = {}
1395 msng_filenode_set = {}
1502
1396
1503 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1397 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1504 junk = None
1398 junk = None
1505
1399
1506 # A changeset always belongs to itself, so the changenode lookup
1400 # A changeset always belongs to itself, so the changenode lookup
1507 # function for a changenode is identity.
1401 # function for a changenode is identity.
1508 def identity(x):
1402 def identity(x):
1509 return x
1403 return x
1510
1404
1511 # A function generating function. Sets up an environment for the
1405 # A function generating function. Sets up an environment for the
1512 # inner function.
1406 # inner function.
1513 def cmp_by_rev_func(revlog):
1407 def cmp_by_rev_func(revlog):
1514 # Compare two nodes by their revision number in the environment's
1408 # Compare two nodes by their revision number in the environment's
1515 # revision history. Since the revision number both represents the
1409 # revision history. Since the revision number both represents the
1516 # most efficient order to read the nodes in, and represents a
1410 # most efficient order to read the nodes in, and represents a
1517 # topological sorting of the nodes, this function is often useful.
1411 # topological sorting of the nodes, this function is often useful.
1518 def cmp_by_rev(a, b):
1412 def cmp_by_rev(a, b):
1519 return cmp(revlog.rev(a), revlog.rev(b))
1413 return cmp(revlog.rev(a), revlog.rev(b))
1520 return cmp_by_rev
1414 return cmp_by_rev
1521
1415
1522 # If we determine that a particular file or manifest node must be a
1416 # If we determine that a particular file or manifest node must be a
1523 # node that the recipient of the changegroup will already have, we can
1417 # node that the recipient of the changegroup will already have, we can
1524 # also assume the recipient will have all the parents. This function
1418 # also assume the recipient will have all the parents. This function
1525 # prunes them from the set of missing nodes.
1419 # prunes them from the set of missing nodes.
1526 def prune_parents(revlog, hasset, msngset):
1420 def prune_parents(revlog, hasset, msngset):
1527 haslst = hasset.keys()
1421 haslst = hasset.keys()
1528 haslst.sort(cmp_by_rev_func(revlog))
1422 haslst.sort(cmp_by_rev_func(revlog))
1529 for node in haslst:
1423 for node in haslst:
1530 parentlst = [p for p in revlog.parents(node) if p != nullid]
1424 parentlst = [p for p in revlog.parents(node) if p != nullid]
1531 while parentlst:
1425 while parentlst:
1532 n = parentlst.pop()
1426 n = parentlst.pop()
1533 if n not in hasset:
1427 if n not in hasset:
1534 hasset[n] = 1
1428 hasset[n] = 1
1535 p = [p for p in revlog.parents(n) if p != nullid]
1429 p = [p for p in revlog.parents(n) if p != nullid]
1536 parentlst.extend(p)
1430 parentlst.extend(p)
1537 for n in hasset:
1431 for n in hasset:
1538 msngset.pop(n, None)
1432 msngset.pop(n, None)
1539
1433
1540 # This is a function generating function used to set up an environment
1434 # This is a function generating function used to set up an environment
1541 # for the inner function to execute in.
1435 # for the inner function to execute in.
1542 def manifest_and_file_collector(changedfileset):
1436 def manifest_and_file_collector(changedfileset):
1543 # This is an information gathering function that gathers
1437 # This is an information gathering function that gathers
1544 # information from each changeset node that goes out as part of
1438 # information from each changeset node that goes out as part of
1545 # the changegroup. The information gathered is a list of which
1439 # the changegroup. The information gathered is a list of which
1546 # manifest nodes are potentially required (the recipient may
1440 # manifest nodes are potentially required (the recipient may
1547 # already have them) and total list of all files which were
1441 # already have them) and total list of all files which were
1548 # changed in any changeset in the changegroup.
1442 # changed in any changeset in the changegroup.
1549 #
1443 #
1550 # We also remember the first changenode we saw any manifest
1444 # We also remember the first changenode we saw any manifest
1551 # referenced by so we can later determine which changenode 'owns'
1445 # referenced by so we can later determine which changenode 'owns'
1552 # the manifest.
1446 # the manifest.
1553 def collect_manifests_and_files(clnode):
1447 def collect_manifests_and_files(clnode):
1554 c = cl.read(clnode)
1448 c = cl.read(clnode)
1555 for f in c[3]:
1449 for f in c[3]:
1556 # This is to make sure we only have one instance of each
1450 # This is to make sure we only have one instance of each
1557 # filename string for each filename.
1451 # filename string for each filename.
1558 changedfileset.setdefault(f, f)
1452 changedfileset.setdefault(f, f)
1559 msng_mnfst_set.setdefault(c[0], clnode)
1453 msng_mnfst_set.setdefault(c[0], clnode)
1560 return collect_manifests_and_files
1454 return collect_manifests_and_files
1561
1455
1562 # Figure out which manifest nodes (of the ones we think might be part
1456 # Figure out which manifest nodes (of the ones we think might be part
1563 # of the changegroup) the recipient must know about and remove them
1457 # of the changegroup) the recipient must know about and remove them
1564 # from the changegroup.
1458 # from the changegroup.
1565 def prune_manifests():
1459 def prune_manifests():
1566 has_mnfst_set = {}
1460 has_mnfst_set = {}
1567 for n in msng_mnfst_set:
1461 for n in msng_mnfst_set:
1568 # If a 'missing' manifest thinks it belongs to a changenode
1462 # If a 'missing' manifest thinks it belongs to a changenode
1569 # the recipient is assumed to have, obviously the recipient
1463 # the recipient is assumed to have, obviously the recipient
1570 # must have that manifest.
1464 # must have that manifest.
1571 linknode = cl.node(mnfst.linkrev(n))
1465 linknode = cl.node(mnfst.linkrev(n))
1572 if linknode in has_cl_set:
1466 if linknode in has_cl_set:
1573 has_mnfst_set[n] = 1
1467 has_mnfst_set[n] = 1
1574 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1468 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1575
1469
1576 # Use the information collected in collect_manifests_and_files to say
1470 # Use the information collected in collect_manifests_and_files to say
1577 # which changenode any manifestnode belongs to.
1471 # which changenode any manifestnode belongs to.
1578 def lookup_manifest_link(mnfstnode):
1472 def lookup_manifest_link(mnfstnode):
1579 return msng_mnfst_set[mnfstnode]
1473 return msng_mnfst_set[mnfstnode]
1580
1474
1581 # A function generating function that sets up the initial environment
1475 # A function generating function that sets up the initial environment
1582 # the inner function.
1476 # the inner function.
1583 def filenode_collector(changedfiles):
1477 def filenode_collector(changedfiles):
1584 next_rev = [0]
1478 next_rev = [0]
1585 # This gathers information from each manifestnode included in the
1479 # This gathers information from each manifestnode included in the
1586 # changegroup about which filenodes the manifest node references
1480 # changegroup about which filenodes the manifest node references
1587 # so we can include those in the changegroup too.
1481 # so we can include those in the changegroup too.
1588 #
1482 #
1589 # It also remembers which changenode each filenode belongs to. It
1483 # It also remembers which changenode each filenode belongs to. It
1590 # does this by assuming the a filenode belongs to the changenode
1484 # does this by assuming the a filenode belongs to the changenode
1591 # the first manifest that references it belongs to.
1485 # the first manifest that references it belongs to.
1592 def collect_msng_filenodes(mnfstnode):
1486 def collect_msng_filenodes(mnfstnode):
1593 r = mnfst.rev(mnfstnode)
1487 r = mnfst.rev(mnfstnode)
1594 if r == next_rev[0]:
1488 if r == next_rev[0]:
1595 # If the last rev we looked at was the one just previous,
1489 # If the last rev we looked at was the one just previous,
1596 # we only need to see a diff.
1490 # we only need to see a diff.
1597 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1491 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1598 # For each line in the delta
1492 # For each line in the delta
1599 for dline in delta.splitlines():
1493 for dline in delta.splitlines():
1600 # get the filename and filenode for that line
1494 # get the filename and filenode for that line
1601 f, fnode = dline.split('\0')
1495 f, fnode = dline.split('\0')
1602 fnode = bin(fnode[:40])
1496 fnode = bin(fnode[:40])
1603 f = changedfiles.get(f, None)
1497 f = changedfiles.get(f, None)
1604 # And if the file is in the list of files we care
1498 # And if the file is in the list of files we care
1605 # about.
1499 # about.
1606 if f is not None:
1500 if f is not None:
1607 # Get the changenode this manifest belongs to
1501 # Get the changenode this manifest belongs to
1608 clnode = msng_mnfst_set[mnfstnode]
1502 clnode = msng_mnfst_set[mnfstnode]
1609 # Create the set of filenodes for the file if
1503 # Create the set of filenodes for the file if
1610 # there isn't one already.
1504 # there isn't one already.
1611 ndset = msng_filenode_set.setdefault(f, {})
1505 ndset = msng_filenode_set.setdefault(f, {})
1612 # And set the filenode's changelog node to the
1506 # And set the filenode's changelog node to the
1613 # manifest's if it hasn't been set already.
1507 # manifest's if it hasn't been set already.
1614 ndset.setdefault(fnode, clnode)
1508 ndset.setdefault(fnode, clnode)
1615 else:
1509 else:
1616 # Otherwise we need a full manifest.
1510 # Otherwise we need a full manifest.
1617 m = mnfst.read(mnfstnode)
1511 m = mnfst.read(mnfstnode)
1618 # For every file in we care about.
1512 # For every file in we care about.
1619 for f in changedfiles:
1513 for f in changedfiles:
1620 fnode = m.get(f, None)
1514 fnode = m.get(f, None)
1621 # If it's in the manifest
1515 # If it's in the manifest
1622 if fnode is not None:
1516 if fnode is not None:
1623 # See comments above.
1517 # See comments above.
1624 clnode = msng_mnfst_set[mnfstnode]
1518 clnode = msng_mnfst_set[mnfstnode]
1625 ndset = msng_filenode_set.setdefault(f, {})
1519 ndset = msng_filenode_set.setdefault(f, {})
1626 ndset.setdefault(fnode, clnode)
1520 ndset.setdefault(fnode, clnode)
1627 # Remember the revision we hope to see next.
1521 # Remember the revision we hope to see next.
1628 next_rev[0] = r + 1
1522 next_rev[0] = r + 1
1629 return collect_msng_filenodes
1523 return collect_msng_filenodes
1630
1524
1631 # We have a list of filenodes we think we need for a file, lets remove
1525 # We have a list of filenodes we think we need for a file, lets remove
1632 # all those we now the recipient must have.
1526 # all those we now the recipient must have.
1633 def prune_filenodes(f, filerevlog):
1527 def prune_filenodes(f, filerevlog):
1634 msngset = msng_filenode_set[f]
1528 msngset = msng_filenode_set[f]
1635 hasset = {}
1529 hasset = {}
1636 # If a 'missing' filenode thinks it belongs to a changenode we
1530 # If a 'missing' filenode thinks it belongs to a changenode we
1637 # assume the recipient must have, then the recipient must have
1531 # assume the recipient must have, then the recipient must have
1638 # that filenode.
1532 # that filenode.
1639 for n in msngset:
1533 for n in msngset:
1640 clnode = cl.node(filerevlog.linkrev(n))
1534 clnode = cl.node(filerevlog.linkrev(n))
1641 if clnode in has_cl_set:
1535 if clnode in has_cl_set:
1642 hasset[n] = 1
1536 hasset[n] = 1
1643 prune_parents(filerevlog, hasset, msngset)
1537 prune_parents(filerevlog, hasset, msngset)
1644
1538
1645 # A function generator function that sets up the a context for the
1539 # A function generator function that sets up the a context for the
1646 # inner function.
1540 # inner function.
1647 def lookup_filenode_link_func(fname):
1541 def lookup_filenode_link_func(fname):
1648 msngset = msng_filenode_set[fname]
1542 msngset = msng_filenode_set[fname]
1649 # Lookup the changenode the filenode belongs to.
1543 # Lookup the changenode the filenode belongs to.
1650 def lookup_filenode_link(fnode):
1544 def lookup_filenode_link(fnode):
1651 return msngset[fnode]
1545 return msngset[fnode]
1652 return lookup_filenode_link
1546 return lookup_filenode_link
1653
1547
1654 # Now that we have all theses utility functions to help out and
1548 # Now that we have all theses utility functions to help out and
1655 # logically divide up the task, generate the group.
1549 # logically divide up the task, generate the group.
1656 def gengroup():
1550 def gengroup():
1657 # The set of changed files starts empty.
1551 # The set of changed files starts empty.
1658 changedfiles = {}
1552 changedfiles = {}
1659 # Create a changenode group generator that will call our functions
1553 # Create a changenode group generator that will call our functions
1660 # back to lookup the owning changenode and collect information.
1554 # back to lookup the owning changenode and collect information.
1661 group = cl.group(msng_cl_lst, identity,
1555 group = cl.group(msng_cl_lst, identity,
1662 manifest_and_file_collector(changedfiles))
1556 manifest_and_file_collector(changedfiles))
1663 for chnk in group:
1557 for chnk in group:
1664 yield chnk
1558 yield chnk
1665
1559
1666 # The list of manifests has been collected by the generator
1560 # The list of manifests has been collected by the generator
1667 # calling our functions back.
1561 # calling our functions back.
1668 prune_manifests()
1562 prune_manifests()
1669 msng_mnfst_lst = msng_mnfst_set.keys()
1563 msng_mnfst_lst = msng_mnfst_set.keys()
1670 # Sort the manifestnodes by revision number.
1564 # Sort the manifestnodes by revision number.
1671 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1565 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1672 # Create a generator for the manifestnodes that calls our lookup
1566 # Create a generator for the manifestnodes that calls our lookup
1673 # and data collection functions back.
1567 # and data collection functions back.
1674 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1568 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1675 filenode_collector(changedfiles))
1569 filenode_collector(changedfiles))
1676 for chnk in group:
1570 for chnk in group:
1677 yield chnk
1571 yield chnk
1678
1572
1679 # These are no longer needed, dereference and toss the memory for
1573 # These are no longer needed, dereference and toss the memory for
1680 # them.
1574 # them.
1681 msng_mnfst_lst = None
1575 msng_mnfst_lst = None
1682 msng_mnfst_set.clear()
1576 msng_mnfst_set.clear()
1683
1577
1684 changedfiles = changedfiles.keys()
1578 changedfiles = changedfiles.keys()
1685 changedfiles.sort()
1579 changedfiles.sort()
1686 # Go through all our files in order sorted by name.
1580 # Go through all our files in order sorted by name.
1687 for fname in changedfiles:
1581 for fname in changedfiles:
1688 filerevlog = self.file(fname)
1582 filerevlog = self.file(fname)
1689 # Toss out the filenodes that the recipient isn't really
1583 # Toss out the filenodes that the recipient isn't really
1690 # missing.
1584 # missing.
1691 if msng_filenode_set.has_key(fname):
1585 if msng_filenode_set.has_key(fname):
1692 prune_filenodes(fname, filerevlog)
1586 prune_filenodes(fname, filerevlog)
1693 msng_filenode_lst = msng_filenode_set[fname].keys()
1587 msng_filenode_lst = msng_filenode_set[fname].keys()
1694 else:
1588 else:
1695 msng_filenode_lst = []
1589 msng_filenode_lst = []
1696 # If any filenodes are left, generate the group for them,
1590 # If any filenodes are left, generate the group for them,
1697 # otherwise don't bother.
1591 # otherwise don't bother.
1698 if len(msng_filenode_lst) > 0:
1592 if len(msng_filenode_lst) > 0:
1699 yield changegroup.genchunk(fname)
1593 yield changegroup.genchunk(fname)
1700 # Sort the filenodes by their revision #
1594 # Sort the filenodes by their revision #
1701 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1595 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1702 # Create a group generator and only pass in a changenode
1596 # Create a group generator and only pass in a changenode
1703 # lookup function as we need to collect no information
1597 # lookup function as we need to collect no information
1704 # from filenodes.
1598 # from filenodes.
1705 group = filerevlog.group(msng_filenode_lst,
1599 group = filerevlog.group(msng_filenode_lst,
1706 lookup_filenode_link_func(fname))
1600 lookup_filenode_link_func(fname))
1707 for chnk in group:
1601 for chnk in group:
1708 yield chnk
1602 yield chnk
1709 if msng_filenode_set.has_key(fname):
1603 if msng_filenode_set.has_key(fname):
1710 # Don't need this anymore, toss it to free memory.
1604 # Don't need this anymore, toss it to free memory.
1711 del msng_filenode_set[fname]
1605 del msng_filenode_set[fname]
1712 # Signal that no more groups are left.
1606 # Signal that no more groups are left.
1713 yield changegroup.closechunk()
1607 yield changegroup.closechunk()
1714
1608
1715 if msng_cl_lst:
1609 if msng_cl_lst:
1716 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1610 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1717
1611
1718 return util.chunkbuffer(gengroup())
1612 return util.chunkbuffer(gengroup())
1719
1613
1720 def changegroup(self, basenodes, source):
1614 def changegroup(self, basenodes, source):
1721 """Generate a changegroup of all nodes that we have that a recipient
1615 """Generate a changegroup of all nodes that we have that a recipient
1722 doesn't.
1616 doesn't.
1723
1617
1724 This is much easier than the previous function as we can assume that
1618 This is much easier than the previous function as we can assume that
1725 the recipient has any changenode we aren't sending them."""
1619 the recipient has any changenode we aren't sending them."""
1726
1620
1727 self.hook('preoutgoing', throw=True, source=source)
1621 self.hook('preoutgoing', throw=True, source=source)
1728
1622
1729 cl = self.changelog
1623 cl = self.changelog
1730 nodes = cl.nodesbetween(basenodes, None)[0]
1624 nodes = cl.nodesbetween(basenodes, None)[0]
1731 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1625 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1732 self.changegroupinfo(nodes)
1626 self.changegroupinfo(nodes)
1733
1627
1734 def identity(x):
1628 def identity(x):
1735 return x
1629 return x
1736
1630
1737 def gennodelst(revlog):
1631 def gennodelst(revlog):
1738 for r in xrange(0, revlog.count()):
1632 for r in xrange(0, revlog.count()):
1739 n = revlog.node(r)
1633 n = revlog.node(r)
1740 if revlog.linkrev(n) in revset:
1634 if revlog.linkrev(n) in revset:
1741 yield n
1635 yield n
1742
1636
1743 def changed_file_collector(changedfileset):
1637 def changed_file_collector(changedfileset):
1744 def collect_changed_files(clnode):
1638 def collect_changed_files(clnode):
1745 c = cl.read(clnode)
1639 c = cl.read(clnode)
1746 for fname in c[3]:
1640 for fname in c[3]:
1747 changedfileset[fname] = 1
1641 changedfileset[fname] = 1
1748 return collect_changed_files
1642 return collect_changed_files
1749
1643
1750 def lookuprevlink_func(revlog):
1644 def lookuprevlink_func(revlog):
1751 def lookuprevlink(n):
1645 def lookuprevlink(n):
1752 return cl.node(revlog.linkrev(n))
1646 return cl.node(revlog.linkrev(n))
1753 return lookuprevlink
1647 return lookuprevlink
1754
1648
1755 def gengroup():
1649 def gengroup():
1756 # construct a list of all changed files
1650 # construct a list of all changed files
1757 changedfiles = {}
1651 changedfiles = {}
1758
1652
1759 for chnk in cl.group(nodes, identity,
1653 for chnk in cl.group(nodes, identity,
1760 changed_file_collector(changedfiles)):
1654 changed_file_collector(changedfiles)):
1761 yield chnk
1655 yield chnk
1762 changedfiles = changedfiles.keys()
1656 changedfiles = changedfiles.keys()
1763 changedfiles.sort()
1657 changedfiles.sort()
1764
1658
1765 mnfst = self.manifest
1659 mnfst = self.manifest
1766 nodeiter = gennodelst(mnfst)
1660 nodeiter = gennodelst(mnfst)
1767 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1661 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1768 yield chnk
1662 yield chnk
1769
1663
1770 for fname in changedfiles:
1664 for fname in changedfiles:
1771 filerevlog = self.file(fname)
1665 filerevlog = self.file(fname)
1772 nodeiter = gennodelst(filerevlog)
1666 nodeiter = gennodelst(filerevlog)
1773 nodeiter = list(nodeiter)
1667 nodeiter = list(nodeiter)
1774 if nodeiter:
1668 if nodeiter:
1775 yield changegroup.genchunk(fname)
1669 yield changegroup.genchunk(fname)
1776 lookup = lookuprevlink_func(filerevlog)
1670 lookup = lookuprevlink_func(filerevlog)
1777 for chnk in filerevlog.group(nodeiter, lookup):
1671 for chnk in filerevlog.group(nodeiter, lookup):
1778 yield chnk
1672 yield chnk
1779
1673
1780 yield changegroup.closechunk()
1674 yield changegroup.closechunk()
1781
1675
1782 if nodes:
1676 if nodes:
1783 self.hook('outgoing', node=hex(nodes[0]), source=source)
1677 self.hook('outgoing', node=hex(nodes[0]), source=source)
1784
1678
1785 return util.chunkbuffer(gengroup())
1679 return util.chunkbuffer(gengroup())
1786
1680
1787 def addchangegroup(self, source, srctype, url):
1681 def addchangegroup(self, source, srctype, url):
1788 """add changegroup to repo.
1682 """add changegroup to repo.
1789
1683
1790 return values:
1684 return values:
1791 - nothing changed or no source: 0
1685 - nothing changed or no source: 0
1792 - more heads than before: 1+added heads (2..n)
1686 - more heads than before: 1+added heads (2..n)
1793 - less heads than before: -1-removed heads (-2..-n)
1687 - less heads than before: -1-removed heads (-2..-n)
1794 - number of heads stays the same: 1
1688 - number of heads stays the same: 1
1795 """
1689 """
1796 def csmap(x):
1690 def csmap(x):
1797 self.ui.debug(_("add changeset %s\n") % short(x))
1691 self.ui.debug(_("add changeset %s\n") % short(x))
1798 return cl.count()
1692 return cl.count()
1799
1693
1800 def revmap(x):
1694 def revmap(x):
1801 return cl.rev(x)
1695 return cl.rev(x)
1802
1696
1803 if not source:
1697 if not source:
1804 return 0
1698 return 0
1805
1699
1806 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1700 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1807
1701
1808 changesets = files = revisions = 0
1702 changesets = files = revisions = 0
1809
1703
1810 tr = self.transaction()
1704 tr = self.transaction()
1811
1705
1812 # write changelog data to temp files so concurrent readers will not see
1706 # write changelog data to temp files so concurrent readers will not see
1813 # inconsistent view
1707 # inconsistent view
1814 cl = None
1708 cl = None
1815 try:
1709 try:
1816 cl = appendfile.appendchangelog(self.sopener,
1710 cl = appendfile.appendchangelog(self.sopener,
1817 self.changelog.version)
1711 self.changelog.version)
1818
1712
1819 oldheads = len(cl.heads())
1713 oldheads = len(cl.heads())
1820
1714
1821 # pull off the changeset group
1715 # pull off the changeset group
1822 self.ui.status(_("adding changesets\n"))
1716 self.ui.status(_("adding changesets\n"))
1823 cor = cl.count() - 1
1717 cor = cl.count() - 1
1824 chunkiter = changegroup.chunkiter(source)
1718 chunkiter = changegroup.chunkiter(source)
1825 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1719 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1826 raise util.Abort(_("received changelog group is empty"))
1720 raise util.Abort(_("received changelog group is empty"))
1827 cnr = cl.count() - 1
1721 cnr = cl.count() - 1
1828 changesets = cnr - cor
1722 changesets = cnr - cor
1829
1723
1830 # pull off the manifest group
1724 # pull off the manifest group
1831 self.ui.status(_("adding manifests\n"))
1725 self.ui.status(_("adding manifests\n"))
1832 chunkiter = changegroup.chunkiter(source)
1726 chunkiter = changegroup.chunkiter(source)
1833 # no need to check for empty manifest group here:
1727 # no need to check for empty manifest group here:
1834 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1728 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1835 # no new manifest will be created and the manifest group will
1729 # no new manifest will be created and the manifest group will
1836 # be empty during the pull
1730 # be empty during the pull
1837 self.manifest.addgroup(chunkiter, revmap, tr)
1731 self.manifest.addgroup(chunkiter, revmap, tr)
1838
1732
1839 # process the files
1733 # process the files
1840 self.ui.status(_("adding file changes\n"))
1734 self.ui.status(_("adding file changes\n"))
1841 while 1:
1735 while 1:
1842 f = changegroup.getchunk(source)
1736 f = changegroup.getchunk(source)
1843 if not f:
1737 if not f:
1844 break
1738 break
1845 self.ui.debug(_("adding %s revisions\n") % f)
1739 self.ui.debug(_("adding %s revisions\n") % f)
1846 fl = self.file(f)
1740 fl = self.file(f)
1847 o = fl.count()
1741 o = fl.count()
1848 chunkiter = changegroup.chunkiter(source)
1742 chunkiter = changegroup.chunkiter(source)
1849 if fl.addgroup(chunkiter, revmap, tr) is None:
1743 if fl.addgroup(chunkiter, revmap, tr) is None:
1850 raise util.Abort(_("received file revlog group is empty"))
1744 raise util.Abort(_("received file revlog group is empty"))
1851 revisions += fl.count() - o
1745 revisions += fl.count() - o
1852 files += 1
1746 files += 1
1853
1747
1854 cl.writedata()
1748 cl.writedata()
1855 finally:
1749 finally:
1856 if cl:
1750 if cl:
1857 cl.cleanup()
1751 cl.cleanup()
1858
1752
1859 # make changelog see real files again
1753 # make changelog see real files again
1860 self.changelog = changelog.changelog(self.sopener,
1754 self.changelog = changelog.changelog(self.sopener,
1861 self.changelog.version)
1755 self.changelog.version)
1862 self.changelog.checkinlinesize(tr)
1756 self.changelog.checkinlinesize(tr)
1863
1757
1864 newheads = len(self.changelog.heads())
1758 newheads = len(self.changelog.heads())
1865 heads = ""
1759 heads = ""
1866 if oldheads and newheads != oldheads:
1760 if oldheads and newheads != oldheads:
1867 heads = _(" (%+d heads)") % (newheads - oldheads)
1761 heads = _(" (%+d heads)") % (newheads - oldheads)
1868
1762
1869 self.ui.status(_("added %d changesets"
1763 self.ui.status(_("added %d changesets"
1870 " with %d changes to %d files%s\n")
1764 " with %d changes to %d files%s\n")
1871 % (changesets, revisions, files, heads))
1765 % (changesets, revisions, files, heads))
1872
1766
1873 if changesets > 0:
1767 if changesets > 0:
1874 self.hook('pretxnchangegroup', throw=True,
1768 self.hook('pretxnchangegroup', throw=True,
1875 node=hex(self.changelog.node(cor+1)), source=srctype,
1769 node=hex(self.changelog.node(cor+1)), source=srctype,
1876 url=url)
1770 url=url)
1877
1771
1878 tr.close()
1772 tr.close()
1879
1773
1880 if changesets > 0:
1774 if changesets > 0:
1881 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1775 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1882 source=srctype, url=url)
1776 source=srctype, url=url)
1883
1777
1884 for i in xrange(cor + 1, cnr + 1):
1778 for i in xrange(cor + 1, cnr + 1):
1885 self.hook("incoming", node=hex(self.changelog.node(i)),
1779 self.hook("incoming", node=hex(self.changelog.node(i)),
1886 source=srctype, url=url)
1780 source=srctype, url=url)
1887
1781
1888 # never return 0 here:
1782 # never return 0 here:
1889 if newheads < oldheads:
1783 if newheads < oldheads:
1890 return newheads - oldheads - 1
1784 return newheads - oldheads - 1
1891 else:
1785 else:
1892 return newheads - oldheads + 1
1786 return newheads - oldheads + 1
1893
1787
1894
1788
1895 def stream_in(self, remote):
1789 def stream_in(self, remote):
1896 fp = remote.stream_out()
1790 fp = remote.stream_out()
1897 l = fp.readline()
1791 l = fp.readline()
1898 try:
1792 try:
1899 resp = int(l)
1793 resp = int(l)
1900 except ValueError:
1794 except ValueError:
1901 raise util.UnexpectedOutput(
1795 raise util.UnexpectedOutput(
1902 _('Unexpected response from remote server:'), l)
1796 _('Unexpected response from remote server:'), l)
1903 if resp == 1:
1797 if resp == 1:
1904 raise util.Abort(_('operation forbidden by server'))
1798 raise util.Abort(_('operation forbidden by server'))
1905 elif resp == 2:
1799 elif resp == 2:
1906 raise util.Abort(_('locking the remote repository failed'))
1800 raise util.Abort(_('locking the remote repository failed'))
1907 elif resp != 0:
1801 elif resp != 0:
1908 raise util.Abort(_('the server sent an unknown error code'))
1802 raise util.Abort(_('the server sent an unknown error code'))
1909 self.ui.status(_('streaming all changes\n'))
1803 self.ui.status(_('streaming all changes\n'))
1910 l = fp.readline()
1804 l = fp.readline()
1911 try:
1805 try:
1912 total_files, total_bytes = map(int, l.split(' ', 1))
1806 total_files, total_bytes = map(int, l.split(' ', 1))
1913 except ValueError, TypeError:
1807 except ValueError, TypeError:
1914 raise util.UnexpectedOutput(
1808 raise util.UnexpectedOutput(
1915 _('Unexpected response from remote server:'), l)
1809 _('Unexpected response from remote server:'), l)
1916 self.ui.status(_('%d files to transfer, %s of data\n') %
1810 self.ui.status(_('%d files to transfer, %s of data\n') %
1917 (total_files, util.bytecount(total_bytes)))
1811 (total_files, util.bytecount(total_bytes)))
1918 start = time.time()
1812 start = time.time()
1919 for i in xrange(total_files):
1813 for i in xrange(total_files):
1920 # XXX doesn't support '\n' or '\r' in filenames
1814 # XXX doesn't support '\n' or '\r' in filenames
1921 l = fp.readline()
1815 l = fp.readline()
1922 try:
1816 try:
1923 name, size = l.split('\0', 1)
1817 name, size = l.split('\0', 1)
1924 size = int(size)
1818 size = int(size)
1925 except ValueError, TypeError:
1819 except ValueError, TypeError:
1926 raise util.UnexpectedOutput(
1820 raise util.UnexpectedOutput(
1927 _('Unexpected response from remote server:'), l)
1821 _('Unexpected response from remote server:'), l)
1928 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1822 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1929 ofp = self.sopener(name, 'w')
1823 ofp = self.sopener(name, 'w')
1930 for chunk in util.filechunkiter(fp, limit=size):
1824 for chunk in util.filechunkiter(fp, limit=size):
1931 ofp.write(chunk)
1825 ofp.write(chunk)
1932 ofp.close()
1826 ofp.close()
1933 elapsed = time.time() - start
1827 elapsed = time.time() - start
1934 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1828 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1935 (util.bytecount(total_bytes), elapsed,
1829 (util.bytecount(total_bytes), elapsed,
1936 util.bytecount(total_bytes / elapsed)))
1830 util.bytecount(total_bytes / elapsed)))
1937 self.reload()
1831 self.reload()
1938 return len(self.heads()) + 1
1832 return len(self.heads()) + 1
1939
1833
1940 def clone(self, remote, heads=[], stream=False):
1834 def clone(self, remote, heads=[], stream=False):
1941 '''clone remote repository.
1835 '''clone remote repository.
1942
1836
1943 keyword arguments:
1837 keyword arguments:
1944 heads: list of revs to clone (forces use of pull)
1838 heads: list of revs to clone (forces use of pull)
1945 stream: use streaming clone if possible'''
1839 stream: use streaming clone if possible'''
1946
1840
1947 # now, all clients that can request uncompressed clones can
1841 # now, all clients that can request uncompressed clones can
1948 # read repo formats supported by all servers that can serve
1842 # read repo formats supported by all servers that can serve
1949 # them.
1843 # them.
1950
1844
1951 # if revlog format changes, client will have to check version
1845 # if revlog format changes, client will have to check version
1952 # and format flags on "stream" capability, and use
1846 # and format flags on "stream" capability, and use
1953 # uncompressed only if compatible.
1847 # uncompressed only if compatible.
1954
1848
1955 if stream and not heads and remote.capable('stream'):
1849 if stream and not heads and remote.capable('stream'):
1956 return self.stream_in(remote)
1850 return self.stream_in(remote)
1957 return self.pull(remote, heads)
1851 return self.pull(remote, heads)
1958
1852
1959 # used to avoid circular references so destructors work
1853 # used to avoid circular references so destructors work
1960 def aftertrans(files):
1854 def aftertrans(files):
1961 renamefiles = [tuple(t) for t in files]
1855 renamefiles = [tuple(t) for t in files]
1962 def a():
1856 def a():
1963 for src, dest in renamefiles:
1857 for src, dest in renamefiles:
1964 util.rename(src, dest)
1858 util.rename(src, dest)
1965 return a
1859 return a
1966
1860
1967 def instance(ui, path, create):
1861 def instance(ui, path, create):
1968 return localrepository(ui, util.drop_scheme('file', path), create)
1862 return localrepository(ui, util.drop_scheme('file', path), create)
1969
1863
1970 def islocal(path):
1864 def islocal(path):
1971 return True
1865 return True
@@ -1,495 +1,498 b''
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 demandload(globals(), "errno util os tempfile")
11 demandload(globals(), "errno util os tempfile")
12
12
13 def filemerge(repo, fw, fo, wctx, mctx):
13 def filemerge(repo, fw, fo, wctx, mctx):
14 """perform a 3-way merge in the working directory
14 """perform a 3-way merge in the working directory
15
15
16 fw = filename in the working directory
16 fw = filename in the working directory
17 fo = filename in other parent
17 fo = filename in other parent
18 wctx, mctx = working and merge changecontexts
18 wctx, mctx = working and merge changecontexts
19 """
19 """
20
20
21 def temp(prefix, ctx):
21 def temp(prefix, ctx):
22 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
22 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
23 (fd, name) = tempfile.mkstemp(prefix=pre)
23 (fd, name) = tempfile.mkstemp(prefix=pre)
24 f = os.fdopen(fd, "wb")
24 f = os.fdopen(fd, "wb")
25 repo.wwrite(ctx.path(), ctx.data(), f)
25 repo.wwrite(ctx.path(), ctx.data(), f)
26 f.close()
26 f.close()
27 return name
27 return name
28
28
29 fcm = wctx.filectx(fw)
29 fcm = wctx.filectx(fw)
30 fco = mctx.filectx(fo)
30 fco = mctx.filectx(fo)
31
31
32 if not fco.cmp(fcm.data()): # files identical?
32 if not fco.cmp(fcm.data()): # files identical?
33 return None
33 return None
34
34
35 fca = fcm.ancestor(fco)
35 fca = fcm.ancestor(fco)
36 if not fca:
36 if not fca:
37 fca = repo.filectx(fw, fileid=nullrev)
37 fca = repo.filectx(fw, fileid=nullrev)
38 a = repo.wjoin(fw)
38 a = repo.wjoin(fw)
39 b = temp("base", fca)
39 b = temp("base", fca)
40 c = temp("other", fco)
40 c = temp("other", fco)
41
41
42 if fw != fo:
42 if fw != fo:
43 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
43 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
44 else:
44 else:
45 repo.ui.status(_("merging %s\n") % fw)
45 repo.ui.status(_("merging %s\n") % fw)
46
46
47 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
47 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
48
48
49 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
49 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
50 or "hgmerge")
50 or "hgmerge")
51 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
51 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
52 environ={'HG_FILE': fw,
52 environ={'HG_FILE': fw,
53 'HG_MY_NODE': str(wctx.parents()[0]),
53 'HG_MY_NODE': str(wctx.parents()[0]),
54 'HG_OTHER_NODE': str(mctx)})
54 'HG_OTHER_NODE': str(mctx)})
55 if r:
55 if r:
56 repo.ui.warn(_("merging %s failed!\n") % fw)
56 repo.ui.warn(_("merging %s failed!\n") % fw)
57
57
58 os.unlink(b)
58 os.unlink(b)
59 os.unlink(c)
59 os.unlink(c)
60 return r
60 return r
61
61
62 def checkunknown(wctx, mctx):
62 def checkunknown(wctx, mctx):
63 "check for collisions between unknown files and files in mctx"
63 "check for collisions between unknown files and files in mctx"
64 man = mctx.manifest()
64 man = mctx.manifest()
65 for f in wctx.unknown():
65 for f in wctx.unknown():
66 if f in man:
66 if f in man:
67 if mctx.filectx(f).cmp(wctx.filectx(f).data()):
67 if mctx.filectx(f).cmp(wctx.filectx(f).data()):
68 raise util.Abort(_("untracked local file '%s' differs"\
68 raise util.Abort(_("untracked local file '%s' differs"\
69 " from remote version") % f)
69 " from remote version") % f)
70
70
71 def checkcollision(mctx):
71 def checkcollision(mctx):
72 "check for case folding collisions in the destination context"
72 "check for case folding collisions in the destination context"
73 folded = {}
73 folded = {}
74 for fn in mctx.manifest():
74 for fn in mctx.manifest():
75 fold = fn.lower()
75 fold = fn.lower()
76 if fold in folded:
76 if fold in folded:
77 raise util.Abort(_("case-folding collision between %s and %s")
77 raise util.Abort(_("case-folding collision between %s and %s")
78 % (fn, folded[fold]))
78 % (fn, folded[fold]))
79 folded[fold] = fn
79 folded[fold] = fn
80
80
81 def forgetremoved(wctx, mctx):
81 def forgetremoved(wctx, mctx):
82 """
82 """
83 Forget removed files
83 Forget removed files
84
84
85 If we're jumping between revisions (as opposed to merging), and if
85 If we're jumping between revisions (as opposed to merging), and if
86 neither the working directory nor the target rev has the file,
86 neither the working directory nor the target rev has the file,
87 then we need to remove it from the dirstate, to prevent the
87 then we need to remove it from the dirstate, to prevent the
88 dirstate from listing the file when it is no longer in the
88 dirstate from listing the file when it is no longer in the
89 manifest.
89 manifest.
90 """
90 """
91
91
92 action = []
92 action = []
93 man = mctx.manifest()
93 man = mctx.manifest()
94 for f in wctx.deleted() + wctx.removed():
94 for f in wctx.deleted() + wctx.removed():
95 if f not in man:
95 if f not in man:
96 action.append((f, "f"))
96 action.append((f, "f"))
97
97
98 return action
98 return action
99
99
100 def findcopies(repo, m1, m2, ma, limit):
100 def findcopies(repo, m1, m2, ma, limit):
101 """
101 """
102 Find moves and copies between m1 and m2 back to limit linkrev
102 Find moves and copies between m1 and m2 back to limit linkrev
103 """
103 """
104
104
105 def findold(fctx):
105 def findold(fctx):
106 "find files that path was copied from, back to linkrev limit"
106 "find files that path was copied from, back to linkrev limit"
107 old = {}
107 old = {}
108 orig = fctx.path()
108 orig = fctx.path()
109 visit = [fctx]
109 visit = [fctx]
110 while visit:
110 while visit:
111 fc = visit.pop()
111 fc = visit.pop()
112 if fc.path() != orig and fc.path() not in old:
112 if fc.path() != orig and fc.path() not in old:
113 old[fc.path()] = 1
113 old[fc.path()] = 1
114 if fc.rev() < limit:
114 if fc.rev() < limit:
115 continue
115 continue
116 visit += fc.parents()
116 visit += fc.parents()
117
117
118 old = old.keys()
118 old = old.keys()
119 old.sort()
119 old.sort()
120 return old
120 return old
121
121
122 def nonoverlap(d1, d2, d3):
122 def nonoverlap(d1, d2, d3):
123 "Return list of elements in d1 not in d2 or d3"
123 "Return list of elements in d1 not in d2 or d3"
124 l = [d for d in d1 if d not in d3 and d not in d2]
124 l = [d for d in d1 if d not in d3 and d not in d2]
125 l.sort()
125 l.sort()
126 return l
126 return l
127
127
128 def checkcopies(c, man):
128 def checkcopies(c, man):
129 '''check possible copies for filectx c'''
129 '''check possible copies for filectx c'''
130 for of in findold(c):
130 for of in findold(c):
131 if of not in man:
131 if of not in man:
132 return
132 return
133 c2 = ctx(of, man[of])
133 c2 = ctx(of, man[of])
134 ca = c.ancestor(c2)
134 ca = c.ancestor(c2)
135 if not ca: # unrelated
135 if not ca: # unrelated
136 return
136 return
137 if ca.path() == c.path() or ca.path() == c2.path():
137 if ca.path() == c.path() or ca.path() == c2.path():
138 fullcopy[c.path()] = of
138 fullcopy[c.path()] = of
139 if c == ca or c2 == ca: # no merge needed, ignore copy
139 if c == ca or c2 == ca: # no merge needed, ignore copy
140 return
140 return
141 copy[c.path()] = of
141 copy[c.path()] = of
142
142
143 def dirs(files):
143 def dirs(files):
144 d = {}
144 d = {}
145 for f in files:
145 for f in files:
146 d[os.path.dirname(f)] = True
146 d[os.path.dirname(f)] = True
147 return d
147 return d
148
148
149 if not repo.ui.configbool("merge", "followcopies", True):
149 if not repo.ui.configbool("merge", "followcopies", True):
150 return {}
150 return {}
151
151
152 # avoid silly behavior for update from empty dir
152 # avoid silly behavior for update from empty dir
153 if not m1 or not m2 or not ma:
153 if not m1 or not m2 or not ma:
154 return {}
154 return {}
155
155
156 dcopies = repo.dirstate.copies()
156 dcopies = repo.dirstate.copies()
157 copy = {}
157 copy = {}
158 fullcopy = {}
158 fullcopy = {}
159 u1 = nonoverlap(m1, m2, ma)
159 u1 = nonoverlap(m1, m2, ma)
160 u2 = nonoverlap(m2, m1, ma)
160 u2 = nonoverlap(m2, m1, ma)
161 ctx = util.cachefunc(lambda f, n: repo.filectx(f, fileid=n[:20]))
161 ctx = util.cachefunc(lambda f, n: repo.filectx(f, fileid=n[:20]))
162
162
163 for f in u1:
163 for f in u1:
164 checkcopies(ctx(dcopies.get(f, f), m1[f]), m2)
164 checkcopies(ctx(dcopies.get(f, f), m1[f]), m2)
165
165
166 for f in u2:
166 for f in u2:
167 checkcopies(ctx(f, m2[f]), m1)
167 checkcopies(ctx(f, m2[f]), m1)
168
168
169 if not fullcopy or not repo.ui.configbool("merge", "followdirs", True):
169 if not fullcopy or not repo.ui.configbool("merge", "followdirs", True):
170 return copy
170 return copy
171
171
172 # generate a directory move map
172 # generate a directory move map
173 d1, d2 = dirs(m1), dirs(m2)
173 d1, d2 = dirs(m1), dirs(m2)
174 invalid = {}
174 invalid = {}
175 dirmove = {}
175 dirmove = {}
176
176
177 for dst, src in fullcopy.items():
177 for dst, src in fullcopy.items():
178 dsrc, ddst = os.path.dirname(src), os.path.dirname(dst)
178 dsrc, ddst = os.path.dirname(src), os.path.dirname(dst)
179 if dsrc in invalid:
179 if dsrc in invalid:
180 continue
180 continue
181 elif (dsrc in d1 and ddst in d1) or (dsrc in d2 and ddst in d2):
181 elif (dsrc in d1 and ddst in d1) or (dsrc in d2 and ddst in d2):
182 invalid[dsrc] = True
182 invalid[dsrc] = True
183 elif dsrc in dirmove and dirmove[dsrc] != ddst:
183 elif dsrc in dirmove and dirmove[dsrc] != ddst:
184 invalid[dsrc] = True
184 invalid[dsrc] = True
185 del dirmove[dsrc]
185 del dirmove[dsrc]
186 else:
186 else:
187 dirmove[dsrc] = ddst
187 dirmove[dsrc] = ddst
188
188
189 del d1, d2, invalid
189 del d1, d2, invalid
190
190
191 if not dirmove:
191 if not dirmove:
192 return copy
192 return copy
193
193
194 # check unaccounted nonoverlapping files
194 # check unaccounted nonoverlapping files
195 for f in u1 + u2:
195 for f in u1 + u2:
196 if f not in fullcopy:
196 if f not in fullcopy:
197 d = os.path.dirname(f)
197 d = os.path.dirname(f)
198 if d in dirmove:
198 if d in dirmove:
199 copy[f] = dirmove[d] + "/" + os.path.basename(f)
199 copy[f] = dirmove[d] + "/" + os.path.basename(f)
200
200
201 return copy
201 return copy
202
202
203 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
203 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
204 """
204 """
205 Merge p1 and p2 with ancestor ma and generate merge action list
205 Merge p1 and p2 with ancestor ma and generate merge action list
206
206
207 overwrite = whether we clobber working files
207 overwrite = whether we clobber working files
208 partial = function to filter file lists
208 partial = function to filter file lists
209 """
209 """
210
210
211 repo.ui.note(_("resolving manifests\n"))
211 repo.ui.note(_("resolving manifests\n"))
212 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
212 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
213 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
213 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
214
214
215 m1 = p1.manifest()
215 m1 = p1.manifest()
216 m2 = p2.manifest()
216 m2 = p2.manifest()
217 ma = pa.manifest()
217 ma = pa.manifest()
218 backwards = (pa == p2)
218 backwards = (pa == p2)
219 action = []
219 action = []
220 copy = {}
220 copy = {}
221
221
222 def fmerge(f, f2=None, fa=None):
222 def fmerge(f, f2=None, fa=None):
223 """merge executable flags"""
223 """merge executable flags"""
224 if not f2:
224 if not f2:
225 f2 = f
225 f2 = f
226 fa = f
226 fa = f
227 a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2)
227 a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2)
228 return ((a^b) | (a^c)) ^ a
228 return ((a^b) | (a^c)) ^ a
229
229
230 def act(msg, m, f, *args):
230 def act(msg, m, f, *args):
231 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
231 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
232 action.append((f, m) + args)
232 action.append((f, m) + args)
233
233
234 if not (backwards or overwrite):
234 if not (backwards or overwrite):
235 copy = findcopies(repo, m1, m2, ma, pa.rev())
235 copy = findcopies(repo, m1, m2, ma, pa.rev())
236 copied = dict.fromkeys(copy.values())
236 copied = dict.fromkeys(copy.values())
237
237
238 # Compare manifests
238 # Compare manifests
239 for f, n in m1.iteritems():
239 for f, n in m1.iteritems():
240 if partial and not partial(f):
240 if partial and not partial(f):
241 continue
241 continue
242 if f in m2:
242 if f in m2:
243 # are files different?
243 # are files different?
244 if n != m2[f]:
244 if n != m2[f]:
245 a = ma.get(f, nullid)
245 a = ma.get(f, nullid)
246 # are both different from the ancestor?
246 # are both different from the ancestor?
247 if not overwrite and n != a and m2[f] != a:
247 if not overwrite and n != a and m2[f] != a:
248 act("versions differ", "m", f, f, f, fmerge(f), False)
248 act("versions differ", "m", f, f, f, fmerge(f), False)
249 # are we clobbering?
249 # are we clobbering?
250 # is remote's version newer?
250 # is remote's version newer?
251 # or are we going back in time and clean?
251 # or are we going back in time and clean?
252 elif overwrite or m2[f] != a or (backwards and not n[20:]):
252 elif overwrite or m2[f] != a or (backwards and not n[20:]):
253 act("remote is newer", "g", f, m2.execf(f))
253 act("remote is newer", "g", f, m2.execf(f))
254 # local is newer, not overwrite, check mode bits
254 # local is newer, not overwrite, check mode bits
255 elif fmerge(f) != m1.execf(f):
255 elif fmerge(f) != m1.execf(f):
256 act("update permissions", "e", f, m2.execf(f))
256 act("update permissions", "e", f, m2.execf(f))
257 # contents same, check mode bits
257 # contents same, check mode bits
258 elif m1.execf(f) != m2.execf(f):
258 elif m1.execf(f) != m2.execf(f):
259 if overwrite or fmerge(f) != m1.execf(f):
259 if overwrite or fmerge(f) != m1.execf(f):
260 act("update permissions", "e", f, m2.execf(f))
260 act("update permissions", "e", f, m2.execf(f))
261 elif f in copied:
261 elif f in copied:
262 continue
262 continue
263 elif f in copy:
263 elif f in copy:
264 f2 = copy[f]
264 f2 = copy[f]
265 if f2 not in m2: # directory rename
265 if f2 not in m2: # directory rename
266 act("remote renamed directory to " + f2, "d",
266 act("remote renamed directory to " + f2, "d",
267 f, None, f2, m1.execf(f))
267 f, None, f2, m1.execf(f))
268 elif f2 in m1: # case 2 A,B/B/B
268 elif f2 in m1: # case 2 A,B/B/B
269 act("local copied to " + f2, "m",
269 act("local copied to " + f2, "m",
270 f, f2, f, fmerge(f, f2, f2), False)
270 f, f2, f, fmerge(f, f2, f2), False)
271 else: # case 4,21 A/B/B
271 else: # case 4,21 A/B/B
272 act("local moved to " + f2, "m",
272 act("local moved to " + f2, "m",
273 f, f2, f, fmerge(f, f2, f2), False)
273 f, f2, f, fmerge(f, f2, f2), False)
274 elif f in ma:
274 elif f in ma:
275 if n != ma[f] and not overwrite:
275 if n != ma[f] and not overwrite:
276 if repo.ui.prompt(
276 if repo.ui.prompt(
277 (_(" local changed %s which remote deleted\n") % f) +
277 (_(" local changed %s which remote deleted\n") % f) +
278 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("d"):
278 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("d"):
279 act("prompt delete", "r", f)
279 act("prompt delete", "r", f)
280 else:
280 else:
281 act("other deleted", "r", f)
281 act("other deleted", "r", f)
282 else:
282 else:
283 # file is created on branch or in working directory
283 # file is created on branch or in working directory
284 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
284 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
285 act("remote deleted", "r", f)
285 act("remote deleted", "r", f)
286
286
287 for f, n in m2.iteritems():
287 for f, n in m2.iteritems():
288 if partial and not partial(f):
288 if partial and not partial(f):
289 continue
289 continue
290 if f in m1:
290 if f in m1:
291 continue
291 continue
292 if f in copied:
292 if f in copied:
293 continue
293 continue
294 if f in copy:
294 if f in copy:
295 f2 = copy[f]
295 f2 = copy[f]
296 if f2 not in m1: # directory rename
296 if f2 not in m1: # directory rename
297 act("local renamed directory to " + f2, "d",
297 act("local renamed directory to " + f2, "d",
298 None, f, f2, m2.execf(f))
298 None, f, f2, m2.execf(f))
299 elif f2 in m2: # rename case 1, A/A,B/A
299 elif f2 in m2: # rename case 1, A/A,B/A
300 act("remote copied to " + f, "m",
300 act("remote copied to " + f, "m",
301 f2, f, f, fmerge(f2, f, f2), False)
301 f2, f, f, fmerge(f2, f, f2), False)
302 else: # case 3,20 A/B/A
302 else: # case 3,20 A/B/A
303 act("remote moved to " + f, "m",
303 act("remote moved to " + f, "m",
304 f2, f, f, fmerge(f2, f, f2), True)
304 f2, f, f, fmerge(f2, f, f2), True)
305 elif f in ma:
305 elif f in ma:
306 if overwrite or backwards:
306 if overwrite or backwards:
307 act("recreating", "g", f, m2.execf(f))
307 act("recreating", "g", f, m2.execf(f))
308 elif n != ma[f]:
308 elif n != ma[f]:
309 if repo.ui.prompt(
309 if repo.ui.prompt(
310 (_("remote changed %s which local deleted\n") % f) +
310 (_("remote changed %s which local deleted\n") % f) +
311 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("k"):
311 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("k"):
312 act("prompt recreating", "g", f, m2.execf(f))
312 act("prompt recreating", "g", f, m2.execf(f))
313 else:
313 else:
314 act("remote created", "g", f, m2.execf(f))
314 act("remote created", "g", f, m2.execf(f))
315
315
316 return action
316 return action
317
317
318 def applyupdates(repo, action, wctx, mctx):
318 def applyupdates(repo, action, wctx, mctx):
319 "apply the merge action list to the working directory"
319 "apply the merge action list to the working directory"
320
320
321 updated, merged, removed, unresolved = 0, 0, 0, 0
321 updated, merged, removed, unresolved = 0, 0, 0, 0
322 action.sort()
322 action.sort()
323 for a in action:
323 for a in action:
324 f, m = a[:2]
324 f, m = a[:2]
325 if f and f[0] == "/":
325 if f and f[0] == "/":
326 continue
326 continue
327 if m == "r": # remove
327 if m == "r": # remove
328 repo.ui.note(_("removing %s\n") % f)
328 repo.ui.note(_("removing %s\n") % f)
329 util.audit_path(f)
329 util.audit_path(f)
330 try:
330 try:
331 util.unlink(repo.wjoin(f))
331 util.unlink(repo.wjoin(f))
332 except OSError, inst:
332 except OSError, inst:
333 if inst.errno != errno.ENOENT:
333 if inst.errno != errno.ENOENT:
334 repo.ui.warn(_("update failed to remove %s: %s!\n") %
334 repo.ui.warn(_("update failed to remove %s: %s!\n") %
335 (f, inst.strerror))
335 (f, inst.strerror))
336 removed += 1
336 removed += 1
337 elif m == "m": # merge
337 elif m == "m": # merge
338 f2, fd, flag, move = a[2:]
338 f2, fd, flag, move = a[2:]
339 r = filemerge(repo, f, f2, wctx, mctx)
339 r = filemerge(repo, f, f2, wctx, mctx)
340 if r > 0:
340 if r > 0:
341 unresolved += 1
341 unresolved += 1
342 else:
342 else:
343 if r is None:
343 if r is None:
344 updated += 1
344 updated += 1
345 else:
345 else:
346 merged += 1
346 merged += 1
347 if f != fd:
347 if f != fd:
348 repo.ui.debug(_("copying %s to %s\n") % (f, fd))
348 repo.ui.debug(_("copying %s to %s\n") % (f, fd))
349 repo.wwrite(fd, repo.wread(f))
349 repo.wwrite(fd, repo.wread(f))
350 if move:
350 if move:
351 repo.ui.debug(_("removing %s\n") % f)
351 repo.ui.debug(_("removing %s\n") % f)
352 os.unlink(repo.wjoin(f))
352 os.unlink(repo.wjoin(f))
353 util.set_exec(repo.wjoin(fd), flag)
353 util.set_exec(repo.wjoin(fd), flag)
354 elif m == "g": # get
354 elif m == "g": # get
355 flag = a[2]
355 flag = a[2]
356 repo.ui.note(_("getting %s\n") % f)
356 repo.ui.note(_("getting %s\n") % f)
357 t = mctx.filectx(f).data()
357 t = mctx.filectx(f).data()
358 repo.wwrite(f, t)
358 repo.wwrite(f, t)
359 util.set_exec(repo.wjoin(f), flag)
359 util.set_exec(repo.wjoin(f), flag)
360 updated += 1
360 updated += 1
361 elif m == "d": # directory rename
361 elif m == "d": # directory rename
362 f2, fd, flag = a[2:]
362 f2, fd, flag = a[2:]
363 if f:
363 if f:
364 repo.ui.note(_("moving %s to %s\n") % (f, fd))
364 repo.ui.note(_("moving %s to %s\n") % (f, fd))
365 t = wctx.filectx(f).data()
365 t = wctx.filectx(f).data()
366 repo.wwrite(fd, t)
366 repo.wwrite(fd, t)
367 util.set_exec(repo.wjoin(fd), flag)
367 util.set_exec(repo.wjoin(fd), flag)
368 util.unlink(repo.wjoin(f))
368 util.unlink(repo.wjoin(f))
369 if f2:
369 if f2:
370 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
370 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
371 t = mctx.filectx(f2).data()
371 t = mctx.filectx(f2).data()
372 repo.wwrite(fd, t)
372 repo.wwrite(fd, t)
373 util.set_exec(repo.wjoin(fd), flag)
373 util.set_exec(repo.wjoin(fd), flag)
374 updated += 1
374 updated += 1
375 elif m == "e": # exec
375 elif m == "e": # exec
376 flag = a[2]
376 flag = a[2]
377 util.set_exec(repo.wjoin(f), flag)
377 util.set_exec(repo.wjoin(f), flag)
378
378
379 return updated, merged, removed, unresolved
379 return updated, merged, removed, unresolved
380
380
381 def recordupdates(repo, action, branchmerge):
381 def recordupdates(repo, action, branchmerge):
382 "record merge actions to the dirstate"
382 "record merge actions to the dirstate"
383
383
384 for a in action:
384 for a in action:
385 f, m = a[:2]
385 f, m = a[:2]
386 if m == "r": # remove
386 if m == "r": # remove
387 if branchmerge:
387 if branchmerge:
388 repo.dirstate.update([f], 'r')
388 repo.dirstate.update([f], 'r')
389 else:
389 else:
390 repo.dirstate.forget([f])
390 repo.dirstate.forget([f])
391 elif m == "f": # forget
391 elif m == "f": # forget
392 repo.dirstate.forget([f])
392 repo.dirstate.forget([f])
393 elif m == "g": # get
393 elif m == "g": # get
394 if branchmerge:
394 if branchmerge:
395 repo.dirstate.update([f], 'n', st_mtime=-1)
395 repo.dirstate.update([f], 'n', st_mtime=-1)
396 else:
396 else:
397 repo.dirstate.update([f], 'n')
397 repo.dirstate.update([f], 'n')
398 elif m == "m": # merge
398 elif m == "m": # merge
399 f2, fd, flag, move = a[2:]
399 f2, fd, flag, move = a[2:]
400 if branchmerge:
400 if branchmerge:
401 # We've done a branch merge, mark this file as merged
401 # We've done a branch merge, mark this file as merged
402 # so that we properly record the merger later
402 # so that we properly record the merger later
403 repo.dirstate.update([fd], 'm')
403 repo.dirstate.update([fd], 'm')
404 if f != f2: # copy/rename
404 if f != f2: # copy/rename
405 if move:
405 if move:
406 repo.dirstate.update([f], 'r')
406 repo.dirstate.update([f], 'r')
407 if f != fd:
407 if f != fd:
408 repo.dirstate.copy(f, fd)
408 repo.dirstate.copy(f, fd)
409 else:
409 else:
410 repo.dirstate.copy(f2, fd)
410 repo.dirstate.copy(f2, fd)
411 else:
411 else:
412 # We've update-merged a locally modified file, so
412 # We've update-merged a locally modified file, so
413 # we set the dirstate to emulate a normal checkout
413 # we set the dirstate to emulate a normal checkout
414 # of that file some time in the past. Thus our
414 # of that file some time in the past. Thus our
415 # merge will appear as a normal local file
415 # merge will appear as a normal local file
416 # modification.
416 # modification.
417 repo.dirstate.update([fd], 'n', st_size=-1, st_mtime=-1)
417 repo.dirstate.update([fd], 'n', st_size=-1, st_mtime=-1)
418 if move:
418 if move:
419 repo.dirstate.forget([f])
419 repo.dirstate.forget([f])
420 elif m == "d": # directory rename
420 elif m == "d": # directory rename
421 f2, fd, flag = a[2:]
421 f2, fd, flag = a[2:]
422 if branchmerge:
422 if branchmerge:
423 repo.dirstate.update([fd], 'a')
423 repo.dirstate.update([fd], 'a')
424 if f:
424 if f:
425 repo.dirstate.update([f], 'r')
425 repo.dirstate.update([f], 'r')
426 repo.dirstate.copy(f, fd)
426 repo.dirstate.copy(f, fd)
427 if f2:
427 if f2:
428 repo.dirstate.copy(f2, fd)
428 repo.dirstate.copy(f2, fd)
429 else:
429 else:
430 repo.dirstate.update([fd], 'n')
430 repo.dirstate.update([fd], 'n')
431 if f:
431 if f:
432 repo.dirstate.forget([f])
432 repo.dirstate.forget([f])
433
433
434 def update(repo, node, branchmerge, force, partial, wlock):
434 def update(repo, node, branchmerge, force, partial, wlock):
435 """
435 """
436 Perform a merge between the working directory and the given node
436 Perform a merge between the working directory and the given node
437
437
438 branchmerge = whether to merge between branches
438 branchmerge = whether to merge between branches
439 force = whether to force branch merging or file overwriting
439 force = whether to force branch merging or file overwriting
440 partial = a function to filter file lists (dirstate not updated)
440 partial = a function to filter file lists (dirstate not updated)
441 wlock = working dir lock, if already held
441 wlock = working dir lock, if already held
442 """
442 """
443
443
444 if node is None:
445 node = "tip"
446
444 if not wlock:
447 if not wlock:
445 wlock = repo.wlock()
448 wlock = repo.wlock()
446
449
447 overwrite = force and not branchmerge
450 overwrite = force and not branchmerge
448 forcemerge = force and branchmerge
451 forcemerge = force and branchmerge
449 wc = repo.workingctx()
452 wc = repo.workingctx()
450 pl = wc.parents()
453 pl = wc.parents()
451 p1, p2 = pl[0], repo.changectx(node)
454 p1, p2 = pl[0], repo.changectx(node)
452 pa = p1.ancestor(p2)
455 pa = p1.ancestor(p2)
453 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
456 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
454
457
455 ### check phase
458 ### check phase
456 if not overwrite and len(pl) > 1:
459 if not overwrite and len(pl) > 1:
457 raise util.Abort(_("outstanding uncommitted merges"))
460 raise util.Abort(_("outstanding uncommitted merges"))
458 if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
461 if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
459 if branchmerge:
462 if branchmerge:
460 raise util.Abort(_("there is nothing to merge, just use "
463 raise util.Abort(_("there is nothing to merge, just use "
461 "'hg update' or look at 'hg heads'"))
464 "'hg update' or look at 'hg heads'"))
462 elif not (overwrite or branchmerge):
465 elif not (overwrite or branchmerge):
463 raise util.Abort(_("update spans branches, use 'hg merge' "
466 raise util.Abort(_("update spans branches, use 'hg merge' "
464 "or 'hg update -C' to lose changes"))
467 "or 'hg update -C' to lose changes"))
465 if branchmerge and not forcemerge:
468 if branchmerge and not forcemerge:
466 if wc.files():
469 if wc.files():
467 raise util.Abort(_("outstanding uncommitted changes"))
470 raise util.Abort(_("outstanding uncommitted changes"))
468
471
469 ### calculate phase
472 ### calculate phase
470 action = []
473 action = []
471 if not force:
474 if not force:
472 checkunknown(wc, p2)
475 checkunknown(wc, p2)
473 if not util.checkfolding(repo.path):
476 if not util.checkfolding(repo.path):
474 checkcollision(p2)
477 checkcollision(p2)
475 if not branchmerge:
478 if not branchmerge:
476 action += forgetremoved(wc, p2)
479 action += forgetremoved(wc, p2)
477 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
480 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
478
481
479 ### apply phase
482 ### apply phase
480 if not branchmerge: # just jump to the new rev
483 if not branchmerge: # just jump to the new rev
481 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
484 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
482 if not partial:
485 if not partial:
483 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
486 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
484
487
485 stats = applyupdates(repo, action, wc, p2)
488 stats = applyupdates(repo, action, wc, p2)
486
489
487 if not partial:
490 if not partial:
488 recordupdates(repo, action, branchmerge)
491 recordupdates(repo, action, branchmerge)
489 repo.dirstate.setparents(fp1, fp2)
492 repo.dirstate.setparents(fp1, fp2)
490 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
493 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
491 if not branchmerge:
494 if not branchmerge:
492 repo.opener("branch", "w").write(p2.branch() + "\n")
495 repo.opener("branch", "w").write(p2.branch() + "\n")
493
496
494 return stats
497 return stats
495
498
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now