##// END OF EJS Templates
reverse sense of return value from python hooks....
Vadim Gelfer -
r2221:05b6c13f default
parent child Browse files
Show More
@@ -1,284 +1,283
1 # bugzilla.py - bugzilla integration for mercurial
1 # bugzilla.py - bugzilla integration for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 # hook extension to update comments of bugzilla bugs when changesets
8 # hook extension to update comments of bugzilla bugs when changesets
9 # that refer to bugs by id are seen. this hook does not change bug
9 # that refer to bugs by id are seen. this hook does not change bug
10 # status, only comments.
10 # status, only comments.
11 #
11 #
12 # to configure, add items to '[bugzilla]' section of hgrc.
12 # to configure, add items to '[bugzilla]' section of hgrc.
13 #
13 #
14 # to use, configure bugzilla extension and enable like this:
14 # to use, configure bugzilla extension and enable like this:
15 #
15 #
16 # [extensions]
16 # [extensions]
17 # hgext.bugzilla =
17 # hgext.bugzilla =
18 #
18 #
19 # [hooks]
19 # [hooks]
20 # # run bugzilla hook on every change pulled or pushed in here
20 # # run bugzilla hook on every change pulled or pushed in here
21 # incoming.bugzilla = python:hgext.bugzilla.hook
21 # incoming.bugzilla = python:hgext.bugzilla.hook
22 #
22 #
23 # config items:
23 # config items:
24 #
24 #
25 # REQUIRED:
25 # REQUIRED:
26 # host = bugzilla # mysql server where bugzilla database lives
26 # host = bugzilla # mysql server where bugzilla database lives
27 # password = ** # user's password
27 # password = ** # user's password
28 # version = 2.16 # version of bugzilla installed
28 # version = 2.16 # version of bugzilla installed
29 #
29 #
30 # OPTIONAL:
30 # OPTIONAL:
31 # bzuser = ... # bugzilla user id to record comments with
31 # bzuser = ... # bugzilla user id to record comments with
32 # db = bugs # database to connect to
32 # db = bugs # database to connect to
33 # notify = ... # command to run to get bugzilla to send mail
33 # notify = ... # command to run to get bugzilla to send mail
34 # regexp = ... # regexp to match bug ids (must contain one "()" group)
34 # regexp = ... # regexp to match bug ids (must contain one "()" group)
35 # strip = 0 # number of slashes to strip for url paths
35 # strip = 0 # number of slashes to strip for url paths
36 # style = ... # style file to use when formatting comments
36 # style = ... # style file to use when formatting comments
37 # template = ... # template to use when formatting comments
37 # template = ... # template to use when formatting comments
38 # timeout = 5 # database connection timeout (seconds)
38 # timeout = 5 # database connection timeout (seconds)
39 # user = bugs # user to connect to database as
39 # user = bugs # user to connect to database as
40 # [web]
40 # [web]
41 # baseurl = http://hgserver/... # root of hg web site for browsing commits
41 # baseurl = http://hgserver/... # root of hg web site for browsing commits
42
42
43 from mercurial.demandload import *
43 from mercurial.demandload import *
44 from mercurial.i18n import gettext as _
44 from mercurial.i18n import gettext as _
45 from mercurial.node import *
45 from mercurial.node import *
46 demandload(globals(), 'mercurial:templater,util os re time')
46 demandload(globals(), 'mercurial:templater,util os re time')
47
47
48 MySQLdb = None
48 MySQLdb = None
49
49
50 def buglist(ids):
50 def buglist(ids):
51 return '(' + ','.join(map(str, ids)) + ')'
51 return '(' + ','.join(map(str, ids)) + ')'
52
52
53 class bugzilla_2_16(object):
53 class bugzilla_2_16(object):
54 '''support for bugzilla version 2.16.'''
54 '''support for bugzilla version 2.16.'''
55
55
56 def __init__(self, ui):
56 def __init__(self, ui):
57 self.ui = ui
57 self.ui = ui
58 host = self.ui.config('bugzilla', 'host', 'localhost')
58 host = self.ui.config('bugzilla', 'host', 'localhost')
59 user = self.ui.config('bugzilla', 'user', 'bugs')
59 user = self.ui.config('bugzilla', 'user', 'bugs')
60 passwd = self.ui.config('bugzilla', 'password')
60 passwd = self.ui.config('bugzilla', 'password')
61 db = self.ui.config('bugzilla', 'db', 'bugs')
61 db = self.ui.config('bugzilla', 'db', 'bugs')
62 timeout = int(self.ui.config('bugzilla', 'timeout', 5))
62 timeout = int(self.ui.config('bugzilla', 'timeout', 5))
63 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
63 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
64 (host, db, user, '*' * len(passwd)))
64 (host, db, user, '*' * len(passwd)))
65 self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd,
65 self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd,
66 db=db, connect_timeout=timeout)
66 db=db, connect_timeout=timeout)
67 self.cursor = self.conn.cursor()
67 self.cursor = self.conn.cursor()
68 self.run('select fieldid from fielddefs where name = "longdesc"')
68 self.run('select fieldid from fielddefs where name = "longdesc"')
69 ids = self.cursor.fetchall()
69 ids = self.cursor.fetchall()
70 if len(ids) != 1:
70 if len(ids) != 1:
71 raise util.Abort(_('unknown database schema'))
71 raise util.Abort(_('unknown database schema'))
72 self.longdesc_id = ids[0][0]
72 self.longdesc_id = ids[0][0]
73 self.user_ids = {}
73 self.user_ids = {}
74
74
75 def run(self, *args, **kwargs):
75 def run(self, *args, **kwargs):
76 '''run a query.'''
76 '''run a query.'''
77 self.ui.note(_('query: %s %s\n') % (args, kwargs))
77 self.ui.note(_('query: %s %s\n') % (args, kwargs))
78 try:
78 try:
79 self.cursor.execute(*args, **kwargs)
79 self.cursor.execute(*args, **kwargs)
80 except MySQLdb.MySQLError, err:
80 except MySQLdb.MySQLError, err:
81 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
81 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
82 raise
82 raise
83
83
84 def filter_real_bug_ids(self, ids):
84 def filter_real_bug_ids(self, ids):
85 '''filter not-existing bug ids from list.'''
85 '''filter not-existing bug ids from list.'''
86 self.run('select bug_id from bugs where bug_id in %s' % buglist(ids))
86 self.run('select bug_id from bugs where bug_id in %s' % buglist(ids))
87 ids = [c[0] for c in self.cursor.fetchall()]
87 ids = [c[0] for c in self.cursor.fetchall()]
88 ids.sort()
88 ids.sort()
89 return ids
89 return ids
90
90
91 def filter_unknown_bug_ids(self, node, ids):
91 def filter_unknown_bug_ids(self, node, ids):
92 '''filter bug ids from list that already refer to this changeset.'''
92 '''filter bug ids from list that already refer to this changeset.'''
93
93
94 self.run('''select bug_id from longdescs where
94 self.run('''select bug_id from longdescs where
95 bug_id in %s and thetext like "%%%s%%"''' %
95 bug_id in %s and thetext like "%%%s%%"''' %
96 (buglist(ids), short(node)))
96 (buglist(ids), short(node)))
97 unknown = dict.fromkeys(ids)
97 unknown = dict.fromkeys(ids)
98 for (id,) in self.cursor.fetchall():
98 for (id,) in self.cursor.fetchall():
99 self.ui.status(_('bug %d already knows about changeset %s\n') %
99 self.ui.status(_('bug %d already knows about changeset %s\n') %
100 (id, short(node)))
100 (id, short(node)))
101 unknown.pop(id, None)
101 unknown.pop(id, None)
102 ids = unknown.keys()
102 ids = unknown.keys()
103 ids.sort()
103 ids.sort()
104 return ids
104 return ids
105
105
106 def notify(self, ids):
106 def notify(self, ids):
107 '''tell bugzilla to send mail.'''
107 '''tell bugzilla to send mail.'''
108
108
109 self.ui.status(_('telling bugzilla to send mail:\n'))
109 self.ui.status(_('telling bugzilla to send mail:\n'))
110 for id in ids:
110 for id in ids:
111 self.ui.status(_(' bug %s\n') % id)
111 self.ui.status(_(' bug %s\n') % id)
112 cmd = self.ui.config('bugzilla', 'notify',
112 cmd = self.ui.config('bugzilla', 'notify',
113 'cd /var/www/html/bugzilla && '
113 'cd /var/www/html/bugzilla && '
114 './processmail %s nobody@nowhere.com') % id
114 './processmail %s nobody@nowhere.com') % id
115 fp = os.popen('(%s) 2>&1' % cmd)
115 fp = os.popen('(%s) 2>&1' % cmd)
116 out = fp.read()
116 out = fp.read()
117 ret = fp.close()
117 ret = fp.close()
118 if ret:
118 if ret:
119 self.ui.warn(out)
119 self.ui.warn(out)
120 raise util.Abort(_('bugzilla notify command %s') %
120 raise util.Abort(_('bugzilla notify command %s') %
121 util.explain_exit(ret)[0])
121 util.explain_exit(ret)[0])
122 self.ui.status(_('done\n'))
122 self.ui.status(_('done\n'))
123
123
124 def get_user_id(self, user):
124 def get_user_id(self, user):
125 '''look up numeric bugzilla user id.'''
125 '''look up numeric bugzilla user id.'''
126 try:
126 try:
127 return self.user_ids[user]
127 return self.user_ids[user]
128 except KeyError:
128 except KeyError:
129 try:
129 try:
130 userid = int(user)
130 userid = int(user)
131 except ValueError:
131 except ValueError:
132 self.ui.note(_('looking up user %s\n') % user)
132 self.ui.note(_('looking up user %s\n') % user)
133 self.run('''select userid from profiles
133 self.run('''select userid from profiles
134 where login_name like %s''', user)
134 where login_name like %s''', user)
135 all = self.cursor.fetchall()
135 all = self.cursor.fetchall()
136 if len(all) != 1:
136 if len(all) != 1:
137 raise KeyError(user)
137 raise KeyError(user)
138 userid = int(all[0][0])
138 userid = int(all[0][0])
139 self.user_ids[user] = userid
139 self.user_ids[user] = userid
140 return userid
140 return userid
141
141
142 def add_comment(self, bugid, text, prefuser):
142 def add_comment(self, bugid, text, prefuser):
143 '''add comment to bug. try adding comment as committer of
143 '''add comment to bug. try adding comment as committer of
144 changeset, otherwise as default bugzilla user.'''
144 changeset, otherwise as default bugzilla user.'''
145 try:
145 try:
146 userid = self.get_user_id(prefuser)
146 userid = self.get_user_id(prefuser)
147 except KeyError:
147 except KeyError:
148 try:
148 try:
149 defaultuser = self.ui.config('bugzilla', 'bzuser')
149 defaultuser = self.ui.config('bugzilla', 'bzuser')
150 userid = self.get_user_id(defaultuser)
150 userid = self.get_user_id(defaultuser)
151 except KeyError:
151 except KeyError:
152 raise util.Abort(_('cannot find user id for %s or %s') %
152 raise util.Abort(_('cannot find user id for %s or %s') %
153 (prefuser, defaultuser))
153 (prefuser, defaultuser))
154 now = time.strftime('%Y-%m-%d %H:%M:%S')
154 now = time.strftime('%Y-%m-%d %H:%M:%S')
155 self.run('''insert into longdescs
155 self.run('''insert into longdescs
156 (bug_id, who, bug_when, thetext)
156 (bug_id, who, bug_when, thetext)
157 values (%s, %s, %s, %s)''',
157 values (%s, %s, %s, %s)''',
158 (bugid, userid, now, text))
158 (bugid, userid, now, text))
159 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
159 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
160 values (%s, %s, %s, %s)''',
160 values (%s, %s, %s, %s)''',
161 (bugid, userid, now, self.longdesc_id))
161 (bugid, userid, now, self.longdesc_id))
162
162
163 class bugzilla(object):
163 class bugzilla(object):
164 # supported versions of bugzilla. different versions have
164 # supported versions of bugzilla. different versions have
165 # different schemas.
165 # different schemas.
166 _versions = {
166 _versions = {
167 '2.16': bugzilla_2_16,
167 '2.16': bugzilla_2_16,
168 }
168 }
169
169
170 _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
170 _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
171 r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
171 r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
172
172
173 _bz = None
173 _bz = None
174
174
175 def __init__(self, ui, repo):
175 def __init__(self, ui, repo):
176 self.ui = ui
176 self.ui = ui
177 self.repo = repo
177 self.repo = repo
178
178
179 def bz(self):
179 def bz(self):
180 '''return object that knows how to talk to bugzilla version in
180 '''return object that knows how to talk to bugzilla version in
181 use.'''
181 use.'''
182
182
183 if bugzilla._bz is None:
183 if bugzilla._bz is None:
184 bzversion = self.ui.config('bugzilla', 'version')
184 bzversion = self.ui.config('bugzilla', 'version')
185 try:
185 try:
186 bzclass = bugzilla._versions[bzversion]
186 bzclass = bugzilla._versions[bzversion]
187 except KeyError:
187 except KeyError:
188 raise util.Abort(_('bugzilla version %s not supported') %
188 raise util.Abort(_('bugzilla version %s not supported') %
189 bzversion)
189 bzversion)
190 bugzilla._bz = bzclass(self.ui)
190 bugzilla._bz = bzclass(self.ui)
191 return bugzilla._bz
191 return bugzilla._bz
192
192
193 def __getattr__(self, key):
193 def __getattr__(self, key):
194 return getattr(self.bz(), key)
194 return getattr(self.bz(), key)
195
195
196 _bug_re = None
196 _bug_re = None
197 _split_re = None
197 _split_re = None
198
198
199 def find_bug_ids(self, node, desc):
199 def find_bug_ids(self, node, desc):
200 '''find valid bug ids that are referred to in changeset
200 '''find valid bug ids that are referred to in changeset
201 comments and that do not already have references to this
201 comments and that do not already have references to this
202 changeset.'''
202 changeset.'''
203
203
204 if bugzilla._bug_re is None:
204 if bugzilla._bug_re is None:
205 bugzilla._bug_re = re.compile(
205 bugzilla._bug_re = re.compile(
206 self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
206 self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
207 re.IGNORECASE)
207 re.IGNORECASE)
208 bugzilla._split_re = re.compile(r'\D+')
208 bugzilla._split_re = re.compile(r'\D+')
209 start = 0
209 start = 0
210 ids = {}
210 ids = {}
211 while True:
211 while True:
212 m = bugzilla._bug_re.search(desc, start)
212 m = bugzilla._bug_re.search(desc, start)
213 if not m:
213 if not m:
214 break
214 break
215 start = m.end()
215 start = m.end()
216 for id in bugzilla._split_re.split(m.group(1)):
216 for id in bugzilla._split_re.split(m.group(1)):
217 ids[int(id)] = 1
217 ids[int(id)] = 1
218 ids = ids.keys()
218 ids = ids.keys()
219 if ids:
219 if ids:
220 ids = self.filter_real_bug_ids(ids)
220 ids = self.filter_real_bug_ids(ids)
221 if ids:
221 if ids:
222 ids = self.filter_unknown_bug_ids(node, ids)
222 ids = self.filter_unknown_bug_ids(node, ids)
223 return ids
223 return ids
224
224
225 def update(self, bugid, node, changes):
225 def update(self, bugid, node, changes):
226 '''update bugzilla bug with reference to changeset.'''
226 '''update bugzilla bug with reference to changeset.'''
227
227
228 def webroot(root):
228 def webroot(root):
229 '''strip leading prefix of repo root and turn into
229 '''strip leading prefix of repo root and turn into
230 url-safe path.'''
230 url-safe path.'''
231 count = int(self.ui.config('bugzilla', 'strip', 0))
231 count = int(self.ui.config('bugzilla', 'strip', 0))
232 root = util.pconvert(root)
232 root = util.pconvert(root)
233 while count > 0:
233 while count > 0:
234 c = root.find('/')
234 c = root.find('/')
235 if c == -1:
235 if c == -1:
236 break
236 break
237 root = root[c+1:]
237 root = root[c+1:]
238 count -= 1
238 count -= 1
239 return root
239 return root
240
240
241 mapfile = self.ui.config('bugzilla', 'style')
241 mapfile = self.ui.config('bugzilla', 'style')
242 tmpl = self.ui.config('bugzilla', 'template')
242 tmpl = self.ui.config('bugzilla', 'template')
243 sio = templater.stringio()
243 sio = templater.stringio()
244 t = templater.changeset_templater(self.ui, self.repo, mapfile, sio)
244 t = templater.changeset_templater(self.ui, self.repo, mapfile, sio)
245 if not mapfile and not tmpl:
245 if not mapfile and not tmpl:
246 tmpl = _('changeset {node|short} in repo {root} refers '
246 tmpl = _('changeset {node|short} in repo {root} refers '
247 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
247 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
248 if tmpl:
248 if tmpl:
249 tmpl = templater.parsestring(tmpl, quoted=False)
249 tmpl = templater.parsestring(tmpl, quoted=False)
250 t.use_template(tmpl)
250 t.use_template(tmpl)
251 t.show(changenode=node, changes=changes,
251 t.show(changenode=node, changes=changes,
252 bug=str(bugid),
252 bug=str(bugid),
253 hgweb=self.ui.config('web', 'baseurl'),
253 hgweb=self.ui.config('web', 'baseurl'),
254 root=self.repo.root,
254 root=self.repo.root,
255 webroot=webroot(self.repo.root))
255 webroot=webroot(self.repo.root))
256 self.add_comment(bugid, sio.getvalue(), templater.email(changes[1]))
256 self.add_comment(bugid, sio.getvalue(), templater.email(changes[1]))
257
257
258 def hook(ui, repo, hooktype, node=None, **kwargs):
258 def hook(ui, repo, hooktype, node=None, **kwargs):
259 '''add comment to bugzilla for each changeset that refers to a
259 '''add comment to bugzilla for each changeset that refers to a
260 bugzilla bug id. only add a comment once per bug, so same change
260 bugzilla bug id. only add a comment once per bug, so same change
261 seen multiple times does not fill bug with duplicate data.'''
261 seen multiple times does not fill bug with duplicate data.'''
262 try:
262 try:
263 import MySQLdb as mysql
263 import MySQLdb as mysql
264 global MySQLdb
264 global MySQLdb
265 MySQLdb = mysql
265 MySQLdb = mysql
266 except ImportError, err:
266 except ImportError, err:
267 raise util.Abort(_('python mysql support not available: %s') % err)
267 raise util.Abort(_('python mysql support not available: %s') % err)
268
268
269 if node is None:
269 if node is None:
270 raise util.Abort(_('hook type %s does not pass a changeset id') %
270 raise util.Abort(_('hook type %s does not pass a changeset id') %
271 hooktype)
271 hooktype)
272 try:
272 try:
273 bz = bugzilla(ui, repo)
273 bz = bugzilla(ui, repo)
274 bin_node = bin(node)
274 bin_node = bin(node)
275 changes = repo.changelog.read(bin_node)
275 changes = repo.changelog.read(bin_node)
276 ids = bz.find_bug_ids(bin_node, changes[4])
276 ids = bz.find_bug_ids(bin_node, changes[4])
277 if ids:
277 if ids:
278 for id in ids:
278 for id in ids:
279 bz.update(id, bin_node, changes)
279 bz.update(id, bin_node, changes)
280 bz.notify(ids)
280 bz.notify(ids)
281 return True
282 except MySQLdb.MySQLError, err:
281 except MySQLdb.MySQLError, err:
283 raise util.Abort(_('database error: %s') % err[1])
282 raise util.Abort(_('database error: %s') % err[1])
284
283
@@ -1,258 +1,257
1 # notify.py - email notifications for mercurial
1 # notify.py - email notifications for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 # hook extension to email notifications to people when changesets are
8 # hook extension to email notifications to people when changesets are
9 # committed to a repo they subscribe to.
9 # committed to a repo they subscribe to.
10 #
10 #
11 # default mode is to print messages to stdout, for testing and
11 # default mode is to print messages to stdout, for testing and
12 # configuring.
12 # configuring.
13 #
13 #
14 # to use, configure notify extension and enable in hgrc like this:
14 # to use, configure notify extension and enable in hgrc like this:
15 #
15 #
16 # [extensions]
16 # [extensions]
17 # hgext.notify =
17 # hgext.notify =
18 #
18 #
19 # [hooks]
19 # [hooks]
20 # # one email for each incoming changeset
20 # # one email for each incoming changeset
21 # incoming.notify = python:hgext.notify.hook
21 # incoming.notify = python:hgext.notify.hook
22 # # batch emails when many changesets incoming at one time
22 # # batch emails when many changesets incoming at one time
23 # changegroup.notify = python:hgext.notify.hook
23 # changegroup.notify = python:hgext.notify.hook
24 #
24 #
25 # [notify]
25 # [notify]
26 # # config items go in here
26 # # config items go in here
27 #
27 #
28 # config items:
28 # config items:
29 #
29 #
30 # REQUIRED:
30 # REQUIRED:
31 # config = /path/to/file # file containing subscriptions
31 # config = /path/to/file # file containing subscriptions
32 #
32 #
33 # OPTIONAL:
33 # OPTIONAL:
34 # test = True # print messages to stdout for testing
34 # test = True # print messages to stdout for testing
35 # strip = 3 # number of slashes to strip for url paths
35 # strip = 3 # number of slashes to strip for url paths
36 # domain = example.com # domain to use if committer missing domain
36 # domain = example.com # domain to use if committer missing domain
37 # style = ... # style file to use when formatting email
37 # style = ... # style file to use when formatting email
38 # template = ... # template to use when formatting email
38 # template = ... # template to use when formatting email
39 # incoming = ... # template to use when run as incoming hook
39 # incoming = ... # template to use when run as incoming hook
40 # changegroup = ... # template when run as changegroup hook
40 # changegroup = ... # template when run as changegroup hook
41 # maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
41 # maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
42 # maxsubject = 67 # truncate subject line longer than this
42 # maxsubject = 67 # truncate subject line longer than this
43 # [email]
43 # [email]
44 # from = user@host.com # email address to send as if none given
44 # from = user@host.com # email address to send as if none given
45 # [web]
45 # [web]
46 # baseurl = http://hgserver/... # root of hg web site for browsing commits
46 # baseurl = http://hgserver/... # root of hg web site for browsing commits
47 #
47 #
48 # notify config file has same format as regular hgrc. it has two
48 # notify config file has same format as regular hgrc. it has two
49 # sections so you can express subscriptions in whatever way is handier
49 # sections so you can express subscriptions in whatever way is handier
50 # for you.
50 # for you.
51 #
51 #
52 # [usersubs]
52 # [usersubs]
53 # # key is subscriber email, value is ","-separated list of glob patterns
53 # # key is subscriber email, value is ","-separated list of glob patterns
54 # user@host = pattern
54 # user@host = pattern
55 #
55 #
56 # [reposubs]
56 # [reposubs]
57 # # key is glob pattern, value is ","-separated list of subscriber emails
57 # # key is glob pattern, value is ","-separated list of subscriber emails
58 # pattern = user@host
58 # pattern = user@host
59 #
59 #
60 # glob patterns are matched against path to repo root.
60 # glob patterns are matched against path to repo root.
61 #
61 #
62 # if you like, you can put notify config file in repo that users can
62 # if you like, you can put notify config file in repo that users can
63 # push changes to, they can manage their own subscriptions.
63 # push changes to, they can manage their own subscriptions.
64
64
65 from mercurial.demandload import *
65 from mercurial.demandload import *
66 from mercurial.i18n import gettext as _
66 from mercurial.i18n import gettext as _
67 from mercurial.node import *
67 from mercurial.node import *
68 demandload(globals(), 'email.Parser mercurial:commands,templater,util')
68 demandload(globals(), 'email.Parser mercurial:commands,templater,util')
69 demandload(globals(), 'fnmatch socket time')
69 demandload(globals(), 'fnmatch socket time')
70
70
71 # template for single changeset can include email headers.
71 # template for single changeset can include email headers.
72 single_template = '''
72 single_template = '''
73 Subject: changeset in {webroot}: {desc|firstline|strip}
73 Subject: changeset in {webroot}: {desc|firstline|strip}
74 From: {author}
74 From: {author}
75
75
76 changeset {node|short} in {root}
76 changeset {node|short} in {root}
77 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
77 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
78 description:
78 description:
79 \t{desc|tabindent|strip}
79 \t{desc|tabindent|strip}
80 '''.lstrip()
80 '''.lstrip()
81
81
82 # template for multiple changesets should not contain email headers,
82 # template for multiple changesets should not contain email headers,
83 # because only first set of headers will be used and result will look
83 # because only first set of headers will be used and result will look
84 # strange.
84 # strange.
85 multiple_template = '''
85 multiple_template = '''
86 changeset {node|short} in {root}
86 changeset {node|short} in {root}
87 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
87 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
88 summary: {desc|firstline}
88 summary: {desc|firstline}
89 '''
89 '''
90
90
91 deftemplates = {
91 deftemplates = {
92 'changegroup': multiple_template,
92 'changegroup': multiple_template,
93 }
93 }
94
94
95 class notifier(object):
95 class notifier(object):
96 '''email notification class.'''
96 '''email notification class.'''
97
97
98 def __init__(self, ui, repo, hooktype):
98 def __init__(self, ui, repo, hooktype):
99 self.ui = ui
99 self.ui = ui
100 self.ui.readconfig(self.ui.config('notify', 'config'))
100 self.ui.readconfig(self.ui.config('notify', 'config'))
101 self.repo = repo
101 self.repo = repo
102 self.stripcount = int(self.ui.config('notify', 'strip', 0))
102 self.stripcount = int(self.ui.config('notify', 'strip', 0))
103 self.root = self.strip(self.repo.root)
103 self.root = self.strip(self.repo.root)
104 self.domain = self.ui.config('notify', 'domain')
104 self.domain = self.ui.config('notify', 'domain')
105 self.sio = templater.stringio()
105 self.sio = templater.stringio()
106 self.subs = self.subscribers()
106 self.subs = self.subscribers()
107
107
108 mapfile = self.ui.config('notify', 'style')
108 mapfile = self.ui.config('notify', 'style')
109 template = (self.ui.config('notify', hooktype) or
109 template = (self.ui.config('notify', hooktype) or
110 self.ui.config('notify', 'template'))
110 self.ui.config('notify', 'template'))
111 self.t = templater.changeset_templater(self.ui, self.repo, mapfile,
111 self.t = templater.changeset_templater(self.ui, self.repo, mapfile,
112 self.sio)
112 self.sio)
113 if not mapfile and not template:
113 if not mapfile and not template:
114 template = deftemplates.get(hooktype) or single_template
114 template = deftemplates.get(hooktype) or single_template
115 if template:
115 if template:
116 template = templater.parsestring(template, quoted=False)
116 template = templater.parsestring(template, quoted=False)
117 self.t.use_template(template)
117 self.t.use_template(template)
118
118
119 def strip(self, path):
119 def strip(self, path):
120 '''strip leading slashes from local path, turn into web-safe path.'''
120 '''strip leading slashes from local path, turn into web-safe path.'''
121
121
122 path = util.pconvert(path)
122 path = util.pconvert(path)
123 count = self.stripcount
123 count = self.stripcount
124 while path and count >= 0:
124 while path and count >= 0:
125 c = path.find('/')
125 c = path.find('/')
126 if c == -1:
126 if c == -1:
127 break
127 break
128 path = path[c+1:]
128 path = path[c+1:]
129 count -= 1
129 count -= 1
130 return path
130 return path
131
131
132 def fixmail(self, addr):
132 def fixmail(self, addr):
133 '''try to clean up email addresses.'''
133 '''try to clean up email addresses.'''
134
134
135 addr = templater.email(addr.strip())
135 addr = templater.email(addr.strip())
136 a = addr.find('@localhost')
136 a = addr.find('@localhost')
137 if a != -1:
137 if a != -1:
138 addr = addr[:a]
138 addr = addr[:a]
139 if '@' not in addr:
139 if '@' not in addr:
140 return addr + '@' + self.domain
140 return addr + '@' + self.domain
141 return addr
141 return addr
142
142
143 def subscribers(self):
143 def subscribers(self):
144 '''return list of email addresses of subscribers to this repo.'''
144 '''return list of email addresses of subscribers to this repo.'''
145
145
146 subs = {}
146 subs = {}
147 for user, pats in self.ui.configitems('usersubs'):
147 for user, pats in self.ui.configitems('usersubs'):
148 for pat in pats.split(','):
148 for pat in pats.split(','):
149 if fnmatch.fnmatch(self.repo.root, pat.strip()):
149 if fnmatch.fnmatch(self.repo.root, pat.strip()):
150 subs[self.fixmail(user)] = 1
150 subs[self.fixmail(user)] = 1
151 for pat, users in self.ui.configitems('reposubs'):
151 for pat, users in self.ui.configitems('reposubs'):
152 if fnmatch.fnmatch(self.repo.root, pat):
152 if fnmatch.fnmatch(self.repo.root, pat):
153 for user in users.split(','):
153 for user in users.split(','):
154 subs[self.fixmail(user)] = 1
154 subs[self.fixmail(user)] = 1
155 subs = subs.keys()
155 subs = subs.keys()
156 subs.sort()
156 subs.sort()
157 return subs
157 return subs
158
158
159 def url(self, path=None):
159 def url(self, path=None):
160 return self.ui.config('web', 'baseurl') + (path or self.root)
160 return self.ui.config('web', 'baseurl') + (path or self.root)
161
161
162 def node(self, node):
162 def node(self, node):
163 '''format one changeset.'''
163 '''format one changeset.'''
164
164
165 self.t.show(changenode=node, changes=self.repo.changelog.read(node),
165 self.t.show(changenode=node, changes=self.repo.changelog.read(node),
166 baseurl=self.ui.config('web', 'baseurl'),
166 baseurl=self.ui.config('web', 'baseurl'),
167 root=self.repo.root,
167 root=self.repo.root,
168 webroot=self.root)
168 webroot=self.root)
169
169
170 def send(self, node, count):
170 def send(self, node, count):
171 '''send message.'''
171 '''send message.'''
172
172
173 p = email.Parser.Parser()
173 p = email.Parser.Parser()
174 self.sio.seek(0)
174 self.sio.seek(0)
175 msg = p.parse(self.sio)
175 msg = p.parse(self.sio)
176
176
177 def fix_subject():
177 def fix_subject():
178 '''try to make subject line exist and be useful.'''
178 '''try to make subject line exist and be useful.'''
179
179
180 subject = msg['Subject']
180 subject = msg['Subject']
181 if not subject:
181 if not subject:
182 if count > 1:
182 if count > 1:
183 subject = _('%s: %d new changesets') % (self.root, count)
183 subject = _('%s: %d new changesets') % (self.root, count)
184 else:
184 else:
185 changes = self.repo.changelog.read(node)
185 changes = self.repo.changelog.read(node)
186 s = changes[4].lstrip().split('\n', 1)[0].rstrip()
186 s = changes[4].lstrip().split('\n', 1)[0].rstrip()
187 subject = '%s: %s' % (self.root, s)
187 subject = '%s: %s' % (self.root, s)
188 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
188 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
189 if maxsubject and len(subject) > maxsubject:
189 if maxsubject and len(subject) > maxsubject:
190 subject = subject[:maxsubject-3] + '...'
190 subject = subject[:maxsubject-3] + '...'
191 del msg['Subject']
191 del msg['Subject']
192 msg['Subject'] = subject
192 msg['Subject'] = subject
193
193
194 def fix_sender():
194 def fix_sender():
195 '''try to make message have proper sender.'''
195 '''try to make message have proper sender.'''
196
196
197 sender = msg['From']
197 sender = msg['From']
198 if not sender:
198 if not sender:
199 sender = self.ui.config('email', 'from') or self.ui.username()
199 sender = self.ui.config('email', 'from') or self.ui.username()
200 if '@' not in sender or '@localhost' in sender:
200 if '@' not in sender or '@localhost' in sender:
201 sender = self.fixmail(sender)
201 sender = self.fixmail(sender)
202 del msg['From']
202 del msg['From']
203 msg['From'] = sender
203 msg['From'] = sender
204
204
205 fix_subject()
205 fix_subject()
206 fix_sender()
206 fix_sender()
207
207
208 msg['X-Hg-Notification'] = 'changeset ' + short(node)
208 msg['X-Hg-Notification'] = 'changeset ' + short(node)
209 if not msg['Message-Id']:
209 if not msg['Message-Id']:
210 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
210 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
211 (short(node), int(time.time()),
211 (short(node), int(time.time()),
212 hash(self.repo.root), socket.getfqdn()))
212 hash(self.repo.root), socket.getfqdn()))
213
213
214 msgtext = msg.as_string(0)
214 msgtext = msg.as_string(0)
215 if self.ui.configbool('notify', 'test', True):
215 if self.ui.configbool('notify', 'test', True):
216 self.ui.write(msgtext)
216 self.ui.write(msgtext)
217 if not msgtext.endswith('\n'):
217 if not msgtext.endswith('\n'):
218 self.ui.write('\n')
218 self.ui.write('\n')
219 else:
219 else:
220 mail = self.ui.sendmail()
220 mail = self.ui.sendmail()
221 mail.sendmail(templater.email(msg['From']), self.subs, msgtext)
221 mail.sendmail(templater.email(msg['From']), self.subs, msgtext)
222
222
223 def diff(self, node):
223 def diff(self, node):
224 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
224 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
225 if maxdiff == 0:
225 if maxdiff == 0:
226 return
226 return
227 fp = templater.stringio()
227 fp = templater.stringio()
228 commands.dodiff(fp, self.ui, self.repo, node,
228 commands.dodiff(fp, self.ui, self.repo, node,
229 self.repo.changelog.tip())
229 self.repo.changelog.tip())
230 difflines = fp.getvalue().splitlines(1)
230 difflines = fp.getvalue().splitlines(1)
231 if maxdiff > 0 and len(difflines) > maxdiff:
231 if maxdiff > 0 and len(difflines) > maxdiff:
232 self.sio.write(_('\ndiffs (truncated from %d to %d lines):\n\n') %
232 self.sio.write(_('\ndiffs (truncated from %d to %d lines):\n\n') %
233 (len(difflines), maxdiff))
233 (len(difflines), maxdiff))
234 difflines = difflines[:maxdiff]
234 difflines = difflines[:maxdiff]
235 elif difflines:
235 elif difflines:
236 self.sio.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
236 self.sio.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
237 self.sio.write(*difflines)
237 self.sio.write(*difflines)
238
238
239 def hook(ui, repo, hooktype, node=None, **kwargs):
239 def hook(ui, repo, hooktype, node=None, **kwargs):
240 '''send email notifications to interested subscribers.
240 '''send email notifications to interested subscribers.
241
241
242 if used as changegroup hook, send one email for all changesets in
242 if used as changegroup hook, send one email for all changesets in
243 changegroup. else send one email per changeset.'''
243 changegroup. else send one email per changeset.'''
244 n = notifier(ui, repo, hooktype)
244 n = notifier(ui, repo, hooktype)
245 if not n.subs: return True
245 if not n.subs: return True
246 node = bin(node)
246 node = bin(node)
247 if hooktype == 'changegroup':
247 if hooktype == 'changegroup':
248 start = repo.changelog.rev(node)
248 start = repo.changelog.rev(node)
249 end = repo.changelog.count()
249 end = repo.changelog.count()
250 count = end - start
250 count = end - start
251 for rev in xrange(start, end):
251 for rev in xrange(start, end):
252 n.node(repo.changelog.node(rev))
252 n.node(repo.changelog.node(rev))
253 else:
253 else:
254 count = 1
254 count = 1
255 n.node(node)
255 n.node(node)
256 n.diff(node)
256 n.diff(node)
257 n.send(node, count)
257 n.send(node, count)
258 return True
@@ -1,2069 +1,2072
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os, util
8 import os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "appendfile changegroup")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "revlog traceback")
15 demandload(globals(), "revlog traceback")
16
16
17 class localrepository(object):
17 class localrepository(object):
18 def __del__(self):
18 def __del__(self):
19 self.transhandle = None
19 self.transhandle = None
20 def __init__(self, parentui, path=None, create=0):
20 def __init__(self, parentui, path=None, create=0):
21 if not path:
21 if not path:
22 p = os.getcwd()
22 p = os.getcwd()
23 while not os.path.isdir(os.path.join(p, ".hg")):
23 while not os.path.isdir(os.path.join(p, ".hg")):
24 oldp = p
24 oldp = p
25 p = os.path.dirname(p)
25 p = os.path.dirname(p)
26 if p == oldp:
26 if p == oldp:
27 raise repo.RepoError(_("no repo found"))
27 raise repo.RepoError(_("no repo found"))
28 path = p
28 path = p
29 self.path = os.path.join(path, ".hg")
29 self.path = os.path.join(path, ".hg")
30
30
31 if not create and not os.path.isdir(self.path):
31 if not create and not os.path.isdir(self.path):
32 raise repo.RepoError(_("repository %s not found") % path)
32 raise repo.RepoError(_("repository %s not found") % path)
33
33
34 self.root = os.path.abspath(path)
34 self.root = os.path.abspath(path)
35 self.origroot = path
35 self.origroot = path
36 self.ui = ui.ui(parentui=parentui)
36 self.ui = ui.ui(parentui=parentui)
37 self.opener = util.opener(self.path)
37 self.opener = util.opener(self.path)
38 self.wopener = util.opener(self.root)
38 self.wopener = util.opener(self.root)
39
39
40 try:
40 try:
41 self.ui.readconfig(self.join("hgrc"), self.root)
41 self.ui.readconfig(self.join("hgrc"), self.root)
42 except IOError:
42 except IOError:
43 pass
43 pass
44
44
45 v = self.ui.revlogopts
45 v = self.ui.revlogopts
46 self.revlogversion = int(v.get('format', revlog.REVLOGV0))
46 self.revlogversion = int(v.get('format', revlog.REVLOGV0))
47 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
47 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
48 flags = 0
48 flags = 0
49 for x in v.get('flags', "").split():
49 for x in v.get('flags', "").split():
50 flags |= revlog.flagstr(x)
50 flags |= revlog.flagstr(x)
51
51
52 v = self.revlogversion | flags
52 v = self.revlogversion | flags
53 self.manifest = manifest.manifest(self.opener, v)
53 self.manifest = manifest.manifest(self.opener, v)
54 self.changelog = changelog.changelog(self.opener, v)
54 self.changelog = changelog.changelog(self.opener, v)
55
55
56 # the changelog might not have the inline index flag
56 # the changelog might not have the inline index flag
57 # on. If the format of the changelog is the same as found in
57 # on. If the format of the changelog is the same as found in
58 # .hgrc, apply any flags found in the .hgrc as well.
58 # .hgrc, apply any flags found in the .hgrc as well.
59 # Otherwise, just version from the changelog
59 # Otherwise, just version from the changelog
60 v = self.changelog.version
60 v = self.changelog.version
61 if v == self.revlogversion:
61 if v == self.revlogversion:
62 v |= flags
62 v |= flags
63 self.revlogversion = v
63 self.revlogversion = v
64
64
65 self.tagscache = None
65 self.tagscache = None
66 self.nodetagscache = None
66 self.nodetagscache = None
67 self.encodepats = None
67 self.encodepats = None
68 self.decodepats = None
68 self.decodepats = None
69 self.transhandle = None
69 self.transhandle = None
70
70
71 if create:
71 if create:
72 os.mkdir(self.path)
72 os.mkdir(self.path)
73 os.mkdir(self.join("data"))
73 os.mkdir(self.join("data"))
74
74
75 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
75 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
76
76
77 def hook(self, name, throw=False, **args):
77 def hook(self, name, throw=False, **args):
78 def callhook(hname, funcname):
78 def callhook(hname, funcname):
79 '''call python hook. hook is callable object, looked up as
79 '''call python hook. hook is callable object, looked up as
80 name in python module. if callable returns "true", hook
80 name in python module. if callable returns "true", hook
81 passes, else fails. if hook raises exception, treated as
81 fails, else passes. if hook raises exception, treated as
82 hook failure. exception propagates if throw is "true".'''
82 hook failure. exception propagates if throw is "true".
83
84 reason for "true" meaning "hook failed" is so that
85 unmodified commands (e.g. mercurial.commands.update) can
86 be run as hooks without wrappers to convert return values.'''
83
87
84 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
88 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
85 d = funcname.rfind('.')
89 d = funcname.rfind('.')
86 if d == -1:
90 if d == -1:
87 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
91 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
88 % (hname, funcname))
92 % (hname, funcname))
89 modname = funcname[:d]
93 modname = funcname[:d]
90 try:
94 try:
91 obj = __import__(modname)
95 obj = __import__(modname)
92 except ImportError:
96 except ImportError:
93 raise util.Abort(_('%s hook is invalid '
97 raise util.Abort(_('%s hook is invalid '
94 '(import of "%s" failed)') %
98 '(import of "%s" failed)') %
95 (hname, modname))
99 (hname, modname))
96 try:
100 try:
97 for p in funcname.split('.')[1:]:
101 for p in funcname.split('.')[1:]:
98 obj = getattr(obj, p)
102 obj = getattr(obj, p)
99 except AttributeError, err:
103 except AttributeError, err:
100 raise util.Abort(_('%s hook is invalid '
104 raise util.Abort(_('%s hook is invalid '
101 '("%s" is not defined)') %
105 '("%s" is not defined)') %
102 (hname, funcname))
106 (hname, funcname))
103 if not callable(obj):
107 if not callable(obj):
104 raise util.Abort(_('%s hook is invalid '
108 raise util.Abort(_('%s hook is invalid '
105 '("%s" is not callable)') %
109 '("%s" is not callable)') %
106 (hname, funcname))
110 (hname, funcname))
107 try:
111 try:
108 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
112 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
109 except (KeyboardInterrupt, util.SignalInterrupt):
113 except (KeyboardInterrupt, util.SignalInterrupt):
110 raise
114 raise
111 except Exception, exc:
115 except Exception, exc:
112 if isinstance(exc, util.Abort):
116 if isinstance(exc, util.Abort):
113 self.ui.warn(_('error: %s hook failed: %s\n') %
117 self.ui.warn(_('error: %s hook failed: %s\n') %
114 (hname, exc.args[0] % exc.args[1:]))
118 (hname, exc.args[0] % exc.args[1:]))
115 else:
119 else:
116 self.ui.warn(_('error: %s hook raised an exception: '
120 self.ui.warn(_('error: %s hook raised an exception: '
117 '%s\n') % (hname, exc))
121 '%s\n') % (hname, exc))
118 if throw:
122 if throw:
119 raise
123 raise
120 if self.ui.traceback:
124 if self.ui.traceback:
121 traceback.print_exc()
125 traceback.print_exc()
122 return False
126 return True
123 if not r:
127 if r:
124 if throw:
128 if throw:
125 raise util.Abort(_('%s hook failed') % hname)
129 raise util.Abort(_('%s hook failed') % hname)
126 self.ui.warn(_('error: %s hook failed\n') % hname)
130 self.ui.warn(_('warning: %s hook failed\n') % hname)
127 return r
131 return r
128
132
129 def runhook(name, cmd):
133 def runhook(name, cmd):
130 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
134 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
131 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()] +
135 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()] +
132 [(k.upper(), v) for k, v in args.iteritems()])
136 [(k.upper(), v) for k, v in args.iteritems()])
133 r = util.system(cmd, environ=env, cwd=self.root)
137 r = util.system(cmd, environ=env, cwd=self.root)
134 if r:
138 if r:
135 desc, r = util.explain_exit(r)
139 desc, r = util.explain_exit(r)
136 if throw:
140 if throw:
137 raise util.Abort(_('%s hook %s') % (name, desc))
141 raise util.Abort(_('%s hook %s') % (name, desc))
138 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
142 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
139 return False
143 return r
140 return True
141
144
142 r = True
145 r = False
143 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
146 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
144 if hname.split(".", 1)[0] == name and cmd]
147 if hname.split(".", 1)[0] == name and cmd]
145 hooks.sort()
148 hooks.sort()
146 for hname, cmd in hooks:
149 for hname, cmd in hooks:
147 if cmd.startswith('python:'):
150 if cmd.startswith('python:'):
148 r = callhook(hname, cmd[7:].strip()) and r
151 r = callhook(hname, cmd[7:].strip()) or r
149 else:
152 else:
150 r = runhook(hname, cmd) and r
153 r = runhook(hname, cmd) or r
151 return r
154 return r
152
155
153 def tags(self):
156 def tags(self):
154 '''return a mapping of tag to node'''
157 '''return a mapping of tag to node'''
155 if not self.tagscache:
158 if not self.tagscache:
156 self.tagscache = {}
159 self.tagscache = {}
157
160
158 def parsetag(line, context):
161 def parsetag(line, context):
159 if not line:
162 if not line:
160 return
163 return
161 s = l.split(" ", 1)
164 s = l.split(" ", 1)
162 if len(s) != 2:
165 if len(s) != 2:
163 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
166 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
164 return
167 return
165 node, key = s
168 node, key = s
166 try:
169 try:
167 bin_n = bin(node)
170 bin_n = bin(node)
168 except TypeError:
171 except TypeError:
169 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
172 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
170 return
173 return
171 if bin_n not in self.changelog.nodemap:
174 if bin_n not in self.changelog.nodemap:
172 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
175 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
173 return
176 return
174 self.tagscache[key.strip()] = bin_n
177 self.tagscache[key.strip()] = bin_n
175
178
176 # read each head of the tags file, ending with the tip
179 # read each head of the tags file, ending with the tip
177 # and add each tag found to the map, with "newer" ones
180 # and add each tag found to the map, with "newer" ones
178 # taking precedence
181 # taking precedence
179 fl = self.file(".hgtags")
182 fl = self.file(".hgtags")
180 h = fl.heads()
183 h = fl.heads()
181 h.reverse()
184 h.reverse()
182 for r in h:
185 for r in h:
183 count = 0
186 count = 0
184 for l in fl.read(r).splitlines():
187 for l in fl.read(r).splitlines():
185 count += 1
188 count += 1
186 parsetag(l, ".hgtags:%d" % count)
189 parsetag(l, ".hgtags:%d" % count)
187
190
188 try:
191 try:
189 f = self.opener("localtags")
192 f = self.opener("localtags")
190 count = 0
193 count = 0
191 for l in f:
194 for l in f:
192 count += 1
195 count += 1
193 parsetag(l, "localtags:%d" % count)
196 parsetag(l, "localtags:%d" % count)
194 except IOError:
197 except IOError:
195 pass
198 pass
196
199
197 self.tagscache['tip'] = self.changelog.tip()
200 self.tagscache['tip'] = self.changelog.tip()
198
201
199 return self.tagscache
202 return self.tagscache
200
203
201 def tagslist(self):
204 def tagslist(self):
202 '''return a list of tags ordered by revision'''
205 '''return a list of tags ordered by revision'''
203 l = []
206 l = []
204 for t, n in self.tags().items():
207 for t, n in self.tags().items():
205 try:
208 try:
206 r = self.changelog.rev(n)
209 r = self.changelog.rev(n)
207 except:
210 except:
208 r = -2 # sort to the beginning of the list if unknown
211 r = -2 # sort to the beginning of the list if unknown
209 l.append((r, t, n))
212 l.append((r, t, n))
210 l.sort()
213 l.sort()
211 return [(t, n) for r, t, n in l]
214 return [(t, n) for r, t, n in l]
212
215
213 def nodetags(self, node):
216 def nodetags(self, node):
214 '''return the tags associated with a node'''
217 '''return the tags associated with a node'''
215 if not self.nodetagscache:
218 if not self.nodetagscache:
216 self.nodetagscache = {}
219 self.nodetagscache = {}
217 for t, n in self.tags().items():
220 for t, n in self.tags().items():
218 self.nodetagscache.setdefault(n, []).append(t)
221 self.nodetagscache.setdefault(n, []).append(t)
219 return self.nodetagscache.get(node, [])
222 return self.nodetagscache.get(node, [])
220
223
221 def lookup(self, key):
224 def lookup(self, key):
222 try:
225 try:
223 return self.tags()[key]
226 return self.tags()[key]
224 except KeyError:
227 except KeyError:
225 try:
228 try:
226 return self.changelog.lookup(key)
229 return self.changelog.lookup(key)
227 except:
230 except:
228 raise repo.RepoError(_("unknown revision '%s'") % key)
231 raise repo.RepoError(_("unknown revision '%s'") % key)
229
232
230 def dev(self):
233 def dev(self):
231 return os.stat(self.path).st_dev
234 return os.stat(self.path).st_dev
232
235
233 def local(self):
236 def local(self):
234 return True
237 return True
235
238
236 def join(self, f):
239 def join(self, f):
237 return os.path.join(self.path, f)
240 return os.path.join(self.path, f)
238
241
239 def wjoin(self, f):
242 def wjoin(self, f):
240 return os.path.join(self.root, f)
243 return os.path.join(self.root, f)
241
244
242 def file(self, f):
245 def file(self, f):
243 if f[0] == '/':
246 if f[0] == '/':
244 f = f[1:]
247 f = f[1:]
245 return filelog.filelog(self.opener, f, self.revlogversion)
248 return filelog.filelog(self.opener, f, self.revlogversion)
246
249
247 def getcwd(self):
250 def getcwd(self):
248 return self.dirstate.getcwd()
251 return self.dirstate.getcwd()
249
252
250 def wfile(self, f, mode='r'):
253 def wfile(self, f, mode='r'):
251 return self.wopener(f, mode)
254 return self.wopener(f, mode)
252
255
253 def wread(self, filename):
256 def wread(self, filename):
254 if self.encodepats == None:
257 if self.encodepats == None:
255 l = []
258 l = []
256 for pat, cmd in self.ui.configitems("encode"):
259 for pat, cmd in self.ui.configitems("encode"):
257 mf = util.matcher(self.root, "", [pat], [], [])[1]
260 mf = util.matcher(self.root, "", [pat], [], [])[1]
258 l.append((mf, cmd))
261 l.append((mf, cmd))
259 self.encodepats = l
262 self.encodepats = l
260
263
261 data = self.wopener(filename, 'r').read()
264 data = self.wopener(filename, 'r').read()
262
265
263 for mf, cmd in self.encodepats:
266 for mf, cmd in self.encodepats:
264 if mf(filename):
267 if mf(filename):
265 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
268 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
266 data = util.filter(data, cmd)
269 data = util.filter(data, cmd)
267 break
270 break
268
271
269 return data
272 return data
270
273
271 def wwrite(self, filename, data, fd=None):
274 def wwrite(self, filename, data, fd=None):
272 if self.decodepats == None:
275 if self.decodepats == None:
273 l = []
276 l = []
274 for pat, cmd in self.ui.configitems("decode"):
277 for pat, cmd in self.ui.configitems("decode"):
275 mf = util.matcher(self.root, "", [pat], [], [])[1]
278 mf = util.matcher(self.root, "", [pat], [], [])[1]
276 l.append((mf, cmd))
279 l.append((mf, cmd))
277 self.decodepats = l
280 self.decodepats = l
278
281
279 for mf, cmd in self.decodepats:
282 for mf, cmd in self.decodepats:
280 if mf(filename):
283 if mf(filename):
281 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
284 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
282 data = util.filter(data, cmd)
285 data = util.filter(data, cmd)
283 break
286 break
284
287
285 if fd:
288 if fd:
286 return fd.write(data)
289 return fd.write(data)
287 return self.wopener(filename, 'w').write(data)
290 return self.wopener(filename, 'w').write(data)
288
291
289 def transaction(self):
292 def transaction(self):
290 tr = self.transhandle
293 tr = self.transhandle
291 if tr != None and tr.running():
294 if tr != None and tr.running():
292 return tr.nest()
295 return tr.nest()
293
296
294 # save dirstate for undo
297 # save dirstate for undo
295 try:
298 try:
296 ds = self.opener("dirstate").read()
299 ds = self.opener("dirstate").read()
297 except IOError:
300 except IOError:
298 ds = ""
301 ds = ""
299 self.opener("journal.dirstate", "w").write(ds)
302 self.opener("journal.dirstate", "w").write(ds)
300
303
301 tr = transaction.transaction(self.ui.warn, self.opener,
304 tr = transaction.transaction(self.ui.warn, self.opener,
302 self.join("journal"),
305 self.join("journal"),
303 aftertrans(self.path))
306 aftertrans(self.path))
304 self.transhandle = tr
307 self.transhandle = tr
305 return tr
308 return tr
306
309
307 def recover(self):
310 def recover(self):
308 l = self.lock()
311 l = self.lock()
309 if os.path.exists(self.join("journal")):
312 if os.path.exists(self.join("journal")):
310 self.ui.status(_("rolling back interrupted transaction\n"))
313 self.ui.status(_("rolling back interrupted transaction\n"))
311 transaction.rollback(self.opener, self.join("journal"))
314 transaction.rollback(self.opener, self.join("journal"))
312 self.reload()
315 self.reload()
313 return True
316 return True
314 else:
317 else:
315 self.ui.warn(_("no interrupted transaction available\n"))
318 self.ui.warn(_("no interrupted transaction available\n"))
316 return False
319 return False
317
320
318 def undo(self, wlock=None):
321 def undo(self, wlock=None):
319 if not wlock:
322 if not wlock:
320 wlock = self.wlock()
323 wlock = self.wlock()
321 l = self.lock()
324 l = self.lock()
322 if os.path.exists(self.join("undo")):
325 if os.path.exists(self.join("undo")):
323 self.ui.status(_("rolling back last transaction\n"))
326 self.ui.status(_("rolling back last transaction\n"))
324 transaction.rollback(self.opener, self.join("undo"))
327 transaction.rollback(self.opener, self.join("undo"))
325 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
328 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
326 self.reload()
329 self.reload()
327 self.wreload()
330 self.wreload()
328 else:
331 else:
329 self.ui.warn(_("no undo information available\n"))
332 self.ui.warn(_("no undo information available\n"))
330
333
331 def wreload(self):
334 def wreload(self):
332 self.dirstate.read()
335 self.dirstate.read()
333
336
334 def reload(self):
337 def reload(self):
335 self.changelog.load()
338 self.changelog.load()
336 self.manifest.load()
339 self.manifest.load()
337 self.tagscache = None
340 self.tagscache = None
338 self.nodetagscache = None
341 self.nodetagscache = None
339
342
340 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
343 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
341 desc=None):
344 desc=None):
342 try:
345 try:
343 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
346 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
344 except lock.LockHeld, inst:
347 except lock.LockHeld, inst:
345 if not wait:
348 if not wait:
346 raise
349 raise
347 self.ui.warn(_("waiting for lock on %s held by %s\n") %
350 self.ui.warn(_("waiting for lock on %s held by %s\n") %
348 (desc, inst.args[0]))
351 (desc, inst.args[0]))
349 # default to 600 seconds timeout
352 # default to 600 seconds timeout
350 l = lock.lock(self.join(lockname),
353 l = lock.lock(self.join(lockname),
351 int(self.ui.config("ui", "timeout") or 600),
354 int(self.ui.config("ui", "timeout") or 600),
352 releasefn, desc=desc)
355 releasefn, desc=desc)
353 if acquirefn:
356 if acquirefn:
354 acquirefn()
357 acquirefn()
355 return l
358 return l
356
359
357 def lock(self, wait=1):
360 def lock(self, wait=1):
358 return self.do_lock("lock", wait, acquirefn=self.reload,
361 return self.do_lock("lock", wait, acquirefn=self.reload,
359 desc=_('repository %s') % self.origroot)
362 desc=_('repository %s') % self.origroot)
360
363
361 def wlock(self, wait=1):
364 def wlock(self, wait=1):
362 return self.do_lock("wlock", wait, self.dirstate.write,
365 return self.do_lock("wlock", wait, self.dirstate.write,
363 self.wreload,
366 self.wreload,
364 desc=_('working directory of %s') % self.origroot)
367 desc=_('working directory of %s') % self.origroot)
365
368
366 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
369 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
367 "determine whether a new filenode is needed"
370 "determine whether a new filenode is needed"
368 fp1 = manifest1.get(filename, nullid)
371 fp1 = manifest1.get(filename, nullid)
369 fp2 = manifest2.get(filename, nullid)
372 fp2 = manifest2.get(filename, nullid)
370
373
371 if fp2 != nullid:
374 if fp2 != nullid:
372 # is one parent an ancestor of the other?
375 # is one parent an ancestor of the other?
373 fpa = filelog.ancestor(fp1, fp2)
376 fpa = filelog.ancestor(fp1, fp2)
374 if fpa == fp1:
377 if fpa == fp1:
375 fp1, fp2 = fp2, nullid
378 fp1, fp2 = fp2, nullid
376 elif fpa == fp2:
379 elif fpa == fp2:
377 fp2 = nullid
380 fp2 = nullid
378
381
379 # is the file unmodified from the parent? report existing entry
382 # is the file unmodified from the parent? report existing entry
380 if fp2 == nullid and text == filelog.read(fp1):
383 if fp2 == nullid and text == filelog.read(fp1):
381 return (fp1, None, None)
384 return (fp1, None, None)
382
385
383 return (None, fp1, fp2)
386 return (None, fp1, fp2)
384
387
385 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
388 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
386 orig_parent = self.dirstate.parents()[0] or nullid
389 orig_parent = self.dirstate.parents()[0] or nullid
387 p1 = p1 or self.dirstate.parents()[0] or nullid
390 p1 = p1 or self.dirstate.parents()[0] or nullid
388 p2 = p2 or self.dirstate.parents()[1] or nullid
391 p2 = p2 or self.dirstate.parents()[1] or nullid
389 c1 = self.changelog.read(p1)
392 c1 = self.changelog.read(p1)
390 c2 = self.changelog.read(p2)
393 c2 = self.changelog.read(p2)
391 m1 = self.manifest.read(c1[0])
394 m1 = self.manifest.read(c1[0])
392 mf1 = self.manifest.readflags(c1[0])
395 mf1 = self.manifest.readflags(c1[0])
393 m2 = self.manifest.read(c2[0])
396 m2 = self.manifest.read(c2[0])
394 changed = []
397 changed = []
395
398
396 if orig_parent == p1:
399 if orig_parent == p1:
397 update_dirstate = 1
400 update_dirstate = 1
398 else:
401 else:
399 update_dirstate = 0
402 update_dirstate = 0
400
403
401 if not wlock:
404 if not wlock:
402 wlock = self.wlock()
405 wlock = self.wlock()
403 l = self.lock()
406 l = self.lock()
404 tr = self.transaction()
407 tr = self.transaction()
405 mm = m1.copy()
408 mm = m1.copy()
406 mfm = mf1.copy()
409 mfm = mf1.copy()
407 linkrev = self.changelog.count()
410 linkrev = self.changelog.count()
408 for f in files:
411 for f in files:
409 try:
412 try:
410 t = self.wread(f)
413 t = self.wread(f)
411 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
414 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
412 r = self.file(f)
415 r = self.file(f)
413 mfm[f] = tm
416 mfm[f] = tm
414
417
415 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
418 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
416 if entry:
419 if entry:
417 mm[f] = entry
420 mm[f] = entry
418 continue
421 continue
419
422
420 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
423 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
421 changed.append(f)
424 changed.append(f)
422 if update_dirstate:
425 if update_dirstate:
423 self.dirstate.update([f], "n")
426 self.dirstate.update([f], "n")
424 except IOError:
427 except IOError:
425 try:
428 try:
426 del mm[f]
429 del mm[f]
427 del mfm[f]
430 del mfm[f]
428 if update_dirstate:
431 if update_dirstate:
429 self.dirstate.forget([f])
432 self.dirstate.forget([f])
430 except:
433 except:
431 # deleted from p2?
434 # deleted from p2?
432 pass
435 pass
433
436
434 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
437 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
435 user = user or self.ui.username()
438 user = user or self.ui.username()
436 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
439 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
437 tr.close()
440 tr.close()
438 if update_dirstate:
441 if update_dirstate:
439 self.dirstate.setparents(n, nullid)
442 self.dirstate.setparents(n, nullid)
440
443
441 def commit(self, files=None, text="", user=None, date=None,
444 def commit(self, files=None, text="", user=None, date=None,
442 match=util.always, force=False, lock=None, wlock=None):
445 match=util.always, force=False, lock=None, wlock=None):
443 commit = []
446 commit = []
444 remove = []
447 remove = []
445 changed = []
448 changed = []
446
449
447 if files:
450 if files:
448 for f in files:
451 for f in files:
449 s = self.dirstate.state(f)
452 s = self.dirstate.state(f)
450 if s in 'nmai':
453 if s in 'nmai':
451 commit.append(f)
454 commit.append(f)
452 elif s == 'r':
455 elif s == 'r':
453 remove.append(f)
456 remove.append(f)
454 else:
457 else:
455 self.ui.warn(_("%s not tracked!\n") % f)
458 self.ui.warn(_("%s not tracked!\n") % f)
456 else:
459 else:
457 modified, added, removed, deleted, unknown = self.changes(match=match)
460 modified, added, removed, deleted, unknown = self.changes(match=match)
458 commit = modified + added
461 commit = modified + added
459 remove = removed
462 remove = removed
460
463
461 p1, p2 = self.dirstate.parents()
464 p1, p2 = self.dirstate.parents()
462 c1 = self.changelog.read(p1)
465 c1 = self.changelog.read(p1)
463 c2 = self.changelog.read(p2)
466 c2 = self.changelog.read(p2)
464 m1 = self.manifest.read(c1[0])
467 m1 = self.manifest.read(c1[0])
465 mf1 = self.manifest.readflags(c1[0])
468 mf1 = self.manifest.readflags(c1[0])
466 m2 = self.manifest.read(c2[0])
469 m2 = self.manifest.read(c2[0])
467
470
468 if not commit and not remove and not force and p2 == nullid:
471 if not commit and not remove and not force and p2 == nullid:
469 self.ui.status(_("nothing changed\n"))
472 self.ui.status(_("nothing changed\n"))
470 return None
473 return None
471
474
472 xp1 = hex(p1)
475 xp1 = hex(p1)
473 if p2 == nullid: xp2 = ''
476 if p2 == nullid: xp2 = ''
474 else: xp2 = hex(p2)
477 else: xp2 = hex(p2)
475
478
476 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
479 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
477
480
478 if not wlock:
481 if not wlock:
479 wlock = self.wlock()
482 wlock = self.wlock()
480 if not lock:
483 if not lock:
481 lock = self.lock()
484 lock = self.lock()
482 tr = self.transaction()
485 tr = self.transaction()
483
486
484 # check in files
487 # check in files
485 new = {}
488 new = {}
486 linkrev = self.changelog.count()
489 linkrev = self.changelog.count()
487 commit.sort()
490 commit.sort()
488 for f in commit:
491 for f in commit:
489 self.ui.note(f + "\n")
492 self.ui.note(f + "\n")
490 try:
493 try:
491 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
494 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
492 t = self.wread(f)
495 t = self.wread(f)
493 except IOError:
496 except IOError:
494 self.ui.warn(_("trouble committing %s!\n") % f)
497 self.ui.warn(_("trouble committing %s!\n") % f)
495 raise
498 raise
496
499
497 r = self.file(f)
500 r = self.file(f)
498
501
499 meta = {}
502 meta = {}
500 cp = self.dirstate.copied(f)
503 cp = self.dirstate.copied(f)
501 if cp:
504 if cp:
502 meta["copy"] = cp
505 meta["copy"] = cp
503 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
506 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
504 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
507 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
505 fp1, fp2 = nullid, nullid
508 fp1, fp2 = nullid, nullid
506 else:
509 else:
507 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
510 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
508 if entry:
511 if entry:
509 new[f] = entry
512 new[f] = entry
510 continue
513 continue
511
514
512 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
515 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
513 # remember what we've added so that we can later calculate
516 # remember what we've added so that we can later calculate
514 # the files to pull from a set of changesets
517 # the files to pull from a set of changesets
515 changed.append(f)
518 changed.append(f)
516
519
517 # update manifest
520 # update manifest
518 m1 = m1.copy()
521 m1 = m1.copy()
519 m1.update(new)
522 m1.update(new)
520 for f in remove:
523 for f in remove:
521 if f in m1:
524 if f in m1:
522 del m1[f]
525 del m1[f]
523 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
526 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
524 (new, remove))
527 (new, remove))
525
528
526 # add changeset
529 # add changeset
527 new = new.keys()
530 new = new.keys()
528 new.sort()
531 new.sort()
529
532
530 user = user or self.ui.username()
533 user = user or self.ui.username()
531 if not text:
534 if not text:
532 edittext = [""]
535 edittext = [""]
533 if p2 != nullid:
536 if p2 != nullid:
534 edittext.append("HG: branch merge")
537 edittext.append("HG: branch merge")
535 edittext.extend(["HG: changed %s" % f for f in changed])
538 edittext.extend(["HG: changed %s" % f for f in changed])
536 edittext.extend(["HG: removed %s" % f for f in remove])
539 edittext.extend(["HG: removed %s" % f for f in remove])
537 if not changed and not remove:
540 if not changed and not remove:
538 edittext.append("HG: no files changed")
541 edittext.append("HG: no files changed")
539 edittext.append("")
542 edittext.append("")
540 # run editor in the repository root
543 # run editor in the repository root
541 olddir = os.getcwd()
544 olddir = os.getcwd()
542 os.chdir(self.root)
545 os.chdir(self.root)
543 edittext = self.ui.edit("\n".join(edittext), user)
546 edittext = self.ui.edit("\n".join(edittext), user)
544 os.chdir(olddir)
547 os.chdir(olddir)
545 if not edittext.rstrip():
548 if not edittext.rstrip():
546 return None
549 return None
547 text = edittext
550 text = edittext
548
551
549 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
552 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
550 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
553 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
551 parent2=xp2)
554 parent2=xp2)
552 tr.close()
555 tr.close()
553
556
554 self.dirstate.setparents(n)
557 self.dirstate.setparents(n)
555 self.dirstate.update(new, "n")
558 self.dirstate.update(new, "n")
556 self.dirstate.forget(remove)
559 self.dirstate.forget(remove)
557
560
558 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
561 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
559 return n
562 return n
560
563
561 def walk(self, node=None, files=[], match=util.always, badmatch=None):
564 def walk(self, node=None, files=[], match=util.always, badmatch=None):
562 if node:
565 if node:
563 fdict = dict.fromkeys(files)
566 fdict = dict.fromkeys(files)
564 for fn in self.manifest.read(self.changelog.read(node)[0]):
567 for fn in self.manifest.read(self.changelog.read(node)[0]):
565 fdict.pop(fn, None)
568 fdict.pop(fn, None)
566 if match(fn):
569 if match(fn):
567 yield 'm', fn
570 yield 'm', fn
568 for fn in fdict:
571 for fn in fdict:
569 if badmatch and badmatch(fn):
572 if badmatch and badmatch(fn):
570 if match(fn):
573 if match(fn):
571 yield 'b', fn
574 yield 'b', fn
572 else:
575 else:
573 self.ui.warn(_('%s: No such file in rev %s\n') % (
576 self.ui.warn(_('%s: No such file in rev %s\n') % (
574 util.pathto(self.getcwd(), fn), short(node)))
577 util.pathto(self.getcwd(), fn), short(node)))
575 else:
578 else:
576 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
579 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
577 yield src, fn
580 yield src, fn
578
581
579 def changes(self, node1=None, node2=None, files=[], match=util.always,
582 def changes(self, node1=None, node2=None, files=[], match=util.always,
580 wlock=None, show_ignored=None):
583 wlock=None, show_ignored=None):
581 """return changes between two nodes or node and working directory
584 """return changes between two nodes or node and working directory
582
585
583 If node1 is None, use the first dirstate parent instead.
586 If node1 is None, use the first dirstate parent instead.
584 If node2 is None, compare node1 with working directory.
587 If node2 is None, compare node1 with working directory.
585 """
588 """
586
589
587 def fcmp(fn, mf):
590 def fcmp(fn, mf):
588 t1 = self.wread(fn)
591 t1 = self.wread(fn)
589 t2 = self.file(fn).read(mf.get(fn, nullid))
592 t2 = self.file(fn).read(mf.get(fn, nullid))
590 return cmp(t1, t2)
593 return cmp(t1, t2)
591
594
592 def mfmatches(node):
595 def mfmatches(node):
593 change = self.changelog.read(node)
596 change = self.changelog.read(node)
594 mf = dict(self.manifest.read(change[0]))
597 mf = dict(self.manifest.read(change[0]))
595 for fn in mf.keys():
598 for fn in mf.keys():
596 if not match(fn):
599 if not match(fn):
597 del mf[fn]
600 del mf[fn]
598 return mf
601 return mf
599
602
600 if node1:
603 if node1:
601 # read the manifest from node1 before the manifest from node2,
604 # read the manifest from node1 before the manifest from node2,
602 # so that we'll hit the manifest cache if we're going through
605 # so that we'll hit the manifest cache if we're going through
603 # all the revisions in parent->child order.
606 # all the revisions in parent->child order.
604 mf1 = mfmatches(node1)
607 mf1 = mfmatches(node1)
605
608
606 # are we comparing the working directory?
609 # are we comparing the working directory?
607 if not node2:
610 if not node2:
608 if not wlock:
611 if not wlock:
609 try:
612 try:
610 wlock = self.wlock(wait=0)
613 wlock = self.wlock(wait=0)
611 except lock.LockException:
614 except lock.LockException:
612 wlock = None
615 wlock = None
613 lookup, modified, added, removed, deleted, unknown, ignored = (
616 lookup, modified, added, removed, deleted, unknown, ignored = (
614 self.dirstate.changes(files, match, show_ignored))
617 self.dirstate.changes(files, match, show_ignored))
615
618
616 # are we comparing working dir against its parent?
619 # are we comparing working dir against its parent?
617 if not node1:
620 if not node1:
618 if lookup:
621 if lookup:
619 # do a full compare of any files that might have changed
622 # do a full compare of any files that might have changed
620 mf2 = mfmatches(self.dirstate.parents()[0])
623 mf2 = mfmatches(self.dirstate.parents()[0])
621 for f in lookup:
624 for f in lookup:
622 if fcmp(f, mf2):
625 if fcmp(f, mf2):
623 modified.append(f)
626 modified.append(f)
624 elif wlock is not None:
627 elif wlock is not None:
625 self.dirstate.update([f], "n")
628 self.dirstate.update([f], "n")
626 else:
629 else:
627 # we are comparing working dir against non-parent
630 # we are comparing working dir against non-parent
628 # generate a pseudo-manifest for the working dir
631 # generate a pseudo-manifest for the working dir
629 mf2 = mfmatches(self.dirstate.parents()[0])
632 mf2 = mfmatches(self.dirstate.parents()[0])
630 for f in lookup + modified + added:
633 for f in lookup + modified + added:
631 mf2[f] = ""
634 mf2[f] = ""
632 for f in removed:
635 for f in removed:
633 if f in mf2:
636 if f in mf2:
634 del mf2[f]
637 del mf2[f]
635 else:
638 else:
636 # we are comparing two revisions
639 # we are comparing two revisions
637 deleted, unknown, ignored = [], [], []
640 deleted, unknown, ignored = [], [], []
638 mf2 = mfmatches(node2)
641 mf2 = mfmatches(node2)
639
642
640 if node1:
643 if node1:
641 # flush lists from dirstate before comparing manifests
644 # flush lists from dirstate before comparing manifests
642 modified, added = [], []
645 modified, added = [], []
643
646
644 for fn in mf2:
647 for fn in mf2:
645 if mf1.has_key(fn):
648 if mf1.has_key(fn):
646 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
649 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
647 modified.append(fn)
650 modified.append(fn)
648 del mf1[fn]
651 del mf1[fn]
649 else:
652 else:
650 added.append(fn)
653 added.append(fn)
651
654
652 removed = mf1.keys()
655 removed = mf1.keys()
653
656
654 # sort and return results:
657 # sort and return results:
655 for l in modified, added, removed, deleted, unknown, ignored:
658 for l in modified, added, removed, deleted, unknown, ignored:
656 l.sort()
659 l.sort()
657 if show_ignored is None:
660 if show_ignored is None:
658 return (modified, added, removed, deleted, unknown)
661 return (modified, added, removed, deleted, unknown)
659 else:
662 else:
660 return (modified, added, removed, deleted, unknown, ignored)
663 return (modified, added, removed, deleted, unknown, ignored)
661
664
662 def add(self, list, wlock=None):
665 def add(self, list, wlock=None):
663 if not wlock:
666 if not wlock:
664 wlock = self.wlock()
667 wlock = self.wlock()
665 for f in list:
668 for f in list:
666 p = self.wjoin(f)
669 p = self.wjoin(f)
667 if not os.path.exists(p):
670 if not os.path.exists(p):
668 self.ui.warn(_("%s does not exist!\n") % f)
671 self.ui.warn(_("%s does not exist!\n") % f)
669 elif not os.path.isfile(p):
672 elif not os.path.isfile(p):
670 self.ui.warn(_("%s not added: only files supported currently\n")
673 self.ui.warn(_("%s not added: only files supported currently\n")
671 % f)
674 % f)
672 elif self.dirstate.state(f) in 'an':
675 elif self.dirstate.state(f) in 'an':
673 self.ui.warn(_("%s already tracked!\n") % f)
676 self.ui.warn(_("%s already tracked!\n") % f)
674 else:
677 else:
675 self.dirstate.update([f], "a")
678 self.dirstate.update([f], "a")
676
679
677 def forget(self, list, wlock=None):
680 def forget(self, list, wlock=None):
678 if not wlock:
681 if not wlock:
679 wlock = self.wlock()
682 wlock = self.wlock()
680 for f in list:
683 for f in list:
681 if self.dirstate.state(f) not in 'ai':
684 if self.dirstate.state(f) not in 'ai':
682 self.ui.warn(_("%s not added!\n") % f)
685 self.ui.warn(_("%s not added!\n") % f)
683 else:
686 else:
684 self.dirstate.forget([f])
687 self.dirstate.forget([f])
685
688
686 def remove(self, list, unlink=False, wlock=None):
689 def remove(self, list, unlink=False, wlock=None):
687 if unlink:
690 if unlink:
688 for f in list:
691 for f in list:
689 try:
692 try:
690 util.unlink(self.wjoin(f))
693 util.unlink(self.wjoin(f))
691 except OSError, inst:
694 except OSError, inst:
692 if inst.errno != errno.ENOENT:
695 if inst.errno != errno.ENOENT:
693 raise
696 raise
694 if not wlock:
697 if not wlock:
695 wlock = self.wlock()
698 wlock = self.wlock()
696 for f in list:
699 for f in list:
697 p = self.wjoin(f)
700 p = self.wjoin(f)
698 if os.path.exists(p):
701 if os.path.exists(p):
699 self.ui.warn(_("%s still exists!\n") % f)
702 self.ui.warn(_("%s still exists!\n") % f)
700 elif self.dirstate.state(f) == 'a':
703 elif self.dirstate.state(f) == 'a':
701 self.dirstate.forget([f])
704 self.dirstate.forget([f])
702 elif f not in self.dirstate:
705 elif f not in self.dirstate:
703 self.ui.warn(_("%s not tracked!\n") % f)
706 self.ui.warn(_("%s not tracked!\n") % f)
704 else:
707 else:
705 self.dirstate.update([f], "r")
708 self.dirstate.update([f], "r")
706
709
707 def undelete(self, list, wlock=None):
710 def undelete(self, list, wlock=None):
708 p = self.dirstate.parents()[0]
711 p = self.dirstate.parents()[0]
709 mn = self.changelog.read(p)[0]
712 mn = self.changelog.read(p)[0]
710 mf = self.manifest.readflags(mn)
713 mf = self.manifest.readflags(mn)
711 m = self.manifest.read(mn)
714 m = self.manifest.read(mn)
712 if not wlock:
715 if not wlock:
713 wlock = self.wlock()
716 wlock = self.wlock()
714 for f in list:
717 for f in list:
715 if self.dirstate.state(f) not in "r":
718 if self.dirstate.state(f) not in "r":
716 self.ui.warn("%s not removed!\n" % f)
719 self.ui.warn("%s not removed!\n" % f)
717 else:
720 else:
718 t = self.file(f).read(m[f])
721 t = self.file(f).read(m[f])
719 self.wwrite(f, t)
722 self.wwrite(f, t)
720 util.set_exec(self.wjoin(f), mf[f])
723 util.set_exec(self.wjoin(f), mf[f])
721 self.dirstate.update([f], "n")
724 self.dirstate.update([f], "n")
722
725
723 def copy(self, source, dest, wlock=None):
726 def copy(self, source, dest, wlock=None):
724 p = self.wjoin(dest)
727 p = self.wjoin(dest)
725 if not os.path.exists(p):
728 if not os.path.exists(p):
726 self.ui.warn(_("%s does not exist!\n") % dest)
729 self.ui.warn(_("%s does not exist!\n") % dest)
727 elif not os.path.isfile(p):
730 elif not os.path.isfile(p):
728 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
731 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
729 else:
732 else:
730 if not wlock:
733 if not wlock:
731 wlock = self.wlock()
734 wlock = self.wlock()
732 if self.dirstate.state(dest) == '?':
735 if self.dirstate.state(dest) == '?':
733 self.dirstate.update([dest], "a")
736 self.dirstate.update([dest], "a")
734 self.dirstate.copy(source, dest)
737 self.dirstate.copy(source, dest)
735
738
736 def heads(self, start=None):
739 def heads(self, start=None):
737 heads = self.changelog.heads(start)
740 heads = self.changelog.heads(start)
738 # sort the output in rev descending order
741 # sort the output in rev descending order
739 heads = [(-self.changelog.rev(h), h) for h in heads]
742 heads = [(-self.changelog.rev(h), h) for h in heads]
740 heads.sort()
743 heads.sort()
741 return [n for (r, n) in heads]
744 return [n for (r, n) in heads]
742
745
743 # branchlookup returns a dict giving a list of branches for
746 # branchlookup returns a dict giving a list of branches for
744 # each head. A branch is defined as the tag of a node or
747 # each head. A branch is defined as the tag of a node or
745 # the branch of the node's parents. If a node has multiple
748 # the branch of the node's parents. If a node has multiple
746 # branch tags, tags are eliminated if they are visible from other
749 # branch tags, tags are eliminated if they are visible from other
747 # branch tags.
750 # branch tags.
748 #
751 #
749 # So, for this graph: a->b->c->d->e
752 # So, for this graph: a->b->c->d->e
750 # \ /
753 # \ /
751 # aa -----/
754 # aa -----/
752 # a has tag 2.6.12
755 # a has tag 2.6.12
753 # d has tag 2.6.13
756 # d has tag 2.6.13
754 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
757 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
755 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
758 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
756 # from the list.
759 # from the list.
757 #
760 #
758 # It is possible that more than one head will have the same branch tag.
761 # It is possible that more than one head will have the same branch tag.
759 # callers need to check the result for multiple heads under the same
762 # callers need to check the result for multiple heads under the same
760 # branch tag if that is a problem for them (ie checkout of a specific
763 # branch tag if that is a problem for them (ie checkout of a specific
761 # branch).
764 # branch).
762 #
765 #
763 # passing in a specific branch will limit the depth of the search
766 # passing in a specific branch will limit the depth of the search
764 # through the parents. It won't limit the branches returned in the
767 # through the parents. It won't limit the branches returned in the
765 # result though.
768 # result though.
766 def branchlookup(self, heads=None, branch=None):
769 def branchlookup(self, heads=None, branch=None):
767 if not heads:
770 if not heads:
768 heads = self.heads()
771 heads = self.heads()
769 headt = [ h for h in heads ]
772 headt = [ h for h in heads ]
770 chlog = self.changelog
773 chlog = self.changelog
771 branches = {}
774 branches = {}
772 merges = []
775 merges = []
773 seenmerge = {}
776 seenmerge = {}
774
777
775 # traverse the tree once for each head, recording in the branches
778 # traverse the tree once for each head, recording in the branches
776 # dict which tags are visible from this head. The branches
779 # dict which tags are visible from this head. The branches
777 # dict also records which tags are visible from each tag
780 # dict also records which tags are visible from each tag
778 # while we traverse.
781 # while we traverse.
779 while headt or merges:
782 while headt or merges:
780 if merges:
783 if merges:
781 n, found = merges.pop()
784 n, found = merges.pop()
782 visit = [n]
785 visit = [n]
783 else:
786 else:
784 h = headt.pop()
787 h = headt.pop()
785 visit = [h]
788 visit = [h]
786 found = [h]
789 found = [h]
787 seen = {}
790 seen = {}
788 while visit:
791 while visit:
789 n = visit.pop()
792 n = visit.pop()
790 if n in seen:
793 if n in seen:
791 continue
794 continue
792 pp = chlog.parents(n)
795 pp = chlog.parents(n)
793 tags = self.nodetags(n)
796 tags = self.nodetags(n)
794 if tags:
797 if tags:
795 for x in tags:
798 for x in tags:
796 if x == 'tip':
799 if x == 'tip':
797 continue
800 continue
798 for f in found:
801 for f in found:
799 branches.setdefault(f, {})[n] = 1
802 branches.setdefault(f, {})[n] = 1
800 branches.setdefault(n, {})[n] = 1
803 branches.setdefault(n, {})[n] = 1
801 break
804 break
802 if n not in found:
805 if n not in found:
803 found.append(n)
806 found.append(n)
804 if branch in tags:
807 if branch in tags:
805 continue
808 continue
806 seen[n] = 1
809 seen[n] = 1
807 if pp[1] != nullid and n not in seenmerge:
810 if pp[1] != nullid and n not in seenmerge:
808 merges.append((pp[1], [x for x in found]))
811 merges.append((pp[1], [x for x in found]))
809 seenmerge[n] = 1
812 seenmerge[n] = 1
810 if pp[0] != nullid:
813 if pp[0] != nullid:
811 visit.append(pp[0])
814 visit.append(pp[0])
812 # traverse the branches dict, eliminating branch tags from each
815 # traverse the branches dict, eliminating branch tags from each
813 # head that are visible from another branch tag for that head.
816 # head that are visible from another branch tag for that head.
814 out = {}
817 out = {}
815 viscache = {}
818 viscache = {}
816 for h in heads:
819 for h in heads:
817 def visible(node):
820 def visible(node):
818 if node in viscache:
821 if node in viscache:
819 return viscache[node]
822 return viscache[node]
820 ret = {}
823 ret = {}
821 visit = [node]
824 visit = [node]
822 while visit:
825 while visit:
823 x = visit.pop()
826 x = visit.pop()
824 if x in viscache:
827 if x in viscache:
825 ret.update(viscache[x])
828 ret.update(viscache[x])
826 elif x not in ret:
829 elif x not in ret:
827 ret[x] = 1
830 ret[x] = 1
828 if x in branches:
831 if x in branches:
829 visit[len(visit):] = branches[x].keys()
832 visit[len(visit):] = branches[x].keys()
830 viscache[node] = ret
833 viscache[node] = ret
831 return ret
834 return ret
832 if h not in branches:
835 if h not in branches:
833 continue
836 continue
834 # O(n^2), but somewhat limited. This only searches the
837 # O(n^2), but somewhat limited. This only searches the
835 # tags visible from a specific head, not all the tags in the
838 # tags visible from a specific head, not all the tags in the
836 # whole repo.
839 # whole repo.
837 for b in branches[h]:
840 for b in branches[h]:
838 vis = False
841 vis = False
839 for bb in branches[h].keys():
842 for bb in branches[h].keys():
840 if b != bb:
843 if b != bb:
841 if b in visible(bb):
844 if b in visible(bb):
842 vis = True
845 vis = True
843 break
846 break
844 if not vis:
847 if not vis:
845 l = out.setdefault(h, [])
848 l = out.setdefault(h, [])
846 l[len(l):] = self.nodetags(b)
849 l[len(l):] = self.nodetags(b)
847 return out
850 return out
848
851
849 def branches(self, nodes):
852 def branches(self, nodes):
850 if not nodes:
853 if not nodes:
851 nodes = [self.changelog.tip()]
854 nodes = [self.changelog.tip()]
852 b = []
855 b = []
853 for n in nodes:
856 for n in nodes:
854 t = n
857 t = n
855 while n:
858 while n:
856 p = self.changelog.parents(n)
859 p = self.changelog.parents(n)
857 if p[1] != nullid or p[0] == nullid:
860 if p[1] != nullid or p[0] == nullid:
858 b.append((t, n, p[0], p[1]))
861 b.append((t, n, p[0], p[1]))
859 break
862 break
860 n = p[0]
863 n = p[0]
861 return b
864 return b
862
865
863 def between(self, pairs):
866 def between(self, pairs):
864 r = []
867 r = []
865
868
866 for top, bottom in pairs:
869 for top, bottom in pairs:
867 n, l, i = top, [], 0
870 n, l, i = top, [], 0
868 f = 1
871 f = 1
869
872
870 while n != bottom:
873 while n != bottom:
871 p = self.changelog.parents(n)[0]
874 p = self.changelog.parents(n)[0]
872 if i == f:
875 if i == f:
873 l.append(n)
876 l.append(n)
874 f = f * 2
877 f = f * 2
875 n = p
878 n = p
876 i += 1
879 i += 1
877
880
878 r.append(l)
881 r.append(l)
879
882
880 return r
883 return r
881
884
882 def findincoming(self, remote, base=None, heads=None, force=False):
885 def findincoming(self, remote, base=None, heads=None, force=False):
883 m = self.changelog.nodemap
886 m = self.changelog.nodemap
884 search = []
887 search = []
885 fetch = {}
888 fetch = {}
886 seen = {}
889 seen = {}
887 seenbranch = {}
890 seenbranch = {}
888 if base == None:
891 if base == None:
889 base = {}
892 base = {}
890
893
891 if not heads:
894 if not heads:
892 heads = remote.heads()
895 heads = remote.heads()
893
896
894 if self.changelog.tip() == nullid:
897 if self.changelog.tip() == nullid:
895 if heads != [nullid]:
898 if heads != [nullid]:
896 return [nullid]
899 return [nullid]
897 return []
900 return []
898
901
899 # assume we're closer to the tip than the root
902 # assume we're closer to the tip than the root
900 # and start by examining the heads
903 # and start by examining the heads
901 self.ui.status(_("searching for changes\n"))
904 self.ui.status(_("searching for changes\n"))
902
905
903 unknown = []
906 unknown = []
904 for h in heads:
907 for h in heads:
905 if h not in m:
908 if h not in m:
906 unknown.append(h)
909 unknown.append(h)
907 else:
910 else:
908 base[h] = 1
911 base[h] = 1
909
912
910 if not unknown:
913 if not unknown:
911 return []
914 return []
912
915
913 rep = {}
916 rep = {}
914 reqcnt = 0
917 reqcnt = 0
915
918
916 # search through remote branches
919 # search through remote branches
917 # a 'branch' here is a linear segment of history, with four parts:
920 # a 'branch' here is a linear segment of history, with four parts:
918 # head, root, first parent, second parent
921 # head, root, first parent, second parent
919 # (a branch always has two parents (or none) by definition)
922 # (a branch always has two parents (or none) by definition)
920 unknown = remote.branches(unknown)
923 unknown = remote.branches(unknown)
921 while unknown:
924 while unknown:
922 r = []
925 r = []
923 while unknown:
926 while unknown:
924 n = unknown.pop(0)
927 n = unknown.pop(0)
925 if n[0] in seen:
928 if n[0] in seen:
926 continue
929 continue
927
930
928 self.ui.debug(_("examining %s:%s\n")
931 self.ui.debug(_("examining %s:%s\n")
929 % (short(n[0]), short(n[1])))
932 % (short(n[0]), short(n[1])))
930 if n[0] == nullid:
933 if n[0] == nullid:
931 break
934 break
932 if n in seenbranch:
935 if n in seenbranch:
933 self.ui.debug(_("branch already found\n"))
936 self.ui.debug(_("branch already found\n"))
934 continue
937 continue
935 if n[1] and n[1] in m: # do we know the base?
938 if n[1] and n[1] in m: # do we know the base?
936 self.ui.debug(_("found incomplete branch %s:%s\n")
939 self.ui.debug(_("found incomplete branch %s:%s\n")
937 % (short(n[0]), short(n[1])))
940 % (short(n[0]), short(n[1])))
938 search.append(n) # schedule branch range for scanning
941 search.append(n) # schedule branch range for scanning
939 seenbranch[n] = 1
942 seenbranch[n] = 1
940 else:
943 else:
941 if n[1] not in seen and n[1] not in fetch:
944 if n[1] not in seen and n[1] not in fetch:
942 if n[2] in m and n[3] in m:
945 if n[2] in m and n[3] in m:
943 self.ui.debug(_("found new changeset %s\n") %
946 self.ui.debug(_("found new changeset %s\n") %
944 short(n[1]))
947 short(n[1]))
945 fetch[n[1]] = 1 # earliest unknown
948 fetch[n[1]] = 1 # earliest unknown
946 base[n[2]] = 1 # latest known
949 base[n[2]] = 1 # latest known
947 continue
950 continue
948
951
949 for a in n[2:4]:
952 for a in n[2:4]:
950 if a not in rep:
953 if a not in rep:
951 r.append(a)
954 r.append(a)
952 rep[a] = 1
955 rep[a] = 1
953
956
954 seen[n[0]] = 1
957 seen[n[0]] = 1
955
958
956 if r:
959 if r:
957 reqcnt += 1
960 reqcnt += 1
958 self.ui.debug(_("request %d: %s\n") %
961 self.ui.debug(_("request %d: %s\n") %
959 (reqcnt, " ".join(map(short, r))))
962 (reqcnt, " ".join(map(short, r))))
960 for p in range(0, len(r), 10):
963 for p in range(0, len(r), 10):
961 for b in remote.branches(r[p:p+10]):
964 for b in remote.branches(r[p:p+10]):
962 self.ui.debug(_("received %s:%s\n") %
965 self.ui.debug(_("received %s:%s\n") %
963 (short(b[0]), short(b[1])))
966 (short(b[0]), short(b[1])))
964 if b[0] in m:
967 if b[0] in m:
965 self.ui.debug(_("found base node %s\n")
968 self.ui.debug(_("found base node %s\n")
966 % short(b[0]))
969 % short(b[0]))
967 base[b[0]] = 1
970 base[b[0]] = 1
968 elif b[0] not in seen:
971 elif b[0] not in seen:
969 unknown.append(b)
972 unknown.append(b)
970
973
971 # do binary search on the branches we found
974 # do binary search on the branches we found
972 while search:
975 while search:
973 n = search.pop(0)
976 n = search.pop(0)
974 reqcnt += 1
977 reqcnt += 1
975 l = remote.between([(n[0], n[1])])[0]
978 l = remote.between([(n[0], n[1])])[0]
976 l.append(n[1])
979 l.append(n[1])
977 p = n[0]
980 p = n[0]
978 f = 1
981 f = 1
979 for i in l:
982 for i in l:
980 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
983 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
981 if i in m:
984 if i in m:
982 if f <= 2:
985 if f <= 2:
983 self.ui.debug(_("found new branch changeset %s\n") %
986 self.ui.debug(_("found new branch changeset %s\n") %
984 short(p))
987 short(p))
985 fetch[p] = 1
988 fetch[p] = 1
986 base[i] = 1
989 base[i] = 1
987 else:
990 else:
988 self.ui.debug(_("narrowed branch search to %s:%s\n")
991 self.ui.debug(_("narrowed branch search to %s:%s\n")
989 % (short(p), short(i)))
992 % (short(p), short(i)))
990 search.append((p, i))
993 search.append((p, i))
991 break
994 break
992 p, f = i, f * 2
995 p, f = i, f * 2
993
996
994 # sanity check our fetch list
997 # sanity check our fetch list
995 for f in fetch.keys():
998 for f in fetch.keys():
996 if f in m:
999 if f in m:
997 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1000 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
998
1001
999 if base.keys() == [nullid]:
1002 if base.keys() == [nullid]:
1000 if force:
1003 if force:
1001 self.ui.warn(_("warning: repository is unrelated\n"))
1004 self.ui.warn(_("warning: repository is unrelated\n"))
1002 else:
1005 else:
1003 raise util.Abort(_("repository is unrelated"))
1006 raise util.Abort(_("repository is unrelated"))
1004
1007
1005 self.ui.note(_("found new changesets starting at ") +
1008 self.ui.note(_("found new changesets starting at ") +
1006 " ".join([short(f) for f in fetch]) + "\n")
1009 " ".join([short(f) for f in fetch]) + "\n")
1007
1010
1008 self.ui.debug(_("%d total queries\n") % reqcnt)
1011 self.ui.debug(_("%d total queries\n") % reqcnt)
1009
1012
1010 return fetch.keys()
1013 return fetch.keys()
1011
1014
1012 def findoutgoing(self, remote, base=None, heads=None, force=False):
1015 def findoutgoing(self, remote, base=None, heads=None, force=False):
1013 """Return list of nodes that are roots of subsets not in remote
1016 """Return list of nodes that are roots of subsets not in remote
1014
1017
1015 If base dict is specified, assume that these nodes and their parents
1018 If base dict is specified, assume that these nodes and their parents
1016 exist on the remote side.
1019 exist on the remote side.
1017 If a list of heads is specified, return only nodes which are heads
1020 If a list of heads is specified, return only nodes which are heads
1018 or ancestors of these heads, and return a second element which
1021 or ancestors of these heads, and return a second element which
1019 contains all remote heads which get new children.
1022 contains all remote heads which get new children.
1020 """
1023 """
1021 if base == None:
1024 if base == None:
1022 base = {}
1025 base = {}
1023 self.findincoming(remote, base, heads, force=force)
1026 self.findincoming(remote, base, heads, force=force)
1024
1027
1025 self.ui.debug(_("common changesets up to ")
1028 self.ui.debug(_("common changesets up to ")
1026 + " ".join(map(short, base.keys())) + "\n")
1029 + " ".join(map(short, base.keys())) + "\n")
1027
1030
1028 remain = dict.fromkeys(self.changelog.nodemap)
1031 remain = dict.fromkeys(self.changelog.nodemap)
1029
1032
1030 # prune everything remote has from the tree
1033 # prune everything remote has from the tree
1031 del remain[nullid]
1034 del remain[nullid]
1032 remove = base.keys()
1035 remove = base.keys()
1033 while remove:
1036 while remove:
1034 n = remove.pop(0)
1037 n = remove.pop(0)
1035 if n in remain:
1038 if n in remain:
1036 del remain[n]
1039 del remain[n]
1037 for p in self.changelog.parents(n):
1040 for p in self.changelog.parents(n):
1038 remove.append(p)
1041 remove.append(p)
1039
1042
1040 # find every node whose parents have been pruned
1043 # find every node whose parents have been pruned
1041 subset = []
1044 subset = []
1042 # find every remote head that will get new children
1045 # find every remote head that will get new children
1043 updated_heads = {}
1046 updated_heads = {}
1044 for n in remain:
1047 for n in remain:
1045 p1, p2 = self.changelog.parents(n)
1048 p1, p2 = self.changelog.parents(n)
1046 if p1 not in remain and p2 not in remain:
1049 if p1 not in remain and p2 not in remain:
1047 subset.append(n)
1050 subset.append(n)
1048 if heads:
1051 if heads:
1049 if p1 in heads:
1052 if p1 in heads:
1050 updated_heads[p1] = True
1053 updated_heads[p1] = True
1051 if p2 in heads:
1054 if p2 in heads:
1052 updated_heads[p2] = True
1055 updated_heads[p2] = True
1053
1056
1054 # this is the set of all roots we have to push
1057 # this is the set of all roots we have to push
1055 if heads:
1058 if heads:
1056 return subset, updated_heads.keys()
1059 return subset, updated_heads.keys()
1057 else:
1060 else:
1058 return subset
1061 return subset
1059
1062
1060 def pull(self, remote, heads=None, force=False):
1063 def pull(self, remote, heads=None, force=False):
1061 l = self.lock()
1064 l = self.lock()
1062
1065
1063 fetch = self.findincoming(remote, force=force)
1066 fetch = self.findincoming(remote, force=force)
1064 if fetch == [nullid]:
1067 if fetch == [nullid]:
1065 self.ui.status(_("requesting all changes\n"))
1068 self.ui.status(_("requesting all changes\n"))
1066
1069
1067 if not fetch:
1070 if not fetch:
1068 self.ui.status(_("no changes found\n"))
1071 self.ui.status(_("no changes found\n"))
1069 return 0
1072 return 0
1070
1073
1071 if heads is None:
1074 if heads is None:
1072 cg = remote.changegroup(fetch, 'pull')
1075 cg = remote.changegroup(fetch, 'pull')
1073 else:
1076 else:
1074 cg = remote.changegroupsubset(fetch, heads, 'pull')
1077 cg = remote.changegroupsubset(fetch, heads, 'pull')
1075 return self.addchangegroup(cg)
1078 return self.addchangegroup(cg)
1076
1079
1077 def push(self, remote, force=False, revs=None):
1080 def push(self, remote, force=False, revs=None):
1078 lock = remote.lock()
1081 lock = remote.lock()
1079
1082
1080 base = {}
1083 base = {}
1081 remote_heads = remote.heads()
1084 remote_heads = remote.heads()
1082 inc = self.findincoming(remote, base, remote_heads, force=force)
1085 inc = self.findincoming(remote, base, remote_heads, force=force)
1083 if not force and inc:
1086 if not force and inc:
1084 self.ui.warn(_("abort: unsynced remote changes!\n"))
1087 self.ui.warn(_("abort: unsynced remote changes!\n"))
1085 self.ui.status(_("(did you forget to sync?"
1088 self.ui.status(_("(did you forget to sync?"
1086 " use push -f to force)\n"))
1089 " use push -f to force)\n"))
1087 return 1
1090 return 1
1088
1091
1089 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1092 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1090 if revs is not None:
1093 if revs is not None:
1091 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1094 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1092 else:
1095 else:
1093 bases, heads = update, self.changelog.heads()
1096 bases, heads = update, self.changelog.heads()
1094
1097
1095 if not bases:
1098 if not bases:
1096 self.ui.status(_("no changes found\n"))
1099 self.ui.status(_("no changes found\n"))
1097 return 1
1100 return 1
1098 elif not force:
1101 elif not force:
1099 # FIXME we don't properly detect creation of new heads
1102 # FIXME we don't properly detect creation of new heads
1100 # in the push -r case, assume the user knows what he's doing
1103 # in the push -r case, assume the user knows what he's doing
1101 if not revs and len(remote_heads) < len(heads) \
1104 if not revs and len(remote_heads) < len(heads) \
1102 and remote_heads != [nullid]:
1105 and remote_heads != [nullid]:
1103 self.ui.warn(_("abort: push creates new remote branches!\n"))
1106 self.ui.warn(_("abort: push creates new remote branches!\n"))
1104 self.ui.status(_("(did you forget to merge?"
1107 self.ui.status(_("(did you forget to merge?"
1105 " use push -f to force)\n"))
1108 " use push -f to force)\n"))
1106 return 1
1109 return 1
1107
1110
1108 if revs is None:
1111 if revs is None:
1109 cg = self.changegroup(update, 'push')
1112 cg = self.changegroup(update, 'push')
1110 else:
1113 else:
1111 cg = self.changegroupsubset(update, revs, 'push')
1114 cg = self.changegroupsubset(update, revs, 'push')
1112 return remote.addchangegroup(cg)
1115 return remote.addchangegroup(cg)
1113
1116
1114 def changegroupsubset(self, bases, heads, source):
1117 def changegroupsubset(self, bases, heads, source):
1115 """This function generates a changegroup consisting of all the nodes
1118 """This function generates a changegroup consisting of all the nodes
1116 that are descendents of any of the bases, and ancestors of any of
1119 that are descendents of any of the bases, and ancestors of any of
1117 the heads.
1120 the heads.
1118
1121
1119 It is fairly complex as determining which filenodes and which
1122 It is fairly complex as determining which filenodes and which
1120 manifest nodes need to be included for the changeset to be complete
1123 manifest nodes need to be included for the changeset to be complete
1121 is non-trivial.
1124 is non-trivial.
1122
1125
1123 Another wrinkle is doing the reverse, figuring out which changeset in
1126 Another wrinkle is doing the reverse, figuring out which changeset in
1124 the changegroup a particular filenode or manifestnode belongs to."""
1127 the changegroup a particular filenode or manifestnode belongs to."""
1125
1128
1126 self.hook('preoutgoing', throw=True, source=source)
1129 self.hook('preoutgoing', throw=True, source=source)
1127
1130
1128 # Set up some initial variables
1131 # Set up some initial variables
1129 # Make it easy to refer to self.changelog
1132 # Make it easy to refer to self.changelog
1130 cl = self.changelog
1133 cl = self.changelog
1131 # msng is short for missing - compute the list of changesets in this
1134 # msng is short for missing - compute the list of changesets in this
1132 # changegroup.
1135 # changegroup.
1133 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1136 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1134 # Some bases may turn out to be superfluous, and some heads may be
1137 # Some bases may turn out to be superfluous, and some heads may be
1135 # too. nodesbetween will return the minimal set of bases and heads
1138 # too. nodesbetween will return the minimal set of bases and heads
1136 # necessary to re-create the changegroup.
1139 # necessary to re-create the changegroup.
1137
1140
1138 # Known heads are the list of heads that it is assumed the recipient
1141 # Known heads are the list of heads that it is assumed the recipient
1139 # of this changegroup will know about.
1142 # of this changegroup will know about.
1140 knownheads = {}
1143 knownheads = {}
1141 # We assume that all parents of bases are known heads.
1144 # We assume that all parents of bases are known heads.
1142 for n in bases:
1145 for n in bases:
1143 for p in cl.parents(n):
1146 for p in cl.parents(n):
1144 if p != nullid:
1147 if p != nullid:
1145 knownheads[p] = 1
1148 knownheads[p] = 1
1146 knownheads = knownheads.keys()
1149 knownheads = knownheads.keys()
1147 if knownheads:
1150 if knownheads:
1148 # Now that we know what heads are known, we can compute which
1151 # Now that we know what heads are known, we can compute which
1149 # changesets are known. The recipient must know about all
1152 # changesets are known. The recipient must know about all
1150 # changesets required to reach the known heads from the null
1153 # changesets required to reach the known heads from the null
1151 # changeset.
1154 # changeset.
1152 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1155 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1153 junk = None
1156 junk = None
1154 # Transform the list into an ersatz set.
1157 # Transform the list into an ersatz set.
1155 has_cl_set = dict.fromkeys(has_cl_set)
1158 has_cl_set = dict.fromkeys(has_cl_set)
1156 else:
1159 else:
1157 # If there were no known heads, the recipient cannot be assumed to
1160 # If there were no known heads, the recipient cannot be assumed to
1158 # know about any changesets.
1161 # know about any changesets.
1159 has_cl_set = {}
1162 has_cl_set = {}
1160
1163
1161 # Make it easy to refer to self.manifest
1164 # Make it easy to refer to self.manifest
1162 mnfst = self.manifest
1165 mnfst = self.manifest
1163 # We don't know which manifests are missing yet
1166 # We don't know which manifests are missing yet
1164 msng_mnfst_set = {}
1167 msng_mnfst_set = {}
1165 # Nor do we know which filenodes are missing.
1168 # Nor do we know which filenodes are missing.
1166 msng_filenode_set = {}
1169 msng_filenode_set = {}
1167
1170
1168 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1171 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1169 junk = None
1172 junk = None
1170
1173
1171 # A changeset always belongs to itself, so the changenode lookup
1174 # A changeset always belongs to itself, so the changenode lookup
1172 # function for a changenode is identity.
1175 # function for a changenode is identity.
1173 def identity(x):
1176 def identity(x):
1174 return x
1177 return x
1175
1178
1176 # A function generating function. Sets up an environment for the
1179 # A function generating function. Sets up an environment for the
1177 # inner function.
1180 # inner function.
1178 def cmp_by_rev_func(revlog):
1181 def cmp_by_rev_func(revlog):
1179 # Compare two nodes by their revision number in the environment's
1182 # Compare two nodes by their revision number in the environment's
1180 # revision history. Since the revision number both represents the
1183 # revision history. Since the revision number both represents the
1181 # most efficient order to read the nodes in, and represents a
1184 # most efficient order to read the nodes in, and represents a
1182 # topological sorting of the nodes, this function is often useful.
1185 # topological sorting of the nodes, this function is often useful.
1183 def cmp_by_rev(a, b):
1186 def cmp_by_rev(a, b):
1184 return cmp(revlog.rev(a), revlog.rev(b))
1187 return cmp(revlog.rev(a), revlog.rev(b))
1185 return cmp_by_rev
1188 return cmp_by_rev
1186
1189
1187 # If we determine that a particular file or manifest node must be a
1190 # If we determine that a particular file or manifest node must be a
1188 # node that the recipient of the changegroup will already have, we can
1191 # node that the recipient of the changegroup will already have, we can
1189 # also assume the recipient will have all the parents. This function
1192 # also assume the recipient will have all the parents. This function
1190 # prunes them from the set of missing nodes.
1193 # prunes them from the set of missing nodes.
1191 def prune_parents(revlog, hasset, msngset):
1194 def prune_parents(revlog, hasset, msngset):
1192 haslst = hasset.keys()
1195 haslst = hasset.keys()
1193 haslst.sort(cmp_by_rev_func(revlog))
1196 haslst.sort(cmp_by_rev_func(revlog))
1194 for node in haslst:
1197 for node in haslst:
1195 parentlst = [p for p in revlog.parents(node) if p != nullid]
1198 parentlst = [p for p in revlog.parents(node) if p != nullid]
1196 while parentlst:
1199 while parentlst:
1197 n = parentlst.pop()
1200 n = parentlst.pop()
1198 if n not in hasset:
1201 if n not in hasset:
1199 hasset[n] = 1
1202 hasset[n] = 1
1200 p = [p for p in revlog.parents(n) if p != nullid]
1203 p = [p for p in revlog.parents(n) if p != nullid]
1201 parentlst.extend(p)
1204 parentlst.extend(p)
1202 for n in hasset:
1205 for n in hasset:
1203 msngset.pop(n, None)
1206 msngset.pop(n, None)
1204
1207
1205 # This is a function generating function used to set up an environment
1208 # This is a function generating function used to set up an environment
1206 # for the inner function to execute in.
1209 # for the inner function to execute in.
1207 def manifest_and_file_collector(changedfileset):
1210 def manifest_and_file_collector(changedfileset):
1208 # This is an information gathering function that gathers
1211 # This is an information gathering function that gathers
1209 # information from each changeset node that goes out as part of
1212 # information from each changeset node that goes out as part of
1210 # the changegroup. The information gathered is a list of which
1213 # the changegroup. The information gathered is a list of which
1211 # manifest nodes are potentially required (the recipient may
1214 # manifest nodes are potentially required (the recipient may
1212 # already have them) and total list of all files which were
1215 # already have them) and total list of all files which were
1213 # changed in any changeset in the changegroup.
1216 # changed in any changeset in the changegroup.
1214 #
1217 #
1215 # We also remember the first changenode we saw any manifest
1218 # We also remember the first changenode we saw any manifest
1216 # referenced by so we can later determine which changenode 'owns'
1219 # referenced by so we can later determine which changenode 'owns'
1217 # the manifest.
1220 # the manifest.
1218 def collect_manifests_and_files(clnode):
1221 def collect_manifests_and_files(clnode):
1219 c = cl.read(clnode)
1222 c = cl.read(clnode)
1220 for f in c[3]:
1223 for f in c[3]:
1221 # This is to make sure we only have one instance of each
1224 # This is to make sure we only have one instance of each
1222 # filename string for each filename.
1225 # filename string for each filename.
1223 changedfileset.setdefault(f, f)
1226 changedfileset.setdefault(f, f)
1224 msng_mnfst_set.setdefault(c[0], clnode)
1227 msng_mnfst_set.setdefault(c[0], clnode)
1225 return collect_manifests_and_files
1228 return collect_manifests_and_files
1226
1229
1227 # Figure out which manifest nodes (of the ones we think might be part
1230 # Figure out which manifest nodes (of the ones we think might be part
1228 # of the changegroup) the recipient must know about and remove them
1231 # of the changegroup) the recipient must know about and remove them
1229 # from the changegroup.
1232 # from the changegroup.
1230 def prune_manifests():
1233 def prune_manifests():
1231 has_mnfst_set = {}
1234 has_mnfst_set = {}
1232 for n in msng_mnfst_set:
1235 for n in msng_mnfst_set:
1233 # If a 'missing' manifest thinks it belongs to a changenode
1236 # If a 'missing' manifest thinks it belongs to a changenode
1234 # the recipient is assumed to have, obviously the recipient
1237 # the recipient is assumed to have, obviously the recipient
1235 # must have that manifest.
1238 # must have that manifest.
1236 linknode = cl.node(mnfst.linkrev(n))
1239 linknode = cl.node(mnfst.linkrev(n))
1237 if linknode in has_cl_set:
1240 if linknode in has_cl_set:
1238 has_mnfst_set[n] = 1
1241 has_mnfst_set[n] = 1
1239 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1242 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1240
1243
1241 # Use the information collected in collect_manifests_and_files to say
1244 # Use the information collected in collect_manifests_and_files to say
1242 # which changenode any manifestnode belongs to.
1245 # which changenode any manifestnode belongs to.
1243 def lookup_manifest_link(mnfstnode):
1246 def lookup_manifest_link(mnfstnode):
1244 return msng_mnfst_set[mnfstnode]
1247 return msng_mnfst_set[mnfstnode]
1245
1248
1246 # A function generating function that sets up the initial environment
1249 # A function generating function that sets up the initial environment
1247 # the inner function.
1250 # the inner function.
1248 def filenode_collector(changedfiles):
1251 def filenode_collector(changedfiles):
1249 next_rev = [0]
1252 next_rev = [0]
1250 # This gathers information from each manifestnode included in the
1253 # This gathers information from each manifestnode included in the
1251 # changegroup about which filenodes the manifest node references
1254 # changegroup about which filenodes the manifest node references
1252 # so we can include those in the changegroup too.
1255 # so we can include those in the changegroup too.
1253 #
1256 #
1254 # It also remembers which changenode each filenode belongs to. It
1257 # It also remembers which changenode each filenode belongs to. It
1255 # does this by assuming the a filenode belongs to the changenode
1258 # does this by assuming the a filenode belongs to the changenode
1256 # the first manifest that references it belongs to.
1259 # the first manifest that references it belongs to.
1257 def collect_msng_filenodes(mnfstnode):
1260 def collect_msng_filenodes(mnfstnode):
1258 r = mnfst.rev(mnfstnode)
1261 r = mnfst.rev(mnfstnode)
1259 if r == next_rev[0]:
1262 if r == next_rev[0]:
1260 # If the last rev we looked at was the one just previous,
1263 # If the last rev we looked at was the one just previous,
1261 # we only need to see a diff.
1264 # we only need to see a diff.
1262 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1265 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1263 # For each line in the delta
1266 # For each line in the delta
1264 for dline in delta.splitlines():
1267 for dline in delta.splitlines():
1265 # get the filename and filenode for that line
1268 # get the filename and filenode for that line
1266 f, fnode = dline.split('\0')
1269 f, fnode = dline.split('\0')
1267 fnode = bin(fnode[:40])
1270 fnode = bin(fnode[:40])
1268 f = changedfiles.get(f, None)
1271 f = changedfiles.get(f, None)
1269 # And if the file is in the list of files we care
1272 # And if the file is in the list of files we care
1270 # about.
1273 # about.
1271 if f is not None:
1274 if f is not None:
1272 # Get the changenode this manifest belongs to
1275 # Get the changenode this manifest belongs to
1273 clnode = msng_mnfst_set[mnfstnode]
1276 clnode = msng_mnfst_set[mnfstnode]
1274 # Create the set of filenodes for the file if
1277 # Create the set of filenodes for the file if
1275 # there isn't one already.
1278 # there isn't one already.
1276 ndset = msng_filenode_set.setdefault(f, {})
1279 ndset = msng_filenode_set.setdefault(f, {})
1277 # And set the filenode's changelog node to the
1280 # And set the filenode's changelog node to the
1278 # manifest's if it hasn't been set already.
1281 # manifest's if it hasn't been set already.
1279 ndset.setdefault(fnode, clnode)
1282 ndset.setdefault(fnode, clnode)
1280 else:
1283 else:
1281 # Otherwise we need a full manifest.
1284 # Otherwise we need a full manifest.
1282 m = mnfst.read(mnfstnode)
1285 m = mnfst.read(mnfstnode)
1283 # For every file in we care about.
1286 # For every file in we care about.
1284 for f in changedfiles:
1287 for f in changedfiles:
1285 fnode = m.get(f, None)
1288 fnode = m.get(f, None)
1286 # If it's in the manifest
1289 # If it's in the manifest
1287 if fnode is not None:
1290 if fnode is not None:
1288 # See comments above.
1291 # See comments above.
1289 clnode = msng_mnfst_set[mnfstnode]
1292 clnode = msng_mnfst_set[mnfstnode]
1290 ndset = msng_filenode_set.setdefault(f, {})
1293 ndset = msng_filenode_set.setdefault(f, {})
1291 ndset.setdefault(fnode, clnode)
1294 ndset.setdefault(fnode, clnode)
1292 # Remember the revision we hope to see next.
1295 # Remember the revision we hope to see next.
1293 next_rev[0] = r + 1
1296 next_rev[0] = r + 1
1294 return collect_msng_filenodes
1297 return collect_msng_filenodes
1295
1298
1296 # We have a list of filenodes we think we need for a file, lets remove
1299 # We have a list of filenodes we think we need for a file, lets remove
1297 # all those we now the recipient must have.
1300 # all those we now the recipient must have.
1298 def prune_filenodes(f, filerevlog):
1301 def prune_filenodes(f, filerevlog):
1299 msngset = msng_filenode_set[f]
1302 msngset = msng_filenode_set[f]
1300 hasset = {}
1303 hasset = {}
1301 # If a 'missing' filenode thinks it belongs to a changenode we
1304 # If a 'missing' filenode thinks it belongs to a changenode we
1302 # assume the recipient must have, then the recipient must have
1305 # assume the recipient must have, then the recipient must have
1303 # that filenode.
1306 # that filenode.
1304 for n in msngset:
1307 for n in msngset:
1305 clnode = cl.node(filerevlog.linkrev(n))
1308 clnode = cl.node(filerevlog.linkrev(n))
1306 if clnode in has_cl_set:
1309 if clnode in has_cl_set:
1307 hasset[n] = 1
1310 hasset[n] = 1
1308 prune_parents(filerevlog, hasset, msngset)
1311 prune_parents(filerevlog, hasset, msngset)
1309
1312
1310 # A function generator function that sets up the a context for the
1313 # A function generator function that sets up the a context for the
1311 # inner function.
1314 # inner function.
1312 def lookup_filenode_link_func(fname):
1315 def lookup_filenode_link_func(fname):
1313 msngset = msng_filenode_set[fname]
1316 msngset = msng_filenode_set[fname]
1314 # Lookup the changenode the filenode belongs to.
1317 # Lookup the changenode the filenode belongs to.
1315 def lookup_filenode_link(fnode):
1318 def lookup_filenode_link(fnode):
1316 return msngset[fnode]
1319 return msngset[fnode]
1317 return lookup_filenode_link
1320 return lookup_filenode_link
1318
1321
1319 # Now that we have all theses utility functions to help out and
1322 # Now that we have all theses utility functions to help out and
1320 # logically divide up the task, generate the group.
1323 # logically divide up the task, generate the group.
1321 def gengroup():
1324 def gengroup():
1322 # The set of changed files starts empty.
1325 # The set of changed files starts empty.
1323 changedfiles = {}
1326 changedfiles = {}
1324 # Create a changenode group generator that will call our functions
1327 # Create a changenode group generator that will call our functions
1325 # back to lookup the owning changenode and collect information.
1328 # back to lookup the owning changenode and collect information.
1326 group = cl.group(msng_cl_lst, identity,
1329 group = cl.group(msng_cl_lst, identity,
1327 manifest_and_file_collector(changedfiles))
1330 manifest_and_file_collector(changedfiles))
1328 for chnk in group:
1331 for chnk in group:
1329 yield chnk
1332 yield chnk
1330
1333
1331 # The list of manifests has been collected by the generator
1334 # The list of manifests has been collected by the generator
1332 # calling our functions back.
1335 # calling our functions back.
1333 prune_manifests()
1336 prune_manifests()
1334 msng_mnfst_lst = msng_mnfst_set.keys()
1337 msng_mnfst_lst = msng_mnfst_set.keys()
1335 # Sort the manifestnodes by revision number.
1338 # Sort the manifestnodes by revision number.
1336 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1339 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1337 # Create a generator for the manifestnodes that calls our lookup
1340 # Create a generator for the manifestnodes that calls our lookup
1338 # and data collection functions back.
1341 # and data collection functions back.
1339 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1342 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1340 filenode_collector(changedfiles))
1343 filenode_collector(changedfiles))
1341 for chnk in group:
1344 for chnk in group:
1342 yield chnk
1345 yield chnk
1343
1346
1344 # These are no longer needed, dereference and toss the memory for
1347 # These are no longer needed, dereference and toss the memory for
1345 # them.
1348 # them.
1346 msng_mnfst_lst = None
1349 msng_mnfst_lst = None
1347 msng_mnfst_set.clear()
1350 msng_mnfst_set.clear()
1348
1351
1349 changedfiles = changedfiles.keys()
1352 changedfiles = changedfiles.keys()
1350 changedfiles.sort()
1353 changedfiles.sort()
1351 # Go through all our files in order sorted by name.
1354 # Go through all our files in order sorted by name.
1352 for fname in changedfiles:
1355 for fname in changedfiles:
1353 filerevlog = self.file(fname)
1356 filerevlog = self.file(fname)
1354 # Toss out the filenodes that the recipient isn't really
1357 # Toss out the filenodes that the recipient isn't really
1355 # missing.
1358 # missing.
1356 if msng_filenode_set.has_key(fname):
1359 if msng_filenode_set.has_key(fname):
1357 prune_filenodes(fname, filerevlog)
1360 prune_filenodes(fname, filerevlog)
1358 msng_filenode_lst = msng_filenode_set[fname].keys()
1361 msng_filenode_lst = msng_filenode_set[fname].keys()
1359 else:
1362 else:
1360 msng_filenode_lst = []
1363 msng_filenode_lst = []
1361 # If any filenodes are left, generate the group for them,
1364 # If any filenodes are left, generate the group for them,
1362 # otherwise don't bother.
1365 # otherwise don't bother.
1363 if len(msng_filenode_lst) > 0:
1366 if len(msng_filenode_lst) > 0:
1364 yield changegroup.genchunk(fname)
1367 yield changegroup.genchunk(fname)
1365 # Sort the filenodes by their revision #
1368 # Sort the filenodes by their revision #
1366 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1369 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1367 # Create a group generator and only pass in a changenode
1370 # Create a group generator and only pass in a changenode
1368 # lookup function as we need to collect no information
1371 # lookup function as we need to collect no information
1369 # from filenodes.
1372 # from filenodes.
1370 group = filerevlog.group(msng_filenode_lst,
1373 group = filerevlog.group(msng_filenode_lst,
1371 lookup_filenode_link_func(fname))
1374 lookup_filenode_link_func(fname))
1372 for chnk in group:
1375 for chnk in group:
1373 yield chnk
1376 yield chnk
1374 if msng_filenode_set.has_key(fname):
1377 if msng_filenode_set.has_key(fname):
1375 # Don't need this anymore, toss it to free memory.
1378 # Don't need this anymore, toss it to free memory.
1376 del msng_filenode_set[fname]
1379 del msng_filenode_set[fname]
1377 # Signal that no more groups are left.
1380 # Signal that no more groups are left.
1378 yield changegroup.closechunk()
1381 yield changegroup.closechunk()
1379
1382
1380 if msng_cl_lst:
1383 if msng_cl_lst:
1381 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1384 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1382
1385
1383 return util.chunkbuffer(gengroup())
1386 return util.chunkbuffer(gengroup())
1384
1387
1385 def changegroup(self, basenodes, source):
1388 def changegroup(self, basenodes, source):
1386 """Generate a changegroup of all nodes that we have that a recipient
1389 """Generate a changegroup of all nodes that we have that a recipient
1387 doesn't.
1390 doesn't.
1388
1391
1389 This is much easier than the previous function as we can assume that
1392 This is much easier than the previous function as we can assume that
1390 the recipient has any changenode we aren't sending them."""
1393 the recipient has any changenode we aren't sending them."""
1391
1394
1392 self.hook('preoutgoing', throw=True, source=source)
1395 self.hook('preoutgoing', throw=True, source=source)
1393
1396
1394 cl = self.changelog
1397 cl = self.changelog
1395 nodes = cl.nodesbetween(basenodes, None)[0]
1398 nodes = cl.nodesbetween(basenodes, None)[0]
1396 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1399 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1397
1400
1398 def identity(x):
1401 def identity(x):
1399 return x
1402 return x
1400
1403
1401 def gennodelst(revlog):
1404 def gennodelst(revlog):
1402 for r in xrange(0, revlog.count()):
1405 for r in xrange(0, revlog.count()):
1403 n = revlog.node(r)
1406 n = revlog.node(r)
1404 if revlog.linkrev(n) in revset:
1407 if revlog.linkrev(n) in revset:
1405 yield n
1408 yield n
1406
1409
1407 def changed_file_collector(changedfileset):
1410 def changed_file_collector(changedfileset):
1408 def collect_changed_files(clnode):
1411 def collect_changed_files(clnode):
1409 c = cl.read(clnode)
1412 c = cl.read(clnode)
1410 for fname in c[3]:
1413 for fname in c[3]:
1411 changedfileset[fname] = 1
1414 changedfileset[fname] = 1
1412 return collect_changed_files
1415 return collect_changed_files
1413
1416
1414 def lookuprevlink_func(revlog):
1417 def lookuprevlink_func(revlog):
1415 def lookuprevlink(n):
1418 def lookuprevlink(n):
1416 return cl.node(revlog.linkrev(n))
1419 return cl.node(revlog.linkrev(n))
1417 return lookuprevlink
1420 return lookuprevlink
1418
1421
1419 def gengroup():
1422 def gengroup():
1420 # construct a list of all changed files
1423 # construct a list of all changed files
1421 changedfiles = {}
1424 changedfiles = {}
1422
1425
1423 for chnk in cl.group(nodes, identity,
1426 for chnk in cl.group(nodes, identity,
1424 changed_file_collector(changedfiles)):
1427 changed_file_collector(changedfiles)):
1425 yield chnk
1428 yield chnk
1426 changedfiles = changedfiles.keys()
1429 changedfiles = changedfiles.keys()
1427 changedfiles.sort()
1430 changedfiles.sort()
1428
1431
1429 mnfst = self.manifest
1432 mnfst = self.manifest
1430 nodeiter = gennodelst(mnfst)
1433 nodeiter = gennodelst(mnfst)
1431 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1434 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1432 yield chnk
1435 yield chnk
1433
1436
1434 for fname in changedfiles:
1437 for fname in changedfiles:
1435 filerevlog = self.file(fname)
1438 filerevlog = self.file(fname)
1436 nodeiter = gennodelst(filerevlog)
1439 nodeiter = gennodelst(filerevlog)
1437 nodeiter = list(nodeiter)
1440 nodeiter = list(nodeiter)
1438 if nodeiter:
1441 if nodeiter:
1439 yield changegroup.genchunk(fname)
1442 yield changegroup.genchunk(fname)
1440 lookup = lookuprevlink_func(filerevlog)
1443 lookup = lookuprevlink_func(filerevlog)
1441 for chnk in filerevlog.group(nodeiter, lookup):
1444 for chnk in filerevlog.group(nodeiter, lookup):
1442 yield chnk
1445 yield chnk
1443
1446
1444 yield changegroup.closechunk()
1447 yield changegroup.closechunk()
1445
1448
1446 if nodes:
1449 if nodes:
1447 self.hook('outgoing', node=hex(nodes[0]), source=source)
1450 self.hook('outgoing', node=hex(nodes[0]), source=source)
1448
1451
1449 return util.chunkbuffer(gengroup())
1452 return util.chunkbuffer(gengroup())
1450
1453
1451 def addchangegroup(self, source):
1454 def addchangegroup(self, source):
1452 """add changegroup to repo.
1455 """add changegroup to repo.
1453 returns number of heads modified or added + 1."""
1456 returns number of heads modified or added + 1."""
1454
1457
1455 def csmap(x):
1458 def csmap(x):
1456 self.ui.debug(_("add changeset %s\n") % short(x))
1459 self.ui.debug(_("add changeset %s\n") % short(x))
1457 return cl.count()
1460 return cl.count()
1458
1461
1459 def revmap(x):
1462 def revmap(x):
1460 return cl.rev(x)
1463 return cl.rev(x)
1461
1464
1462 if not source:
1465 if not source:
1463 return 0
1466 return 0
1464
1467
1465 self.hook('prechangegroup', throw=True)
1468 self.hook('prechangegroup', throw=True)
1466
1469
1467 changesets = files = revisions = 0
1470 changesets = files = revisions = 0
1468
1471
1469 tr = self.transaction()
1472 tr = self.transaction()
1470
1473
1471 # write changelog and manifest data to temp files so
1474 # write changelog and manifest data to temp files so
1472 # concurrent readers will not see inconsistent view
1475 # concurrent readers will not see inconsistent view
1473 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1476 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1474
1477
1475 oldheads = len(cl.heads())
1478 oldheads = len(cl.heads())
1476
1479
1477 # pull off the changeset group
1480 # pull off the changeset group
1478 self.ui.status(_("adding changesets\n"))
1481 self.ui.status(_("adding changesets\n"))
1479 co = cl.tip()
1482 co = cl.tip()
1480 chunkiter = changegroup.chunkiter(source)
1483 chunkiter = changegroup.chunkiter(source)
1481 cn = cl.addgroup(chunkiter, csmap, tr, 1) # unique
1484 cn = cl.addgroup(chunkiter, csmap, tr, 1) # unique
1482 cnr, cor = map(cl.rev, (cn, co))
1485 cnr, cor = map(cl.rev, (cn, co))
1483 if cn == nullid:
1486 if cn == nullid:
1484 cnr = cor
1487 cnr = cor
1485 changesets = cnr - cor
1488 changesets = cnr - cor
1486
1489
1487 mf = appendfile.appendmanifest(self.opener, self.manifest.version)
1490 mf = appendfile.appendmanifest(self.opener, self.manifest.version)
1488
1491
1489 # pull off the manifest group
1492 # pull off the manifest group
1490 self.ui.status(_("adding manifests\n"))
1493 self.ui.status(_("adding manifests\n"))
1491 mm = mf.tip()
1494 mm = mf.tip()
1492 chunkiter = changegroup.chunkiter(source)
1495 chunkiter = changegroup.chunkiter(source)
1493 mo = mf.addgroup(chunkiter, revmap, tr)
1496 mo = mf.addgroup(chunkiter, revmap, tr)
1494
1497
1495 # process the files
1498 # process the files
1496 self.ui.status(_("adding file changes\n"))
1499 self.ui.status(_("adding file changes\n"))
1497 while 1:
1500 while 1:
1498 f = changegroup.getchunk(source)
1501 f = changegroup.getchunk(source)
1499 if not f:
1502 if not f:
1500 break
1503 break
1501 self.ui.debug(_("adding %s revisions\n") % f)
1504 self.ui.debug(_("adding %s revisions\n") % f)
1502 fl = self.file(f)
1505 fl = self.file(f)
1503 o = fl.count()
1506 o = fl.count()
1504 chunkiter = changegroup.chunkiter(source)
1507 chunkiter = changegroup.chunkiter(source)
1505 n = fl.addgroup(chunkiter, revmap, tr)
1508 n = fl.addgroup(chunkiter, revmap, tr)
1506 revisions += fl.count() - o
1509 revisions += fl.count() - o
1507 files += 1
1510 files += 1
1508
1511
1509 # write order here is important so concurrent readers will see
1512 # write order here is important so concurrent readers will see
1510 # consistent view of repo
1513 # consistent view of repo
1511 mf.writedata()
1514 mf.writedata()
1512 cl.writedata()
1515 cl.writedata()
1513
1516
1514 # make changelog and manifest see real files again
1517 # make changelog and manifest see real files again
1515 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1518 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1516 self.manifest = manifest.manifest(self.opener, self.manifest.version)
1519 self.manifest = manifest.manifest(self.opener, self.manifest.version)
1517 self.changelog.checkinlinesize(tr)
1520 self.changelog.checkinlinesize(tr)
1518 self.manifest.checkinlinesize(tr)
1521 self.manifest.checkinlinesize(tr)
1519
1522
1520 newheads = len(self.changelog.heads())
1523 newheads = len(self.changelog.heads())
1521 heads = ""
1524 heads = ""
1522 if oldheads and newheads > oldheads:
1525 if oldheads and newheads > oldheads:
1523 heads = _(" (+%d heads)") % (newheads - oldheads)
1526 heads = _(" (+%d heads)") % (newheads - oldheads)
1524
1527
1525 self.ui.status(_("added %d changesets"
1528 self.ui.status(_("added %d changesets"
1526 " with %d changes to %d files%s\n")
1529 " with %d changes to %d files%s\n")
1527 % (changesets, revisions, files, heads))
1530 % (changesets, revisions, files, heads))
1528
1531
1529 self.hook('pretxnchangegroup', throw=True,
1532 self.hook('pretxnchangegroup', throw=True,
1530 node=hex(self.changelog.node(cor+1)))
1533 node=hex(self.changelog.node(cor+1)))
1531
1534
1532 tr.close()
1535 tr.close()
1533
1536
1534 if changesets > 0:
1537 if changesets > 0:
1535 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1538 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1536
1539
1537 for i in range(cor + 1, cnr + 1):
1540 for i in range(cor + 1, cnr + 1):
1538 self.hook("incoming", node=hex(self.changelog.node(i)))
1541 self.hook("incoming", node=hex(self.changelog.node(i)))
1539
1542
1540 return newheads - oldheads + 1
1543 return newheads - oldheads + 1
1541
1544
1542 def update(self, node, allow=False, force=False, choose=None,
1545 def update(self, node, allow=False, force=False, choose=None,
1543 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1546 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1544 pl = self.dirstate.parents()
1547 pl = self.dirstate.parents()
1545 if not force and pl[1] != nullid:
1548 if not force and pl[1] != nullid:
1546 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1549 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1547 return 1
1550 return 1
1548
1551
1549 err = False
1552 err = False
1550
1553
1551 p1, p2 = pl[0], node
1554 p1, p2 = pl[0], node
1552 pa = self.changelog.ancestor(p1, p2)
1555 pa = self.changelog.ancestor(p1, p2)
1553 m1n = self.changelog.read(p1)[0]
1556 m1n = self.changelog.read(p1)[0]
1554 m2n = self.changelog.read(p2)[0]
1557 m2n = self.changelog.read(p2)[0]
1555 man = self.manifest.ancestor(m1n, m2n)
1558 man = self.manifest.ancestor(m1n, m2n)
1556 m1 = self.manifest.read(m1n)
1559 m1 = self.manifest.read(m1n)
1557 mf1 = self.manifest.readflags(m1n)
1560 mf1 = self.manifest.readflags(m1n)
1558 m2 = self.manifest.read(m2n).copy()
1561 m2 = self.manifest.read(m2n).copy()
1559 mf2 = self.manifest.readflags(m2n)
1562 mf2 = self.manifest.readflags(m2n)
1560 ma = self.manifest.read(man)
1563 ma = self.manifest.read(man)
1561 mfa = self.manifest.readflags(man)
1564 mfa = self.manifest.readflags(man)
1562
1565
1563 modified, added, removed, deleted, unknown = self.changes()
1566 modified, added, removed, deleted, unknown = self.changes()
1564
1567
1565 # is this a jump, or a merge? i.e. is there a linear path
1568 # is this a jump, or a merge? i.e. is there a linear path
1566 # from p1 to p2?
1569 # from p1 to p2?
1567 linear_path = (pa == p1 or pa == p2)
1570 linear_path = (pa == p1 or pa == p2)
1568
1571
1569 if allow and linear_path:
1572 if allow and linear_path:
1570 raise util.Abort(_("there is nothing to merge, "
1573 raise util.Abort(_("there is nothing to merge, "
1571 "just use 'hg update'"))
1574 "just use 'hg update'"))
1572 if allow and not forcemerge:
1575 if allow and not forcemerge:
1573 if modified or added or removed:
1576 if modified or added or removed:
1574 raise util.Abort(_("outstanding uncommitted changes"))
1577 raise util.Abort(_("outstanding uncommitted changes"))
1575 if not forcemerge and not force:
1578 if not forcemerge and not force:
1576 for f in unknown:
1579 for f in unknown:
1577 if f in m2:
1580 if f in m2:
1578 t1 = self.wread(f)
1581 t1 = self.wread(f)
1579 t2 = self.file(f).read(m2[f])
1582 t2 = self.file(f).read(m2[f])
1580 if cmp(t1, t2) != 0:
1583 if cmp(t1, t2) != 0:
1581 raise util.Abort(_("'%s' already exists in the working"
1584 raise util.Abort(_("'%s' already exists in the working"
1582 " dir and differs from remote") % f)
1585 " dir and differs from remote") % f)
1583
1586
1584 # resolve the manifest to determine which files
1587 # resolve the manifest to determine which files
1585 # we care about merging
1588 # we care about merging
1586 self.ui.note(_("resolving manifests\n"))
1589 self.ui.note(_("resolving manifests\n"))
1587 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1590 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1588 (force, allow, moddirstate, linear_path))
1591 (force, allow, moddirstate, linear_path))
1589 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1592 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1590 (short(man), short(m1n), short(m2n)))
1593 (short(man), short(m1n), short(m2n)))
1591
1594
1592 merge = {}
1595 merge = {}
1593 get = {}
1596 get = {}
1594 remove = []
1597 remove = []
1595
1598
1596 # construct a working dir manifest
1599 # construct a working dir manifest
1597 mw = m1.copy()
1600 mw = m1.copy()
1598 mfw = mf1.copy()
1601 mfw = mf1.copy()
1599 umap = dict.fromkeys(unknown)
1602 umap = dict.fromkeys(unknown)
1600
1603
1601 for f in added + modified + unknown:
1604 for f in added + modified + unknown:
1602 mw[f] = ""
1605 mw[f] = ""
1603 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1606 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1604
1607
1605 if moddirstate and not wlock:
1608 if moddirstate and not wlock:
1606 wlock = self.wlock()
1609 wlock = self.wlock()
1607
1610
1608 for f in deleted + removed:
1611 for f in deleted + removed:
1609 if f in mw:
1612 if f in mw:
1610 del mw[f]
1613 del mw[f]
1611
1614
1612 # If we're jumping between revisions (as opposed to merging),
1615 # If we're jumping between revisions (as opposed to merging),
1613 # and if neither the working directory nor the target rev has
1616 # and if neither the working directory nor the target rev has
1614 # the file, then we need to remove it from the dirstate, to
1617 # the file, then we need to remove it from the dirstate, to
1615 # prevent the dirstate from listing the file when it is no
1618 # prevent the dirstate from listing the file when it is no
1616 # longer in the manifest.
1619 # longer in the manifest.
1617 if moddirstate and linear_path and f not in m2:
1620 if moddirstate and linear_path and f not in m2:
1618 self.dirstate.forget((f,))
1621 self.dirstate.forget((f,))
1619
1622
1620 # Compare manifests
1623 # Compare manifests
1621 for f, n in mw.iteritems():
1624 for f, n in mw.iteritems():
1622 if choose and not choose(f):
1625 if choose and not choose(f):
1623 continue
1626 continue
1624 if f in m2:
1627 if f in m2:
1625 s = 0
1628 s = 0
1626
1629
1627 # is the wfile new since m1, and match m2?
1630 # is the wfile new since m1, and match m2?
1628 if f not in m1:
1631 if f not in m1:
1629 t1 = self.wread(f)
1632 t1 = self.wread(f)
1630 t2 = self.file(f).read(m2[f])
1633 t2 = self.file(f).read(m2[f])
1631 if cmp(t1, t2) == 0:
1634 if cmp(t1, t2) == 0:
1632 n = m2[f]
1635 n = m2[f]
1633 del t1, t2
1636 del t1, t2
1634
1637
1635 # are files different?
1638 # are files different?
1636 if n != m2[f]:
1639 if n != m2[f]:
1637 a = ma.get(f, nullid)
1640 a = ma.get(f, nullid)
1638 # are both different from the ancestor?
1641 # are both different from the ancestor?
1639 if n != a and m2[f] != a:
1642 if n != a and m2[f] != a:
1640 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1643 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1641 # merge executable bits
1644 # merge executable bits
1642 # "if we changed or they changed, change in merge"
1645 # "if we changed or they changed, change in merge"
1643 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1646 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1644 mode = ((a^b) | (a^c)) ^ a
1647 mode = ((a^b) | (a^c)) ^ a
1645 merge[f] = (m1.get(f, nullid), m2[f], mode)
1648 merge[f] = (m1.get(f, nullid), m2[f], mode)
1646 s = 1
1649 s = 1
1647 # are we clobbering?
1650 # are we clobbering?
1648 # is remote's version newer?
1651 # is remote's version newer?
1649 # or are we going back in time?
1652 # or are we going back in time?
1650 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1653 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1651 self.ui.debug(_(" remote %s is newer, get\n") % f)
1654 self.ui.debug(_(" remote %s is newer, get\n") % f)
1652 get[f] = m2[f]
1655 get[f] = m2[f]
1653 s = 1
1656 s = 1
1654 elif f in umap or f in added:
1657 elif f in umap or f in added:
1655 # this unknown file is the same as the checkout
1658 # this unknown file is the same as the checkout
1656 # we need to reset the dirstate if the file was added
1659 # we need to reset the dirstate if the file was added
1657 get[f] = m2[f]
1660 get[f] = m2[f]
1658
1661
1659 if not s and mfw[f] != mf2[f]:
1662 if not s and mfw[f] != mf2[f]:
1660 if force:
1663 if force:
1661 self.ui.debug(_(" updating permissions for %s\n") % f)
1664 self.ui.debug(_(" updating permissions for %s\n") % f)
1662 util.set_exec(self.wjoin(f), mf2[f])
1665 util.set_exec(self.wjoin(f), mf2[f])
1663 else:
1666 else:
1664 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1667 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1665 mode = ((a^b) | (a^c)) ^ a
1668 mode = ((a^b) | (a^c)) ^ a
1666 if mode != b:
1669 if mode != b:
1667 self.ui.debug(_(" updating permissions for %s\n")
1670 self.ui.debug(_(" updating permissions for %s\n")
1668 % f)
1671 % f)
1669 util.set_exec(self.wjoin(f), mode)
1672 util.set_exec(self.wjoin(f), mode)
1670 del m2[f]
1673 del m2[f]
1671 elif f in ma:
1674 elif f in ma:
1672 if n != ma[f]:
1675 if n != ma[f]:
1673 r = _("d")
1676 r = _("d")
1674 if not force and (linear_path or allow):
1677 if not force and (linear_path or allow):
1675 r = self.ui.prompt(
1678 r = self.ui.prompt(
1676 (_(" local changed %s which remote deleted\n") % f) +
1679 (_(" local changed %s which remote deleted\n") % f) +
1677 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1680 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1678 if r == _("d"):
1681 if r == _("d"):
1679 remove.append(f)
1682 remove.append(f)
1680 else:
1683 else:
1681 self.ui.debug(_("other deleted %s\n") % f)
1684 self.ui.debug(_("other deleted %s\n") % f)
1682 remove.append(f) # other deleted it
1685 remove.append(f) # other deleted it
1683 else:
1686 else:
1684 # file is created on branch or in working directory
1687 # file is created on branch or in working directory
1685 if force and f not in umap:
1688 if force and f not in umap:
1686 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1689 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1687 remove.append(f)
1690 remove.append(f)
1688 elif n == m1.get(f, nullid): # same as parent
1691 elif n == m1.get(f, nullid): # same as parent
1689 if p2 == pa: # going backwards?
1692 if p2 == pa: # going backwards?
1690 self.ui.debug(_("remote deleted %s\n") % f)
1693 self.ui.debug(_("remote deleted %s\n") % f)
1691 remove.append(f)
1694 remove.append(f)
1692 else:
1695 else:
1693 self.ui.debug(_("local modified %s, keeping\n") % f)
1696 self.ui.debug(_("local modified %s, keeping\n") % f)
1694 else:
1697 else:
1695 self.ui.debug(_("working dir created %s, keeping\n") % f)
1698 self.ui.debug(_("working dir created %s, keeping\n") % f)
1696
1699
1697 for f, n in m2.iteritems():
1700 for f, n in m2.iteritems():
1698 if choose and not choose(f):
1701 if choose and not choose(f):
1699 continue
1702 continue
1700 if f[0] == "/":
1703 if f[0] == "/":
1701 continue
1704 continue
1702 if f in ma and n != ma[f]:
1705 if f in ma and n != ma[f]:
1703 r = _("k")
1706 r = _("k")
1704 if not force and (linear_path or allow):
1707 if not force and (linear_path or allow):
1705 r = self.ui.prompt(
1708 r = self.ui.prompt(
1706 (_("remote changed %s which local deleted\n") % f) +
1709 (_("remote changed %s which local deleted\n") % f) +
1707 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1710 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1708 if r == _("k"):
1711 if r == _("k"):
1709 get[f] = n
1712 get[f] = n
1710 elif f not in ma:
1713 elif f not in ma:
1711 self.ui.debug(_("remote created %s\n") % f)
1714 self.ui.debug(_("remote created %s\n") % f)
1712 get[f] = n
1715 get[f] = n
1713 else:
1716 else:
1714 if force or p2 == pa: # going backwards?
1717 if force or p2 == pa: # going backwards?
1715 self.ui.debug(_("local deleted %s, recreating\n") % f)
1718 self.ui.debug(_("local deleted %s, recreating\n") % f)
1716 get[f] = n
1719 get[f] = n
1717 else:
1720 else:
1718 self.ui.debug(_("local deleted %s\n") % f)
1721 self.ui.debug(_("local deleted %s\n") % f)
1719
1722
1720 del mw, m1, m2, ma
1723 del mw, m1, m2, ma
1721
1724
1722 if force:
1725 if force:
1723 for f in merge:
1726 for f in merge:
1724 get[f] = merge[f][1]
1727 get[f] = merge[f][1]
1725 merge = {}
1728 merge = {}
1726
1729
1727 if linear_path or force:
1730 if linear_path or force:
1728 # we don't need to do any magic, just jump to the new rev
1731 # we don't need to do any magic, just jump to the new rev
1729 branch_merge = False
1732 branch_merge = False
1730 p1, p2 = p2, nullid
1733 p1, p2 = p2, nullid
1731 else:
1734 else:
1732 if not allow:
1735 if not allow:
1733 self.ui.status(_("this update spans a branch"
1736 self.ui.status(_("this update spans a branch"
1734 " affecting the following files:\n"))
1737 " affecting the following files:\n"))
1735 fl = merge.keys() + get.keys()
1738 fl = merge.keys() + get.keys()
1736 fl.sort()
1739 fl.sort()
1737 for f in fl:
1740 for f in fl:
1738 cf = ""
1741 cf = ""
1739 if f in merge:
1742 if f in merge:
1740 cf = _(" (resolve)")
1743 cf = _(" (resolve)")
1741 self.ui.status(" %s%s\n" % (f, cf))
1744 self.ui.status(" %s%s\n" % (f, cf))
1742 self.ui.warn(_("aborting update spanning branches!\n"))
1745 self.ui.warn(_("aborting update spanning branches!\n"))
1743 self.ui.status(_("(use 'hg merge' to merge across branches"
1746 self.ui.status(_("(use 'hg merge' to merge across branches"
1744 " or 'hg update -C' to lose changes)\n"))
1747 " or 'hg update -C' to lose changes)\n"))
1745 return 1
1748 return 1
1746 branch_merge = True
1749 branch_merge = True
1747
1750
1748 # get the files we don't need to change
1751 # get the files we don't need to change
1749 files = get.keys()
1752 files = get.keys()
1750 files.sort()
1753 files.sort()
1751 for f in files:
1754 for f in files:
1752 if f[0] == "/":
1755 if f[0] == "/":
1753 continue
1756 continue
1754 self.ui.note(_("getting %s\n") % f)
1757 self.ui.note(_("getting %s\n") % f)
1755 t = self.file(f).read(get[f])
1758 t = self.file(f).read(get[f])
1756 self.wwrite(f, t)
1759 self.wwrite(f, t)
1757 util.set_exec(self.wjoin(f), mf2[f])
1760 util.set_exec(self.wjoin(f), mf2[f])
1758 if moddirstate:
1761 if moddirstate:
1759 if branch_merge:
1762 if branch_merge:
1760 self.dirstate.update([f], 'n', st_mtime=-1)
1763 self.dirstate.update([f], 'n', st_mtime=-1)
1761 else:
1764 else:
1762 self.dirstate.update([f], 'n')
1765 self.dirstate.update([f], 'n')
1763
1766
1764 # merge the tricky bits
1767 # merge the tricky bits
1765 failedmerge = []
1768 failedmerge = []
1766 files = merge.keys()
1769 files = merge.keys()
1767 files.sort()
1770 files.sort()
1768 xp1 = hex(p1)
1771 xp1 = hex(p1)
1769 xp2 = hex(p2)
1772 xp2 = hex(p2)
1770 for f in files:
1773 for f in files:
1771 self.ui.status(_("merging %s\n") % f)
1774 self.ui.status(_("merging %s\n") % f)
1772 my, other, flag = merge[f]
1775 my, other, flag = merge[f]
1773 ret = self.merge3(f, my, other, xp1, xp2)
1776 ret = self.merge3(f, my, other, xp1, xp2)
1774 if ret:
1777 if ret:
1775 err = True
1778 err = True
1776 failedmerge.append(f)
1779 failedmerge.append(f)
1777 util.set_exec(self.wjoin(f), flag)
1780 util.set_exec(self.wjoin(f), flag)
1778 if moddirstate:
1781 if moddirstate:
1779 if branch_merge:
1782 if branch_merge:
1780 # We've done a branch merge, mark this file as merged
1783 # We've done a branch merge, mark this file as merged
1781 # so that we properly record the merger later
1784 # so that we properly record the merger later
1782 self.dirstate.update([f], 'm')
1785 self.dirstate.update([f], 'm')
1783 else:
1786 else:
1784 # We've update-merged a locally modified file, so
1787 # We've update-merged a locally modified file, so
1785 # we set the dirstate to emulate a normal checkout
1788 # we set the dirstate to emulate a normal checkout
1786 # of that file some time in the past. Thus our
1789 # of that file some time in the past. Thus our
1787 # merge will appear as a normal local file
1790 # merge will appear as a normal local file
1788 # modification.
1791 # modification.
1789 f_len = len(self.file(f).read(other))
1792 f_len = len(self.file(f).read(other))
1790 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1793 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1791
1794
1792 remove.sort()
1795 remove.sort()
1793 for f in remove:
1796 for f in remove:
1794 self.ui.note(_("removing %s\n") % f)
1797 self.ui.note(_("removing %s\n") % f)
1795 util.audit_path(f)
1798 util.audit_path(f)
1796 try:
1799 try:
1797 util.unlink(self.wjoin(f))
1800 util.unlink(self.wjoin(f))
1798 except OSError, inst:
1801 except OSError, inst:
1799 if inst.errno != errno.ENOENT:
1802 if inst.errno != errno.ENOENT:
1800 self.ui.warn(_("update failed to remove %s: %s!\n") %
1803 self.ui.warn(_("update failed to remove %s: %s!\n") %
1801 (f, inst.strerror))
1804 (f, inst.strerror))
1802 if moddirstate:
1805 if moddirstate:
1803 if branch_merge:
1806 if branch_merge:
1804 self.dirstate.update(remove, 'r')
1807 self.dirstate.update(remove, 'r')
1805 else:
1808 else:
1806 self.dirstate.forget(remove)
1809 self.dirstate.forget(remove)
1807
1810
1808 if moddirstate:
1811 if moddirstate:
1809 self.dirstate.setparents(p1, p2)
1812 self.dirstate.setparents(p1, p2)
1810
1813
1811 if show_stats:
1814 if show_stats:
1812 stats = ((len(get), _("updated")),
1815 stats = ((len(get), _("updated")),
1813 (len(merge) - len(failedmerge), _("merged")),
1816 (len(merge) - len(failedmerge), _("merged")),
1814 (len(remove), _("removed")),
1817 (len(remove), _("removed")),
1815 (len(failedmerge), _("unresolved")))
1818 (len(failedmerge), _("unresolved")))
1816 note = ", ".join([_("%d files %s") % s for s in stats])
1819 note = ", ".join([_("%d files %s") % s for s in stats])
1817 self.ui.status("%s\n" % note)
1820 self.ui.status("%s\n" % note)
1818 if moddirstate:
1821 if moddirstate:
1819 if branch_merge:
1822 if branch_merge:
1820 if failedmerge:
1823 if failedmerge:
1821 self.ui.status(_("There are unresolved merges,"
1824 self.ui.status(_("There are unresolved merges,"
1822 " you can redo the full merge using:\n"
1825 " you can redo the full merge using:\n"
1823 " hg update -C %s\n"
1826 " hg update -C %s\n"
1824 " hg merge %s\n"
1827 " hg merge %s\n"
1825 % (self.changelog.rev(p1),
1828 % (self.changelog.rev(p1),
1826 self.changelog.rev(p2))))
1829 self.changelog.rev(p2))))
1827 else:
1830 else:
1828 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1831 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1829 elif failedmerge:
1832 elif failedmerge:
1830 self.ui.status(_("There are unresolved merges with"
1833 self.ui.status(_("There are unresolved merges with"
1831 " locally modified files.\n"))
1834 " locally modified files.\n"))
1832
1835
1833 return err
1836 return err
1834
1837
1835 def merge3(self, fn, my, other, p1, p2):
1838 def merge3(self, fn, my, other, p1, p2):
1836 """perform a 3-way merge in the working directory"""
1839 """perform a 3-way merge in the working directory"""
1837
1840
1838 def temp(prefix, node):
1841 def temp(prefix, node):
1839 pre = "%s~%s." % (os.path.basename(fn), prefix)
1842 pre = "%s~%s." % (os.path.basename(fn), prefix)
1840 (fd, name) = tempfile.mkstemp(prefix=pre)
1843 (fd, name) = tempfile.mkstemp(prefix=pre)
1841 f = os.fdopen(fd, "wb")
1844 f = os.fdopen(fd, "wb")
1842 self.wwrite(fn, fl.read(node), f)
1845 self.wwrite(fn, fl.read(node), f)
1843 f.close()
1846 f.close()
1844 return name
1847 return name
1845
1848
1846 fl = self.file(fn)
1849 fl = self.file(fn)
1847 base = fl.ancestor(my, other)
1850 base = fl.ancestor(my, other)
1848 a = self.wjoin(fn)
1851 a = self.wjoin(fn)
1849 b = temp("base", base)
1852 b = temp("base", base)
1850 c = temp("other", other)
1853 c = temp("other", other)
1851
1854
1852 self.ui.note(_("resolving %s\n") % fn)
1855 self.ui.note(_("resolving %s\n") % fn)
1853 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1856 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1854 (fn, short(my), short(other), short(base)))
1857 (fn, short(my), short(other), short(base)))
1855
1858
1856 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1859 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1857 or "hgmerge")
1860 or "hgmerge")
1858 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1861 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1859 environ={'HG_FILE': fn,
1862 environ={'HG_FILE': fn,
1860 'HG_MY_NODE': p1,
1863 'HG_MY_NODE': p1,
1861 'HG_OTHER_NODE': p2,
1864 'HG_OTHER_NODE': p2,
1862 'HG_FILE_MY_NODE': hex(my),
1865 'HG_FILE_MY_NODE': hex(my),
1863 'HG_FILE_OTHER_NODE': hex(other),
1866 'HG_FILE_OTHER_NODE': hex(other),
1864 'HG_FILE_BASE_NODE': hex(base)})
1867 'HG_FILE_BASE_NODE': hex(base)})
1865 if r:
1868 if r:
1866 self.ui.warn(_("merging %s failed!\n") % fn)
1869 self.ui.warn(_("merging %s failed!\n") % fn)
1867
1870
1868 os.unlink(b)
1871 os.unlink(b)
1869 os.unlink(c)
1872 os.unlink(c)
1870 return r
1873 return r
1871
1874
1872 def verify(self):
1875 def verify(self):
1873 filelinkrevs = {}
1876 filelinkrevs = {}
1874 filenodes = {}
1877 filenodes = {}
1875 changesets = revisions = files = 0
1878 changesets = revisions = files = 0
1876 errors = [0]
1879 errors = [0]
1877 warnings = [0]
1880 warnings = [0]
1878 neededmanifests = {}
1881 neededmanifests = {}
1879
1882
1880 def err(msg):
1883 def err(msg):
1881 self.ui.warn(msg + "\n")
1884 self.ui.warn(msg + "\n")
1882 errors[0] += 1
1885 errors[0] += 1
1883
1886
1884 def warn(msg):
1887 def warn(msg):
1885 self.ui.warn(msg + "\n")
1888 self.ui.warn(msg + "\n")
1886 warnings[0] += 1
1889 warnings[0] += 1
1887
1890
1888 def checksize(obj, name):
1891 def checksize(obj, name):
1889 d = obj.checksize()
1892 d = obj.checksize()
1890 if d[0]:
1893 if d[0]:
1891 err(_("%s data length off by %d bytes") % (name, d[0]))
1894 err(_("%s data length off by %d bytes") % (name, d[0]))
1892 if d[1]:
1895 if d[1]:
1893 err(_("%s index contains %d extra bytes") % (name, d[1]))
1896 err(_("%s index contains %d extra bytes") % (name, d[1]))
1894
1897
1895 def checkversion(obj, name):
1898 def checkversion(obj, name):
1896 if obj.version != revlog.REVLOGV0:
1899 if obj.version != revlog.REVLOGV0:
1897 if not revlogv1:
1900 if not revlogv1:
1898 warn(_("warning: `%s' uses revlog format 1") % name)
1901 warn(_("warning: `%s' uses revlog format 1") % name)
1899 elif revlogv1:
1902 elif revlogv1:
1900 warn(_("warning: `%s' uses revlog format 0") % name)
1903 warn(_("warning: `%s' uses revlog format 0") % name)
1901
1904
1902 revlogv1 = self.revlogversion != revlog.REVLOGV0
1905 revlogv1 = self.revlogversion != revlog.REVLOGV0
1903 if self.ui.verbose or revlogv1 != self.revlogv1:
1906 if self.ui.verbose or revlogv1 != self.revlogv1:
1904 self.ui.status(_("repository uses revlog format %d\n") %
1907 self.ui.status(_("repository uses revlog format %d\n") %
1905 (revlogv1 and 1 or 0))
1908 (revlogv1 and 1 or 0))
1906
1909
1907 seen = {}
1910 seen = {}
1908 self.ui.status(_("checking changesets\n"))
1911 self.ui.status(_("checking changesets\n"))
1909 checksize(self.changelog, "changelog")
1912 checksize(self.changelog, "changelog")
1910
1913
1911 for i in range(self.changelog.count()):
1914 for i in range(self.changelog.count()):
1912 changesets += 1
1915 changesets += 1
1913 n = self.changelog.node(i)
1916 n = self.changelog.node(i)
1914 l = self.changelog.linkrev(n)
1917 l = self.changelog.linkrev(n)
1915 if l != i:
1918 if l != i:
1916 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1919 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1917 if n in seen:
1920 if n in seen:
1918 err(_("duplicate changeset at revision %d") % i)
1921 err(_("duplicate changeset at revision %d") % i)
1919 seen[n] = 1
1922 seen[n] = 1
1920
1923
1921 for p in self.changelog.parents(n):
1924 for p in self.changelog.parents(n):
1922 if p not in self.changelog.nodemap:
1925 if p not in self.changelog.nodemap:
1923 err(_("changeset %s has unknown parent %s") %
1926 err(_("changeset %s has unknown parent %s") %
1924 (short(n), short(p)))
1927 (short(n), short(p)))
1925 try:
1928 try:
1926 changes = self.changelog.read(n)
1929 changes = self.changelog.read(n)
1927 except KeyboardInterrupt:
1930 except KeyboardInterrupt:
1928 self.ui.warn(_("interrupted"))
1931 self.ui.warn(_("interrupted"))
1929 raise
1932 raise
1930 except Exception, inst:
1933 except Exception, inst:
1931 err(_("unpacking changeset %s: %s") % (short(n), inst))
1934 err(_("unpacking changeset %s: %s") % (short(n), inst))
1932 continue
1935 continue
1933
1936
1934 neededmanifests[changes[0]] = n
1937 neededmanifests[changes[0]] = n
1935
1938
1936 for f in changes[3]:
1939 for f in changes[3]:
1937 filelinkrevs.setdefault(f, []).append(i)
1940 filelinkrevs.setdefault(f, []).append(i)
1938
1941
1939 seen = {}
1942 seen = {}
1940 self.ui.status(_("checking manifests\n"))
1943 self.ui.status(_("checking manifests\n"))
1941 checkversion(self.manifest, "manifest")
1944 checkversion(self.manifest, "manifest")
1942 checksize(self.manifest, "manifest")
1945 checksize(self.manifest, "manifest")
1943
1946
1944 for i in range(self.manifest.count()):
1947 for i in range(self.manifest.count()):
1945 n = self.manifest.node(i)
1948 n = self.manifest.node(i)
1946 l = self.manifest.linkrev(n)
1949 l = self.manifest.linkrev(n)
1947
1950
1948 if l < 0 or l >= self.changelog.count():
1951 if l < 0 or l >= self.changelog.count():
1949 err(_("bad manifest link (%d) at revision %d") % (l, i))
1952 err(_("bad manifest link (%d) at revision %d") % (l, i))
1950
1953
1951 if n in neededmanifests:
1954 if n in neededmanifests:
1952 del neededmanifests[n]
1955 del neededmanifests[n]
1953
1956
1954 if n in seen:
1957 if n in seen:
1955 err(_("duplicate manifest at revision %d") % i)
1958 err(_("duplicate manifest at revision %d") % i)
1956
1959
1957 seen[n] = 1
1960 seen[n] = 1
1958
1961
1959 for p in self.manifest.parents(n):
1962 for p in self.manifest.parents(n):
1960 if p not in self.manifest.nodemap:
1963 if p not in self.manifest.nodemap:
1961 err(_("manifest %s has unknown parent %s") %
1964 err(_("manifest %s has unknown parent %s") %
1962 (short(n), short(p)))
1965 (short(n), short(p)))
1963
1966
1964 try:
1967 try:
1965 delta = mdiff.patchtext(self.manifest.delta(n))
1968 delta = mdiff.patchtext(self.manifest.delta(n))
1966 except KeyboardInterrupt:
1969 except KeyboardInterrupt:
1967 self.ui.warn(_("interrupted"))
1970 self.ui.warn(_("interrupted"))
1968 raise
1971 raise
1969 except Exception, inst:
1972 except Exception, inst:
1970 err(_("unpacking manifest %s: %s") % (short(n), inst))
1973 err(_("unpacking manifest %s: %s") % (short(n), inst))
1971 continue
1974 continue
1972
1975
1973 try:
1976 try:
1974 ff = [ l.split('\0') for l in delta.splitlines() ]
1977 ff = [ l.split('\0') for l in delta.splitlines() ]
1975 for f, fn in ff:
1978 for f, fn in ff:
1976 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1979 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1977 except (ValueError, TypeError), inst:
1980 except (ValueError, TypeError), inst:
1978 err(_("broken delta in manifest %s: %s") % (short(n), inst))
1981 err(_("broken delta in manifest %s: %s") % (short(n), inst))
1979
1982
1980 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1983 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1981
1984
1982 for m, c in neededmanifests.items():
1985 for m, c in neededmanifests.items():
1983 err(_("Changeset %s refers to unknown manifest %s") %
1986 err(_("Changeset %s refers to unknown manifest %s") %
1984 (short(m), short(c)))
1987 (short(m), short(c)))
1985 del neededmanifests
1988 del neededmanifests
1986
1989
1987 for f in filenodes:
1990 for f in filenodes:
1988 if f not in filelinkrevs:
1991 if f not in filelinkrevs:
1989 err(_("file %s in manifest but not in changesets") % f)
1992 err(_("file %s in manifest but not in changesets") % f)
1990
1993
1991 for f in filelinkrevs:
1994 for f in filelinkrevs:
1992 if f not in filenodes:
1995 if f not in filenodes:
1993 err(_("file %s in changeset but not in manifest") % f)
1996 err(_("file %s in changeset but not in manifest") % f)
1994
1997
1995 self.ui.status(_("checking files\n"))
1998 self.ui.status(_("checking files\n"))
1996 ff = filenodes.keys()
1999 ff = filenodes.keys()
1997 ff.sort()
2000 ff.sort()
1998 for f in ff:
2001 for f in ff:
1999 if f == "/dev/null":
2002 if f == "/dev/null":
2000 continue
2003 continue
2001 files += 1
2004 files += 1
2002 if not f:
2005 if not f:
2003 err(_("file without name in manifest %s") % short(n))
2006 err(_("file without name in manifest %s") % short(n))
2004 continue
2007 continue
2005 fl = self.file(f)
2008 fl = self.file(f)
2006 checkversion(fl, f)
2009 checkversion(fl, f)
2007 checksize(fl, f)
2010 checksize(fl, f)
2008
2011
2009 nodes = {nullid: 1}
2012 nodes = {nullid: 1}
2010 seen = {}
2013 seen = {}
2011 for i in range(fl.count()):
2014 for i in range(fl.count()):
2012 revisions += 1
2015 revisions += 1
2013 n = fl.node(i)
2016 n = fl.node(i)
2014
2017
2015 if n in seen:
2018 if n in seen:
2016 err(_("%s: duplicate revision %d") % (f, i))
2019 err(_("%s: duplicate revision %d") % (f, i))
2017 if n not in filenodes[f]:
2020 if n not in filenodes[f]:
2018 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2021 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2019 else:
2022 else:
2020 del filenodes[f][n]
2023 del filenodes[f][n]
2021
2024
2022 flr = fl.linkrev(n)
2025 flr = fl.linkrev(n)
2023 if flr not in filelinkrevs.get(f, []):
2026 if flr not in filelinkrevs.get(f, []):
2024 err(_("%s:%s points to unexpected changeset %d")
2027 err(_("%s:%s points to unexpected changeset %d")
2025 % (f, short(n), flr))
2028 % (f, short(n), flr))
2026 else:
2029 else:
2027 filelinkrevs[f].remove(flr)
2030 filelinkrevs[f].remove(flr)
2028
2031
2029 # verify contents
2032 # verify contents
2030 try:
2033 try:
2031 t = fl.read(n)
2034 t = fl.read(n)
2032 except KeyboardInterrupt:
2035 except KeyboardInterrupt:
2033 self.ui.warn(_("interrupted"))
2036 self.ui.warn(_("interrupted"))
2034 raise
2037 raise
2035 except Exception, inst:
2038 except Exception, inst:
2036 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2039 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2037
2040
2038 # verify parents
2041 # verify parents
2039 (p1, p2) = fl.parents(n)
2042 (p1, p2) = fl.parents(n)
2040 if p1 not in nodes:
2043 if p1 not in nodes:
2041 err(_("file %s:%s unknown parent 1 %s") %
2044 err(_("file %s:%s unknown parent 1 %s") %
2042 (f, short(n), short(p1)))
2045 (f, short(n), short(p1)))
2043 if p2 not in nodes:
2046 if p2 not in nodes:
2044 err(_("file %s:%s unknown parent 2 %s") %
2047 err(_("file %s:%s unknown parent 2 %s") %
2045 (f, short(n), short(p1)))
2048 (f, short(n), short(p1)))
2046 nodes[n] = 1
2049 nodes[n] = 1
2047
2050
2048 # cross-check
2051 # cross-check
2049 for node in filenodes[f]:
2052 for node in filenodes[f]:
2050 err(_("node %s in manifests not in %s") % (hex(node), f))
2053 err(_("node %s in manifests not in %s") % (hex(node), f))
2051
2054
2052 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2055 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2053 (files, changesets, revisions))
2056 (files, changesets, revisions))
2054
2057
2055 if warnings[0]:
2058 if warnings[0]:
2056 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2059 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2057 if errors[0]:
2060 if errors[0]:
2058 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2061 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2059 return 1
2062 return 1
2060
2063
2061 # used to avoid circular references so destructors work
2064 # used to avoid circular references so destructors work
2062 def aftertrans(base):
2065 def aftertrans(base):
2063 p = base
2066 p = base
2064 def a():
2067 def a():
2065 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2068 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2066 util.rename(os.path.join(p, "journal.dirstate"),
2069 util.rename(os.path.join(p, "journal.dirstate"),
2067 os.path.join(p, "undo.dirstate"))
2070 os.path.join(p, "undo.dirstate"))
2068 return a
2071 return a
2069
2072
@@ -1,179 +1,178
1 #!/bin/sh
1 #!/bin/sh
2
2
3 # commit hooks can see env vars
3 # commit hooks can see env vars
4 hg init a
4 hg init a
5 cd a
5 cd a
6 echo "[hooks]" > .hg/hgrc
6 echo "[hooks]" > .hg/hgrc
7 echo 'commit = echo commit hook: n=$HG_NODE p1=$HG_PARENT1 p2=$HG_PARENT2' >> .hg/hgrc
7 echo 'commit = echo commit hook: n=$HG_NODE p1=$HG_PARENT1 p2=$HG_PARENT2' >> .hg/hgrc
8 echo 'commit.b = echo commit hook b' >> .hg/hgrc
8 echo 'commit.b = echo commit hook b' >> .hg/hgrc
9 echo 'precommit = echo precommit hook: p1=$HG_PARENT1 p2=$HG_PARENT2' >> .hg/hgrc
9 echo 'precommit = echo precommit hook: p1=$HG_PARENT1 p2=$HG_PARENT2' >> .hg/hgrc
10 echo 'pretxncommit = echo pretxncommit hook: n=$HG_NODE p1=$HG_PARENT1 p2=$HG_PARENT2; hg -q tip' >> .hg/hgrc
10 echo 'pretxncommit = echo pretxncommit hook: n=$HG_NODE p1=$HG_PARENT1 p2=$HG_PARENT2; hg -q tip' >> .hg/hgrc
11 echo a > a
11 echo a > a
12 hg add a
12 hg add a
13 hg commit -m a -d "1000000 0"
13 hg commit -m a -d "1000000 0"
14
14
15 hg clone . ../b
15 hg clone . ../b
16 cd ../b
16 cd ../b
17
17
18 # changegroup hooks can see env vars
18 # changegroup hooks can see env vars
19 echo '[hooks]' > .hg/hgrc
19 echo '[hooks]' > .hg/hgrc
20 echo 'prechangegroup = echo prechangegroup hook' >> .hg/hgrc
20 echo 'prechangegroup = echo prechangegroup hook' >> .hg/hgrc
21 echo 'changegroup = echo changegroup hook: n=$HG_NODE' >> .hg/hgrc
21 echo 'changegroup = echo changegroup hook: n=$HG_NODE' >> .hg/hgrc
22 echo 'incoming = echo incoming hook: n=$HG_NODE' >> .hg/hgrc
22 echo 'incoming = echo incoming hook: n=$HG_NODE' >> .hg/hgrc
23
23
24 # pretxncommit and commit hooks can see both parents of merge
24 # pretxncommit and commit hooks can see both parents of merge
25 cd ../a
25 cd ../a
26 echo b >> a
26 echo b >> a
27 hg commit -m a1 -d "1 0"
27 hg commit -m a1 -d "1 0"
28 hg update -C 0
28 hg update -C 0
29 echo b > b
29 echo b > b
30 hg add b
30 hg add b
31 hg commit -m b -d '1 0'
31 hg commit -m b -d '1 0'
32 hg update -m 1
32 hg update -m 1
33 hg commit -m merge -d '2 0'
33 hg commit -m merge -d '2 0'
34
34
35 cd ../b
35 cd ../b
36 hg pull ../a
36 hg pull ../a
37
37
38 # tag hooks can see env vars
38 # tag hooks can see env vars
39 cd ../a
39 cd ../a
40 echo 'pretag = echo pretag hook: t=$HG_TAG n=$HG_NODE l=$HG_LOCAL' >> .hg/hgrc
40 echo 'pretag = echo pretag hook: t=$HG_TAG n=$HG_NODE l=$HG_LOCAL' >> .hg/hgrc
41 echo 'tag = echo tag hook: t=$HG_TAG n=$HG_NODE l=$HG_LOCAL' >> .hg/hgrc
41 echo 'tag = echo tag hook: t=$HG_TAG n=$HG_NODE l=$HG_LOCAL' >> .hg/hgrc
42 hg tag -d '3 0' a
42 hg tag -d '3 0' a
43 hg tag -l la
43 hg tag -l la
44
44
45 # pretag hook can forbid tagging
45 # pretag hook can forbid tagging
46 echo 'pretag.forbid = echo pretag.forbid hook; exit 1' >> .hg/hgrc
46 echo 'pretag.forbid = echo pretag.forbid hook; exit 1' >> .hg/hgrc
47 hg tag -d '4 0' fa
47 hg tag -d '4 0' fa
48 hg tag -l fla
48 hg tag -l fla
49
49
50 # pretxncommit hook can see changeset, can roll back txn, changeset
50 # pretxncommit hook can see changeset, can roll back txn, changeset
51 # no more there after
51 # no more there after
52 echo 'pretxncommit.forbid = echo pretxncommit.forbid hook: tip=`hg -q tip`; exit 1' >> .hg/hgrc
52 echo 'pretxncommit.forbid = echo pretxncommit.forbid hook: tip=`hg -q tip`; exit 1' >> .hg/hgrc
53 echo z > z
53 echo z > z
54 hg add z
54 hg add z
55 hg -q tip
55 hg -q tip
56 hg commit -m 'fail' -d '4 0'
56 hg commit -m 'fail' -d '4 0'
57 hg -q tip
57 hg -q tip
58
58
59 # precommit hook can prevent commit
59 # precommit hook can prevent commit
60 echo 'precommit.forbid = echo precommit.forbid hook; exit 1' >> .hg/hgrc
60 echo 'precommit.forbid = echo precommit.forbid hook; exit 1' >> .hg/hgrc
61 hg commit -m 'fail' -d '4 0'
61 hg commit -m 'fail' -d '4 0'
62 hg -q tip
62 hg -q tip
63
63
64 # prechangegroup hook can prevent incoming changes
64 # prechangegroup hook can prevent incoming changes
65 cd ../b
65 cd ../b
66 hg -q tip
66 hg -q tip
67 echo '[hooks]' > .hg/hgrc
67 echo '[hooks]' > .hg/hgrc
68 echo 'prechangegroup.forbid = echo prechangegroup.forbid hook; exit 1' >> .hg/hgrc
68 echo 'prechangegroup.forbid = echo prechangegroup.forbid hook; exit 1' >> .hg/hgrc
69 hg pull ../a
69 hg pull ../a
70
70
71 # pretxnchangegroup hook can see incoming changes, can roll back txn,
71 # pretxnchangegroup hook can see incoming changes, can roll back txn,
72 # incoming changes no longer there after
72 # incoming changes no longer there after
73 echo '[hooks]' > .hg/hgrc
73 echo '[hooks]' > .hg/hgrc
74 echo 'pretxnchangegroup.forbid = echo pretxnchangegroup.forbid hook: tip=`hg -q tip`; exit 1' >> .hg/hgrc
74 echo 'pretxnchangegroup.forbid = echo pretxnchangegroup.forbid hook: tip=`hg -q tip`; exit 1' >> .hg/hgrc
75 hg pull ../a
75 hg pull ../a
76 hg -q tip
76 hg -q tip
77
77
78 # outgoing hooks can see env vars
78 # outgoing hooks can see env vars
79 rm .hg/hgrc
79 rm .hg/hgrc
80 echo '[hooks]' > ../a/.hg/hgrc
80 echo '[hooks]' > ../a/.hg/hgrc
81 echo 'preoutgoing = echo preoutgoing hook: s=$HG_SOURCE' >> ../a/.hg/hgrc
81 echo 'preoutgoing = echo preoutgoing hook: s=$HG_SOURCE' >> ../a/.hg/hgrc
82 echo 'outgoing = echo outgoing hook: n=$HG_NODE s=$HG_SOURCE' >> ../a/.hg/hgrc
82 echo 'outgoing = echo outgoing hook: n=$HG_NODE s=$HG_SOURCE' >> ../a/.hg/hgrc
83 hg pull ../a
83 hg pull ../a
84 hg undo
84 hg undo
85
85
86 # preoutgoing hook can prevent outgoing changes
86 # preoutgoing hook can prevent outgoing changes
87 echo 'preoutgoing.forbid = echo preoutgoing.forbid hook; exit 1' >> ../a/.hg/hgrc
87 echo 'preoutgoing.forbid = echo preoutgoing.forbid hook; exit 1' >> ../a/.hg/hgrc
88 hg pull ../a
88 hg pull ../a
89
89
90 cat > hooktests.py <<EOF
90 cat > hooktests.py <<EOF
91 from mercurial import util
91 from mercurial import util
92
92
93 uncallable = 0
93 uncallable = 0
94
94
95 def printargs(args):
95 def printargs(args):
96 args.pop('ui', None)
96 args.pop('ui', None)
97 args.pop('repo', None)
97 args.pop('repo', None)
98 a = list(args.items())
98 a = list(args.items())
99 a.sort()
99 a.sort()
100 print 'hook args:'
100 print 'hook args:'
101 for k, v in a:
101 for k, v in a:
102 print ' ', k, v
102 print ' ', k, v
103 return True
104
103
105 def passhook(**args):
104 def passhook(**args):
106 printargs(args)
105 printargs(args)
107 return True
108
106
109 def failhook(**args):
107 def failhook(**args):
110 printargs(args)
108 printargs(args)
109 return True
111
110
112 class LocalException(Exception):
111 class LocalException(Exception):
113 pass
112 pass
114
113
115 def raisehook(**args):
114 def raisehook(**args):
116 raise LocalException('exception from hook')
115 raise LocalException('exception from hook')
117
116
118 def aborthook(**args):
117 def aborthook(**args):
119 raise util.Abort('raise abort from hook')
118 raise util.Abort('raise abort from hook')
120
119
121 def brokenhook(**args):
120 def brokenhook(**args):
122 return 1 + {}
121 return 1 + {}
123
122
124 class container:
123 class container:
125 unreachable = 1
124 unreachable = 1
126 EOF
125 EOF
127
126
128 echo '# test python hooks'
127 echo '# test python hooks'
129 PYTHONPATH="`pwd`:$PYTHONPATH"
128 PYTHONPATH="`pwd`:$PYTHONPATH"
130 export PYTHONPATH
129 export PYTHONPATH
131
130
132 echo '[hooks]' > ../a/.hg/hgrc
131 echo '[hooks]' > ../a/.hg/hgrc
133 echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
132 echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
134 hg pull ../a 2>&1 | grep 'raised an exception'
133 hg pull ../a 2>&1 | grep 'raised an exception'
135
134
136 echo '[hooks]' > ../a/.hg/hgrc
135 echo '[hooks]' > ../a/.hg/hgrc
137 echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
136 echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
138 hg pull ../a 2>&1 | grep 'raised an exception'
137 hg pull ../a 2>&1 | grep 'raised an exception'
139
138
140 echo '[hooks]' > ../a/.hg/hgrc
139 echo '[hooks]' > ../a/.hg/hgrc
141 echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
140 echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
142 hg pull ../a
141 hg pull ../a
143
142
144 echo '[hooks]' > ../a/.hg/hgrc
143 echo '[hooks]' > ../a/.hg/hgrc
145 echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
144 echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
146 hg pull ../a
145 hg pull ../a
147
146
148 echo '[hooks]' > ../a/.hg/hgrc
147 echo '[hooks]' > ../a/.hg/hgrc
149 echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
148 echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
150 hg pull ../a
149 hg pull ../a
151
150
152 echo '[hooks]' > ../a/.hg/hgrc
151 echo '[hooks]' > ../a/.hg/hgrc
153 echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
152 echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
154 hg pull ../a
153 hg pull ../a
155
154
156 echo '[hooks]' > ../a/.hg/hgrc
155 echo '[hooks]' > ../a/.hg/hgrc
157 echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
156 echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
158 hg pull ../a
157 hg pull ../a
159
158
160 echo '[hooks]' > ../a/.hg/hgrc
159 echo '[hooks]' > ../a/.hg/hgrc
161 echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
160 echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
162 hg pull ../a
161 hg pull ../a
163
162
164 echo '[hooks]' > ../a/.hg/hgrc
163 echo '[hooks]' > ../a/.hg/hgrc
165 echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
164 echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
166 hg pull ../a
165 hg pull ../a
167
166
168 echo '[hooks]' > ../a/.hg/hgrc
167 echo '[hooks]' > ../a/.hg/hgrc
169 echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
168 echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
170 hg pull ../a
169 hg pull ../a
171
170
172 echo '# make sure --traceback works'
171 echo '# make sure --traceback works'
173 echo '[hooks]' > .hg/hgrc
172 echo '[hooks]' > .hg/hgrc
174 echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
173 echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
175
174
176 echo a >> a
175 echo a >> a
177 hg --traceback commit -A -m a 2>&1 | grep '^Traceback'
176 hg --traceback commit -A -m a 2>&1 | grep '^Traceback'
178
177
179 exit 0
178 exit 0
General Comments 0
You need to be logged in to leave comments. Login now