##// END OF EJS Templates
Remove deprecated old-style branch support
Matt Mackall -
r3876:1e0b94cf default
parent child Browse files
Show More
@@ -1,312 +1,312 b''
1 1 # bugzilla.py - bugzilla integration for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 #
8 8 # hook extension to update comments of bugzilla bugs when changesets
9 9 # that refer to bugs by id are seen. this hook does not change bug
10 10 # status, only comments.
11 11 #
12 12 # to configure, add items to '[bugzilla]' section of hgrc.
13 13 #
14 14 # to use, configure bugzilla extension and enable like this:
15 15 #
16 16 # [extensions]
17 17 # hgext.bugzilla =
18 18 #
19 19 # [hooks]
20 20 # # run bugzilla hook on every change pulled or pushed in here
21 21 # incoming.bugzilla = python:hgext.bugzilla.hook
22 22 #
23 23 # config items:
24 24 #
25 25 # section name is 'bugzilla'.
26 26 # [bugzilla]
27 27 #
28 28 # REQUIRED:
29 29 # host = bugzilla # mysql server where bugzilla database lives
30 30 # password = ** # user's password
31 31 # version = 2.16 # version of bugzilla installed
32 32 #
33 33 # OPTIONAL:
34 34 # bzuser = ... # fallback bugzilla user name to record comments with
35 35 # db = bugs # database to connect to
36 36 # notify = ... # command to run to get bugzilla to send mail
37 37 # regexp = ... # regexp to match bug ids (must contain one "()" group)
38 38 # strip = 0 # number of slashes to strip for url paths
39 39 # style = ... # style file to use when formatting comments
40 40 # template = ... # template to use when formatting comments
41 41 # timeout = 5 # database connection timeout (seconds)
42 42 # user = bugs # user to connect to database as
43 43 # [web]
44 44 # baseurl = http://hgserver/... # root of hg web site for browsing commits
45 45 #
46 46 # if hg committer names are not same as bugzilla user names, use
47 47 # "usermap" feature to map from committer email to bugzilla user name.
48 48 # usermap can be in hgrc or separate config file.
49 49 #
50 50 # [bugzilla]
51 51 # usermap = filename # cfg file with "committer"="bugzilla user" info
52 52 # [usermap]
53 53 # committer_email = bugzilla_user_name
54 54
55 55 from mercurial.demandload import *
56 56 from mercurial.i18n import gettext as _
57 57 from mercurial.node import *
58 58 demandload(globals(), 'mercurial:cmdutil,templater,util os re time')
59 59
60 60 MySQLdb = None
61 61
62 62 def buglist(ids):
63 63 return '(' + ','.join(map(str, ids)) + ')'
64 64
65 65 class bugzilla_2_16(object):
66 66 '''support for bugzilla version 2.16.'''
67 67
68 68 def __init__(self, ui):
69 69 self.ui = ui
70 70 host = self.ui.config('bugzilla', 'host', 'localhost')
71 71 user = self.ui.config('bugzilla', 'user', 'bugs')
72 72 passwd = self.ui.config('bugzilla', 'password')
73 73 db = self.ui.config('bugzilla', 'db', 'bugs')
74 74 timeout = int(self.ui.config('bugzilla', 'timeout', 5))
75 75 usermap = self.ui.config('bugzilla', 'usermap')
76 76 if usermap:
77 77 self.ui.readsections(usermap, 'usermap')
78 78 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
79 79 (host, db, user, '*' * len(passwd)))
80 80 self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd,
81 81 db=db, connect_timeout=timeout)
82 82 self.cursor = self.conn.cursor()
83 83 self.run('select fieldid from fielddefs where name = "longdesc"')
84 84 ids = self.cursor.fetchall()
85 85 if len(ids) != 1:
86 86 raise util.Abort(_('unknown database schema'))
87 87 self.longdesc_id = ids[0][0]
88 88 self.user_ids = {}
89 89
90 90 def run(self, *args, **kwargs):
91 91 '''run a query.'''
92 92 self.ui.note(_('query: %s %s\n') % (args, kwargs))
93 93 try:
94 94 self.cursor.execute(*args, **kwargs)
95 95 except MySQLdb.MySQLError, err:
96 96 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
97 97 raise
98 98
99 99 def filter_real_bug_ids(self, ids):
100 100 '''filter not-existing bug ids from list.'''
101 101 self.run('select bug_id from bugs where bug_id in %s' % buglist(ids))
102 102 ids = [c[0] for c in self.cursor.fetchall()]
103 103 ids.sort()
104 104 return ids
105 105
106 106 def filter_unknown_bug_ids(self, node, ids):
107 107 '''filter bug ids from list that already refer to this changeset.'''
108 108
109 109 self.run('''select bug_id from longdescs where
110 110 bug_id in %s and thetext like "%%%s%%"''' %
111 111 (buglist(ids), short(node)))
112 112 unknown = dict.fromkeys(ids)
113 113 for (id,) in self.cursor.fetchall():
114 114 self.ui.status(_('bug %d already knows about changeset %s\n') %
115 115 (id, short(node)))
116 116 unknown.pop(id, None)
117 117 ids = unknown.keys()
118 118 ids.sort()
119 119 return ids
120 120
121 121 def notify(self, ids):
122 122 '''tell bugzilla to send mail.'''
123 123
124 124 self.ui.status(_('telling bugzilla to send mail:\n'))
125 125 for id in ids:
126 126 self.ui.status(_(' bug %s\n') % id)
127 127 cmd = self.ui.config('bugzilla', 'notify',
128 128 'cd /var/www/html/bugzilla && '
129 129 './processmail %s nobody@nowhere.com') % id
130 130 fp = os.popen('(%s) 2>&1' % cmd)
131 131 out = fp.read()
132 132 ret = fp.close()
133 133 if ret:
134 134 self.ui.warn(out)
135 135 raise util.Abort(_('bugzilla notify command %s') %
136 136 util.explain_exit(ret)[0])
137 137 self.ui.status(_('done\n'))
138 138
139 139 def get_user_id(self, user):
140 140 '''look up numeric bugzilla user id.'''
141 141 try:
142 142 return self.user_ids[user]
143 143 except KeyError:
144 144 try:
145 145 userid = int(user)
146 146 except ValueError:
147 147 self.ui.note(_('looking up user %s\n') % user)
148 148 self.run('''select userid from profiles
149 149 where login_name like %s''', user)
150 150 all = self.cursor.fetchall()
151 151 if len(all) != 1:
152 152 raise KeyError(user)
153 153 userid = int(all[0][0])
154 154 self.user_ids[user] = userid
155 155 return userid
156 156
157 157 def map_committer(self, user):
158 158 '''map name of committer to bugzilla user name.'''
159 159 for committer, bzuser in self.ui.configitems('usermap'):
160 160 if committer.lower() == user.lower():
161 161 return bzuser
162 162 return user
163 163
164 164 def add_comment(self, bugid, text, committer):
165 165 '''add comment to bug. try adding comment as committer of
166 166 changeset, otherwise as default bugzilla user.'''
167 167 user = self.map_committer(committer)
168 168 try:
169 169 userid = self.get_user_id(user)
170 170 except KeyError:
171 171 try:
172 172 defaultuser = self.ui.config('bugzilla', 'bzuser')
173 173 if not defaultuser:
174 174 raise util.Abort(_('cannot find bugzilla user id for %s') %
175 175 user)
176 176 userid = self.get_user_id(defaultuser)
177 177 except KeyError:
178 178 raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
179 179 (user, defaultuser))
180 180 now = time.strftime('%Y-%m-%d %H:%M:%S')
181 181 self.run('''insert into longdescs
182 182 (bug_id, who, bug_when, thetext)
183 183 values (%s, %s, %s, %s)''',
184 184 (bugid, userid, now, text))
185 185 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
186 186 values (%s, %s, %s, %s)''',
187 187 (bugid, userid, now, self.longdesc_id))
188 188
189 189 class bugzilla(object):
190 190 # supported versions of bugzilla. different versions have
191 191 # different schemas.
192 192 _versions = {
193 193 '2.16': bugzilla_2_16,
194 194 }
195 195
196 196 _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
197 197 r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
198 198
199 199 _bz = None
200 200
201 201 def __init__(self, ui, repo):
202 202 self.ui = ui
203 203 self.repo = repo
204 204
205 205 def bz(self):
206 206 '''return object that knows how to talk to bugzilla version in
207 207 use.'''
208 208
209 209 if bugzilla._bz is None:
210 210 bzversion = self.ui.config('bugzilla', 'version')
211 211 try:
212 212 bzclass = bugzilla._versions[bzversion]
213 213 except KeyError:
214 214 raise util.Abort(_('bugzilla version %s not supported') %
215 215 bzversion)
216 216 bugzilla._bz = bzclass(self.ui)
217 217 return bugzilla._bz
218 218
219 219 def __getattr__(self, key):
220 220 return getattr(self.bz(), key)
221 221
222 222 _bug_re = None
223 223 _split_re = None
224 224
225 225 def find_bug_ids(self, node, desc):
226 226 '''find valid bug ids that are referred to in changeset
227 227 comments and that do not already have references to this
228 228 changeset.'''
229 229
230 230 if bugzilla._bug_re is None:
231 231 bugzilla._bug_re = re.compile(
232 232 self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
233 233 re.IGNORECASE)
234 234 bugzilla._split_re = re.compile(r'\D+')
235 235 start = 0
236 236 ids = {}
237 237 while True:
238 238 m = bugzilla._bug_re.search(desc, start)
239 239 if not m:
240 240 break
241 241 start = m.end()
242 242 for id in bugzilla._split_re.split(m.group(1)):
243 243 if not id: continue
244 244 ids[int(id)] = 1
245 245 ids = ids.keys()
246 246 if ids:
247 247 ids = self.filter_real_bug_ids(ids)
248 248 if ids:
249 249 ids = self.filter_unknown_bug_ids(node, ids)
250 250 return ids
251 251
252 252 def update(self, bugid, node, changes):
253 253 '''update bugzilla bug with reference to changeset.'''
254 254
255 255 def webroot(root):
256 256 '''strip leading prefix of repo root and turn into
257 257 url-safe path.'''
258 258 count = int(self.ui.config('bugzilla', 'strip', 0))
259 259 root = util.pconvert(root)
260 260 while count > 0:
261 261 c = root.find('/')
262 262 if c == -1:
263 263 break
264 264 root = root[c+1:]
265 265 count -= 1
266 266 return root
267 267
268 268 mapfile = self.ui.config('bugzilla', 'style')
269 269 tmpl = self.ui.config('bugzilla', 'template')
270 270 t = cmdutil.changeset_templater(self.ui, self.repo,
271 False, None, mapfile, False)
271 False, mapfile, False)
272 272 if not mapfile and not tmpl:
273 273 tmpl = _('changeset {node|short} in repo {root} refers '
274 274 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
275 275 if tmpl:
276 276 tmpl = templater.parsestring(tmpl, quoted=False)
277 277 t.use_template(tmpl)
278 278 self.ui.pushbuffer()
279 279 t.show(changenode=node, changes=changes,
280 280 bug=str(bugid),
281 281 hgweb=self.ui.config('web', 'baseurl'),
282 282 root=self.repo.root,
283 283 webroot=webroot(self.repo.root))
284 284 data = self.ui.popbuffer()
285 285 self.add_comment(bugid, data, templater.email(changes[1]))
286 286
287 287 def hook(ui, repo, hooktype, node=None, **kwargs):
288 288 '''add comment to bugzilla for each changeset that refers to a
289 289 bugzilla bug id. only add a comment once per bug, so same change
290 290 seen multiple times does not fill bug with duplicate data.'''
291 291 try:
292 292 import MySQLdb as mysql
293 293 global MySQLdb
294 294 MySQLdb = mysql
295 295 except ImportError, err:
296 296 raise util.Abort(_('python mysql support not available: %s') % err)
297 297
298 298 if node is None:
299 299 raise util.Abort(_('hook type %s does not pass a changeset id') %
300 300 hooktype)
301 301 try:
302 302 bz = bugzilla(ui, repo)
303 303 bin_node = bin(node)
304 304 changes = repo.changelog.read(bin_node)
305 305 ids = bz.find_bug_ids(bin_node, changes[4])
306 306 if ids:
307 307 for id in ids:
308 308 bz.update(id, bin_node, changes)
309 309 bz.notify(ids)
310 310 except MySQLdb.MySQLError, err:
311 311 raise util.Abort(_('database error: %s') % err[1])
312 312
@@ -1,280 +1,280 b''
1 1 # notify.py - email notifications for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 #
8 8 # hook extension to email notifications to people when changesets are
9 9 # committed to a repo they subscribe to.
10 10 #
11 11 # default mode is to print messages to stdout, for testing and
12 12 # configuring.
13 13 #
14 14 # to use, configure notify extension and enable in hgrc like this:
15 15 #
16 16 # [extensions]
17 17 # hgext.notify =
18 18 #
19 19 # [hooks]
20 20 # # one email for each incoming changeset
21 21 # incoming.notify = python:hgext.notify.hook
22 22 # # batch emails when many changesets incoming at one time
23 23 # changegroup.notify = python:hgext.notify.hook
24 24 #
25 25 # [notify]
26 26 # # config items go in here
27 27 #
28 28 # config items:
29 29 #
30 30 # REQUIRED:
31 31 # config = /path/to/file # file containing subscriptions
32 32 #
33 33 # OPTIONAL:
34 34 # test = True # print messages to stdout for testing
35 35 # strip = 3 # number of slashes to strip for url paths
36 36 # domain = example.com # domain to use if committer missing domain
37 37 # style = ... # style file to use when formatting email
38 38 # template = ... # template to use when formatting email
39 39 # incoming = ... # template to use when run as incoming hook
40 40 # changegroup = ... # template when run as changegroup hook
41 41 # maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
42 42 # maxsubject = 67 # truncate subject line longer than this
43 43 # diffstat = True # add a diffstat before the diff content
44 44 # sources = serve # notify if source of incoming changes in this list
45 45 # # (serve == ssh or http, push, pull, bundle)
46 46 # [email]
47 47 # from = user@host.com # email address to send as if none given
48 48 # [web]
49 49 # baseurl = http://hgserver/... # root of hg web site for browsing commits
50 50 #
51 51 # notify config file has same format as regular hgrc. it has two
52 52 # sections so you can express subscriptions in whatever way is handier
53 53 # for you.
54 54 #
55 55 # [usersubs]
56 56 # # key is subscriber email, value is ","-separated list of glob patterns
57 57 # user@host = pattern
58 58 #
59 59 # [reposubs]
60 60 # # key is glob pattern, value is ","-separated list of subscriber emails
61 61 # pattern = user@host
62 62 #
63 63 # glob patterns are matched against path to repo root.
64 64 #
65 65 # if you like, you can put notify config file in repo that users can
66 66 # push changes to, they can manage their own subscriptions.
67 67
68 68 from mercurial.demandload import *
69 69 from mercurial.i18n import gettext as _
70 70 from mercurial.node import *
71 71 demandload(globals(), 'mercurial:patch,cmdutil,templater,util,mail')
72 72 demandload(globals(), 'email.Parser fnmatch socket time')
73 73
74 74 # template for single changeset can include email headers.
75 75 single_template = '''
76 76 Subject: changeset in {webroot}: {desc|firstline|strip}
77 77 From: {author}
78 78
79 79 changeset {node|short} in {root}
80 80 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
81 81 description:
82 82 \t{desc|tabindent|strip}
83 83 '''.lstrip()
84 84
85 85 # template for multiple changesets should not contain email headers,
86 86 # because only first set of headers will be used and result will look
87 87 # strange.
88 88 multiple_template = '''
89 89 changeset {node|short} in {root}
90 90 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
91 91 summary: {desc|firstline}
92 92 '''
93 93
94 94 deftemplates = {
95 95 'changegroup': multiple_template,
96 96 }
97 97
98 98 class notifier(object):
99 99 '''email notification class.'''
100 100
101 101 def __init__(self, ui, repo, hooktype):
102 102 self.ui = ui
103 103 cfg = self.ui.config('notify', 'config')
104 104 if cfg:
105 105 self.ui.readsections(cfg, 'usersubs', 'reposubs')
106 106 self.repo = repo
107 107 self.stripcount = int(self.ui.config('notify', 'strip', 0))
108 108 self.root = self.strip(self.repo.root)
109 109 self.domain = self.ui.config('notify', 'domain')
110 110 self.subs = self.subscribers()
111 111
112 112 mapfile = self.ui.config('notify', 'style')
113 113 template = (self.ui.config('notify', hooktype) or
114 114 self.ui.config('notify', 'template'))
115 115 self.t = cmdutil.changeset_templater(self.ui, self.repo,
116 False, None, mapfile, False)
116 False, mapfile, False)
117 117 if not mapfile and not template:
118 118 template = deftemplates.get(hooktype) or single_template
119 119 if template:
120 120 template = templater.parsestring(template, quoted=False)
121 121 self.t.use_template(template)
122 122
123 123 def strip(self, path):
124 124 '''strip leading slashes from local path, turn into web-safe path.'''
125 125
126 126 path = util.pconvert(path)
127 127 count = self.stripcount
128 128 while count > 0:
129 129 c = path.find('/')
130 130 if c == -1:
131 131 break
132 132 path = path[c+1:]
133 133 count -= 1
134 134 return path
135 135
136 136 def fixmail(self, addr):
137 137 '''try to clean up email addresses.'''
138 138
139 139 addr = templater.email(addr.strip())
140 140 a = addr.find('@localhost')
141 141 if a != -1:
142 142 addr = addr[:a]
143 143 if '@' not in addr:
144 144 return addr + '@' + self.domain
145 145 return addr
146 146
147 147 def subscribers(self):
148 148 '''return list of email addresses of subscribers to this repo.'''
149 149
150 150 subs = {}
151 151 for user, pats in self.ui.configitems('usersubs'):
152 152 for pat in pats.split(','):
153 153 if fnmatch.fnmatch(self.repo.root, pat.strip()):
154 154 subs[self.fixmail(user)] = 1
155 155 for pat, users in self.ui.configitems('reposubs'):
156 156 if fnmatch.fnmatch(self.repo.root, pat):
157 157 for user in users.split(','):
158 158 subs[self.fixmail(user)] = 1
159 159 subs = subs.keys()
160 160 subs.sort()
161 161 return subs
162 162
163 163 def url(self, path=None):
164 164 return self.ui.config('web', 'baseurl') + (path or self.root)
165 165
166 166 def node(self, node):
167 167 '''format one changeset.'''
168 168
169 169 self.t.show(changenode=node, changes=self.repo.changelog.read(node),
170 170 baseurl=self.ui.config('web', 'baseurl'),
171 171 root=self.repo.root,
172 172 webroot=self.root)
173 173
174 174 def skipsource(self, source):
175 175 '''true if incoming changes from this source should be skipped.'''
176 176 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
177 177 return source not in ok_sources
178 178
179 179 def send(self, node, count, data):
180 180 '''send message.'''
181 181
182 182 p = email.Parser.Parser()
183 183 msg = p.parsestr(data)
184 184
185 185 def fix_subject():
186 186 '''try to make subject line exist and be useful.'''
187 187
188 188 subject = msg['Subject']
189 189 if not subject:
190 190 if count > 1:
191 191 subject = _('%s: %d new changesets') % (self.root, count)
192 192 else:
193 193 changes = self.repo.changelog.read(node)
194 194 s = changes[4].lstrip().split('\n', 1)[0].rstrip()
195 195 subject = '%s: %s' % (self.root, s)
196 196 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
197 197 if maxsubject and len(subject) > maxsubject:
198 198 subject = subject[:maxsubject-3] + '...'
199 199 del msg['Subject']
200 200 msg['Subject'] = subject
201 201
202 202 def fix_sender():
203 203 '''try to make message have proper sender.'''
204 204
205 205 sender = msg['From']
206 206 if not sender:
207 207 sender = self.ui.config('email', 'from') or self.ui.username()
208 208 if '@' not in sender or '@localhost' in sender:
209 209 sender = self.fixmail(sender)
210 210 del msg['From']
211 211 msg['From'] = sender
212 212
213 213 fix_subject()
214 214 fix_sender()
215 215
216 216 msg['X-Hg-Notification'] = 'changeset ' + short(node)
217 217 if not msg['Message-Id']:
218 218 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
219 219 (short(node), int(time.time()),
220 220 hash(self.repo.root), socket.getfqdn()))
221 221 msg['To'] = ', '.join(self.subs)
222 222
223 223 msgtext = msg.as_string(0)
224 224 if self.ui.configbool('notify', 'test', True):
225 225 self.ui.write(msgtext)
226 226 if not msgtext.endswith('\n'):
227 227 self.ui.write('\n')
228 228 else:
229 229 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
230 230 (len(self.subs), count))
231 231 mail.sendmail(self.ui, templater.email(msg['From']),
232 232 self.subs, msgtext)
233 233
234 234 def diff(self, node, ref):
235 235 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
236 236 if maxdiff == 0:
237 237 return
238 238 prev = self.repo.changelog.parents(node)[0]
239 239 self.ui.pushbuffer()
240 240 patch.diff(self.repo, prev, ref)
241 241 difflines = self.ui.popbuffer().splitlines(1)
242 242 if self.ui.configbool('notify', 'diffstat', True):
243 243 s = patch.diffstat(difflines)
244 244 self.ui.write('\ndiffstat:\n\n' + s)
245 245 if maxdiff > 0 and len(difflines) > maxdiff:
246 246 self.ui.write(_('\ndiffs (truncated from %d to %d lines):\n\n') %
247 247 (len(difflines), maxdiff))
248 248 difflines = difflines[:maxdiff]
249 249 elif difflines:
250 250 self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
251 251 self.ui.write(*difflines)
252 252
253 253 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
254 254 '''send email notifications to interested subscribers.
255 255
256 256 if used as changegroup hook, send one email for all changesets in
257 257 changegroup. else send one email per changeset.'''
258 258 n = notifier(ui, repo, hooktype)
259 259 if not n.subs:
260 260 ui.debug(_('notify: no subscribers to repo %s\n') % n.root)
261 261 return
262 262 if n.skipsource(source):
263 263 ui.debug(_('notify: changes have source "%s" - skipping\n') %
264 264 source)
265 265 return
266 266 node = bin(node)
267 267 ui.pushbuffer()
268 268 if hooktype == 'changegroup':
269 269 start = repo.changelog.rev(node)
270 270 end = repo.changelog.count()
271 271 count = end - start
272 272 for rev in xrange(start, end):
273 273 n.node(repo.changelog.node(rev))
274 274 n.diff(node, repo.changelog.tip())
275 275 else:
276 276 count = 1
277 277 n.node(node)
278 278 n.diff(node, node)
279 279 data = ui.popbuffer()
280 280 n.send(node, count, data)
@@ -1,773 +1,756 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from node import *
10 10 from i18n import gettext as _
11 11 demandload(globals(), 'os sys')
12 12 demandload(globals(), 'mdiff util templater patch')
13 13
14 14 revrangesep = ':'
15 15
16 16 def revpair(repo, revs):
17 17 '''return pair of nodes, given list of revisions. second item can
18 18 be None, meaning use working dir.'''
19 19
20 20 def revfix(repo, val, defval):
21 21 if not val and val != 0 and defval is not None:
22 22 val = defval
23 23 return repo.lookup(val)
24 24
25 25 if not revs:
26 26 return repo.dirstate.parents()[0], None
27 27 end = None
28 28 if len(revs) == 1:
29 29 if revrangesep in revs[0]:
30 30 start, end = revs[0].split(revrangesep, 1)
31 31 start = revfix(repo, start, 0)
32 32 end = revfix(repo, end, repo.changelog.count() - 1)
33 33 else:
34 34 start = revfix(repo, revs[0], None)
35 35 elif len(revs) == 2:
36 36 if revrangesep in revs[0] or revrangesep in revs[1]:
37 37 raise util.Abort(_('too many revisions specified'))
38 38 start = revfix(repo, revs[0], None)
39 39 end = revfix(repo, revs[1], None)
40 40 else:
41 41 raise util.Abort(_('too many revisions specified'))
42 42 return start, end
43 43
44 44 def revrange(repo, revs):
45 45 """Yield revision as strings from a list of revision specifications."""
46 46
47 47 def revfix(repo, val, defval):
48 48 if not val and val != 0 and defval is not None:
49 49 return defval
50 50 return repo.changelog.rev(repo.lookup(val))
51 51
52 52 seen, l = {}, []
53 53 for spec in revs:
54 54 if revrangesep in spec:
55 55 start, end = spec.split(revrangesep, 1)
56 56 start = revfix(repo, start, 0)
57 57 end = revfix(repo, end, repo.changelog.count() - 1)
58 58 step = start > end and -1 or 1
59 59 for rev in xrange(start, end+step, step):
60 60 if rev in seen:
61 61 continue
62 62 seen[rev] = 1
63 63 l.append(rev)
64 64 else:
65 65 rev = revfix(repo, spec, None)
66 66 if rev in seen:
67 67 continue
68 68 seen[rev] = 1
69 69 l.append(rev)
70 70
71 71 return l
72 72
73 73 def make_filename(repo, pat, node,
74 74 total=None, seqno=None, revwidth=None, pathname=None):
75 75 node_expander = {
76 76 'H': lambda: hex(node),
77 77 'R': lambda: str(repo.changelog.rev(node)),
78 78 'h': lambda: short(node),
79 79 }
80 80 expander = {
81 81 '%': lambda: '%',
82 82 'b': lambda: os.path.basename(repo.root),
83 83 }
84 84
85 85 try:
86 86 if node:
87 87 expander.update(node_expander)
88 88 if node and revwidth is not None:
89 89 expander['r'] = (lambda:
90 90 str(repo.changelog.rev(node)).zfill(revwidth))
91 91 if total is not None:
92 92 expander['N'] = lambda: str(total)
93 93 if seqno is not None:
94 94 expander['n'] = lambda: str(seqno)
95 95 if total is not None and seqno is not None:
96 96 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
97 97 if pathname is not None:
98 98 expander['s'] = lambda: os.path.basename(pathname)
99 99 expander['d'] = lambda: os.path.dirname(pathname) or '.'
100 100 expander['p'] = lambda: pathname
101 101
102 102 newname = []
103 103 patlen = len(pat)
104 104 i = 0
105 105 while i < patlen:
106 106 c = pat[i]
107 107 if c == '%':
108 108 i += 1
109 109 c = pat[i]
110 110 c = expander[c]()
111 111 newname.append(c)
112 112 i += 1
113 113 return ''.join(newname)
114 114 except KeyError, inst:
115 115 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
116 116 inst.args[0])
117 117
118 118 def make_file(repo, pat, node=None,
119 119 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
120 120 if not pat or pat == '-':
121 121 return 'w' in mode and sys.stdout or sys.stdin
122 122 if hasattr(pat, 'write') and 'w' in mode:
123 123 return pat
124 124 if hasattr(pat, 'read') and 'r' in mode:
125 125 return pat
126 126 return open(make_filename(repo, pat, node, total, seqno, revwidth,
127 127 pathname),
128 128 mode)
129 129
130 130 def matchpats(repo, pats=[], opts={}, head=''):
131 131 cwd = repo.getcwd()
132 132 if not pats and cwd:
133 133 opts['include'] = [os.path.join(cwd, i)
134 134 for i in opts.get('include', [])]
135 135 opts['exclude'] = [os.path.join(cwd, x)
136 136 for x in opts.get('exclude', [])]
137 137 cwd = ''
138 138 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
139 139 opts.get('exclude'), head)
140 140
141 141 def walk(repo, pats=[], opts={}, node=None, head='', badmatch=None):
142 142 files, matchfn, anypats = matchpats(repo, pats, opts, head)
143 143 exact = dict.fromkeys(files)
144 144 for src, fn in repo.walk(node=node, files=files, match=matchfn,
145 145 badmatch=badmatch):
146 146 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
147 147
148 148 def findrenames(repo, added=None, removed=None, threshold=0.5):
149 149 if added is None or removed is None:
150 150 added, removed = repo.status()[1:3]
151 151 changes = repo.changelog.read(repo.dirstate.parents()[0])
152 152 mf = repo.manifest.read(changes[0])
153 153 for a in added:
154 154 aa = repo.wread(a)
155 155 bestscore, bestname = None, None
156 156 for r in removed:
157 157 rr = repo.file(r).read(mf[r])
158 158 delta = mdiff.textdiff(aa, rr)
159 159 if len(delta) < len(aa):
160 160 myscore = 1.0 - (float(len(delta)) / len(aa))
161 161 if bestscore is None or myscore > bestscore:
162 162 bestscore, bestname = myscore, r
163 163 if bestname and bestscore >= threshold:
164 164 yield bestname, a, bestscore
165 165
166 166 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
167 167 similarity=None):
168 168 if dry_run is None:
169 169 dry_run = opts.get('dry_run')
170 170 if similarity is None:
171 171 similarity = float(opts.get('similarity') or 0)
172 172 add, remove = [], []
173 173 mapping = {}
174 174 for src, abs, rel, exact in walk(repo, pats, opts):
175 175 if src == 'f' and repo.dirstate.state(abs) == '?':
176 176 add.append(abs)
177 177 mapping[abs] = rel, exact
178 178 if repo.ui.verbose or not exact:
179 179 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
180 180 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
181 181 remove.append(abs)
182 182 mapping[abs] = rel, exact
183 183 if repo.ui.verbose or not exact:
184 184 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
185 185 if not dry_run:
186 186 repo.add(add, wlock=wlock)
187 187 repo.remove(remove, wlock=wlock)
188 188 if similarity > 0:
189 189 for old, new, score in findrenames(repo, add, remove, similarity):
190 190 oldrel, oldexact = mapping[old]
191 191 newrel, newexact = mapping[new]
192 192 if repo.ui.verbose or not oldexact or not newexact:
193 193 repo.ui.status(_('recording removal of %s as rename to %s '
194 194 '(%d%% similar)\n') %
195 195 (oldrel, newrel, score * 100))
196 196 if not dry_run:
197 197 repo.copy(old, new, wlock=wlock)
198 198
199 199 class changeset_printer(object):
200 200 '''show changeset information when templating not requested.'''
201 201
202 def __init__(self, ui, repo, patch, brinfo, buffered):
202 def __init__(self, ui, repo, patch, buffered):
203 203 self.ui = ui
204 204 self.repo = repo
205 205 self.buffered = buffered
206 206 self.patch = patch
207 self.brinfo = brinfo
208 207 self.header = {}
209 208 self.hunk = {}
210 209 self.lastheader = None
211 210
212 211 def flush(self, rev):
213 212 if rev in self.header:
214 213 h = self.header[rev]
215 214 if h != self.lastheader:
216 215 self.lastheader = h
217 216 self.ui.write(h)
218 217 del self.header[rev]
219 218 if rev in self.hunk:
220 219 self.ui.write(self.hunk[rev])
221 220 del self.hunk[rev]
222 221 return 1
223 222 return 0
224 223
225 224 def show(self, rev=0, changenode=None, copies=None, **props):
226 225 if self.buffered:
227 226 self.ui.pushbuffer()
228 227 self._show(rev, changenode, copies, props)
229 228 self.hunk[rev] = self.ui.popbuffer()
230 229 else:
231 230 self._show(rev, changenode, copies, props)
232 231
233 232 def _show(self, rev, changenode, copies, props):
234 233 '''show a single changeset or file revision'''
235 234 log = self.repo.changelog
236 235 if changenode is None:
237 236 changenode = log.node(rev)
238 237 elif not rev:
239 238 rev = log.rev(changenode)
240 239
241 240 if self.ui.quiet:
242 241 self.ui.write("%d:%s\n" % (rev, short(changenode)))
243 242 return
244 243
245 244 changes = log.read(changenode)
246 245 date = util.datestr(changes[2])
247 246 extra = changes[5]
248 247 branch = extra.get("branch")
249 248
250 249 hexfunc = self.ui.debugflag and hex or short
251 250
252 251 parents = log.parentrevs(rev)
253 252 if not self.ui.debugflag:
254 253 if parents[1] == nullrev:
255 254 if parents[0] >= rev - 1:
256 255 parents = []
257 256 else:
258 257 parents = [parents[0]]
259 258 parents = [(p, hexfunc(log.node(p))) for p in parents]
260 259
261 260 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
262 261
263 262 if branch:
264 263 branch = util.tolocal(branch)
265 264 self.ui.write(_("branch: %s\n") % branch)
266 265 for tag in self.repo.nodetags(changenode):
267 266 self.ui.write(_("tag: %s\n") % tag)
268 267 for parent in parents:
269 268 self.ui.write(_("parent: %d:%s\n") % parent)
270 269
271 if self.brinfo:
272 br = self.repo.branchlookup([changenode])
273 if br:
274 self.ui.write(_("branch: %s\n") % " ".join(br[changenode]))
275
276 270 if self.ui.debugflag:
277 271 self.ui.write(_("manifest: %d:%s\n") %
278 272 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
279 273 self.ui.write(_("user: %s\n") % changes[1])
280 274 self.ui.write(_("date: %s\n") % date)
281 275
282 276 if self.ui.debugflag:
283 277 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
284 278 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
285 279 files):
286 280 if value:
287 281 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
288 282 elif changes[3] and self.ui.verbose:
289 283 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
290 284 if copies and self.ui.verbose:
291 285 copies = ['%s (%s)' % c for c in copies]
292 286 self.ui.write(_("copies: %s\n") % ' '.join(copies))
293 287
294 288 if extra and self.ui.debugflag:
295 289 extraitems = extra.items()
296 290 extraitems.sort()
297 291 for key, value in extraitems:
298 292 self.ui.write(_("extra: %s=%s\n")
299 293 % (key, value.encode('string_escape')))
300 294
301 295 description = changes[4].strip()
302 296 if description:
303 297 if self.ui.verbose:
304 298 self.ui.write(_("description:\n"))
305 299 self.ui.write(description)
306 300 self.ui.write("\n\n")
307 301 else:
308 302 self.ui.write(_("summary: %s\n") %
309 303 description.splitlines()[0])
310 304 self.ui.write("\n")
311 305
312 306 self.showpatch(changenode)
313 307
314 308 def showpatch(self, node):
315 309 if self.patch:
316 310 prev = self.repo.changelog.parents(node)[0]
317 311 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui)
318 312 self.ui.write("\n")
319 313
320 314 class changeset_templater(changeset_printer):
321 315 '''format changeset information.'''
322 316
323 def __init__(self, ui, repo, patch, brinfo, mapfile, buffered):
324 changeset_printer.__init__(self, ui, repo, patch, brinfo, buffered)
317 def __init__(self, ui, repo, patch, mapfile, buffered):
318 changeset_printer.__init__(self, ui, repo, patch, buffered)
325 319 self.t = templater.templater(mapfile, templater.common_filters,
326 320 cache={'parent': '{rev}:{node|short} ',
327 321 'manifest': '{rev}:{node|short}',
328 322 'filecopy': '{name} ({source})'})
329 323
330 324 def use_template(self, t):
331 325 '''set template string to use'''
332 326 self.t.cache['changeset'] = t
333 327
334 328 def _show(self, rev, changenode, copies, props):
335 329 '''show a single changeset or file revision'''
336 330 log = self.repo.changelog
337 331 if changenode is None:
338 332 changenode = log.node(rev)
339 333 elif not rev:
340 334 rev = log.rev(changenode)
341 335
342 336 changes = log.read(changenode)
343 337
344 338 def showlist(name, values, plural=None, **args):
345 339 '''expand set of values.
346 340 name is name of key in template map.
347 341 values is list of strings or dicts.
348 342 plural is plural of name, if not simply name + 's'.
349 343
350 344 expansion works like this, given name 'foo'.
351 345
352 346 if values is empty, expand 'no_foos'.
353 347
354 348 if 'foo' not in template map, return values as a string,
355 349 joined by space.
356 350
357 351 expand 'start_foos'.
358 352
359 353 for each value, expand 'foo'. if 'last_foo' in template
360 354 map, expand it instead of 'foo' for last key.
361 355
362 356 expand 'end_foos'.
363 357 '''
364 358 if plural: names = plural
365 359 else: names = name + 's'
366 360 if not values:
367 361 noname = 'no_' + names
368 362 if noname in self.t:
369 363 yield self.t(noname, **args)
370 364 return
371 365 if name not in self.t:
372 366 if isinstance(values[0], str):
373 367 yield ' '.join(values)
374 368 else:
375 369 for v in values:
376 370 yield dict(v, **args)
377 371 return
378 372 startname = 'start_' + names
379 373 if startname in self.t:
380 374 yield self.t(startname, **args)
381 375 vargs = args.copy()
382 376 def one(v, tag=name):
383 377 try:
384 378 vargs.update(v)
385 379 except (AttributeError, ValueError):
386 380 try:
387 381 for a, b in v:
388 382 vargs[a] = b
389 383 except ValueError:
390 384 vargs[name] = v
391 385 return self.t(tag, **vargs)
392 386 lastname = 'last_' + name
393 387 if lastname in self.t:
394 388 last = values.pop()
395 389 else:
396 390 last = None
397 391 for v in values:
398 392 yield one(v)
399 393 if last is not None:
400 394 yield one(last, tag=lastname)
401 395 endname = 'end_' + names
402 396 if endname in self.t:
403 397 yield self.t(endname, **args)
404 398
405 399 def showbranches(**args):
406 400 branch = changes[5].get("branch")
407 401 if branch:
408 402 branch = util.tolocal(branch)
409 403 return showlist('branch', [branch], plural='branches', **args)
410 # add old style branches if requested
411 if self.brinfo:
412 br = self.repo.branchlookup([changenode])
413 if changenode in br:
414 return showlist('branch', br[changenode],
415 plural='branches', **args)
416 404
417 405 def showparents(**args):
418 406 parents = [[('rev', log.rev(p)), ('node', hex(p))]
419 407 for p in log.parents(changenode)
420 408 if self.ui.debugflag or p != nullid]
421 409 if (not self.ui.debugflag and len(parents) == 1 and
422 410 parents[0][0][1] == rev - 1):
423 411 return
424 412 return showlist('parent', parents, **args)
425 413
426 414 def showtags(**args):
427 415 return showlist('tag', self.repo.nodetags(changenode), **args)
428 416
429 417 def showextras(**args):
430 418 extras = changes[5].items()
431 419 extras.sort()
432 420 for key, value in extras:
433 421 args = args.copy()
434 422 args.update(dict(key=key, value=value))
435 423 yield self.t('extra', **args)
436 424
437 425 def showcopies(**args):
438 426 c = [{'name': x[0], 'source': x[1]} for x in copies]
439 427 return showlist('file_copy', c, plural='file_copies', **args)
440 428
441 429 if self.ui.debugflag:
442 430 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
443 431 def showfiles(**args):
444 432 return showlist('file', files[0], **args)
445 433 def showadds(**args):
446 434 return showlist('file_add', files[1], **args)
447 435 def showdels(**args):
448 436 return showlist('file_del', files[2], **args)
449 437 def showmanifest(**args):
450 438 args = args.copy()
451 439 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
452 440 node=hex(changes[0])))
453 441 return self.t('manifest', **args)
454 442 else:
455 443 def showfiles(**args):
456 444 return showlist('file', changes[3], **args)
457 445 showadds = ''
458 446 showdels = ''
459 447 showmanifest = ''
460 448
461 449 defprops = {
462 450 'author': changes[1],
463 451 'branches': showbranches,
464 452 'date': changes[2],
465 453 'desc': changes[4],
466 454 'file_adds': showadds,
467 455 'file_dels': showdels,
468 456 'files': showfiles,
469 457 'file_copies': showcopies,
470 458 'manifest': showmanifest,
471 459 'node': hex(changenode),
472 460 'parents': showparents,
473 461 'rev': rev,
474 462 'tags': showtags,
475 463 'extras': showextras,
476 464 }
477 465 props = props.copy()
478 466 props.update(defprops)
479 467
480 468 try:
481 469 if self.ui.debugflag and 'header_debug' in self.t:
482 470 key = 'header_debug'
483 471 elif self.ui.quiet and 'header_quiet' in self.t:
484 472 key = 'header_quiet'
485 473 elif self.ui.verbose and 'header_verbose' in self.t:
486 474 key = 'header_verbose'
487 475 elif 'header' in self.t:
488 476 key = 'header'
489 477 else:
490 478 key = ''
491 479 if key:
492 480 h = templater.stringify(self.t(key, **props))
493 481 if self.buffered:
494 482 self.header[rev] = h
495 483 else:
496 484 self.ui.write(h)
497 485 if self.ui.debugflag and 'changeset_debug' in self.t:
498 486 key = 'changeset_debug'
499 487 elif self.ui.quiet and 'changeset_quiet' in self.t:
500 488 key = 'changeset_quiet'
501 489 elif self.ui.verbose and 'changeset_verbose' in self.t:
502 490 key = 'changeset_verbose'
503 491 else:
504 492 key = 'changeset'
505 493 self.ui.write(templater.stringify(self.t(key, **props)))
506 494 self.showpatch(changenode)
507 495 except KeyError, inst:
508 496 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
509 497 inst.args[0]))
510 498 except SyntaxError, inst:
511 499 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
512 500
513 501 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
514 502 """show one changeset using template or regular display.
515 503
516 504 Display format will be the first non-empty hit of:
517 505 1. option 'template'
518 506 2. option 'style'
519 507 3. [ui] setting 'logtemplate'
520 508 4. [ui] setting 'style'
521 509 If all of these values are either the unset or the empty string,
522 510 regular display via changeset_printer() is done.
523 511 """
524 512 # options
525 513 patch = False
526 514 if opts.get('patch'):
527 515 patch = matchfn or util.always
528 516
529 br = None
530 if opts.get('branches'):
531 ui.warn(_("the --branches option is deprecated, "
532 "please use 'hg branches' instead\n"))
533 br = True
534 517 tmpl = opts.get('template')
535 518 mapfile = None
536 519 if tmpl:
537 520 tmpl = templater.parsestring(tmpl, quoted=False)
538 521 else:
539 522 mapfile = opts.get('style')
540 523 # ui settings
541 524 if not mapfile:
542 525 tmpl = ui.config('ui', 'logtemplate')
543 526 if tmpl:
544 527 tmpl = templater.parsestring(tmpl)
545 528 else:
546 529 mapfile = ui.config('ui', 'style')
547 530
548 531 if tmpl or mapfile:
549 532 if mapfile:
550 533 if not os.path.split(mapfile)[0]:
551 534 mapname = (templater.templatepath('map-cmdline.' + mapfile)
552 535 or templater.templatepath(mapfile))
553 536 if mapname: mapfile = mapname
554 537 try:
555 t = changeset_templater(ui, repo, patch, br, mapfile, buffered)
538 t = changeset_templater(ui, repo, patch, mapfile, buffered)
556 539 except SyntaxError, inst:
557 540 raise util.Abort(inst.args[0])
558 541 if tmpl: t.use_template(tmpl)
559 542 return t
560 return changeset_printer(ui, repo, patch, br, buffered)
543 return changeset_printer(ui, repo, patch, buffered)
561 544
562 545 def finddate(ui, repo, date):
563 546 """Find the tipmost changeset that matches the given date spec"""
564 547 df = util.matchdate(date + " to " + date)
565 548 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
566 549 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
567 550 results = {}
568 551 for st, rev, fns in changeiter:
569 552 if st == 'add':
570 553 d = get(rev)[2]
571 554 if df(d[0]):
572 555 results[rev] = d
573 556 elif st == 'iter':
574 557 if rev in results:
575 558 ui.status("Found revision %s from %s\n" %
576 559 (rev, util.datestr(results[rev])))
577 560 return str(rev)
578 561
579 562 raise util.Abort(_("revision matching date not found"))
580 563
581 564 def walkchangerevs(ui, repo, pats, change, opts):
582 565 '''Iterate over files and the revs they changed in.
583 566
584 567 Callers most commonly need to iterate backwards over the history
585 568 it is interested in. Doing so has awful (quadratic-looking)
586 569 performance, so we use iterators in a "windowed" way.
587 570
588 571 We walk a window of revisions in the desired order. Within the
589 572 window, we first walk forwards to gather data, then in the desired
590 573 order (usually backwards) to display it.
591 574
592 575 This function returns an (iterator, matchfn) tuple. The iterator
593 576 yields 3-tuples. They will be of one of the following forms:
594 577
595 578 "window", incrementing, lastrev: stepping through a window,
596 579 positive if walking forwards through revs, last rev in the
597 580 sequence iterated over - use to reset state for the current window
598 581
599 582 "add", rev, fns: out-of-order traversal of the given file names
600 583 fns, which changed during revision rev - use to gather data for
601 584 possible display
602 585
603 586 "iter", rev, None: in-order traversal of the revs earlier iterated
604 587 over with "add" - use to display data'''
605 588
606 589 def increasing_windows(start, end, windowsize=8, sizelimit=512):
607 590 if start < end:
608 591 while start < end:
609 592 yield start, min(windowsize, end-start)
610 593 start += windowsize
611 594 if windowsize < sizelimit:
612 595 windowsize *= 2
613 596 else:
614 597 while start > end:
615 598 yield start, min(windowsize, start-end-1)
616 599 start -= windowsize
617 600 if windowsize < sizelimit:
618 601 windowsize *= 2
619 602
620 603 files, matchfn, anypats = matchpats(repo, pats, opts)
621 604 follow = opts.get('follow') or opts.get('follow_first')
622 605
623 606 if repo.changelog.count() == 0:
624 607 return [], matchfn
625 608
626 609 if follow:
627 610 defrange = '%s:0' % repo.changectx().rev()
628 611 else:
629 612 defrange = 'tip:0'
630 613 revs = revrange(repo, opts['rev'] or [defrange])
631 614 wanted = {}
632 615 slowpath = anypats or opts.get('removed')
633 616 fncache = {}
634 617
635 618 if not slowpath and not files:
636 619 # No files, no patterns. Display all revs.
637 620 wanted = dict.fromkeys(revs)
638 621 copies = []
639 622 if not slowpath:
640 623 # Only files, no patterns. Check the history of each file.
641 624 def filerevgen(filelog, node):
642 625 cl_count = repo.changelog.count()
643 626 if node is None:
644 627 last = filelog.count() - 1
645 628 else:
646 629 last = filelog.rev(node)
647 630 for i, window in increasing_windows(last, nullrev):
648 631 revs = []
649 632 for j in xrange(i - window, i + 1):
650 633 n = filelog.node(j)
651 634 revs.append((filelog.linkrev(n),
652 635 follow and filelog.renamed(n)))
653 636 revs.reverse()
654 637 for rev in revs:
655 638 # only yield rev for which we have the changelog, it can
656 639 # happen while doing "hg log" during a pull or commit
657 640 if rev[0] < cl_count:
658 641 yield rev
659 642 def iterfiles():
660 643 for filename in files:
661 644 yield filename, None
662 645 for filename_node in copies:
663 646 yield filename_node
664 647 minrev, maxrev = min(revs), max(revs)
665 648 for file_, node in iterfiles():
666 649 filelog = repo.file(file_)
667 650 # A zero count may be a directory or deleted file, so
668 651 # try to find matching entries on the slow path.
669 652 if filelog.count() == 0:
670 653 slowpath = True
671 654 break
672 655 for rev, copied in filerevgen(filelog, node):
673 656 if rev <= maxrev:
674 657 if rev < minrev:
675 658 break
676 659 fncache.setdefault(rev, [])
677 660 fncache[rev].append(file_)
678 661 wanted[rev] = 1
679 662 if follow and copied:
680 663 copies.append(copied)
681 664 if slowpath:
682 665 if follow:
683 666 raise util.Abort(_('can only follow copies/renames for explicit '
684 667 'file names'))
685 668
686 669 # The slow path checks files modified in every changeset.
687 670 def changerevgen():
688 671 for i, window in increasing_windows(repo.changelog.count()-1,
689 672 nullrev):
690 673 for j in xrange(i - window, i + 1):
691 674 yield j, change(j)[3]
692 675
693 676 for rev, changefiles in changerevgen():
694 677 matches = filter(matchfn, changefiles)
695 678 if matches:
696 679 fncache[rev] = matches
697 680 wanted[rev] = 1
698 681
699 682 class followfilter:
700 683 def __init__(self, onlyfirst=False):
701 684 self.startrev = nullrev
702 685 self.roots = []
703 686 self.onlyfirst = onlyfirst
704 687
705 688 def match(self, rev):
706 689 def realparents(rev):
707 690 if self.onlyfirst:
708 691 return repo.changelog.parentrevs(rev)[0:1]
709 692 else:
710 693 return filter(lambda x: x != nullrev,
711 694 repo.changelog.parentrevs(rev))
712 695
713 696 if self.startrev == nullrev:
714 697 self.startrev = rev
715 698 return True
716 699
717 700 if rev > self.startrev:
718 701 # forward: all descendants
719 702 if not self.roots:
720 703 self.roots.append(self.startrev)
721 704 for parent in realparents(rev):
722 705 if parent in self.roots:
723 706 self.roots.append(rev)
724 707 return True
725 708 else:
726 709 # backwards: all parents
727 710 if not self.roots:
728 711 self.roots.extend(realparents(self.startrev))
729 712 if rev in self.roots:
730 713 self.roots.remove(rev)
731 714 self.roots.extend(realparents(rev))
732 715 return True
733 716
734 717 return False
735 718
736 719 # it might be worthwhile to do this in the iterator if the rev range
737 720 # is descending and the prune args are all within that range
738 721 for rev in opts.get('prune', ()):
739 722 rev = repo.changelog.rev(repo.lookup(rev))
740 723 ff = followfilter()
741 724 stop = min(revs[0], revs[-1])
742 725 for x in xrange(rev, stop-1, -1):
743 726 if ff.match(x) and x in wanted:
744 727 del wanted[x]
745 728
746 729 def iterate():
747 730 if follow and not files:
748 731 ff = followfilter(onlyfirst=opts.get('follow_first'))
749 732 def want(rev):
750 733 if ff.match(rev) and rev in wanted:
751 734 return True
752 735 return False
753 736 else:
754 737 def want(rev):
755 738 return rev in wanted
756 739
757 740 for i, window in increasing_windows(0, len(revs)):
758 741 yield 'window', revs[0] < revs[-1], revs[-1]
759 742 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
760 743 srevs = list(nrevs)
761 744 srevs.sort()
762 745 for rev in srevs:
763 746 fns = fncache.get(rev)
764 747 if not fns:
765 748 def fns_generator():
766 749 for f in change(rev)[3]:
767 750 if matchfn(f):
768 751 yield f
769 752 fns = fns_generator()
770 753 yield 'add', rev, fns
771 754 for rev in nrevs:
772 755 yield 'iter', rev, None
773 756 return iterate(), matchfn
@@ -1,3314 +1,3278 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from node import *
10 10 from i18n import gettext as _
11 11 demandload(globals(), "bisect os re sys signal imp urllib pdb shlex stat")
12 12 demandload(globals(), "fancyopts ui hg util lock revlog bundlerepo")
13 13 demandload(globals(), "difflib patch time help mdiff tempfile")
14 14 demandload(globals(), "traceback errno version atexit")
15 15 demandload(globals(), "archival changegroup cmdutil hgweb.server sshserver")
16 16
17 17 class UnknownCommand(Exception):
18 18 """Exception raised if command is not in the command table."""
19 19 class AmbiguousCommand(Exception):
20 20 """Exception raised if command shortcut matches more than one command."""
21 21
22 22 def bail_if_changed(repo):
23 23 modified, added, removed, deleted = repo.status()[:4]
24 24 if modified or added or removed or deleted:
25 25 raise util.Abort(_("outstanding uncommitted changes"))
26 26
27 27 def logmessage(opts):
28 28 """ get the log message according to -m and -l option """
29 29 message = opts['message']
30 30 logfile = opts['logfile']
31 31
32 32 if message and logfile:
33 33 raise util.Abort(_('options --message and --logfile are mutually '
34 34 'exclusive'))
35 35 if not message and logfile:
36 36 try:
37 37 if logfile == '-':
38 38 message = sys.stdin.read()
39 39 else:
40 40 message = open(logfile).read()
41 41 except IOError, inst:
42 42 raise util.Abort(_("can't read commit message '%s': %s") %
43 43 (logfile, inst.strerror))
44 44 return message
45 45
46 46 def setremoteconfig(ui, opts):
47 47 "copy remote options to ui tree"
48 48 if opts.get('ssh'):
49 49 ui.setconfig("ui", "ssh", opts['ssh'])
50 50 if opts.get('remotecmd'):
51 51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
52 52
53 53 # Commands start here, listed alphabetically
54 54
55 55 def add(ui, repo, *pats, **opts):
56 56 """add the specified files on the next commit
57 57
58 58 Schedule files to be version controlled and added to the repository.
59 59
60 60 The files will be added to the repository at the next commit. To
61 61 undo an add before that, see hg revert.
62 62
63 63 If no names are given, add all files in the repository.
64 64 """
65 65
66 66 names = []
67 67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
68 68 if exact:
69 69 if ui.verbose:
70 70 ui.status(_('adding %s\n') % rel)
71 71 names.append(abs)
72 72 elif repo.dirstate.state(abs) == '?':
73 73 ui.status(_('adding %s\n') % rel)
74 74 names.append(abs)
75 75 if not opts.get('dry_run'):
76 76 repo.add(names)
77 77
78 78 def addremove(ui, repo, *pats, **opts):
79 79 """add all new files, delete all missing files
80 80
81 81 Add all new files and remove all missing files from the repository.
82 82
83 83 New files are ignored if they match any of the patterns in .hgignore. As
84 84 with add, these changes take effect at the next commit.
85 85
86 86 Use the -s option to detect renamed files. With a parameter > 0,
87 87 this compares every removed file with every added file and records
88 88 those similar enough as renames. This option takes a percentage
89 89 between 0 (disabled) and 100 (files must be identical) as its
90 90 parameter. Detecting renamed files this way can be expensive.
91 91 """
92 92 sim = float(opts.get('similarity') or 0)
93 93 if sim < 0 or sim > 100:
94 94 raise util.Abort(_('similarity must be between 0 and 100'))
95 95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
96 96
97 97 def annotate(ui, repo, *pats, **opts):
98 98 """show changeset information per file line
99 99
100 100 List changes in files, showing the revision id responsible for each line
101 101
102 102 This command is useful to discover who did a change or when a change took
103 103 place.
104 104
105 105 Without the -a option, annotate will avoid processing files it
106 106 detects as binary. With -a, annotate will generate an annotation
107 107 anyway, probably with undesirable results.
108 108 """
109 109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
110 110
111 111 if not pats:
112 112 raise util.Abort(_('at least one file name or pattern required'))
113 113
114 114 opmap = [['user', lambda x: ui.shortuser(x.user())],
115 115 ['number', lambda x: str(x.rev())],
116 116 ['changeset', lambda x: short(x.node())],
117 117 ['date', getdate], ['follow', lambda x: x.path()]]
118 118 if (not opts['user'] and not opts['changeset'] and not opts['date']
119 119 and not opts['follow']):
120 120 opts['number'] = 1
121 121
122 122 ctx = repo.changectx(opts['rev'])
123 123
124 124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
125 125 node=ctx.node()):
126 126 fctx = ctx.filectx(abs)
127 127 if not opts['text'] and util.binary(fctx.data()):
128 128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
129 129 continue
130 130
131 131 lines = fctx.annotate(follow=opts.get('follow'))
132 132 pieces = []
133 133
134 134 for o, f in opmap:
135 135 if opts[o]:
136 136 l = [f(n) for n, dummy in lines]
137 137 if l:
138 138 m = max(map(len, l))
139 139 pieces.append(["%*s" % (m, x) for x in l])
140 140
141 141 if pieces:
142 142 for p, l in zip(zip(*pieces), lines):
143 143 ui.write("%s: %s" % (" ".join(p), l[1]))
144 144
145 145 def archive(ui, repo, dest, **opts):
146 146 '''create unversioned archive of a repository revision
147 147
148 148 By default, the revision used is the parent of the working
149 149 directory; use "-r" to specify a different revision.
150 150
151 151 To specify the type of archive to create, use "-t". Valid
152 152 types are:
153 153
154 154 "files" (default): a directory full of files
155 155 "tar": tar archive, uncompressed
156 156 "tbz2": tar archive, compressed using bzip2
157 157 "tgz": tar archive, compressed using gzip
158 158 "uzip": zip archive, uncompressed
159 159 "zip": zip archive, compressed using deflate
160 160
161 161 The exact name of the destination archive or directory is given
162 162 using a format string; see "hg help export" for details.
163 163
164 164 Each member added to an archive file has a directory prefix
165 165 prepended. Use "-p" to specify a format string for the prefix.
166 166 The default is the basename of the archive, with suffixes removed.
167 167 '''
168 168
169 169 node = repo.changectx(opts['rev']).node()
170 170 dest = cmdutil.make_filename(repo, dest, node)
171 171 if os.path.realpath(dest) == repo.root:
172 172 raise util.Abort(_('repository root cannot be destination'))
173 173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
174 174 kind = opts.get('type') or 'files'
175 175 prefix = opts['prefix']
176 176 if dest == '-':
177 177 if kind == 'files':
178 178 raise util.Abort(_('cannot archive plain files to stdout'))
179 179 dest = sys.stdout
180 180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
181 181 prefix = cmdutil.make_filename(repo, prefix, node)
182 182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
183 183 matchfn, prefix)
184 184
185 185 def backout(ui, repo, rev, **opts):
186 186 '''reverse effect of earlier changeset
187 187
188 188 Commit the backed out changes as a new changeset. The new
189 189 changeset is a child of the backed out changeset.
190 190
191 191 If you back out a changeset other than the tip, a new head is
192 192 created. This head is the parent of the working directory. If
193 193 you back out an old changeset, your working directory will appear
194 194 old after the backout. You should merge the backout changeset
195 195 with another head.
196 196
197 197 The --merge option remembers the parent of the working directory
198 198 before starting the backout, then merges the new head with that
199 199 changeset afterwards. This saves you from doing the merge by
200 200 hand. The result of this merge is not committed, as for a normal
201 201 merge.'''
202 202
203 203 bail_if_changed(repo)
204 204 op1, op2 = repo.dirstate.parents()
205 205 if op2 != nullid:
206 206 raise util.Abort(_('outstanding uncommitted merge'))
207 207 node = repo.lookup(rev)
208 208 p1, p2 = repo.changelog.parents(node)
209 209 if p1 == nullid:
210 210 raise util.Abort(_('cannot back out a change with no parents'))
211 211 if p2 != nullid:
212 212 if not opts['parent']:
213 213 raise util.Abort(_('cannot back out a merge changeset without '
214 214 '--parent'))
215 215 p = repo.lookup(opts['parent'])
216 216 if p not in (p1, p2):
217 217 raise util.Abort(_('%s is not a parent of %s') %
218 218 (short(p), short(node)))
219 219 parent = p
220 220 else:
221 221 if opts['parent']:
222 222 raise util.Abort(_('cannot use --parent on non-merge changeset'))
223 223 parent = p1
224 224 hg.clean(repo, node, show_stats=False)
225 225 revert_opts = opts.copy()
226 226 revert_opts['date'] = None
227 227 revert_opts['all'] = True
228 228 revert_opts['rev'] = hex(parent)
229 229 revert(ui, repo, **revert_opts)
230 230 commit_opts = opts.copy()
231 231 commit_opts['addremove'] = False
232 232 if not commit_opts['message'] and not commit_opts['logfile']:
233 233 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
234 234 commit_opts['force_editor'] = True
235 235 commit(ui, repo, **commit_opts)
236 236 def nice(node):
237 237 return '%d:%s' % (repo.changelog.rev(node), short(node))
238 238 ui.status(_('changeset %s backs out changeset %s\n') %
239 239 (nice(repo.changelog.tip()), nice(node)))
240 240 if op1 != node:
241 241 if opts['merge']:
242 242 ui.status(_('merging with changeset %s\n') % nice(op1))
243 n = _lookup(repo, hex(op1))
244 hg.merge(repo, n)
243 hg.merge(repo, hex(op1))
245 244 else:
246 245 ui.status(_('the backout changeset is a new head - '
247 246 'do not forget to merge\n'))
248 247 ui.status(_('(use "backout --merge" '
249 248 'if you want to auto-merge)\n'))
250 249
251 250 def branch(ui, repo, label=None):
252 251 """set or show the current branch name
253 252
254 253 With <name>, set the current branch name. Otherwise, show the
255 254 current branch name.
256 255 """
257 256
258 257 if label is not None:
259 258 repo.opener("branch", "w").write(util.fromlocal(label) + '\n')
260 259 else:
261 260 b = util.tolocal(repo.workingctx().branch())
262 261 if b:
263 262 ui.write("%s\n" % b)
264 263
265 264 def branches(ui, repo):
266 265 """list repository named branches
267 266
268 267 List the repository's named branches.
269 268 """
270 269 b = repo.branchtags()
271 270 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
272 271 l.sort()
273 272 for r, n, t in l:
274 273 hexfunc = ui.debugflag and hex or short
275 274 if ui.quiet:
276 275 ui.write("%s\n" % t)
277 276 else:
278 277 t = util.localsub(t, 30)
279 278 t += " " * (30 - util.locallen(t))
280 279 ui.write("%s %s:%s\n" % (t, -r, hexfunc(n)))
281 280
282 281 def bundle(ui, repo, fname, dest=None, **opts):
283 282 """create a changegroup file
284 283
285 284 Generate a compressed changegroup file collecting changesets not
286 285 found in the other repository.
287 286
288 287 If no destination repository is specified the destination is assumed
289 288 to have all the nodes specified by one or more --base parameters.
290 289
291 290 The bundle file can then be transferred using conventional means and
292 291 applied to another repository with the unbundle or pull command.
293 292 This is useful when direct push and pull are not available or when
294 293 exporting an entire repository is undesirable.
295 294
296 295 Applying bundles preserves all changeset contents including
297 296 permissions, copy/rename information, and revision history.
298 297 """
299 298 revs = opts.get('rev') or None
300 299 if revs:
301 300 revs = [repo.lookup(rev) for rev in revs]
302 301 base = opts.get('base')
303 302 if base:
304 303 if dest:
305 304 raise util.Abort(_("--base is incompatible with specifiying "
306 305 "a destination"))
307 306 base = [repo.lookup(rev) for rev in base]
308 307 # create the right base
309 308 # XXX: nodesbetween / changegroup* should be "fixed" instead
310 309 o = []
311 310 has = {nullid: None}
312 311 for n in base:
313 312 has.update(repo.changelog.reachable(n))
314 313 if revs:
315 314 visit = list(revs)
316 315 else:
317 316 visit = repo.changelog.heads()
318 317 seen = {}
319 318 while visit:
320 319 n = visit.pop(0)
321 320 parents = [p for p in repo.changelog.parents(n) if p not in has]
322 321 if len(parents) == 0:
323 322 o.insert(0, n)
324 323 else:
325 324 for p in parents:
326 325 if p not in seen:
327 326 seen[p] = 1
328 327 visit.append(p)
329 328 else:
330 329 setremoteconfig(ui, opts)
331 330 dest = ui.expandpath(dest or 'default-push', dest or 'default')
332 331 other = hg.repository(ui, dest)
333 332 o = repo.findoutgoing(other, force=opts['force'])
334 333
335 334 if revs:
336 335 cg = repo.changegroupsubset(o, revs, 'bundle')
337 336 else:
338 337 cg = repo.changegroup(o, 'bundle')
339 338 changegroup.writebundle(cg, fname, "HG10BZ")
340 339
341 340 def cat(ui, repo, file1, *pats, **opts):
342 341 """output the latest or given revisions of files
343 342
344 343 Print the specified files as they were at the given revision.
345 344 If no revision is given then working dir parent is used, or tip
346 345 if no revision is checked out.
347 346
348 347 Output may be to a file, in which case the name of the file is
349 348 given using a format string. The formatting rules are the same as
350 349 for the export command, with the following additions:
351 350
352 351 %s basename of file being printed
353 352 %d dirname of file being printed, or '.' if in repo root
354 353 %p root-relative path name of file being printed
355 354 """
356 355 ctx = repo.changectx(opts['rev'])
357 356 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
358 357 ctx.node()):
359 358 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
360 359 fp.write(ctx.filectx(abs).data())
361 360
362 361 def clone(ui, source, dest=None, **opts):
363 362 """make a copy of an existing repository
364 363
365 364 Create a copy of an existing repository in a new directory.
366 365
367 366 If no destination directory name is specified, it defaults to the
368 367 basename of the source.
369 368
370 369 The location of the source is added to the new repository's
371 370 .hg/hgrc file, as the default to be used for future pulls.
372 371
373 372 For efficiency, hardlinks are used for cloning whenever the source
374 373 and destination are on the same filesystem (note this applies only
375 374 to the repository data, not to the checked out files). Some
376 375 filesystems, such as AFS, implement hardlinking incorrectly, but
377 376 do not report errors. In these cases, use the --pull option to
378 377 avoid hardlinking.
379 378
380 379 You can safely clone repositories and checked out files using full
381 380 hardlinks with
382 381
383 382 $ cp -al REPO REPOCLONE
384 383
385 384 which is the fastest way to clone. However, the operation is not
386 385 atomic (making sure REPO is not modified during the operation is
387 386 up to you) and you have to make sure your editor breaks hardlinks
388 387 (Emacs and most Linux Kernel tools do so).
389 388
390 389 If you use the -r option to clone up to a specific revision, no
391 390 subsequent revisions will be present in the cloned repository.
392 391 This option implies --pull, even on local repositories.
393 392
394 393 See pull for valid source format details.
395 394
396 395 It is possible to specify an ssh:// URL as the destination, but no
397 396 .hg/hgrc and working directory will be created on the remote side.
398 397 Look at the help text for the pull command for important details
399 398 about ssh:// URLs.
400 399 """
401 400 setremoteconfig(ui, opts)
402 401 hg.clone(ui, ui.expandpath(source), dest,
403 402 pull=opts['pull'],
404 403 stream=opts['uncompressed'],
405 404 rev=opts['rev'],
406 405 update=not opts['noupdate'])
407 406
408 407 def commit(ui, repo, *pats, **opts):
409 408 """commit the specified files or all outstanding changes
410 409
411 410 Commit changes to the given files into the repository.
412 411
413 412 If a list of files is omitted, all changes reported by "hg status"
414 413 will be committed.
415 414
416 415 If no commit message is specified, the editor configured in your hgrc
417 416 or in the EDITOR environment variable is started to enter a message.
418 417 """
419 418 message = logmessage(opts)
420 419
421 420 if opts['addremove']:
422 421 cmdutil.addremove(repo, pats, opts)
423 422 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
424 423 if pats:
425 424 status = repo.status(files=fns, match=match)
426 425 modified, added, removed, deleted, unknown = status[:5]
427 426 files = modified + added + removed
428 427 slist = None
429 428 for f in fns:
430 429 if f not in files:
431 430 rf = repo.wjoin(f)
432 431 if f in unknown:
433 432 raise util.Abort(_("file %s not tracked!") % rf)
434 433 try:
435 434 mode = os.lstat(rf)[stat.ST_MODE]
436 435 except OSError:
437 436 raise util.Abort(_("file %s not found!") % rf)
438 437 if stat.S_ISDIR(mode):
439 438 name = f + '/'
440 439 if slist is None:
441 440 slist = list(files)
442 441 slist.sort()
443 442 i = bisect.bisect(slist, name)
444 443 if i >= len(slist) or not slist[i].startswith(name):
445 444 raise util.Abort(_("no match under directory %s!")
446 445 % rf)
447 446 elif not stat.S_ISREG(mode):
448 447 raise util.Abort(_("can't commit %s: "
449 448 "unsupported file type!") % rf)
450 449 else:
451 450 files = []
452 451 try:
453 452 repo.commit(files, message, opts['user'], opts['date'], match,
454 453 force_editor=opts.get('force_editor'))
455 454 except ValueError, inst:
456 455 raise util.Abort(str(inst))
457 456
458 457 def docopy(ui, repo, pats, opts, wlock):
459 458 # called with the repo lock held
460 459 #
461 460 # hgsep => pathname that uses "/" to separate directories
462 461 # ossep => pathname that uses os.sep to separate directories
463 462 cwd = repo.getcwd()
464 463 errors = 0
465 464 copied = []
466 465 targets = {}
467 466
468 467 # abs: hgsep
469 468 # rel: ossep
470 469 # return: hgsep
471 470 def okaytocopy(abs, rel, exact):
472 471 reasons = {'?': _('is not managed'),
473 472 'a': _('has been marked for add'),
474 473 'r': _('has been marked for remove')}
475 474 state = repo.dirstate.state(abs)
476 475 reason = reasons.get(state)
477 476 if reason:
478 477 if state == 'a':
479 478 origsrc = repo.dirstate.copied(abs)
480 479 if origsrc is not None:
481 480 return origsrc
482 481 if exact:
483 482 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
484 483 else:
485 484 return abs
486 485
487 486 # origsrc: hgsep
488 487 # abssrc: hgsep
489 488 # relsrc: ossep
490 489 # target: ossep
491 490 def copy(origsrc, abssrc, relsrc, target, exact):
492 491 abstarget = util.canonpath(repo.root, cwd, target)
493 492 reltarget = util.pathto(cwd, abstarget)
494 493 prevsrc = targets.get(abstarget)
495 494 if prevsrc is not None:
496 495 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
497 496 (reltarget, util.localpath(abssrc),
498 497 util.localpath(prevsrc)))
499 498 return
500 499 if (not opts['after'] and os.path.exists(reltarget) or
501 500 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
502 501 if not opts['force']:
503 502 ui.warn(_('%s: not overwriting - file exists\n') %
504 503 reltarget)
505 504 return
506 505 if not opts['after'] and not opts.get('dry_run'):
507 506 os.unlink(reltarget)
508 507 if opts['after']:
509 508 if not os.path.exists(reltarget):
510 509 return
511 510 else:
512 511 targetdir = os.path.dirname(reltarget) or '.'
513 512 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
514 513 os.makedirs(targetdir)
515 514 try:
516 515 restore = repo.dirstate.state(abstarget) == 'r'
517 516 if restore and not opts.get('dry_run'):
518 517 repo.undelete([abstarget], wlock)
519 518 try:
520 519 if not opts.get('dry_run'):
521 520 util.copyfile(relsrc, reltarget)
522 521 restore = False
523 522 finally:
524 523 if restore:
525 524 repo.remove([abstarget], wlock)
526 525 except IOError, inst:
527 526 if inst.errno == errno.ENOENT:
528 527 ui.warn(_('%s: deleted in working copy\n') % relsrc)
529 528 else:
530 529 ui.warn(_('%s: cannot copy - %s\n') %
531 530 (relsrc, inst.strerror))
532 531 errors += 1
533 532 return
534 533 if ui.verbose or not exact:
535 534 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
536 535 targets[abstarget] = abssrc
537 536 if abstarget != origsrc and not opts.get('dry_run'):
538 537 repo.copy(origsrc, abstarget, wlock)
539 538 copied.append((abssrc, relsrc, exact))
540 539
541 540 # pat: ossep
542 541 # dest ossep
543 542 # srcs: list of (hgsep, hgsep, ossep, bool)
544 543 # return: function that takes hgsep and returns ossep
545 544 def targetpathfn(pat, dest, srcs):
546 545 if os.path.isdir(pat):
547 546 abspfx = util.canonpath(repo.root, cwd, pat)
548 547 abspfx = util.localpath(abspfx)
549 548 if destdirexists:
550 549 striplen = len(os.path.split(abspfx)[0])
551 550 else:
552 551 striplen = len(abspfx)
553 552 if striplen:
554 553 striplen += len(os.sep)
555 554 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
556 555 elif destdirexists:
557 556 res = lambda p: os.path.join(dest,
558 557 os.path.basename(util.localpath(p)))
559 558 else:
560 559 res = lambda p: dest
561 560 return res
562 561
563 562 # pat: ossep
564 563 # dest ossep
565 564 # srcs: list of (hgsep, hgsep, ossep, bool)
566 565 # return: function that takes hgsep and returns ossep
567 566 def targetpathafterfn(pat, dest, srcs):
568 567 if util.patkind(pat, None)[0]:
569 568 # a mercurial pattern
570 569 res = lambda p: os.path.join(dest,
571 570 os.path.basename(util.localpath(p)))
572 571 else:
573 572 abspfx = util.canonpath(repo.root, cwd, pat)
574 573 if len(abspfx) < len(srcs[0][0]):
575 574 # A directory. Either the target path contains the last
576 575 # component of the source path or it does not.
577 576 def evalpath(striplen):
578 577 score = 0
579 578 for s in srcs:
580 579 t = os.path.join(dest, util.localpath(s[0])[striplen:])
581 580 if os.path.exists(t):
582 581 score += 1
583 582 return score
584 583
585 584 abspfx = util.localpath(abspfx)
586 585 striplen = len(abspfx)
587 586 if striplen:
588 587 striplen += len(os.sep)
589 588 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
590 589 score = evalpath(striplen)
591 590 striplen1 = len(os.path.split(abspfx)[0])
592 591 if striplen1:
593 592 striplen1 += len(os.sep)
594 593 if evalpath(striplen1) > score:
595 594 striplen = striplen1
596 595 res = lambda p: os.path.join(dest,
597 596 util.localpath(p)[striplen:])
598 597 else:
599 598 # a file
600 599 if destdirexists:
601 600 res = lambda p: os.path.join(dest,
602 601 os.path.basename(util.localpath(p)))
603 602 else:
604 603 res = lambda p: dest
605 604 return res
606 605
607 606
608 607 pats = list(pats)
609 608 if not pats:
610 609 raise util.Abort(_('no source or destination specified'))
611 610 if len(pats) == 1:
612 611 raise util.Abort(_('no destination specified'))
613 612 dest = pats.pop()
614 613 destdirexists = os.path.isdir(dest)
615 614 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
616 615 raise util.Abort(_('with multiple sources, destination must be an '
617 616 'existing directory'))
618 617 if opts['after']:
619 618 tfn = targetpathafterfn
620 619 else:
621 620 tfn = targetpathfn
622 621 copylist = []
623 622 for pat in pats:
624 623 srcs = []
625 624 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
626 625 origsrc = okaytocopy(abssrc, relsrc, exact)
627 626 if origsrc:
628 627 srcs.append((origsrc, abssrc, relsrc, exact))
629 628 if not srcs:
630 629 continue
631 630 copylist.append((tfn(pat, dest, srcs), srcs))
632 631 if not copylist:
633 632 raise util.Abort(_('no files to copy'))
634 633
635 634 for targetpath, srcs in copylist:
636 635 for origsrc, abssrc, relsrc, exact in srcs:
637 636 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
638 637
639 638 if errors:
640 639 ui.warn(_('(consider using --after)\n'))
641 640 return errors, copied
642 641
643 642 def copy(ui, repo, *pats, **opts):
644 643 """mark files as copied for the next commit
645 644
646 645 Mark dest as having copies of source files. If dest is a
647 646 directory, copies are put in that directory. If dest is a file,
648 647 there can only be one source.
649 648
650 649 By default, this command copies the contents of files as they
651 650 stand in the working directory. If invoked with --after, the
652 651 operation is recorded, but no copying is performed.
653 652
654 653 This command takes effect in the next commit. To undo a copy
655 654 before that, see hg revert.
656 655 """
657 656 wlock = repo.wlock(0)
658 657 errs, copied = docopy(ui, repo, pats, opts, wlock)
659 658 return errs
660 659
661 660 def debugancestor(ui, index, rev1, rev2):
662 661 """find the ancestor revision of two revisions in a given index"""
663 662 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
664 663 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
665 664 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
666 665
667 666 def debugcomplete(ui, cmd='', **opts):
668 667 """returns the completion list associated with the given command"""
669 668
670 669 if opts['options']:
671 670 options = []
672 671 otables = [globalopts]
673 672 if cmd:
674 673 aliases, entry = findcmd(ui, cmd)
675 674 otables.append(entry[1])
676 675 for t in otables:
677 676 for o in t:
678 677 if o[0]:
679 678 options.append('-%s' % o[0])
680 679 options.append('--%s' % o[1])
681 680 ui.write("%s\n" % "\n".join(options))
682 681 return
683 682
684 683 clist = findpossible(ui, cmd).keys()
685 684 clist.sort()
686 685 ui.write("%s\n" % "\n".join(clist))
687 686
688 687 def debugrebuildstate(ui, repo, rev=None):
689 688 """rebuild the dirstate as it would look like for the given revision"""
690 689 if not rev:
691 690 rev = repo.changelog.tip()
692 691 else:
693 692 rev = repo.lookup(rev)
694 693 change = repo.changelog.read(rev)
695 694 n = change[0]
696 695 files = repo.manifest.read(n)
697 696 wlock = repo.wlock()
698 697 repo.dirstate.rebuild(rev, files)
699 698
700 699 def debugcheckstate(ui, repo):
701 700 """validate the correctness of the current dirstate"""
702 701 parent1, parent2 = repo.dirstate.parents()
703 702 repo.dirstate.read()
704 703 dc = repo.dirstate.map
705 704 keys = dc.keys()
706 705 keys.sort()
707 706 m1n = repo.changelog.read(parent1)[0]
708 707 m2n = repo.changelog.read(parent2)[0]
709 708 m1 = repo.manifest.read(m1n)
710 709 m2 = repo.manifest.read(m2n)
711 710 errors = 0
712 711 for f in dc:
713 712 state = repo.dirstate.state(f)
714 713 if state in "nr" and f not in m1:
715 714 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
716 715 errors += 1
717 716 if state in "a" and f in m1:
718 717 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
719 718 errors += 1
720 719 if state in "m" and f not in m1 and f not in m2:
721 720 ui.warn(_("%s in state %s, but not in either manifest\n") %
722 721 (f, state))
723 722 errors += 1
724 723 for f in m1:
725 724 state = repo.dirstate.state(f)
726 725 if state not in "nrm":
727 726 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
728 727 errors += 1
729 728 if errors:
730 729 error = _(".hg/dirstate inconsistent with current parent's manifest")
731 730 raise util.Abort(error)
732 731
733 732 def showconfig(ui, repo, *values, **opts):
734 733 """show combined config settings from all hgrc files
735 734
736 735 With no args, print names and values of all config items.
737 736
738 737 With one arg of the form section.name, print just the value of
739 738 that config item.
740 739
741 740 With multiple args, print names and values of all config items
742 741 with matching section names."""
743 742
744 743 untrusted = bool(opts.get('untrusted'))
745 744 if values:
746 745 if len([v for v in values if '.' in v]) > 1:
747 746 raise util.Abort(_('only one config item permitted'))
748 747 for section, name, value in ui.walkconfig(untrusted=untrusted):
749 748 sectname = section + '.' + name
750 749 if values:
751 750 for v in values:
752 751 if v == section:
753 752 ui.write('%s=%s\n' % (sectname, value))
754 753 elif v == sectname:
755 754 ui.write(value, '\n')
756 755 else:
757 756 ui.write('%s=%s\n' % (sectname, value))
758 757
759 758 def debugsetparents(ui, repo, rev1, rev2=None):
760 759 """manually set the parents of the current working directory
761 760
762 761 This is useful for writing repository conversion tools, but should
763 762 be used with care.
764 763 """
765 764
766 765 if not rev2:
767 766 rev2 = hex(nullid)
768 767
769 768 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
770 769
771 770 def debugstate(ui, repo):
772 771 """show the contents of the current dirstate"""
773 772 repo.dirstate.read()
774 773 dc = repo.dirstate.map
775 774 keys = dc.keys()
776 775 keys.sort()
777 776 for file_ in keys:
778 777 ui.write("%c %3o %10d %s %s\n"
779 778 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
780 779 time.strftime("%x %X",
781 780 time.localtime(dc[file_][3])), file_))
782 781 for f in repo.dirstate.copies():
783 782 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
784 783
785 784 def debugdata(ui, file_, rev):
786 785 """dump the contents of an data file revision"""
787 786 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
788 787 file_[:-2] + ".i", file_, 0)
789 788 try:
790 789 ui.write(r.revision(r.lookup(rev)))
791 790 except KeyError:
792 791 raise util.Abort(_('invalid revision identifier %s') % rev)
793 792
794 793 def debugdate(ui, date, range=None, **opts):
795 794 """parse and display a date"""
796 795 if opts["extended"]:
797 796 d = util.parsedate(date, util.extendeddateformats)
798 797 else:
799 798 d = util.parsedate(date)
800 799 ui.write("internal: %s %s\n" % d)
801 800 ui.write("standard: %s\n" % util.datestr(d))
802 801 if range:
803 802 m = util.matchdate(range)
804 803 ui.write("match: %s\n" % m(d[0]))
805 804
806 805 def debugindex(ui, file_):
807 806 """dump the contents of an index file"""
808 807 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
809 808 ui.write(" rev offset length base linkrev" +
810 809 " nodeid p1 p2\n")
811 810 for i in xrange(r.count()):
812 811 node = r.node(i)
813 812 pp = r.parents(node)
814 813 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
815 814 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
816 815 short(node), short(pp[0]), short(pp[1])))
817 816
818 817 def debugindexdot(ui, file_):
819 818 """dump an index DAG as a .dot file"""
820 819 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
821 820 ui.write("digraph G {\n")
822 821 for i in xrange(r.count()):
823 822 node = r.node(i)
824 823 pp = r.parents(node)
825 824 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
826 825 if pp[1] != nullid:
827 826 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
828 827 ui.write("}\n")
829 828
830 829 def debuginstall(ui):
831 830 '''test Mercurial installation'''
832 831
833 832 def writetemp(contents):
834 833 (fd, name) = tempfile.mkstemp()
835 834 f = os.fdopen(fd, "wb")
836 835 f.write(contents)
837 836 f.close()
838 837 return name
839 838
840 839 problems = 0
841 840
842 841 # encoding
843 842 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
844 843 try:
845 844 util.fromlocal("test")
846 845 except util.Abort, inst:
847 846 ui.write(" %s\n" % inst)
848 847 ui.write(_(" (check that your locale is properly set)\n"))
849 848 problems += 1
850 849
851 850 # compiled modules
852 851 ui.status(_("Checking extensions...\n"))
853 852 try:
854 853 import bdiff, mpatch, base85
855 854 except Exception, inst:
856 855 ui.write(" %s\n" % inst)
857 856 ui.write(_(" One or more extensions could not be found"))
858 857 ui.write(_(" (check that you compiled the extensions)\n"))
859 858 problems += 1
860 859
861 860 # templates
862 861 ui.status(_("Checking templates...\n"))
863 862 try:
864 863 import templater
865 864 t = templater.templater(templater.templatepath("map-cmdline.default"))
866 865 except Exception, inst:
867 866 ui.write(" %s\n" % inst)
868 867 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
869 868 problems += 1
870 869
871 870 # patch
872 871 ui.status(_("Checking patch...\n"))
873 872 path = os.environ.get('PATH', '')
874 873 patcher = util.find_in_path('gpatch', path,
875 874 util.find_in_path('patch', path, None))
876 875 if not patcher:
877 876 ui.write(_(" Can't find patch or gpatch in PATH\n"))
878 877 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
879 878 problems += 1
880 879 else:
881 880 # actually attempt a patch here
882 881 a = "1\n2\n3\n4\n"
883 882 b = "1\n2\n3\ninsert\n4\n"
884 883 d = mdiff.unidiff(a, None, b, None, "a")
885 884 fa = writetemp(a)
886 885 fd = writetemp(d)
887 886 fp = os.popen('%s %s %s' % (patcher, fa, fd))
888 887 files = []
889 888 output = ""
890 889 for line in fp:
891 890 output += line
892 891 if line.startswith('patching file '):
893 892 pf = util.parse_patch_output(line.rstrip())
894 893 files.append(pf)
895 894 if files != [fa]:
896 895 ui.write(_(" unexpected patch output!"))
897 896 ui.write(_(" (you may have an incompatible version of patch)\n"))
898 897 ui.write(output)
899 898 problems += 1
900 899 a = file(fa).read()
901 900 if a != b:
902 901 ui.write(_(" patch test failed!"))
903 902 ui.write(_(" (you may have an incompatible version of patch)\n"))
904 903 problems += 1
905 904 os.unlink(fa)
906 905 os.unlink(fd)
907 906
908 907 # merge helper
909 908 ui.status(_("Checking merge helper...\n"))
910 909 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
911 910 or "hgmerge")
912 911 cmdpath = util.find_in_path(cmd, path)
913 912 if not cmdpath:
914 913 cmdpath = util.find_in_path(cmd.split()[0], path)
915 914 if not cmdpath:
916 915 if cmd == 'hgmerge':
917 916 ui.write(_(" No merge helper set and can't find default"
918 917 " hgmerge script in PATH\n"))
919 918 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
920 919 else:
921 920 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
922 921 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
923 922 problems += 1
924 923 else:
925 924 # actually attempt a patch here
926 925 fa = writetemp("1\n2\n3\n4\n")
927 926 fl = writetemp("1\n2\n3\ninsert\n4\n")
928 927 fr = writetemp("begin\n1\n2\n3\n4\n")
929 928 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
930 929 if r:
931 930 ui.write(_(" got unexpected merge error %d!") % r)
932 931 problems += 1
933 932 m = file(fl).read()
934 933 if m != "begin\n1\n2\n3\ninsert\n4\n":
935 934 ui.write(_(" got unexpected merge results!") % r)
936 935 ui.write(_(" (your merge helper may have the"
937 936 " wrong argument order)\n"))
938 937 ui.write(m)
939 938 os.unlink(fa)
940 939 os.unlink(fl)
941 940 os.unlink(fr)
942 941
943 942 # editor
944 943 ui.status(_("Checking commit editor...\n"))
945 944 editor = (os.environ.get("HGEDITOR") or
946 945 ui.config("ui", "editor") or
947 946 os.environ.get("EDITOR", "vi"))
948 947 cmdpath = util.find_in_path(editor, path)
949 948 if not cmdpath:
950 949 cmdpath = util.find_in_path(editor.split()[0], path)
951 950 if not cmdpath:
952 951 if editor == 'vi':
953 952 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
954 953 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
955 954 else:
956 955 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
957 956 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
958 957 problems += 1
959 958
960 959 # check username
961 960 ui.status(_("Checking username...\n"))
962 961 user = os.environ.get("HGUSER")
963 962 if user is None:
964 963 user = ui.config("ui", "username")
965 964 if user is None:
966 965 user = os.environ.get("EMAIL")
967 966 if not user:
968 967 ui.warn(" ")
969 968 ui.username()
970 969 ui.write(_(" (specify a username in your .hgrc file)\n"))
971 970
972 971 if not problems:
973 972 ui.status(_("No problems detected\n"))
974 973 else:
975 974 ui.write(_("%s problems detected,"
976 975 " please check your install!\n") % problems)
977 976
978 977 return problems
979 978
980 979 def debugrename(ui, repo, file1, *pats, **opts):
981 980 """dump rename information"""
982 981
983 982 ctx = repo.changectx(opts.get('rev', 'tip'))
984 983 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
985 984 ctx.node()):
986 985 m = ctx.filectx(abs).renamed()
987 986 if m:
988 987 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
989 988 else:
990 989 ui.write(_("%s not renamed\n") % rel)
991 990
992 991 def debugwalk(ui, repo, *pats, **opts):
993 992 """show how files match on given patterns"""
994 993 items = list(cmdutil.walk(repo, pats, opts))
995 994 if not items:
996 995 return
997 996 fmt = '%%s %%-%ds %%-%ds %%s' % (
998 997 max([len(abs) for (src, abs, rel, exact) in items]),
999 998 max([len(rel) for (src, abs, rel, exact) in items]))
1000 999 for src, abs, rel, exact in items:
1001 1000 line = fmt % (src, abs, rel, exact and 'exact' or '')
1002 1001 ui.write("%s\n" % line.rstrip())
1003 1002
1004 1003 def diff(ui, repo, *pats, **opts):
1005 1004 """diff repository (or selected files)
1006 1005
1007 1006 Show differences between revisions for the specified files.
1008 1007
1009 1008 Differences between files are shown using the unified diff format.
1010 1009
1011 1010 NOTE: diff may generate unexpected results for merges, as it will
1012 1011 default to comparing against the working directory's first parent
1013 1012 changeset if no revisions are specified.
1014 1013
1015 1014 When two revision arguments are given, then changes are shown
1016 1015 between those revisions. If only one revision is specified then
1017 1016 that revision is compared to the working directory, and, when no
1018 1017 revisions are specified, the working directory files are compared
1019 1018 to its parent.
1020 1019
1021 1020 Without the -a option, diff will avoid generating diffs of files
1022 1021 it detects as binary. With -a, diff will generate a diff anyway,
1023 1022 probably with undesirable results.
1024 1023 """
1025 1024 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1026 1025
1027 1026 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1028 1027
1029 1028 patch.diff(repo, node1, node2, fns, match=matchfn,
1030 1029 opts=patch.diffopts(ui, opts))
1031 1030
1032 1031 def export(ui, repo, *changesets, **opts):
1033 1032 """dump the header and diffs for one or more changesets
1034 1033
1035 1034 Print the changeset header and diffs for one or more revisions.
1036 1035
1037 1036 The information shown in the changeset header is: author,
1038 1037 changeset hash, parent(s) and commit comment.
1039 1038
1040 1039 NOTE: export may generate unexpected diff output for merge changesets,
1041 1040 as it will compare the merge changeset against its first parent only.
1042 1041
1043 1042 Output may be to a file, in which case the name of the file is
1044 1043 given using a format string. The formatting rules are as follows:
1045 1044
1046 1045 %% literal "%" character
1047 1046 %H changeset hash (40 bytes of hexadecimal)
1048 1047 %N number of patches being generated
1049 1048 %R changeset revision number
1050 1049 %b basename of the exporting repository
1051 1050 %h short-form changeset hash (12 bytes of hexadecimal)
1052 1051 %n zero-padded sequence number, starting at 1
1053 1052 %r zero-padded changeset revision number
1054 1053
1055 1054 Without the -a option, export will avoid generating diffs of files
1056 1055 it detects as binary. With -a, export will generate a diff anyway,
1057 1056 probably with undesirable results.
1058 1057
1059 1058 With the --switch-parent option, the diff will be against the second
1060 1059 parent. It can be useful to review a merge.
1061 1060 """
1062 1061 if not changesets:
1063 1062 raise util.Abort(_("export requires at least one changeset"))
1064 1063 revs = cmdutil.revrange(repo, changesets)
1065 1064 if len(revs) > 1:
1066 1065 ui.note(_('exporting patches:\n'))
1067 1066 else:
1068 1067 ui.note(_('exporting patch:\n'))
1069 1068 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1070 1069 switch_parent=opts['switch_parent'],
1071 1070 opts=patch.diffopts(ui, opts))
1072 1071
1073 1072 def grep(ui, repo, pattern, *pats, **opts):
1074 1073 """search for a pattern in specified files and revisions
1075 1074
1076 1075 Search revisions of files for a regular expression.
1077 1076
1078 1077 This command behaves differently than Unix grep. It only accepts
1079 1078 Python/Perl regexps. It searches repository history, not the
1080 1079 working directory. It always prints the revision number in which
1081 1080 a match appears.
1082 1081
1083 1082 By default, grep only prints output for the first revision of a
1084 1083 file in which it finds a match. To get it to print every revision
1085 1084 that contains a change in match status ("-" for a match that
1086 1085 becomes a non-match, or "+" for a non-match that becomes a match),
1087 1086 use the --all flag.
1088 1087 """
1089 1088 reflags = 0
1090 1089 if opts['ignore_case']:
1091 1090 reflags |= re.I
1092 1091 regexp = re.compile(pattern, reflags)
1093 1092 sep, eol = ':', '\n'
1094 1093 if opts['print0']:
1095 1094 sep = eol = '\0'
1096 1095
1097 1096 fcache = {}
1098 1097 def getfile(fn):
1099 1098 if fn not in fcache:
1100 1099 fcache[fn] = repo.file(fn)
1101 1100 return fcache[fn]
1102 1101
1103 1102 def matchlines(body):
1104 1103 begin = 0
1105 1104 linenum = 0
1106 1105 while True:
1107 1106 match = regexp.search(body, begin)
1108 1107 if not match:
1109 1108 break
1110 1109 mstart, mend = match.span()
1111 1110 linenum += body.count('\n', begin, mstart) + 1
1112 1111 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1113 1112 lend = body.find('\n', mend)
1114 1113 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1115 1114 begin = lend + 1
1116 1115
1117 1116 class linestate(object):
1118 1117 def __init__(self, line, linenum, colstart, colend):
1119 1118 self.line = line
1120 1119 self.linenum = linenum
1121 1120 self.colstart = colstart
1122 1121 self.colend = colend
1123 1122
1124 1123 def __eq__(self, other):
1125 1124 return self.line == other.line
1126 1125
1127 1126 matches = {}
1128 1127 copies = {}
1129 1128 def grepbody(fn, rev, body):
1130 1129 matches[rev].setdefault(fn, [])
1131 1130 m = matches[rev][fn]
1132 1131 for lnum, cstart, cend, line in matchlines(body):
1133 1132 s = linestate(line, lnum, cstart, cend)
1134 1133 m.append(s)
1135 1134
1136 1135 def difflinestates(a, b):
1137 1136 sm = difflib.SequenceMatcher(None, a, b)
1138 1137 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1139 1138 if tag == 'insert':
1140 1139 for i in xrange(blo, bhi):
1141 1140 yield ('+', b[i])
1142 1141 elif tag == 'delete':
1143 1142 for i in xrange(alo, ahi):
1144 1143 yield ('-', a[i])
1145 1144 elif tag == 'replace':
1146 1145 for i in xrange(alo, ahi):
1147 1146 yield ('-', a[i])
1148 1147 for i in xrange(blo, bhi):
1149 1148 yield ('+', b[i])
1150 1149
1151 1150 prev = {}
1152 1151 def display(fn, rev, states, prevstates):
1153 1152 counts = {'-': 0, '+': 0}
1154 1153 filerevmatches = {}
1155 1154 if incrementing or not opts['all']:
1156 1155 a, b, r = prevstates, states, rev
1157 1156 else:
1158 1157 a, b, r = states, prevstates, prev.get(fn, -1)
1159 1158 for change, l in difflinestates(a, b):
1160 1159 cols = [fn, str(r)]
1161 1160 if opts['line_number']:
1162 1161 cols.append(str(l.linenum))
1163 1162 if opts['all']:
1164 1163 cols.append(change)
1165 1164 if opts['user']:
1166 1165 cols.append(ui.shortuser(get(r)[1]))
1167 1166 if opts['files_with_matches']:
1168 1167 c = (fn, r)
1169 1168 if c in filerevmatches:
1170 1169 continue
1171 1170 filerevmatches[c] = 1
1172 1171 else:
1173 1172 cols.append(l.line)
1174 1173 ui.write(sep.join(cols), eol)
1175 1174 counts[change] += 1
1176 1175 return counts['+'], counts['-']
1177 1176
1178 1177 fstate = {}
1179 1178 skip = {}
1180 1179 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1181 1180 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1182 1181 count = 0
1183 1182 incrementing = False
1184 1183 follow = opts.get('follow')
1185 1184 for st, rev, fns in changeiter:
1186 1185 if st == 'window':
1187 1186 incrementing = rev
1188 1187 matches.clear()
1189 1188 elif st == 'add':
1190 1189 mf = repo.changectx(rev).manifest()
1191 1190 matches[rev] = {}
1192 1191 for fn in fns:
1193 1192 if fn in skip:
1194 1193 continue
1195 1194 fstate.setdefault(fn, {})
1196 1195 try:
1197 1196 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1198 1197 if follow:
1199 1198 copied = getfile(fn).renamed(mf[fn])
1200 1199 if copied:
1201 1200 copies.setdefault(rev, {})[fn] = copied[0]
1202 1201 except KeyError:
1203 1202 pass
1204 1203 elif st == 'iter':
1205 1204 states = matches[rev].items()
1206 1205 states.sort()
1207 1206 for fn, m in states:
1208 1207 copy = copies.get(rev, {}).get(fn)
1209 1208 if fn in skip:
1210 1209 if copy:
1211 1210 skip[copy] = True
1212 1211 continue
1213 1212 if incrementing or not opts['all'] or fstate[fn]:
1214 1213 pos, neg = display(fn, rev, m, fstate[fn])
1215 1214 count += pos + neg
1216 1215 if pos and not opts['all']:
1217 1216 skip[fn] = True
1218 1217 if copy:
1219 1218 skip[copy] = True
1220 1219 fstate[fn] = m
1221 1220 if copy:
1222 1221 fstate[copy] = m
1223 1222 prev[fn] = rev
1224 1223
1225 1224 if not incrementing:
1226 1225 fstate = fstate.items()
1227 1226 fstate.sort()
1228 1227 for fn, state in fstate:
1229 1228 if fn in skip:
1230 1229 continue
1231 1230 if fn not in copies.get(prev[fn], {}):
1232 1231 display(fn, rev, {}, state)
1233 1232 return (count == 0 and 1) or 0
1234 1233
1235 1234 def heads(ui, repo, **opts):
1236 1235 """show current repository heads
1237 1236
1238 1237 Show all repository head changesets.
1239 1238
1240 1239 Repository "heads" are changesets that don't have children
1241 1240 changesets. They are where development generally takes place and
1242 1241 are the usual targets for update and merge operations.
1243 1242 """
1244 1243 if opts['rev']:
1245 1244 heads = repo.heads(repo.lookup(opts['rev']))
1246 1245 else:
1247 1246 heads = repo.heads()
1248 1247 displayer = cmdutil.show_changeset(ui, repo, opts)
1249 1248 for n in heads:
1250 1249 displayer.show(changenode=n)
1251 1250
1252 1251 def help_(ui, name=None, with_version=False):
1253 1252 """show help for a command, extension, or list of commands
1254 1253
1255 1254 With no arguments, print a list of commands and short help.
1256 1255
1257 1256 Given a command name, print help for that command.
1258 1257
1259 1258 Given an extension name, print help for that extension, and the
1260 1259 commands it provides."""
1261 1260 option_lists = []
1262 1261
1263 1262 def helpcmd(name):
1264 1263 if with_version:
1265 1264 version_(ui)
1266 1265 ui.write('\n')
1267 1266 aliases, i = findcmd(ui, name)
1268 1267 # synopsis
1269 1268 ui.write("%s\n\n" % i[2])
1270 1269
1271 1270 # description
1272 1271 doc = i[0].__doc__
1273 1272 if not doc:
1274 1273 doc = _("(No help text available)")
1275 1274 if ui.quiet:
1276 1275 doc = doc.splitlines(0)[0]
1277 1276 ui.write("%s\n" % doc.rstrip())
1278 1277
1279 1278 if not ui.quiet:
1280 1279 # aliases
1281 1280 if len(aliases) > 1:
1282 1281 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1283 1282
1284 1283 # options
1285 1284 if i[1]:
1286 1285 option_lists.append(("options", i[1]))
1287 1286
1288 1287 def helplist(select=None):
1289 1288 h = {}
1290 1289 cmds = {}
1291 1290 for c, e in table.items():
1292 1291 f = c.split("|", 1)[0]
1293 1292 if select and not select(f):
1294 1293 continue
1295 1294 if name == "shortlist" and not f.startswith("^"):
1296 1295 continue
1297 1296 f = f.lstrip("^")
1298 1297 if not ui.debugflag and f.startswith("debug"):
1299 1298 continue
1300 1299 doc = e[0].__doc__
1301 1300 if not doc:
1302 1301 doc = _("(No help text available)")
1303 1302 h[f] = doc.splitlines(0)[0].rstrip()
1304 1303 cmds[f] = c.lstrip("^")
1305 1304
1306 1305 fns = h.keys()
1307 1306 fns.sort()
1308 1307 m = max(map(len, fns))
1309 1308 for f in fns:
1310 1309 if ui.verbose:
1311 1310 commands = cmds[f].replace("|",", ")
1312 1311 ui.write(" %s:\n %s\n"%(commands, h[f]))
1313 1312 else:
1314 1313 ui.write(' %-*s %s\n' % (m, f, h[f]))
1315 1314
1316 1315 def helptopic(name):
1317 1316 v = None
1318 1317 for i in help.helptable:
1319 1318 l = i.split('|')
1320 1319 if name in l:
1321 1320 v = i
1322 1321 header = l[-1]
1323 1322 if not v:
1324 1323 raise UnknownCommand(name)
1325 1324
1326 1325 # description
1327 1326 doc = help.helptable[v]
1328 1327 if not doc:
1329 1328 doc = _("(No help text available)")
1330 1329 if callable(doc):
1331 1330 doc = doc()
1332 1331
1333 1332 ui.write("%s\n" % header)
1334 1333 ui.write("%s\n" % doc.rstrip())
1335 1334
1336 1335 def helpext(name):
1337 1336 try:
1338 1337 mod = findext(name)
1339 1338 except KeyError:
1340 1339 raise UnknownCommand(name)
1341 1340
1342 1341 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1343 1342 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1344 1343 for d in doc[1:]:
1345 1344 ui.write(d, '\n')
1346 1345
1347 1346 ui.status('\n')
1348 1347 if ui.verbose:
1349 1348 ui.status(_('list of commands:\n\n'))
1350 1349 else:
1351 1350 ui.status(_('list of commands (use "hg help -v %s" '
1352 1351 'to show aliases and global options):\n\n') % name)
1353 1352
1354 1353 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
1355 1354 helplist(modcmds.has_key)
1356 1355
1357 1356 if name and name != 'shortlist':
1358 1357 i = None
1359 1358 for f in (helpcmd, helptopic, helpext):
1360 1359 try:
1361 1360 f(name)
1362 1361 i = None
1363 1362 break
1364 1363 except UnknownCommand, inst:
1365 1364 i = inst
1366 1365 if i:
1367 1366 raise i
1368 1367
1369 1368 else:
1370 1369 # program name
1371 1370 if ui.verbose or with_version:
1372 1371 version_(ui)
1373 1372 else:
1374 1373 ui.status(_("Mercurial Distributed SCM\n"))
1375 1374 ui.status('\n')
1376 1375
1377 1376 # list of commands
1378 1377 if name == "shortlist":
1379 1378 ui.status(_('basic commands (use "hg help" '
1380 1379 'for the full list or option "-v" for details):\n\n'))
1381 1380 elif ui.verbose:
1382 1381 ui.status(_('list of commands:\n\n'))
1383 1382 else:
1384 1383 ui.status(_('list of commands (use "hg help -v" '
1385 1384 'to show aliases and global options):\n\n'))
1386 1385
1387 1386 helplist()
1388 1387
1389 1388 # global options
1390 1389 if ui.verbose:
1391 1390 option_lists.append(("global options", globalopts))
1392 1391
1393 1392 # list all option lists
1394 1393 opt_output = []
1395 1394 for title, options in option_lists:
1396 1395 opt_output.append(("\n%s:\n" % title, None))
1397 1396 for shortopt, longopt, default, desc in options:
1398 1397 if "DEPRECATED" in desc and not ui.verbose: continue
1399 1398 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1400 1399 longopt and " --%s" % longopt),
1401 1400 "%s%s" % (desc,
1402 1401 default
1403 1402 and _(" (default: %s)") % default
1404 1403 or "")))
1405 1404
1406 1405 if opt_output:
1407 1406 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1408 1407 for first, second in opt_output:
1409 1408 if second:
1410 1409 ui.write(" %-*s %s\n" % (opts_len, first, second))
1411 1410 else:
1412 1411 ui.write("%s\n" % first)
1413 1412
1414 1413 def identify(ui, repo):
1415 1414 """print information about the working copy
1416 1415
1417 1416 Print a short summary of the current state of the repo.
1418 1417
1419 1418 This summary identifies the repository state using one or two parent
1420 1419 hash identifiers, followed by a "+" if there are uncommitted changes
1421 1420 in the working directory, followed by a list of tags for this revision.
1422 1421 """
1423 1422 parents = [p for p in repo.dirstate.parents() if p != nullid]
1424 1423 if not parents:
1425 1424 ui.write(_("unknown\n"))
1426 1425 return
1427 1426
1428 1427 hexfunc = ui.debugflag and hex or short
1429 1428 modified, added, removed, deleted = repo.status()[:4]
1430 1429 output = ["%s%s" %
1431 1430 ('+'.join([hexfunc(parent) for parent in parents]),
1432 1431 (modified or added or removed or deleted) and "+" or "")]
1433 1432
1434 1433 if not ui.quiet:
1435 1434
1436 1435 branch = util.tolocal(repo.workingctx().branch())
1437 1436 if branch:
1438 1437 output.append("(%s)" % branch)
1439 1438
1440 1439 # multiple tags for a single parent separated by '/'
1441 1440 parenttags = ['/'.join(tags)
1442 1441 for tags in map(repo.nodetags, parents) if tags]
1443 1442 # tags for multiple parents separated by ' + '
1444 1443 if parenttags:
1445 1444 output.append(' + '.join(parenttags))
1446 1445
1447 1446 ui.write("%s\n" % ' '.join(output))
1448 1447
1449 1448 def import_(ui, repo, patch1, *patches, **opts):
1450 1449 """import an ordered set of patches
1451 1450
1452 1451 Import a list of patches and commit them individually.
1453 1452
1454 1453 If there are outstanding changes in the working directory, import
1455 1454 will abort unless given the -f flag.
1456 1455
1457 1456 You can import a patch straight from a mail message. Even patches
1458 1457 as attachments work (body part must be type text/plain or
1459 1458 text/x-patch to be used). From and Subject headers of email
1460 1459 message are used as default committer and commit message. All
1461 1460 text/plain body parts before first diff are added to commit
1462 1461 message.
1463 1462
1464 1463 If imported patch was generated by hg export, user and description
1465 1464 from patch override values from message headers and body. Values
1466 1465 given on command line with -m and -u override these.
1467 1466
1468 1467 To read a patch from standard input, use patch name "-".
1469 1468 """
1470 1469 patches = (patch1,) + patches
1471 1470
1472 1471 if not opts['force']:
1473 1472 bail_if_changed(repo)
1474 1473
1475 1474 d = opts["base"]
1476 1475 strip = opts["strip"]
1477 1476
1478 1477 wlock = repo.wlock()
1479 1478 lock = repo.lock()
1480 1479
1481 1480 for p in patches:
1482 1481 pf = os.path.join(d, p)
1483 1482
1484 1483 if pf == '-':
1485 1484 ui.status(_("applying patch from stdin\n"))
1486 1485 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1487 1486 else:
1488 1487 ui.status(_("applying %s\n") % p)
1489 1488 tmpname, message, user, date = patch.extract(ui, file(pf))
1490 1489
1491 1490 if tmpname is None:
1492 1491 raise util.Abort(_('no diffs found'))
1493 1492
1494 1493 try:
1495 1494 cmdline_message = logmessage(opts)
1496 1495 if cmdline_message:
1497 1496 # pickup the cmdline msg
1498 1497 message = cmdline_message
1499 1498 elif message:
1500 1499 # pickup the patch msg
1501 1500 message = message.strip()
1502 1501 else:
1503 1502 # launch the editor
1504 1503 message = None
1505 1504 ui.debug(_('message:\n%s\n') % message)
1506 1505
1507 1506 files = {}
1508 1507 try:
1509 1508 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1510 1509 files=files)
1511 1510 finally:
1512 1511 files = patch.updatedir(ui, repo, files, wlock=wlock)
1513 1512 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1514 1513 finally:
1515 1514 os.unlink(tmpname)
1516 1515
1517 1516 def incoming(ui, repo, source="default", **opts):
1518 1517 """show new changesets found in source
1519 1518
1520 1519 Show new changesets found in the specified path/URL or the default
1521 1520 pull location. These are the changesets that would be pulled if a pull
1522 1521 was requested.
1523 1522
1524 1523 For remote repository, using --bundle avoids downloading the changesets
1525 1524 twice if the incoming is followed by a pull.
1526 1525
1527 1526 See pull for valid source format details.
1528 1527 """
1529 1528 source = ui.expandpath(source)
1530 1529 setremoteconfig(ui, opts)
1531 1530
1532 1531 other = hg.repository(ui, source)
1533 1532 incoming = repo.findincoming(other, force=opts["force"])
1534 1533 if not incoming:
1535 1534 ui.status(_("no changes found\n"))
1536 1535 return
1537 1536
1538 1537 cleanup = None
1539 1538 try:
1540 1539 fname = opts["bundle"]
1541 1540 if fname or not other.local():
1542 1541 # create a bundle (uncompressed if other repo is not local)
1543 1542 cg = other.changegroup(incoming, "incoming")
1544 1543 bundletype = other.local() and "HG10BZ" or "HG10UN"
1545 1544 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1546 1545 # keep written bundle?
1547 1546 if opts["bundle"]:
1548 1547 cleanup = None
1549 1548 if not other.local():
1550 1549 # use the created uncompressed bundlerepo
1551 1550 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1552 1551
1553 1552 revs = None
1554 1553 if opts['rev']:
1555 1554 revs = [other.lookup(rev) for rev in opts['rev']]
1556 1555 o = other.changelog.nodesbetween(incoming, revs)[0]
1557 1556 if opts['newest_first']:
1558 1557 o.reverse()
1559 1558 displayer = cmdutil.show_changeset(ui, other, opts)
1560 1559 for n in o:
1561 1560 parents = [p for p in other.changelog.parents(n) if p != nullid]
1562 1561 if opts['no_merges'] and len(parents) == 2:
1563 1562 continue
1564 1563 displayer.show(changenode=n)
1565 1564 finally:
1566 1565 if hasattr(other, 'close'):
1567 1566 other.close()
1568 1567 if cleanup:
1569 1568 os.unlink(cleanup)
1570 1569
1571 1570 def init(ui, dest=".", **opts):
1572 1571 """create a new repository in the given directory
1573 1572
1574 1573 Initialize a new repository in the given directory. If the given
1575 1574 directory does not exist, it is created.
1576 1575
1577 1576 If no directory is given, the current directory is used.
1578 1577
1579 1578 It is possible to specify an ssh:// URL as the destination.
1580 1579 Look at the help text for the pull command for important details
1581 1580 about ssh:// URLs.
1582 1581 """
1583 1582 setremoteconfig(ui, opts)
1584 1583 hg.repository(ui, dest, create=1)
1585 1584
1586 1585 def locate(ui, repo, *pats, **opts):
1587 1586 """locate files matching specific patterns
1588 1587
1589 1588 Print all files under Mercurial control whose names match the
1590 1589 given patterns.
1591 1590
1592 1591 This command searches the current directory and its
1593 1592 subdirectories. To search an entire repository, move to the root
1594 1593 of the repository.
1595 1594
1596 1595 If no patterns are given to match, this command prints all file
1597 1596 names.
1598 1597
1599 1598 If you want to feed the output of this command into the "xargs"
1600 1599 command, use the "-0" option to both this command and "xargs".
1601 1600 This will avoid the problem of "xargs" treating single filenames
1602 1601 that contain white space as multiple filenames.
1603 1602 """
1604 1603 end = opts['print0'] and '\0' or '\n'
1605 1604 rev = opts['rev']
1606 1605 if rev:
1607 1606 node = repo.lookup(rev)
1608 1607 else:
1609 1608 node = None
1610 1609
1611 1610 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1612 1611 head='(?:.*/|)'):
1613 1612 if not node and repo.dirstate.state(abs) == '?':
1614 1613 continue
1615 1614 if opts['fullpath']:
1616 1615 ui.write(os.path.join(repo.root, abs), end)
1617 1616 else:
1618 1617 ui.write(((pats and rel) or abs), end)
1619 1618
1620 1619 def log(ui, repo, *pats, **opts):
1621 1620 """show revision history of entire repository or files
1622 1621
1623 1622 Print the revision history of the specified files or the entire
1624 1623 project.
1625 1624
1626 1625 File history is shown without following rename or copy history of
1627 1626 files. Use -f/--follow with a file name to follow history across
1628 1627 renames and copies. --follow without a file name will only show
1629 1628 ancestors or descendants of the starting revision. --follow-first
1630 1629 only follows the first parent of merge revisions.
1631 1630
1632 1631 If no revision range is specified, the default is tip:0 unless
1633 1632 --follow is set, in which case the working directory parent is
1634 1633 used as the starting revision.
1635 1634
1636 1635 By default this command outputs: changeset id and hash, tags,
1637 1636 non-trivial parents, user, date and time, and a summary for each
1638 1637 commit. When the -v/--verbose switch is used, the list of changed
1639 1638 files and full commit message is shown.
1640 1639
1641 1640 NOTE: log -p may generate unexpected diff output for merge
1642 1641 changesets, as it will compare the merge changeset against its
1643 1642 first parent only. Also, the files: list will only reflect files
1644 1643 that are different from BOTH parents.
1645 1644
1646 1645 """
1647 1646
1648 1647 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1649 1648 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1650 1649
1651 1650 if opts['limit']:
1652 1651 try:
1653 1652 limit = int(opts['limit'])
1654 1653 except ValueError:
1655 1654 raise util.Abort(_('limit must be a positive integer'))
1656 1655 if limit <= 0: raise util.Abort(_('limit must be positive'))
1657 1656 else:
1658 1657 limit = sys.maxint
1659 1658 count = 0
1660 1659
1661 1660 if opts['copies'] and opts['rev']:
1662 1661 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1663 1662 else:
1664 1663 endrev = repo.changelog.count()
1665 1664 rcache = {}
1666 1665 ncache = {}
1667 1666 dcache = []
1668 1667 def getrenamed(fn, rev, man):
1669 1668 '''looks up all renames for a file (up to endrev) the first
1670 1669 time the file is given. It indexes on the changerev and only
1671 1670 parses the manifest if linkrev != changerev.
1672 1671 Returns rename info for fn at changerev rev.'''
1673 1672 if fn not in rcache:
1674 1673 rcache[fn] = {}
1675 1674 ncache[fn] = {}
1676 1675 fl = repo.file(fn)
1677 1676 for i in xrange(fl.count()):
1678 1677 node = fl.node(i)
1679 1678 lr = fl.linkrev(node)
1680 1679 renamed = fl.renamed(node)
1681 1680 rcache[fn][lr] = renamed
1682 1681 if renamed:
1683 1682 ncache[fn][node] = renamed
1684 1683 if lr >= endrev:
1685 1684 break
1686 1685 if rev in rcache[fn]:
1687 1686 return rcache[fn][rev]
1688 1687 mr = repo.manifest.rev(man)
1689 1688 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1690 1689 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1691 1690 if not dcache or dcache[0] != man:
1692 1691 dcache[:] = [man, repo.manifest.readdelta(man)]
1693 1692 if fn in dcache[1]:
1694 1693 return ncache[fn].get(dcache[1][fn])
1695 1694 return None
1696 1695
1697 1696 df = False
1698 1697 if opts["date"]:
1699 1698 df = util.matchdate(opts["date"])
1700 1699
1701
1702 1700 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1703 1701 for st, rev, fns in changeiter:
1704 1702 if st == 'add':
1705 1703 changenode = repo.changelog.node(rev)
1706 1704 parents = [p for p in repo.changelog.parentrevs(rev)
1707 1705 if p != nullrev]
1708 1706 if opts['no_merges'] and len(parents) == 2:
1709 1707 continue
1710 1708 if opts['only_merges'] and len(parents) != 2:
1711 1709 continue
1712 1710
1713 1711 if df:
1714 1712 changes = get(rev)
1715 1713 if not df(changes[2][0]):
1716 1714 continue
1717 1715
1718 1716 if opts['keyword']:
1719 1717 changes = get(rev)
1720 1718 miss = 0
1721 1719 for k in [kw.lower() for kw in opts['keyword']]:
1722 1720 if not (k in changes[1].lower() or
1723 1721 k in changes[4].lower() or
1724 1722 k in " ".join(changes[3][:20]).lower()):
1725 1723 miss = 1
1726 1724 break
1727 1725 if miss:
1728 1726 continue
1729 1727
1730 1728 copies = []
1731 1729 if opts.get('copies') and rev:
1732 1730 mf = get(rev)[0]
1733 1731 for fn in get(rev)[3]:
1734 1732 rename = getrenamed(fn, rev, mf)
1735 1733 if rename:
1736 1734 copies.append((fn, rename[0]))
1737 1735 displayer.show(rev, changenode, copies=copies)
1738 1736 elif st == 'iter':
1739 1737 if count == limit: break
1740 1738 if displayer.flush(rev):
1741 1739 count += 1
1742 1740
1743 1741 def manifest(ui, repo, rev=None):
1744 1742 """output the latest or given revision of the project manifest
1745 1743
1746 1744 Print a list of version controlled files for the given revision.
1747 1745
1748 1746 The manifest is the list of files being version controlled. If no revision
1749 1747 is given then the first parent of the working directory is used.
1750 1748
1751 1749 With -v flag, print file permissions. With --debug flag, print
1752 1750 file revision hashes.
1753 1751 """
1754 1752
1755 1753 m = repo.changectx(rev).manifest()
1756 1754 files = m.keys()
1757 1755 files.sort()
1758 1756
1759 1757 for f in files:
1760 1758 if ui.debugflag:
1761 1759 ui.write("%40s " % hex(m[f]))
1762 1760 if ui.verbose:
1763 1761 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1764 1762 ui.write("%s\n" % f)
1765 1763
1766 def merge(ui, repo, node=None, force=None, branch=None):
1764 def merge(ui, repo, node=None, force=None):
1767 1765 """Merge working directory with another revision
1768 1766
1769 1767 Merge the contents of the current working directory and the
1770 1768 requested revision. Files that changed between either parent are
1771 1769 marked as changed for the next commit and a commit must be
1772 1770 performed before any further updates are allowed.
1773 1771
1774 1772 If no revision is specified, the working directory's parent is a
1775 1773 head revision, and the repository contains exactly one other head,
1776 1774 the other head is merged with by default. Otherwise, an explicit
1777 1775 revision to merge with must be provided.
1778 1776 """
1779 1777
1780 if node or branch:
1781 node = _lookup(repo, node, branch)
1782 else:
1778 if not node:
1783 1779 heads = repo.heads()
1784 1780 if len(heads) > 2:
1785 1781 raise util.Abort(_('repo has %d heads - '
1786 1782 'please merge with an explicit rev') %
1787 1783 len(heads))
1788 1784 if len(heads) == 1:
1789 1785 raise util.Abort(_('there is nothing to merge - '
1790 1786 'use "hg update" instead'))
1791 1787 parent = repo.dirstate.parents()[0]
1792 1788 if parent not in heads:
1793 1789 raise util.Abort(_('working dir not at a head rev - '
1794 1790 'use "hg update" or merge with an explicit rev'))
1795 1791 node = parent == heads[0] and heads[-1] or heads[0]
1796 1792 return hg.merge(repo, node, force=force)
1797 1793
1798 1794 def outgoing(ui, repo, dest=None, **opts):
1799 1795 """show changesets not found in destination
1800 1796
1801 1797 Show changesets not found in the specified destination repository or
1802 1798 the default push location. These are the changesets that would be pushed
1803 1799 if a push was requested.
1804 1800
1805 1801 See pull for valid destination format details.
1806 1802 """
1807 1803 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1808 1804 setremoteconfig(ui, opts)
1809 1805 revs = None
1810 1806 if opts['rev']:
1811 1807 revs = [repo.lookup(rev) for rev in opts['rev']]
1812 1808
1813 1809 other = hg.repository(ui, dest)
1814 1810 o = repo.findoutgoing(other, force=opts['force'])
1815 1811 if not o:
1816 1812 ui.status(_("no changes found\n"))
1817 1813 return
1818 1814 o = repo.changelog.nodesbetween(o, revs)[0]
1819 1815 if opts['newest_first']:
1820 1816 o.reverse()
1821 1817 displayer = cmdutil.show_changeset(ui, repo, opts)
1822 1818 for n in o:
1823 1819 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1824 1820 if opts['no_merges'] and len(parents) == 2:
1825 1821 continue
1826 1822 displayer.show(changenode=n)
1827 1823
1828 1824 def parents(ui, repo, file_=None, **opts):
1829 1825 """show the parents of the working dir or revision
1830 1826
1831 1827 Print the working directory's parent revisions.
1832 1828 """
1833 1829 rev = opts.get('rev')
1834 1830 if rev:
1835 1831 if file_:
1836 1832 ctx = repo.filectx(file_, changeid=rev)
1837 1833 else:
1838 1834 ctx = repo.changectx(rev)
1839 1835 p = [cp.node() for cp in ctx.parents()]
1840 1836 else:
1841 1837 p = repo.dirstate.parents()
1842 1838
1843 1839 displayer = cmdutil.show_changeset(ui, repo, opts)
1844 1840 for n in p:
1845 1841 if n != nullid:
1846 1842 displayer.show(changenode=n)
1847 1843
1848 1844 def paths(ui, repo, search=None):
1849 1845 """show definition of symbolic path names
1850 1846
1851 1847 Show definition of symbolic path name NAME. If no name is given, show
1852 1848 definition of available names.
1853 1849
1854 1850 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1855 1851 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1856 1852 """
1857 1853 if search:
1858 1854 for name, path in ui.configitems("paths"):
1859 1855 if name == search:
1860 1856 ui.write("%s\n" % path)
1861 1857 return
1862 1858 ui.warn(_("not found!\n"))
1863 1859 return 1
1864 1860 else:
1865 1861 for name, path in ui.configitems("paths"):
1866 1862 ui.write("%s = %s\n" % (name, path))
1867 1863
1868 1864 def postincoming(ui, repo, modheads, optupdate):
1869 1865 if modheads == 0:
1870 1866 return
1871 1867 if optupdate:
1872 1868 if modheads == 1:
1873 1869 return hg.update(repo, repo.changelog.tip()) # update
1874 1870 else:
1875 1871 ui.status(_("not updating, since new heads added\n"))
1876 1872 if modheads > 1:
1877 1873 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1878 1874 else:
1879 1875 ui.status(_("(run 'hg update' to get a working copy)\n"))
1880 1876
1881 1877 def pull(ui, repo, source="default", **opts):
1882 1878 """pull changes from the specified source
1883 1879
1884 1880 Pull changes from a remote repository to a local one.
1885 1881
1886 1882 This finds all changes from the repository at the specified path
1887 1883 or URL and adds them to the local repository. By default, this
1888 1884 does not update the copy of the project in the working directory.
1889 1885
1890 1886 Valid URLs are of the form:
1891 1887
1892 1888 local/filesystem/path (or file://local/filesystem/path)
1893 1889 http://[user@]host[:port]/[path]
1894 1890 https://[user@]host[:port]/[path]
1895 1891 ssh://[user@]host[:port]/[path]
1896 1892 static-http://host[:port]/[path]
1897 1893
1898 1894 Paths in the local filesystem can either point to Mercurial
1899 1895 repositories or to bundle files (as created by 'hg bundle' or
1900 1896 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1901 1897 allows access to a Mercurial repository where you simply use a web
1902 1898 server to publish the .hg directory as static content.
1903 1899
1904 1900 Some notes about using SSH with Mercurial:
1905 1901 - SSH requires an accessible shell account on the destination machine
1906 1902 and a copy of hg in the remote path or specified with as remotecmd.
1907 1903 - path is relative to the remote user's home directory by default.
1908 1904 Use an extra slash at the start of a path to specify an absolute path:
1909 1905 ssh://example.com//tmp/repository
1910 1906 - Mercurial doesn't use its own compression via SSH; the right thing
1911 1907 to do is to configure it in your ~/.ssh/config, e.g.:
1912 1908 Host *.mylocalnetwork.example.com
1913 1909 Compression no
1914 1910 Host *
1915 1911 Compression yes
1916 1912 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1917 1913 with the --ssh command line option.
1918 1914 """
1919 1915 source = ui.expandpath(source)
1920 1916 setremoteconfig(ui, opts)
1921 1917
1922 1918 other = hg.repository(ui, source)
1923 1919 ui.status(_('pulling from %s\n') % (source))
1924 1920 revs = None
1925 1921 if opts['rev']:
1926 1922 if 'lookup' in other.capabilities:
1927 1923 revs = [other.lookup(rev) for rev in opts['rev']]
1928 1924 else:
1929 1925 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1930 1926 raise util.Abort(error)
1931 1927 modheads = repo.pull(other, heads=revs, force=opts['force'])
1932 1928 return postincoming(ui, repo, modheads, opts['update'])
1933 1929
1934 1930 def push(ui, repo, dest=None, **opts):
1935 1931 """push changes to the specified destination
1936 1932
1937 1933 Push changes from the local repository to the given destination.
1938 1934
1939 1935 This is the symmetrical operation for pull. It helps to move
1940 1936 changes from the current repository to a different one. If the
1941 1937 destination is local this is identical to a pull in that directory
1942 1938 from the current one.
1943 1939
1944 1940 By default, push will refuse to run if it detects the result would
1945 1941 increase the number of remote heads. This generally indicates the
1946 1942 the client has forgotten to sync and merge before pushing.
1947 1943
1948 1944 Valid URLs are of the form:
1949 1945
1950 1946 local/filesystem/path (or file://local/filesystem/path)
1951 1947 ssh://[user@]host[:port]/[path]
1952 1948 http://[user@]host[:port]/[path]
1953 1949 https://[user@]host[:port]/[path]
1954 1950
1955 1951 Look at the help text for the pull command for important details
1956 1952 about ssh:// URLs.
1957 1953
1958 1954 Pushing to http:// and https:// URLs is only possible, if this
1959 1955 feature is explicitly enabled on the remote Mercurial server.
1960 1956 """
1961 1957 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1962 1958 setremoteconfig(ui, opts)
1963 1959
1964 1960 other = hg.repository(ui, dest)
1965 1961 ui.status('pushing to %s\n' % (dest))
1966 1962 revs = None
1967 1963 if opts['rev']:
1968 1964 revs = [repo.lookup(rev) for rev in opts['rev']]
1969 1965 r = repo.push(other, opts['force'], revs=revs)
1970 1966 return r == 0
1971 1967
1972 1968 def rawcommit(ui, repo, *pats, **opts):
1973 1969 """raw commit interface (DEPRECATED)
1974 1970
1975 1971 (DEPRECATED)
1976 1972 Lowlevel commit, for use in helper scripts.
1977 1973
1978 1974 This command is not intended to be used by normal users, as it is
1979 1975 primarily useful for importing from other SCMs.
1980 1976
1981 1977 This command is now deprecated and will be removed in a future
1982 1978 release, please use debugsetparents and commit instead.
1983 1979 """
1984 1980
1985 1981 ui.warn(_("(the rawcommit command is deprecated)\n"))
1986 1982
1987 1983 message = logmessage(opts)
1988 1984
1989 1985 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
1990 1986 if opts['files']:
1991 1987 files += open(opts['files']).read().splitlines()
1992 1988
1993 1989 parents = [repo.lookup(p) for p in opts['parent']]
1994 1990
1995 1991 try:
1996 1992 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
1997 1993 except ValueError, inst:
1998 1994 raise util.Abort(str(inst))
1999 1995
2000 1996 def recover(ui, repo):
2001 1997 """roll back an interrupted transaction
2002 1998
2003 1999 Recover from an interrupted commit or pull.
2004 2000
2005 2001 This command tries to fix the repository status after an interrupted
2006 2002 operation. It should only be necessary when Mercurial suggests it.
2007 2003 """
2008 2004 if repo.recover():
2009 2005 return hg.verify(repo)
2010 2006 return 1
2011 2007
2012 2008 def remove(ui, repo, *pats, **opts):
2013 2009 """remove the specified files on the next commit
2014 2010
2015 2011 Schedule the indicated files for removal from the repository.
2016 2012
2017 2013 This only removes files from the current branch, not from the
2018 2014 entire project history. If the files still exist in the working
2019 2015 directory, they will be deleted from it. If invoked with --after,
2020 2016 files that have been manually deleted are marked as removed.
2021 2017
2022 2018 This command schedules the files to be removed at the next commit.
2023 2019 To undo a remove before that, see hg revert.
2024 2020
2025 2021 Modified files and added files are not removed by default. To
2026 2022 remove them, use the -f/--force option.
2027 2023 """
2028 2024 names = []
2029 2025 if not opts['after'] and not pats:
2030 2026 raise util.Abort(_('no files specified'))
2031 2027 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2032 2028 exact = dict.fromkeys(files)
2033 2029 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2034 2030 modified, added, removed, deleted, unknown = mardu
2035 2031 remove, forget = [], []
2036 2032 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2037 2033 reason = None
2038 2034 if abs not in deleted and opts['after']:
2039 2035 reason = _('is still present')
2040 2036 elif abs in modified and not opts['force']:
2041 2037 reason = _('is modified (use -f to force removal)')
2042 2038 elif abs in added:
2043 2039 if opts['force']:
2044 2040 forget.append(abs)
2045 2041 continue
2046 2042 reason = _('has been marked for add (use -f to force removal)')
2047 2043 elif abs in unknown:
2048 2044 reason = _('is not managed')
2049 2045 elif abs in removed:
2050 2046 continue
2051 2047 if reason:
2052 2048 if exact:
2053 2049 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2054 2050 else:
2055 2051 if ui.verbose or not exact:
2056 2052 ui.status(_('removing %s\n') % rel)
2057 2053 remove.append(abs)
2058 2054 repo.forget(forget)
2059 2055 repo.remove(remove, unlink=not opts['after'])
2060 2056
2061 2057 def rename(ui, repo, *pats, **opts):
2062 2058 """rename files; equivalent of copy + remove
2063 2059
2064 2060 Mark dest as copies of sources; mark sources for deletion. If
2065 2061 dest is a directory, copies are put in that directory. If dest is
2066 2062 a file, there can only be one source.
2067 2063
2068 2064 By default, this command copies the contents of files as they
2069 2065 stand in the working directory. If invoked with --after, the
2070 2066 operation is recorded, but no copying is performed.
2071 2067
2072 2068 This command takes effect in the next commit. To undo a rename
2073 2069 before that, see hg revert.
2074 2070 """
2075 2071 wlock = repo.wlock(0)
2076 2072 errs, copied = docopy(ui, repo, pats, opts, wlock)
2077 2073 names = []
2078 2074 for abs, rel, exact in copied:
2079 2075 if ui.verbose or not exact:
2080 2076 ui.status(_('removing %s\n') % rel)
2081 2077 names.append(abs)
2082 2078 if not opts.get('dry_run'):
2083 2079 repo.remove(names, True, wlock)
2084 2080 return errs
2085 2081
2086 2082 def revert(ui, repo, *pats, **opts):
2087 2083 """revert files or dirs to their states as of some revision
2088 2084
2089 2085 With no revision specified, revert the named files or directories
2090 2086 to the contents they had in the parent of the working directory.
2091 2087 This restores the contents of the affected files to an unmodified
2092 2088 state and unschedules adds, removes, copies, and renames. If the
2093 2089 working directory has two parents, you must explicitly specify the
2094 2090 revision to revert to.
2095 2091
2096 2092 Modified files are saved with a .orig suffix before reverting.
2097 2093 To disable these backups, use --no-backup.
2098 2094
2099 2095 Using the -r option, revert the given files or directories to their
2100 2096 contents as of a specific revision. This can be helpful to "roll
2101 2097 back" some or all of a change that should not have been committed.
2102 2098
2103 2099 Revert modifies the working directory. It does not commit any
2104 2100 changes, or change the parent of the working directory. If you
2105 2101 revert to a revision other than the parent of the working
2106 2102 directory, the reverted files will thus appear modified
2107 2103 afterwards.
2108 2104
2109 2105 If a file has been deleted, it is recreated. If the executable
2110 2106 mode of a file was changed, it is reset.
2111 2107
2112 2108 If names are given, all files matching the names are reverted.
2113 2109
2114 2110 If no arguments are given, no files are reverted.
2115 2111 """
2116 2112
2117 2113 if opts["date"]:
2118 2114 if opts["rev"]:
2119 2115 raise util.Abort(_("you can't specify a revision and a date"))
2120 2116 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2121 2117
2122 2118 if not pats and not opts['all']:
2123 2119 raise util.Abort(_('no files or directories specified; '
2124 2120 'use --all to revert the whole repo'))
2125 2121
2126 2122 parent, p2 = repo.dirstate.parents()
2127 2123 if not opts['rev'] and p2 != nullid:
2128 2124 raise util.Abort(_('uncommitted merge - please provide a '
2129 2125 'specific revision'))
2130 2126 node = repo.changectx(opts['rev']).node()
2131 2127 mf = repo.manifest.read(repo.changelog.read(node)[0])
2132 2128 if node == parent:
2133 2129 pmf = mf
2134 2130 else:
2135 2131 pmf = None
2136 2132
2137 2133 wlock = repo.wlock()
2138 2134
2139 2135 # need all matching names in dirstate and manifest of target rev,
2140 2136 # so have to walk both. do not print errors if files exist in one
2141 2137 # but not other.
2142 2138
2143 2139 names = {}
2144 2140 target_only = {}
2145 2141
2146 2142 # walk dirstate.
2147 2143
2148 2144 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2149 2145 badmatch=mf.has_key):
2150 2146 names[abs] = (rel, exact)
2151 2147 if src == 'b':
2152 2148 target_only[abs] = True
2153 2149
2154 2150 # walk target manifest.
2155 2151
2156 2152 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2157 2153 badmatch=names.has_key):
2158 2154 if abs in names: continue
2159 2155 names[abs] = (rel, exact)
2160 2156 target_only[abs] = True
2161 2157
2162 2158 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2163 2159 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2164 2160
2165 2161 revert = ([], _('reverting %s\n'))
2166 2162 add = ([], _('adding %s\n'))
2167 2163 remove = ([], _('removing %s\n'))
2168 2164 forget = ([], _('forgetting %s\n'))
2169 2165 undelete = ([], _('undeleting %s\n'))
2170 2166 update = {}
2171 2167
2172 2168 disptable = (
2173 2169 # dispatch table:
2174 2170 # file state
2175 2171 # action if in target manifest
2176 2172 # action if not in target manifest
2177 2173 # make backup if in target manifest
2178 2174 # make backup if not in target manifest
2179 2175 (modified, revert, remove, True, True),
2180 2176 (added, revert, forget, True, False),
2181 2177 (removed, undelete, None, False, False),
2182 2178 (deleted, revert, remove, False, False),
2183 2179 (unknown, add, None, True, False),
2184 2180 (target_only, add, None, False, False),
2185 2181 )
2186 2182
2187 2183 entries = names.items()
2188 2184 entries.sort()
2189 2185
2190 2186 for abs, (rel, exact) in entries:
2191 2187 mfentry = mf.get(abs)
2192 2188 def handle(xlist, dobackup):
2193 2189 xlist[0].append(abs)
2194 2190 update[abs] = 1
2195 2191 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2196 2192 bakname = "%s.orig" % rel
2197 2193 ui.note(_('saving current version of %s as %s\n') %
2198 2194 (rel, bakname))
2199 2195 if not opts.get('dry_run'):
2200 2196 util.copyfile(rel, bakname)
2201 2197 if ui.verbose or not exact:
2202 2198 ui.status(xlist[1] % rel)
2203 2199 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2204 2200 if abs not in table: continue
2205 2201 # file has changed in dirstate
2206 2202 if mfentry:
2207 2203 handle(hitlist, backuphit)
2208 2204 elif misslist is not None:
2209 2205 handle(misslist, backupmiss)
2210 2206 else:
2211 2207 if exact: ui.warn(_('file not managed: %s\n') % rel)
2212 2208 break
2213 2209 else:
2214 2210 # file has not changed in dirstate
2215 2211 if node == parent:
2216 2212 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2217 2213 continue
2218 2214 if pmf is None:
2219 2215 # only need parent manifest in this unlikely case,
2220 2216 # so do not read by default
2221 2217 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2222 2218 if abs in pmf:
2223 2219 if mfentry:
2224 2220 # if version of file is same in parent and target
2225 2221 # manifests, do nothing
2226 2222 if pmf[abs] != mfentry:
2227 2223 handle(revert, False)
2228 2224 else:
2229 2225 handle(remove, False)
2230 2226
2231 2227 if not opts.get('dry_run'):
2232 2228 repo.dirstate.forget(forget[0])
2233 2229 r = hg.revert(repo, node, update.has_key, wlock)
2234 2230 repo.dirstate.update(add[0], 'a')
2235 2231 repo.dirstate.update(undelete[0], 'n')
2236 2232 repo.dirstate.update(remove[0], 'r')
2237 2233 return r
2238 2234
2239 2235 def rollback(ui, repo):
2240 2236 """roll back the last transaction in this repository
2241 2237
2242 2238 Roll back the last transaction in this repository, restoring the
2243 2239 project to its state prior to the transaction.
2244 2240
2245 2241 Transactions are used to encapsulate the effects of all commands
2246 2242 that create new changesets or propagate existing changesets into a
2247 2243 repository. For example, the following commands are transactional,
2248 2244 and their effects can be rolled back:
2249 2245
2250 2246 commit
2251 2247 import
2252 2248 pull
2253 2249 push (with this repository as destination)
2254 2250 unbundle
2255 2251
2256 2252 This command should be used with care. There is only one level of
2257 2253 rollback, and there is no way to undo a rollback.
2258 2254
2259 2255 This command is not intended for use on public repositories. Once
2260 2256 changes are visible for pull by other users, rolling a transaction
2261 2257 back locally is ineffective (someone else may already have pulled
2262 2258 the changes). Furthermore, a race is possible with readers of the
2263 2259 repository; for example an in-progress pull from the repository
2264 2260 may fail if a rollback is performed.
2265 2261 """
2266 2262 repo.rollback()
2267 2263
2268 2264 def root(ui, repo):
2269 2265 """print the root (top) of the current working dir
2270 2266
2271 2267 Print the root directory of the current repository.
2272 2268 """
2273 2269 ui.write(repo.root + "\n")
2274 2270
2275 2271 def serve(ui, repo, **opts):
2276 2272 """export the repository via HTTP
2277 2273
2278 2274 Start a local HTTP repository browser and pull server.
2279 2275
2280 2276 By default, the server logs accesses to stdout and errors to
2281 2277 stderr. Use the "-A" and "-E" options to log to files.
2282 2278 """
2283 2279
2284 2280 if opts["stdio"]:
2285 2281 if repo is None:
2286 2282 raise hg.RepoError(_("There is no Mercurial repository here"
2287 2283 " (.hg not found)"))
2288 2284 s = sshserver.sshserver(ui, repo)
2289 2285 s.serve_forever()
2290 2286
2291 2287 optlist = ("name templates style address port ipv6"
2292 2288 " accesslog errorlog webdir_conf")
2293 2289 for o in optlist.split():
2294 2290 if opts[o]:
2295 2291 ui.setconfig("web", o, str(opts[o]))
2296 2292
2297 2293 if repo is None and not ui.config("web", "webdir_conf"):
2298 2294 raise hg.RepoError(_("There is no Mercurial repository here"
2299 2295 " (.hg not found)"))
2300 2296
2301 2297 if opts['daemon'] and not opts['daemon_pipefds']:
2302 2298 rfd, wfd = os.pipe()
2303 2299 args = sys.argv[:]
2304 2300 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2305 2301 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2306 2302 args[0], args)
2307 2303 os.close(wfd)
2308 2304 os.read(rfd, 1)
2309 2305 os._exit(0)
2310 2306
2311 2307 httpd = hgweb.server.create_server(ui, repo)
2312 2308
2313 2309 if ui.verbose:
2314 2310 if httpd.port != 80:
2315 2311 ui.status(_('listening at http://%s:%d/\n') %
2316 2312 (httpd.addr, httpd.port))
2317 2313 else:
2318 2314 ui.status(_('listening at http://%s/\n') % httpd.addr)
2319 2315
2320 2316 if opts['pid_file']:
2321 2317 fp = open(opts['pid_file'], 'w')
2322 2318 fp.write(str(os.getpid()) + '\n')
2323 2319 fp.close()
2324 2320
2325 2321 if opts['daemon_pipefds']:
2326 2322 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2327 2323 os.close(rfd)
2328 2324 os.write(wfd, 'y')
2329 2325 os.close(wfd)
2330 2326 sys.stdout.flush()
2331 2327 sys.stderr.flush()
2332 2328 fd = os.open(util.nulldev, os.O_RDWR)
2333 2329 if fd != 0: os.dup2(fd, 0)
2334 2330 if fd != 1: os.dup2(fd, 1)
2335 2331 if fd != 2: os.dup2(fd, 2)
2336 2332 if fd not in (0, 1, 2): os.close(fd)
2337 2333
2338 2334 httpd.serve_forever()
2339 2335
2340 2336 def status(ui, repo, *pats, **opts):
2341 2337 """show changed files in the working directory
2342 2338
2343 2339 Show status of files in the repository. If names are given, only
2344 2340 files that match are shown. Files that are clean or ignored, are
2345 2341 not listed unless -c (clean), -i (ignored) or -A is given.
2346 2342
2347 2343 NOTE: status may appear to disagree with diff if permissions have
2348 2344 changed or a merge has occurred. The standard diff format does not
2349 2345 report permission changes and diff only reports changes relative
2350 2346 to one merge parent.
2351 2347
2352 2348 If one revision is given, it is used as the base revision.
2353 2349 If two revisions are given, the difference between them is shown.
2354 2350
2355 2351 The codes used to show the status of files are:
2356 2352 M = modified
2357 2353 A = added
2358 2354 R = removed
2359 2355 C = clean
2360 2356 ! = deleted, but still tracked
2361 2357 ? = not tracked
2362 2358 I = ignored (not shown by default)
2363 2359 = the previous added file was copied from here
2364 2360 """
2365 2361
2366 2362 all = opts['all']
2367 2363 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2368 2364
2369 2365 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2370 2366 cwd = (pats and repo.getcwd()) or ''
2371 2367 modified, added, removed, deleted, unknown, ignored, clean = [
2372 2368 [util.pathto(cwd, x) for x in n]
2373 2369 for n in repo.status(node1=node1, node2=node2, files=files,
2374 2370 match=matchfn,
2375 2371 list_ignored=all or opts['ignored'],
2376 2372 list_clean=all or opts['clean'])]
2377 2373
2378 2374 changetypes = (('modified', 'M', modified),
2379 2375 ('added', 'A', added),
2380 2376 ('removed', 'R', removed),
2381 2377 ('deleted', '!', deleted),
2382 2378 ('unknown', '?', unknown),
2383 2379 ('ignored', 'I', ignored))
2384 2380
2385 2381 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2386 2382
2387 2383 end = opts['print0'] and '\0' or '\n'
2388 2384
2389 2385 for opt, char, changes in ([ct for ct in explicit_changetypes
2390 2386 if all or opts[ct[0]]]
2391 2387 or changetypes):
2392 2388 if opts['no_status']:
2393 2389 format = "%%s%s" % end
2394 2390 else:
2395 2391 format = "%s %%s%s" % (char, end)
2396 2392
2397 2393 for f in changes:
2398 2394 ui.write(format % f)
2399 2395 if ((all or opts.get('copies')) and not opts.get('no_status')):
2400 2396 copied = repo.dirstate.copied(f)
2401 2397 if copied:
2402 2398 ui.write(' %s%s' % (copied, end))
2403 2399
2404 2400 def tag(ui, repo, name, rev_=None, **opts):
2405 2401 """add a tag for the current tip or a given revision
2406 2402
2407 2403 Name a particular revision using <name>.
2408 2404
2409 2405 Tags are used to name particular revisions of the repository and are
2410 2406 very useful to compare different revision, to go back to significant
2411 2407 earlier versions or to mark branch points as releases, etc.
2412 2408
2413 2409 If no revision is given, the parent of the working directory is used.
2414 2410
2415 2411 To facilitate version control, distribution, and merging of tags,
2416 2412 they are stored as a file named ".hgtags" which is managed
2417 2413 similarly to other project files and can be hand-edited if
2418 2414 necessary. The file '.hg/localtags' is used for local tags (not
2419 2415 shared among repositories).
2420 2416 """
2421 2417 if name in ['tip', '.', 'null']:
2422 2418 raise util.Abort(_("the name '%s' is reserved") % name)
2423 2419 if rev_ is not None:
2424 2420 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2425 2421 "please use 'hg tag [-r REV] NAME' instead\n"))
2426 2422 if opts['rev']:
2427 2423 raise util.Abort(_("use only one form to specify the revision"))
2428 2424 if opts['rev']:
2429 2425 rev_ = opts['rev']
2430 2426 if not rev_ and repo.dirstate.parents()[1] != nullid:
2431 2427 raise util.Abort(_('uncommitted merge - please provide a '
2432 2428 'specific revision'))
2433 2429 r = repo.changectx(rev_).node()
2434 2430
2435 2431 message = opts['message']
2436 2432 if not message:
2437 2433 message = _('Added tag %s for changeset %s') % (name, short(r))
2438 2434
2439 2435 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2440 2436
2441 2437 def tags(ui, repo):
2442 2438 """list repository tags
2443 2439
2444 2440 List the repository tags.
2445 2441
2446 2442 This lists both regular and local tags.
2447 2443 """
2448 2444
2449 2445 l = repo.tagslist()
2450 2446 l.reverse()
2451 2447 hexfunc = ui.debugflag and hex or short
2452 2448 for t, n in l:
2453 2449 try:
2454 2450 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2455 2451 except KeyError:
2456 2452 r = " ?:?"
2457 2453 if ui.quiet:
2458 2454 ui.write("%s\n" % t)
2459 2455 else:
2460 2456 t = util.localsub(t, 30)
2461 2457 t += " " * (30 - util.locallen(t))
2462 2458 ui.write("%s %s\n" % (t, r))
2463 2459
2464 2460 def tip(ui, repo, **opts):
2465 2461 """show the tip revision
2466 2462
2467 2463 Show the tip revision.
2468 2464 """
2469 2465 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2470 2466
2471 2467 def unbundle(ui, repo, fname, **opts):
2472 2468 """apply a changegroup file
2473 2469
2474 2470 Apply a compressed changegroup file generated by the bundle
2475 2471 command.
2476 2472 """
2477 2473 gen = changegroup.readbundle(urllib.urlopen(fname), fname)
2478 2474 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2479 2475 return postincoming(ui, repo, modheads, opts['update'])
2480 2476
2481 def update(ui, repo, node=None, clean=False, branch=None, date=None):
2477 def update(ui, repo, node=None, clean=False, date=None):
2482 2478 """update or merge working directory
2483 2479
2484 2480 Update the working directory to the specified revision.
2485 2481
2486 2482 If there are no outstanding changes in the working directory and
2487 2483 there is a linear relationship between the current version and the
2488 2484 requested version, the result is the requested version.
2489 2485
2490 2486 To merge the working directory with another revision, use the
2491 2487 merge command.
2492 2488
2493 2489 By default, update will refuse to run if doing so would require
2494 2490 merging or discarding local changes.
2495 2491 """
2496 2492 if date:
2497 2493 if node:
2498 2494 raise util.Abort(_("you can't specify a revision and a date"))
2499 2495 node = cmdutil.finddate(ui, repo, date)
2500 2496
2501 node = _lookup(repo, node, branch)
2502 2497 if clean:
2503 2498 return hg.clean(repo, node)
2504 2499 else:
2505 2500 return hg.update(repo, node)
2506 2501
2507 def _lookup(repo, node, branch=None):
2508 if branch:
2509 repo.ui.warn(_("the --branch option is deprecated, "
2510 "please use 'hg branch' instead\n"))
2511 br = repo.branchlookup(branch=branch)
2512 found = []
2513 for x in br:
2514 if branch in br[x]:
2515 found.append(x)
2516 if len(found) > 1:
2517 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2518 for x in found:
2519 cmdutil.show_changeset(ui, repo, {}).show(changenode=x)
2520 raise util.Abort("")
2521 if len(found) == 1:
2522 node = found[0]
2523 repo.ui.warn(_("Using head %s for branch %s\n")
2524 % (short(node), branch))
2525 else:
2526 raise util.Abort(_("branch %s not found") % branch)
2527 else:
2528 node = node and repo.lookup(node) or repo.changelog.tip()
2529 return node
2530
2531 2502 def verify(ui, repo):
2532 2503 """verify the integrity of the repository
2533 2504
2534 2505 Verify the integrity of the current repository.
2535 2506
2536 2507 This will perform an extensive check of the repository's
2537 2508 integrity, validating the hashes and checksums of each entry in
2538 2509 the changelog, manifest, and tracked files, as well as the
2539 2510 integrity of their crosslinks and indices.
2540 2511 """
2541 2512 return hg.verify(repo)
2542 2513
2543 2514 def version_(ui):
2544 2515 """output version and copyright information"""
2545 2516 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2546 2517 % version.get_version())
2547 2518 ui.status(_(
2548 2519 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2549 2520 "This is free software; see the source for copying conditions. "
2550 2521 "There is NO\nwarranty; "
2551 2522 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2552 2523 ))
2553 2524
2554 2525 # Command options and aliases are listed here, alphabetically
2555 2526
2556 2527 globalopts = [
2557 2528 ('R', 'repository', '',
2558 2529 _('repository root directory or symbolic path name')),
2559 2530 ('', 'cwd', '', _('change working directory')),
2560 2531 ('y', 'noninteractive', None,
2561 2532 _('do not prompt, assume \'yes\' for any required answers')),
2562 2533 ('q', 'quiet', None, _('suppress output')),
2563 2534 ('v', 'verbose', None, _('enable additional output')),
2564 2535 ('', 'config', [], _('set/override config option')),
2565 2536 ('', 'debug', None, _('enable debugging output')),
2566 2537 ('', 'debugger', None, _('start debugger')),
2567 2538 ('', 'encoding', util._encoding, _('set the charset encoding')),
2568 2539 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2569 2540 ('', 'lsprof', None, _('print improved command execution profile')),
2570 2541 ('', 'traceback', None, _('print traceback on exception')),
2571 2542 ('', 'time', None, _('time how long the command takes')),
2572 2543 ('', 'profile', None, _('print command execution profile')),
2573 2544 ('', 'version', None, _('output version information and exit')),
2574 2545 ('h', 'help', None, _('display help and exit')),
2575 2546 ]
2576 2547
2577 2548 dryrunopts = [('n', 'dry-run', None,
2578 2549 _('do not perform actions, just print output'))]
2579 2550
2580 2551 remoteopts = [
2581 2552 ('e', 'ssh', '', _('specify ssh command to use')),
2582 2553 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2583 2554 ]
2584 2555
2585 2556 walkopts = [
2586 2557 ('I', 'include', [], _('include names matching the given patterns')),
2587 2558 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2588 2559 ]
2589 2560
2590 2561 commitopts = [
2591 2562 ('m', 'message', '', _('use <text> as commit message')),
2592 2563 ('l', 'logfile', '', _('read commit message from <file>')),
2593 2564 ]
2594 2565
2595 2566 table = {
2596 2567 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2597 2568 "addremove":
2598 2569 (addremove,
2599 2570 [('s', 'similarity', '',
2600 2571 _('guess renamed files by similarity (0<=s<=100)')),
2601 2572 ] + walkopts + dryrunopts,
2602 2573 _('hg addremove [OPTION]... [FILE]...')),
2603 2574 "^annotate":
2604 2575 (annotate,
2605 2576 [('r', 'rev', '', _('annotate the specified revision')),
2606 2577 ('f', 'follow', None, _('follow file copies and renames')),
2607 2578 ('a', 'text', None, _('treat all files as text')),
2608 2579 ('u', 'user', None, _('list the author')),
2609 2580 ('d', 'date', None, _('list the date')),
2610 2581 ('n', 'number', None, _('list the revision number (default)')),
2611 2582 ('c', 'changeset', None, _('list the changeset')),
2612 2583 ] + walkopts,
2613 2584 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2614 2585 "archive":
2615 2586 (archive,
2616 2587 [('', 'no-decode', None, _('do not pass files through decoders')),
2617 2588 ('p', 'prefix', '', _('directory prefix for files in archive')),
2618 2589 ('r', 'rev', '', _('revision to distribute')),
2619 2590 ('t', 'type', '', _('type of distribution to create')),
2620 2591 ] + walkopts,
2621 2592 _('hg archive [OPTION]... DEST')),
2622 2593 "backout":
2623 2594 (backout,
2624 2595 [('', 'merge', None,
2625 2596 _('merge with old dirstate parent after backout')),
2626 2597 ('d', 'date', '', _('record datecode as commit date')),
2627 2598 ('', 'parent', '', _('parent to choose when backing out merge')),
2628 2599 ('u', 'user', '', _('record user as committer')),
2629 2600 ] + walkopts + commitopts,
2630 2601 _('hg backout [OPTION]... REV')),
2631 2602 "branch": (branch, [], _('hg branch [NAME]')),
2632 2603 "branches": (branches, [], _('hg branches')),
2633 2604 "bundle":
2634 2605 (bundle,
2635 2606 [('f', 'force', None,
2636 2607 _('run even when remote repository is unrelated')),
2637 2608 ('r', 'rev', [],
2638 2609 _('a changeset you would like to bundle')),
2639 2610 ('', 'base', [],
2640 2611 _('a base changeset to specify instead of a destination')),
2641 2612 ] + remoteopts,
2642 2613 _('hg bundle [--base REV]... [--rev REV]... FILE [DEST]')),
2643 2614 "cat":
2644 2615 (cat,
2645 2616 [('o', 'output', '', _('print output to file with formatted name')),
2646 2617 ('r', 'rev', '', _('print the given revision')),
2647 2618 ] + walkopts,
2648 2619 _('hg cat [OPTION]... FILE...')),
2649 2620 "^clone":
2650 2621 (clone,
2651 2622 [('U', 'noupdate', None, _('do not update the new working directory')),
2652 2623 ('r', 'rev', [],
2653 2624 _('a changeset you would like to have after cloning')),
2654 2625 ('', 'pull', None, _('use pull protocol to copy metadata')),
2655 2626 ('', 'uncompressed', None,
2656 2627 _('use uncompressed transfer (fast over LAN)')),
2657 2628 ] + remoteopts,
2658 2629 _('hg clone [OPTION]... SOURCE [DEST]')),
2659 2630 "^commit|ci":
2660 2631 (commit,
2661 2632 [('A', 'addremove', None,
2662 2633 _('mark new/missing files as added/removed before committing')),
2663 2634 ('d', 'date', '', _('record datecode as commit date')),
2664 2635 ('u', 'user', '', _('record user as commiter')),
2665 2636 ] + walkopts + commitopts,
2666 2637 _('hg commit [OPTION]... [FILE]...')),
2667 2638 "copy|cp":
2668 2639 (copy,
2669 2640 [('A', 'after', None, _('record a copy that has already occurred')),
2670 2641 ('f', 'force', None,
2671 2642 _('forcibly copy over an existing managed file')),
2672 2643 ] + walkopts + dryrunopts,
2673 2644 _('hg copy [OPTION]... [SOURCE]... DEST')),
2674 2645 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2675 2646 "debugcomplete":
2676 2647 (debugcomplete,
2677 2648 [('o', 'options', None, _('show the command options'))],
2678 2649 _('debugcomplete [-o] CMD')),
2679 2650 "debuginstall": (debuginstall, [], _('debuginstall')),
2680 2651 "debugrebuildstate":
2681 2652 (debugrebuildstate,
2682 2653 [('r', 'rev', '', _('revision to rebuild to'))],
2683 2654 _('debugrebuildstate [-r REV] [REV]')),
2684 2655 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2685 2656 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2686 2657 "debugstate": (debugstate, [], _('debugstate')),
2687 2658 "debugdate":
2688 2659 (debugdate,
2689 2660 [('e', 'extended', None, _('try extended date formats'))],
2690 2661 _('debugdate [-e] DATE [RANGE]')),
2691 2662 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2692 2663 "debugindex": (debugindex, [], _('debugindex FILE')),
2693 2664 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2694 2665 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2695 2666 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2696 2667 "^diff":
2697 2668 (diff,
2698 2669 [('r', 'rev', [], _('revision')),
2699 2670 ('a', 'text', None, _('treat all files as text')),
2700 2671 ('p', 'show-function', None,
2701 2672 _('show which function each change is in')),
2702 2673 ('g', 'git', None, _('use git extended diff format')),
2703 2674 ('', 'nodates', None, _("don't include dates in diff headers")),
2704 2675 ('w', 'ignore-all-space', None,
2705 2676 _('ignore white space when comparing lines')),
2706 2677 ('b', 'ignore-space-change', None,
2707 2678 _('ignore changes in the amount of white space')),
2708 2679 ('B', 'ignore-blank-lines', None,
2709 2680 _('ignore changes whose lines are all blank')),
2710 2681 ] + walkopts,
2711 2682 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2712 2683 "^export":
2713 2684 (export,
2714 2685 [('o', 'output', '', _('print output to file with formatted name')),
2715 2686 ('a', 'text', None, _('treat all files as text')),
2716 2687 ('g', 'git', None, _('use git extended diff format')),
2717 2688 ('', 'nodates', None, _("don't include dates in diff headers")),
2718 2689 ('', 'switch-parent', None, _('diff against the second parent'))],
2719 2690 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2720 2691 "grep":
2721 2692 (grep,
2722 2693 [('0', 'print0', None, _('end fields with NUL')),
2723 2694 ('', 'all', None, _('print all revisions that match')),
2724 2695 ('f', 'follow', None,
2725 2696 _('follow changeset history, or file history across copies and renames')),
2726 2697 ('i', 'ignore-case', None, _('ignore case when matching')),
2727 2698 ('l', 'files-with-matches', None,
2728 2699 _('print only filenames and revs that match')),
2729 2700 ('n', 'line-number', None, _('print matching line numbers')),
2730 2701 ('r', 'rev', [], _('search in given revision range')),
2731 2702 ('u', 'user', None, _('print user who committed change')),
2732 2703 ] + walkopts,
2733 2704 _('hg grep [OPTION]... PATTERN [FILE]...')),
2734 2705 "heads":
2735 2706 (heads,
2736 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2737 ('', 'style', '', _('display using template map file')),
2707 [('', 'style', '', _('display using template map file')),
2738 2708 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2739 2709 ('', 'template', '', _('display with template'))],
2740 2710 _('hg heads [-r REV]')),
2741 2711 "help": (help_, [], _('hg help [COMMAND]')),
2742 2712 "identify|id": (identify, [], _('hg identify')),
2743 2713 "import|patch":
2744 2714 (import_,
2745 2715 [('p', 'strip', 1,
2746 2716 _('directory strip option for patch. This has the same\n'
2747 2717 'meaning as the corresponding patch option')),
2748 ('b', 'base', '', _('base path (DEPRECATED)')),
2718 ('b', 'base', '', _('base path')),
2749 2719 ('f', 'force', None,
2750 2720 _('skip check for outstanding uncommitted changes'))] + commitopts,
2751 2721 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2752 2722 "incoming|in": (incoming,
2753 2723 [('M', 'no-merges', None, _('do not show merges')),
2754 2724 ('f', 'force', None,
2755 2725 _('run even when remote repository is unrelated')),
2756 2726 ('', 'style', '', _('display using template map file')),
2757 2727 ('n', 'newest-first', None, _('show newest record first')),
2758 2728 ('', 'bundle', '', _('file to store the bundles into')),
2759 2729 ('p', 'patch', None, _('show patch')),
2760 2730 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2761 2731 ('', 'template', '', _('display with template')),
2762 2732 ] + remoteopts,
2763 2733 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2764 2734 ' [--bundle FILENAME] [SOURCE]')),
2765 2735 "^init":
2766 2736 (init, remoteopts, _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2767 2737 "locate":
2768 2738 (locate,
2769 2739 [('r', 'rev', '', _('search the repository as it stood at rev')),
2770 2740 ('0', 'print0', None,
2771 2741 _('end filenames with NUL, for use with xargs')),
2772 2742 ('f', 'fullpath', None,
2773 2743 _('print complete paths from the filesystem root')),
2774 2744 ] + walkopts,
2775 2745 _('hg locate [OPTION]... [PATTERN]...')),
2776 2746 "^log|history":
2777 2747 (log,
2778 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2779 ('f', 'follow', None,
2748 [('f', 'follow', None,
2780 2749 _('follow changeset history, or file history across copies and renames')),
2781 2750 ('', 'follow-first', None,
2782 2751 _('only follow the first parent of merge changesets')),
2783 2752 ('d', 'date', '', _('show revs matching date spec')),
2784 2753 ('C', 'copies', None, _('show copied files')),
2785 2754 ('k', 'keyword', [], _('search for a keyword')),
2786 2755 ('l', 'limit', '', _('limit number of changes displayed')),
2787 2756 ('r', 'rev', [], _('show the specified revision or range')),
2788 2757 ('', 'removed', None, _('include revs where files were removed')),
2789 2758 ('M', 'no-merges', None, _('do not show merges')),
2790 2759 ('', 'style', '', _('display using template map file')),
2791 2760 ('m', 'only-merges', None, _('show only merges')),
2792 2761 ('p', 'patch', None, _('show patch')),
2793 2762 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2794 2763 ('', 'template', '', _('display with template')),
2795 2764 ] + walkopts,
2796 2765 _('hg log [OPTION]... [FILE]')),
2797 2766 "manifest": (manifest, [], _('hg manifest [REV]')),
2798 2767 "merge":
2799 2768 (merge,
2800 [('b', 'branch', '', _('merge with head of a specific branch (DEPRECATED)')),
2801 ('f', 'force', None, _('force a merge with outstanding changes'))],
2769 [('f', 'force', None, _('force a merge with outstanding changes'))],
2802 2770 _('hg merge [-f] [REV]')),
2803 2771 "outgoing|out": (outgoing,
2804 2772 [('M', 'no-merges', None, _('do not show merges')),
2805 2773 ('f', 'force', None,
2806 2774 _('run even when remote repository is unrelated')),
2807 2775 ('p', 'patch', None, _('show patch')),
2808 2776 ('', 'style', '', _('display using template map file')),
2809 2777 ('r', 'rev', [], _('a specific revision you would like to push')),
2810 2778 ('n', 'newest-first', None, _('show newest record first')),
2811 2779 ('', 'template', '', _('display with template')),
2812 2780 ] + remoteopts,
2813 2781 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
2814 2782 "^parents":
2815 2783 (parents,
2816 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2817 ('r', 'rev', '', _('show parents from the specified rev')),
2784 [('r', 'rev', '', _('show parents from the specified rev')),
2818 2785 ('', 'style', '', _('display using template map file')),
2819 2786 ('', 'template', '', _('display with template'))],
2820 2787 _('hg parents [-r REV] [FILE]')),
2821 2788 "paths": (paths, [], _('hg paths [NAME]')),
2822 2789 "^pull":
2823 2790 (pull,
2824 2791 [('u', 'update', None,
2825 2792 _('update to new tip if changesets were pulled')),
2826 2793 ('f', 'force', None,
2827 2794 _('run even when remote repository is unrelated')),
2828 2795 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2829 2796 ] + remoteopts,
2830 2797 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
2831 2798 "^push":
2832 2799 (push,
2833 2800 [('f', 'force', None, _('force push')),
2834 2801 ('r', 'rev', [], _('a specific revision you would like to push')),
2835 2802 ] + remoteopts,
2836 2803 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
2837 2804 "debugrawcommit|rawcommit":
2838 2805 (rawcommit,
2839 2806 [('p', 'parent', [], _('parent')),
2840 2807 ('d', 'date', '', _('date code')),
2841 2808 ('u', 'user', '', _('user')),
2842 2809 ('F', 'files', '', _('file list'))
2843 2810 ] + commitopts,
2844 2811 _('hg debugrawcommit [OPTION]... [FILE]...')),
2845 2812 "recover": (recover, [], _('hg recover')),
2846 2813 "^remove|rm":
2847 2814 (remove,
2848 2815 [('A', 'after', None, _('record remove that has already occurred')),
2849 2816 ('f', 'force', None, _('remove file even if modified')),
2850 2817 ] + walkopts,
2851 2818 _('hg remove [OPTION]... FILE...')),
2852 2819 "rename|mv":
2853 2820 (rename,
2854 2821 [('A', 'after', None, _('record a rename that has already occurred')),
2855 2822 ('f', 'force', None,
2856 2823 _('forcibly copy over an existing managed file')),
2857 2824 ] + walkopts + dryrunopts,
2858 2825 _('hg rename [OPTION]... SOURCE... DEST')),
2859 2826 "^revert":
2860 2827 (revert,
2861 2828 [('a', 'all', None, _('revert all changes when no arguments given')),
2862 2829 ('d', 'date', '', _('tipmost revision matching date')),
2863 2830 ('r', 'rev', '', _('revision to revert to')),
2864 2831 ('', 'no-backup', None, _('do not save backup copies of files')),
2865 2832 ] + walkopts + dryrunopts,
2866 2833 _('hg revert [-r REV] [NAME]...')),
2867 2834 "rollback": (rollback, [], _('hg rollback')),
2868 2835 "root": (root, [], _('hg root')),
2869 2836 "showconfig|debugconfig":
2870 2837 (showconfig,
2871 2838 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2872 2839 _('showconfig [-u] [NAME]...')),
2873 2840 "^serve":
2874 2841 (serve,
2875 2842 [('A', 'accesslog', '', _('name of access log file to write to')),
2876 2843 ('d', 'daemon', None, _('run server in background')),
2877 2844 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2878 2845 ('E', 'errorlog', '', _('name of error log file to write to')),
2879 2846 ('p', 'port', 0, _('port to use (default: 8000)')),
2880 2847 ('a', 'address', '', _('address to use')),
2881 2848 ('n', 'name', '',
2882 2849 _('name to show in web pages (default: working dir)')),
2883 2850 ('', 'webdir-conf', '', _('name of the webdir config file'
2884 2851 ' (serve more than one repo)')),
2885 2852 ('', 'pid-file', '', _('name of file to write process ID to')),
2886 2853 ('', 'stdio', None, _('for remote clients')),
2887 2854 ('t', 'templates', '', _('web templates to use')),
2888 2855 ('', 'style', '', _('template style to use')),
2889 2856 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2890 2857 _('hg serve [OPTION]...')),
2891 2858 "^status|st":
2892 2859 (status,
2893 2860 [('A', 'all', None, _('show status of all files')),
2894 2861 ('m', 'modified', None, _('show only modified files')),
2895 2862 ('a', 'added', None, _('show only added files')),
2896 2863 ('r', 'removed', None, _('show only removed files')),
2897 2864 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2898 2865 ('c', 'clean', None, _('show only files without changes')),
2899 2866 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2900 2867 ('i', 'ignored', None, _('show ignored files')),
2901 2868 ('n', 'no-status', None, _('hide status prefix')),
2902 2869 ('C', 'copies', None, _('show source of copied files')),
2903 2870 ('0', 'print0', None,
2904 2871 _('end filenames with NUL, for use with xargs')),
2905 2872 ('', 'rev', [], _('show difference from revision')),
2906 2873 ] + walkopts,
2907 2874 _('hg status [OPTION]... [FILE]...')),
2908 2875 "tag":
2909 2876 (tag,
2910 2877 [('l', 'local', None, _('make the tag local')),
2911 2878 ('m', 'message', '', _('message for tag commit log entry')),
2912 2879 ('d', 'date', '', _('record datecode as commit date')),
2913 2880 ('u', 'user', '', _('record user as commiter')),
2914 2881 ('r', 'rev', '', _('revision to tag'))],
2915 2882 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2916 2883 "tags": (tags, [], _('hg tags')),
2917 2884 "tip":
2918 2885 (tip,
2919 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2920 ('', 'style', '', _('display using template map file')),
2886 [('', 'style', '', _('display using template map file')),
2921 2887 ('p', 'patch', None, _('show patch')),
2922 2888 ('', 'template', '', _('display with template'))],
2923 2889 _('hg tip [-p]')),
2924 2890 "unbundle":
2925 2891 (unbundle,
2926 2892 [('u', 'update', None,
2927 2893 _('update to new tip if changesets were unbundled'))],
2928 2894 _('hg unbundle [-u] FILE')),
2929 2895 "^update|up|checkout|co":
2930 2896 (update,
2931 [('b', 'branch', '',
2932 _('checkout the head of a specific branch (DEPRECATED)')),
2933 ('C', 'clean', None, _('overwrite locally modified files')),
2897 [('C', 'clean', None, _('overwrite locally modified files')),
2934 2898 ('d', 'date', '', _('tipmost revision matching date'))],
2935 2899 _('hg update [-C] [REV]')),
2936 2900 "verify": (verify, [], _('hg verify')),
2937 2901 "version": (version_, [], _('hg version')),
2938 2902 }
2939 2903
2940 2904 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2941 2905 " debugindex debugindexdot debugdate debuginstall")
2942 2906 optionalrepo = ("paths serve showconfig")
2943 2907
2944 2908 def findpossible(ui, cmd):
2945 2909 """
2946 2910 Return cmd -> (aliases, command table entry)
2947 2911 for each matching command.
2948 2912 Return debug commands (or their aliases) only if no normal command matches.
2949 2913 """
2950 2914 choice = {}
2951 2915 debugchoice = {}
2952 2916 for e in table.keys():
2953 2917 aliases = e.lstrip("^").split("|")
2954 2918 found = None
2955 2919 if cmd in aliases:
2956 2920 found = cmd
2957 2921 elif not ui.config("ui", "strict"):
2958 2922 for a in aliases:
2959 2923 if a.startswith(cmd):
2960 2924 found = a
2961 2925 break
2962 2926 if found is not None:
2963 2927 if aliases[0].startswith("debug") or found.startswith("debug"):
2964 2928 debugchoice[found] = (aliases, table[e])
2965 2929 else:
2966 2930 choice[found] = (aliases, table[e])
2967 2931
2968 2932 if not choice and debugchoice:
2969 2933 choice = debugchoice
2970 2934
2971 2935 return choice
2972 2936
2973 2937 def findcmd(ui, cmd):
2974 2938 """Return (aliases, command table entry) for command string."""
2975 2939 choice = findpossible(ui, cmd)
2976 2940
2977 2941 if choice.has_key(cmd):
2978 2942 return choice[cmd]
2979 2943
2980 2944 if len(choice) > 1:
2981 2945 clist = choice.keys()
2982 2946 clist.sort()
2983 2947 raise AmbiguousCommand(cmd, clist)
2984 2948
2985 2949 if choice:
2986 2950 return choice.values()[0]
2987 2951
2988 2952 raise UnknownCommand(cmd)
2989 2953
2990 2954 def catchterm(*args):
2991 2955 raise util.SignalInterrupt
2992 2956
2993 2957 def run():
2994 2958 sys.exit(dispatch(sys.argv[1:]))
2995 2959
2996 2960 class ParseError(Exception):
2997 2961 """Exception raised on errors in parsing the command line."""
2998 2962
2999 2963 def parse(ui, args):
3000 2964 options = {}
3001 2965 cmdoptions = {}
3002 2966
3003 2967 try:
3004 2968 args = fancyopts.fancyopts(args, globalopts, options)
3005 2969 except fancyopts.getopt.GetoptError, inst:
3006 2970 raise ParseError(None, inst)
3007 2971
3008 2972 if args:
3009 2973 cmd, args = args[0], args[1:]
3010 2974 aliases, i = findcmd(ui, cmd)
3011 2975 cmd = aliases[0]
3012 2976 defaults = ui.config("defaults", cmd)
3013 2977 if defaults:
3014 2978 args = shlex.split(defaults) + args
3015 2979 c = list(i[1])
3016 2980 else:
3017 2981 cmd = None
3018 2982 c = []
3019 2983
3020 2984 # combine global options into local
3021 2985 for o in globalopts:
3022 2986 c.append((o[0], o[1], options[o[1]], o[3]))
3023 2987
3024 2988 try:
3025 2989 args = fancyopts.fancyopts(args, c, cmdoptions)
3026 2990 except fancyopts.getopt.GetoptError, inst:
3027 2991 raise ParseError(cmd, inst)
3028 2992
3029 2993 # separate global options back out
3030 2994 for o in globalopts:
3031 2995 n = o[1]
3032 2996 options[n] = cmdoptions[n]
3033 2997 del cmdoptions[n]
3034 2998
3035 2999 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3036 3000
3037 3001 external = {}
3038 3002
3039 3003 def findext(name):
3040 3004 '''return module with given extension name'''
3041 3005 try:
3042 3006 return sys.modules[external[name]]
3043 3007 except KeyError:
3044 3008 for k, v in external.iteritems():
3045 3009 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3046 3010 return sys.modules[v]
3047 3011 raise KeyError(name)
3048 3012
3049 3013 def load_extensions(ui):
3050 3014 added = []
3051 3015 for ext_name, load_from_name in ui.extensions():
3052 3016 if ext_name in external:
3053 3017 continue
3054 3018 try:
3055 3019 if load_from_name:
3056 3020 # the module will be loaded in sys.modules
3057 3021 # choose an unique name so that it doesn't
3058 3022 # conflicts with other modules
3059 3023 module_name = "hgext_%s" % ext_name.replace('.', '_')
3060 3024 mod = imp.load_source(module_name, load_from_name)
3061 3025 else:
3062 3026 def importh(name):
3063 3027 mod = __import__(name)
3064 3028 components = name.split('.')
3065 3029 for comp in components[1:]:
3066 3030 mod = getattr(mod, comp)
3067 3031 return mod
3068 3032 try:
3069 3033 mod = importh("hgext.%s" % ext_name)
3070 3034 except ImportError:
3071 3035 mod = importh(ext_name)
3072 3036 external[ext_name] = mod.__name__
3073 3037 added.append((mod, ext_name))
3074 3038 except (util.SignalInterrupt, KeyboardInterrupt):
3075 3039 raise
3076 3040 except Exception, inst:
3077 3041 ui.warn(_("*** failed to import extension %s: %s\n") %
3078 3042 (ext_name, inst))
3079 3043 if ui.print_exc():
3080 3044 return 1
3081 3045
3082 3046 for mod, name in added:
3083 3047 uisetup = getattr(mod, 'uisetup', None)
3084 3048 if uisetup:
3085 3049 uisetup(ui)
3086 3050 cmdtable = getattr(mod, 'cmdtable', {})
3087 3051 for t in cmdtable:
3088 3052 if t in table:
3089 3053 ui.warn(_("module %s overrides %s\n") % (name, t))
3090 3054 table.update(cmdtable)
3091 3055
3092 3056 def parseconfig(config):
3093 3057 """parse the --config options from the command line"""
3094 3058 parsed = []
3095 3059 for cfg in config:
3096 3060 try:
3097 3061 name, value = cfg.split('=', 1)
3098 3062 section, name = name.split('.', 1)
3099 3063 if not section or not name:
3100 3064 raise IndexError
3101 3065 parsed.append((section, name, value))
3102 3066 except (IndexError, ValueError):
3103 3067 raise util.Abort(_('malformed --config option: %s') % cfg)
3104 3068 return parsed
3105 3069
3106 3070 def dispatch(args):
3107 3071 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3108 3072 num = getattr(signal, name, None)
3109 3073 if num: signal.signal(num, catchterm)
3110 3074
3111 3075 try:
3112 3076 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3113 3077 except util.Abort, inst:
3114 3078 sys.stderr.write(_("abort: %s\n") % inst)
3115 3079 return -1
3116 3080
3117 3081 load_extensions(u)
3118 3082 u.addreadhook(load_extensions)
3119 3083
3120 3084 try:
3121 3085 cmd, func, args, options, cmdoptions = parse(u, args)
3122 3086 if options["encoding"]:
3123 3087 util._encoding = options["encoding"]
3124 3088 if options["encodingmode"]:
3125 3089 util._encodingmode = options["encodingmode"]
3126 3090 if options["time"]:
3127 3091 def get_times():
3128 3092 t = os.times()
3129 3093 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3130 3094 t = (t[0], t[1], t[2], t[3], time.clock())
3131 3095 return t
3132 3096 s = get_times()
3133 3097 def print_time():
3134 3098 t = get_times()
3135 3099 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3136 3100 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3137 3101 atexit.register(print_time)
3138 3102
3139 3103 # enter the debugger before command execution
3140 3104 if options['debugger']:
3141 3105 pdb.set_trace()
3142 3106
3143 3107 try:
3144 3108 if options['cwd']:
3145 3109 os.chdir(options['cwd'])
3146 3110
3147 3111 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3148 3112 not options["noninteractive"], options["traceback"],
3149 3113 parseconfig(options["config"]))
3150 3114
3151 3115 path = u.expandpath(options["repository"]) or ""
3152 3116 repo = path and hg.repository(u, path=path) or None
3153 3117 if repo and not repo.local():
3154 3118 raise util.Abort(_("repository '%s' is not local") % path)
3155 3119
3156 3120 if options['help']:
3157 3121 return help_(u, cmd, options['version'])
3158 3122 elif options['version']:
3159 3123 return version_(u)
3160 3124 elif not cmd:
3161 3125 return help_(u, 'shortlist')
3162 3126
3163 3127 if cmd not in norepo.split():
3164 3128 try:
3165 3129 if not repo:
3166 3130 repo = hg.repository(u, path=path)
3167 3131 u = repo.ui
3168 3132 for name in external.itervalues():
3169 3133 mod = sys.modules[name]
3170 3134 if hasattr(mod, 'reposetup'):
3171 3135 mod.reposetup(u, repo)
3172 3136 hg.repo_setup_hooks.append(mod.reposetup)
3173 3137 except hg.RepoError:
3174 3138 if cmd not in optionalrepo.split():
3175 3139 raise
3176 3140 d = lambda: func(u, repo, *args, **cmdoptions)
3177 3141 else:
3178 3142 d = lambda: func(u, *args, **cmdoptions)
3179 3143
3180 3144 try:
3181 3145 if options['profile']:
3182 3146 import hotshot, hotshot.stats
3183 3147 prof = hotshot.Profile("hg.prof")
3184 3148 try:
3185 3149 try:
3186 3150 return prof.runcall(d)
3187 3151 except:
3188 3152 try:
3189 3153 u.warn(_('exception raised - generating '
3190 3154 'profile anyway\n'))
3191 3155 except:
3192 3156 pass
3193 3157 raise
3194 3158 finally:
3195 3159 prof.close()
3196 3160 stats = hotshot.stats.load("hg.prof")
3197 3161 stats.strip_dirs()
3198 3162 stats.sort_stats('time', 'calls')
3199 3163 stats.print_stats(40)
3200 3164 elif options['lsprof']:
3201 3165 try:
3202 3166 from mercurial import lsprof
3203 3167 except ImportError:
3204 3168 raise util.Abort(_(
3205 3169 'lsprof not available - install from '
3206 3170 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3207 3171 p = lsprof.Profiler()
3208 3172 p.enable(subcalls=True)
3209 3173 try:
3210 3174 return d()
3211 3175 finally:
3212 3176 p.disable()
3213 3177 stats = lsprof.Stats(p.getstats())
3214 3178 stats.sort()
3215 3179 stats.pprint(top=10, file=sys.stderr, climit=5)
3216 3180 else:
3217 3181 return d()
3218 3182 finally:
3219 3183 u.flush()
3220 3184 except:
3221 3185 # enter the debugger when we hit an exception
3222 3186 if options['debugger']:
3223 3187 pdb.post_mortem(sys.exc_info()[2])
3224 3188 u.print_exc()
3225 3189 raise
3226 3190 except ParseError, inst:
3227 3191 if inst.args[0]:
3228 3192 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3229 3193 help_(u, inst.args[0])
3230 3194 else:
3231 3195 u.warn(_("hg: %s\n") % inst.args[1])
3232 3196 help_(u, 'shortlist')
3233 3197 except AmbiguousCommand, inst:
3234 3198 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3235 3199 (inst.args[0], " ".join(inst.args[1])))
3236 3200 except UnknownCommand, inst:
3237 3201 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3238 3202 help_(u, 'shortlist')
3239 3203 except hg.RepoError, inst:
3240 3204 u.warn(_("abort: %s!\n") % inst)
3241 3205 except lock.LockHeld, inst:
3242 3206 if inst.errno == errno.ETIMEDOUT:
3243 3207 reason = _('timed out waiting for lock held by %s') % inst.locker
3244 3208 else:
3245 3209 reason = _('lock held by %s') % inst.locker
3246 3210 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3247 3211 except lock.LockUnavailable, inst:
3248 3212 u.warn(_("abort: could not lock %s: %s\n") %
3249 3213 (inst.desc or inst.filename, inst.strerror))
3250 3214 except revlog.RevlogError, inst:
3251 3215 u.warn(_("abort: %s!\n") % inst)
3252 3216 except util.SignalInterrupt:
3253 3217 u.warn(_("killed!\n"))
3254 3218 except KeyboardInterrupt:
3255 3219 try:
3256 3220 u.warn(_("interrupted!\n"))
3257 3221 except IOError, inst:
3258 3222 if inst.errno == errno.EPIPE:
3259 3223 if u.debugflag:
3260 3224 u.warn(_("\nbroken pipe\n"))
3261 3225 else:
3262 3226 raise
3263 3227 except IOError, inst:
3264 3228 if hasattr(inst, "code"):
3265 3229 u.warn(_("abort: %s\n") % inst)
3266 3230 elif hasattr(inst, "reason"):
3267 3231 u.warn(_("abort: error: %s\n") % inst.reason[1])
3268 3232 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3269 3233 if u.debugflag:
3270 3234 u.warn(_("broken pipe\n"))
3271 3235 elif getattr(inst, "strerror", None):
3272 3236 if getattr(inst, "filename", None):
3273 3237 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3274 3238 else:
3275 3239 u.warn(_("abort: %s\n") % inst.strerror)
3276 3240 else:
3277 3241 raise
3278 3242 except OSError, inst:
3279 3243 if getattr(inst, "filename", None):
3280 3244 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3281 3245 else:
3282 3246 u.warn(_("abort: %s\n") % inst.strerror)
3283 3247 except util.UnexpectedOutput, inst:
3284 3248 u.warn(_("abort: %s") % inst[0])
3285 3249 if not isinstance(inst[1], basestring):
3286 3250 u.warn(" %r\n" % (inst[1],))
3287 3251 elif not inst[1]:
3288 3252 u.warn(_(" empty string\n"))
3289 3253 else:
3290 3254 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3291 3255 except util.Abort, inst:
3292 3256 u.warn(_("abort: %s\n") % inst)
3293 3257 except TypeError, inst:
3294 3258 # was this an argument error?
3295 3259 tb = traceback.extract_tb(sys.exc_info()[2])
3296 3260 if len(tb) > 2: # no
3297 3261 raise
3298 3262 u.debug(inst, "\n")
3299 3263 u.warn(_("%s: invalid arguments\n") % cmd)
3300 3264 help_(u, cmd)
3301 3265 except SystemExit, inst:
3302 3266 # Commands shouldn't sys.exit directly, but give a return code.
3303 3267 # Just in case catch this and and pass exit code to caller.
3304 3268 return inst.code
3305 3269 except:
3306 3270 u.warn(_("** unknown exception encountered, details follow\n"))
3307 3271 u.warn(_("** report bug details to "
3308 3272 "http://www.selenic.com/mercurial/bts\n"))
3309 3273 u.warn(_("** or mercurial@selenic.com\n"))
3310 3274 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3311 3275 % version.get_version())
3312 3276 raise
3313 3277
3314 3278 return -1
@@ -1,1971 +1,1865 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import gettext as _
10 10 from demandload import *
11 11 import repo
12 12 demandload(globals(), "appendfile changegroup")
13 13 demandload(globals(), "changelog dirstate filelog manifest context")
14 14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 15 demandload(globals(), "os revlog time util")
16 16
17 17 class localrepository(repo.repository):
18 18 capabilities = ('lookup', 'changegroupsubset')
19 19 supported = ('revlogv1', 'store')
20 20
21 21 def __del__(self):
22 22 self.transhandle = None
23 23 def __init__(self, parentui, path=None, create=0):
24 24 repo.repository.__init__(self)
25 25 if not path:
26 26 p = os.getcwd()
27 27 while not os.path.isdir(os.path.join(p, ".hg")):
28 28 oldp = p
29 29 p = os.path.dirname(p)
30 30 if p == oldp:
31 31 raise repo.RepoError(_("There is no Mercurial repository"
32 32 " here (.hg not found)"))
33 33 path = p
34 34
35 35 self.path = os.path.join(path, ".hg")
36 36 self.root = os.path.realpath(path)
37 37 self.origroot = path
38 38 self.opener = util.opener(self.path)
39 39 self.wopener = util.opener(self.root)
40 40
41 41 if not os.path.isdir(self.path):
42 42 if create:
43 43 if not os.path.exists(path):
44 44 os.mkdir(path)
45 45 os.mkdir(self.path)
46 46 os.mkdir(os.path.join(self.path, "store"))
47 47 requirements = ("revlogv1", "store")
48 48 reqfile = self.opener("requires", "w")
49 49 for r in requirements:
50 50 reqfile.write("%s\n" % r)
51 51 reqfile.close()
52 52 # create an invalid changelog
53 53 self.opener("00changelog.i", "a").write(
54 54 '\0\0\0\2' # represents revlogv2
55 55 ' dummy changelog to prevent using the old repo layout'
56 56 )
57 57 else:
58 58 raise repo.RepoError(_("repository %s not found") % path)
59 59 elif create:
60 60 raise repo.RepoError(_("repository %s already exists") % path)
61 61 else:
62 62 # find requirements
63 63 try:
64 64 requirements = self.opener("requires").read().splitlines()
65 65 except IOError, inst:
66 66 if inst.errno != errno.ENOENT:
67 67 raise
68 68 requirements = []
69 69 # check them
70 70 for r in requirements:
71 71 if r not in self.supported:
72 72 raise repo.RepoError(_("requirement '%s' not supported") % r)
73 73
74 74 # setup store
75 75 if "store" in requirements:
76 76 self.encodefn = util.encodefilename
77 77 self.decodefn = util.decodefilename
78 78 self.spath = os.path.join(self.path, "store")
79 79 else:
80 80 self.encodefn = lambda x: x
81 81 self.decodefn = lambda x: x
82 82 self.spath = self.path
83 83 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
84 84
85 85 self.ui = ui.ui(parentui=parentui)
86 86 try:
87 87 self.ui.readconfig(self.join("hgrc"), self.root)
88 88 except IOError:
89 89 pass
90 90
91 91 v = self.ui.configrevlog()
92 92 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
93 93 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
94 94 fl = v.get('flags', None)
95 95 flags = 0
96 96 if fl != None:
97 97 for x in fl.split():
98 98 flags |= revlog.flagstr(x)
99 99 elif self.revlogv1:
100 100 flags = revlog.REVLOG_DEFAULT_FLAGS
101 101
102 102 v = self.revlogversion | flags
103 103 self.manifest = manifest.manifest(self.sopener, v)
104 104 self.changelog = changelog.changelog(self.sopener, v)
105 105
106 106 fallback = self.ui.config('ui', 'fallbackencoding')
107 107 if fallback:
108 108 util._fallbackencoding = fallback
109 109
110 110 # the changelog might not have the inline index flag
111 111 # on. If the format of the changelog is the same as found in
112 112 # .hgrc, apply any flags found in the .hgrc as well.
113 113 # Otherwise, just version from the changelog
114 114 v = self.changelog.version
115 115 if v == self.revlogversion:
116 116 v |= flags
117 117 self.revlogversion = v
118 118
119 119 self.tagscache = None
120 120 self.branchcache = None
121 121 self.nodetagscache = None
122 122 self.encodepats = None
123 123 self.decodepats = None
124 124 self.transhandle = None
125 125
126 126 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
127 127
128 128 def url(self):
129 129 return 'file:' + self.root
130 130
131 131 def hook(self, name, throw=False, **args):
132 132 def callhook(hname, funcname):
133 133 '''call python hook. hook is callable object, looked up as
134 134 name in python module. if callable returns "true", hook
135 135 fails, else passes. if hook raises exception, treated as
136 136 hook failure. exception propagates if throw is "true".
137 137
138 138 reason for "true" meaning "hook failed" is so that
139 139 unmodified commands (e.g. mercurial.commands.update) can
140 140 be run as hooks without wrappers to convert return values.'''
141 141
142 142 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
143 143 d = funcname.rfind('.')
144 144 if d == -1:
145 145 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
146 146 % (hname, funcname))
147 147 modname = funcname[:d]
148 148 try:
149 149 obj = __import__(modname)
150 150 except ImportError:
151 151 try:
152 152 # extensions are loaded with hgext_ prefix
153 153 obj = __import__("hgext_%s" % modname)
154 154 except ImportError:
155 155 raise util.Abort(_('%s hook is invalid '
156 156 '(import of "%s" failed)') %
157 157 (hname, modname))
158 158 try:
159 159 for p in funcname.split('.')[1:]:
160 160 obj = getattr(obj, p)
161 161 except AttributeError, err:
162 162 raise util.Abort(_('%s hook is invalid '
163 163 '("%s" is not defined)') %
164 164 (hname, funcname))
165 165 if not callable(obj):
166 166 raise util.Abort(_('%s hook is invalid '
167 167 '("%s" is not callable)') %
168 168 (hname, funcname))
169 169 try:
170 170 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
171 171 except (KeyboardInterrupt, util.SignalInterrupt):
172 172 raise
173 173 except Exception, exc:
174 174 if isinstance(exc, util.Abort):
175 175 self.ui.warn(_('error: %s hook failed: %s\n') %
176 176 (hname, exc.args[0]))
177 177 else:
178 178 self.ui.warn(_('error: %s hook raised an exception: '
179 179 '%s\n') % (hname, exc))
180 180 if throw:
181 181 raise
182 182 self.ui.print_exc()
183 183 return True
184 184 if r:
185 185 if throw:
186 186 raise util.Abort(_('%s hook failed') % hname)
187 187 self.ui.warn(_('warning: %s hook failed\n') % hname)
188 188 return r
189 189
190 190 def runhook(name, cmd):
191 191 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
192 192 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
193 193 r = util.system(cmd, environ=env, cwd=self.root)
194 194 if r:
195 195 desc, r = util.explain_exit(r)
196 196 if throw:
197 197 raise util.Abort(_('%s hook %s') % (name, desc))
198 198 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
199 199 return r
200 200
201 201 r = False
202 202 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
203 203 if hname.split(".", 1)[0] == name and cmd]
204 204 hooks.sort()
205 205 for hname, cmd in hooks:
206 206 if cmd.startswith('python:'):
207 207 r = callhook(hname, cmd[7:].strip()) or r
208 208 else:
209 209 r = runhook(hname, cmd) or r
210 210 return r
211 211
212 212 tag_disallowed = ':\r\n'
213 213
214 214 def tag(self, name, node, message, local, user, date):
215 215 '''tag a revision with a symbolic name.
216 216
217 217 if local is True, the tag is stored in a per-repository file.
218 218 otherwise, it is stored in the .hgtags file, and a new
219 219 changeset is committed with the change.
220 220
221 221 keyword arguments:
222 222
223 223 local: whether to store tag in non-version-controlled file
224 224 (default False)
225 225
226 226 message: commit message to use if committing
227 227
228 228 user: name of user to use if committing
229 229
230 230 date: date tuple to use if committing'''
231 231
232 232 for c in self.tag_disallowed:
233 233 if c in name:
234 234 raise util.Abort(_('%r cannot be used in a tag name') % c)
235 235
236 236 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
237 237
238 238 if local:
239 239 # local tags are stored in the current charset
240 240 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
241 241 self.hook('tag', node=hex(node), tag=name, local=local)
242 242 return
243 243
244 244 for x in self.status()[:5]:
245 245 if '.hgtags' in x:
246 246 raise util.Abort(_('working copy of .hgtags is changed '
247 247 '(please commit .hgtags manually)'))
248 248
249 249 # committed tags are stored in UTF-8
250 250 line = '%s %s\n' % (hex(node), util.fromlocal(name))
251 251 self.wfile('.hgtags', 'ab').write(line)
252 252 if self.dirstate.state('.hgtags') == '?':
253 253 self.add(['.hgtags'])
254 254
255 255 self.commit(['.hgtags'], message, user, date)
256 256 self.hook('tag', node=hex(node), tag=name, local=local)
257 257
258 258 def tags(self):
259 259 '''return a mapping of tag to node'''
260 260 if not self.tagscache:
261 261 self.tagscache = {}
262 262
263 263 def parsetag(line, context):
264 264 if not line:
265 265 return
266 266 s = l.split(" ", 1)
267 267 if len(s) != 2:
268 268 self.ui.warn(_("%s: cannot parse entry\n") % context)
269 269 return
270 270 node, key = s
271 271 key = util.tolocal(key.strip()) # stored in UTF-8
272 272 try:
273 273 bin_n = bin(node)
274 274 except TypeError:
275 275 self.ui.warn(_("%s: node '%s' is not well formed\n") %
276 276 (context, node))
277 277 return
278 278 if bin_n not in self.changelog.nodemap:
279 279 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
280 280 (context, key))
281 281 return
282 282 self.tagscache[key] = bin_n
283 283
284 284 # read the tags file from each head, ending with the tip,
285 285 # and add each tag found to the map, with "newer" ones
286 286 # taking precedence
287 287 f = None
288 288 for rev, node, fnode in self._hgtagsnodes():
289 289 f = (f and f.filectx(fnode) or
290 290 self.filectx('.hgtags', fileid=fnode))
291 291 count = 0
292 292 for l in f.data().splitlines():
293 293 count += 1
294 294 parsetag(l, _("%s, line %d") % (str(f), count))
295 295
296 296 try:
297 297 f = self.opener("localtags")
298 298 count = 0
299 299 for l in f:
300 300 # localtags are stored in the local character set
301 301 # while the internal tag table is stored in UTF-8
302 302 l = util.fromlocal(l)
303 303 count += 1
304 304 parsetag(l, _("localtags, line %d") % count)
305 305 except IOError:
306 306 pass
307 307
308 308 self.tagscache['tip'] = self.changelog.tip()
309 309
310 310 return self.tagscache
311 311
312 312 def _hgtagsnodes(self):
313 313 heads = self.heads()
314 314 heads.reverse()
315 315 last = {}
316 316 ret = []
317 317 for node in heads:
318 318 c = self.changectx(node)
319 319 rev = c.rev()
320 320 try:
321 321 fnode = c.filenode('.hgtags')
322 322 except repo.LookupError:
323 323 continue
324 324 ret.append((rev, node, fnode))
325 325 if fnode in last:
326 326 ret[last[fnode]] = None
327 327 last[fnode] = len(ret) - 1
328 328 return [item for item in ret if item]
329 329
330 330 def tagslist(self):
331 331 '''return a list of tags ordered by revision'''
332 332 l = []
333 333 for t, n in self.tags().items():
334 334 try:
335 335 r = self.changelog.rev(n)
336 336 except:
337 337 r = -2 # sort to the beginning of the list if unknown
338 338 l.append((r, t, n))
339 339 l.sort()
340 340 return [(t, n) for r, t, n in l]
341 341
342 342 def nodetags(self, node):
343 343 '''return the tags associated with a node'''
344 344 if not self.nodetagscache:
345 345 self.nodetagscache = {}
346 346 for t, n in self.tags().items():
347 347 self.nodetagscache.setdefault(n, []).append(t)
348 348 return self.nodetagscache.get(node, [])
349 349
350 350 def _branchtags(self):
351 351 partial, last, lrev = self._readbranchcache()
352 352
353 353 tiprev = self.changelog.count() - 1
354 354 if lrev != tiprev:
355 355 self._updatebranchcache(partial, lrev+1, tiprev+1)
356 356 self._writebranchcache(partial, self.changelog.tip(), tiprev)
357 357
358 358 return partial
359 359
360 360 def branchtags(self):
361 361 if self.branchcache is not None:
362 362 return self.branchcache
363 363
364 364 self.branchcache = {} # avoid recursion in changectx
365 365 partial = self._branchtags()
366 366
367 367 # the branch cache is stored on disk as UTF-8, but in the local
368 368 # charset internally
369 369 for k, v in partial.items():
370 370 self.branchcache[util.tolocal(k)] = v
371 371 return self.branchcache
372 372
373 373 def _readbranchcache(self):
374 374 partial = {}
375 375 try:
376 376 f = self.opener("branches.cache")
377 377 lines = f.read().split('\n')
378 378 f.close()
379 379 last, lrev = lines.pop(0).rstrip().split(" ", 1)
380 380 last, lrev = bin(last), int(lrev)
381 381 if not (lrev < self.changelog.count() and
382 382 self.changelog.node(lrev) == last): # sanity check
383 383 # invalidate the cache
384 384 raise ValueError('Invalid branch cache: unknown tip')
385 385 for l in lines:
386 386 if not l: continue
387 387 node, label = l.rstrip().split(" ", 1)
388 388 partial[label] = bin(node)
389 389 except (KeyboardInterrupt, util.SignalInterrupt):
390 390 raise
391 391 except Exception, inst:
392 392 if self.ui.debugflag:
393 393 self.ui.warn(str(inst), '\n')
394 394 partial, last, lrev = {}, nullid, nullrev
395 395 return partial, last, lrev
396 396
397 397 def _writebranchcache(self, branches, tip, tiprev):
398 398 try:
399 399 f = self.opener("branches.cache", "w")
400 400 f.write("%s %s\n" % (hex(tip), tiprev))
401 401 for label, node in branches.iteritems():
402 402 f.write("%s %s\n" % (hex(node), label))
403 403 except IOError:
404 404 pass
405 405
406 406 def _updatebranchcache(self, partial, start, end):
407 407 for r in xrange(start, end):
408 408 c = self.changectx(r)
409 409 b = c.branch()
410 410 if b:
411 411 partial[b] = c.node()
412 412
413 413 def lookup(self, key):
414 414 if key == '.':
415 415 key = self.dirstate.parents()[0]
416 416 if key == nullid:
417 417 raise repo.RepoError(_("no revision checked out"))
418 418 elif key == 'null':
419 419 return nullid
420 420 n = self.changelog._match(key)
421 421 if n:
422 422 return n
423 423 if key in self.tags():
424 424 return self.tags()[key]
425 425 if key in self.branchtags():
426 426 return self.branchtags()[key]
427 427 n = self.changelog._partialmatch(key)
428 428 if n:
429 429 return n
430 430 raise repo.RepoError(_("unknown revision '%s'") % key)
431 431
432 432 def dev(self):
433 433 return os.lstat(self.path).st_dev
434 434
435 435 def local(self):
436 436 return True
437 437
438 438 def join(self, f):
439 439 return os.path.join(self.path, f)
440 440
441 441 def sjoin(self, f):
442 442 f = self.encodefn(f)
443 443 return os.path.join(self.spath, f)
444 444
445 445 def wjoin(self, f):
446 446 return os.path.join(self.root, f)
447 447
448 448 def file(self, f):
449 449 if f[0] == '/':
450 450 f = f[1:]
451 451 return filelog.filelog(self.sopener, f, self.revlogversion)
452 452
453 453 def changectx(self, changeid=None):
454 454 return context.changectx(self, changeid)
455 455
456 456 def workingctx(self):
457 457 return context.workingctx(self)
458 458
459 459 def parents(self, changeid=None):
460 460 '''
461 461 get list of changectxs for parents of changeid or working directory
462 462 '''
463 463 if changeid is None:
464 464 pl = self.dirstate.parents()
465 465 else:
466 466 n = self.changelog.lookup(changeid)
467 467 pl = self.changelog.parents(n)
468 468 if pl[1] == nullid:
469 469 return [self.changectx(pl[0])]
470 470 return [self.changectx(pl[0]), self.changectx(pl[1])]
471 471
472 472 def filectx(self, path, changeid=None, fileid=None):
473 473 """changeid can be a changeset revision, node, or tag.
474 474 fileid can be a file revision or node."""
475 475 return context.filectx(self, path, changeid, fileid)
476 476
477 477 def getcwd(self):
478 478 return self.dirstate.getcwd()
479 479
480 480 def wfile(self, f, mode='r'):
481 481 return self.wopener(f, mode)
482 482
483 483 def wread(self, filename):
484 484 if self.encodepats == None:
485 485 l = []
486 486 for pat, cmd in self.ui.configitems("encode"):
487 487 mf = util.matcher(self.root, "", [pat], [], [])[1]
488 488 l.append((mf, cmd))
489 489 self.encodepats = l
490 490
491 491 data = self.wopener(filename, 'r').read()
492 492
493 493 for mf, cmd in self.encodepats:
494 494 if mf(filename):
495 495 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
496 496 data = util.filter(data, cmd)
497 497 break
498 498
499 499 return data
500 500
501 501 def wwrite(self, filename, data, fd=None):
502 502 if self.decodepats == None:
503 503 l = []
504 504 for pat, cmd in self.ui.configitems("decode"):
505 505 mf = util.matcher(self.root, "", [pat], [], [])[1]
506 506 l.append((mf, cmd))
507 507 self.decodepats = l
508 508
509 509 for mf, cmd in self.decodepats:
510 510 if mf(filename):
511 511 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
512 512 data = util.filter(data, cmd)
513 513 break
514 514
515 515 if fd:
516 516 return fd.write(data)
517 517 return self.wopener(filename, 'w').write(data)
518 518
519 519 def transaction(self):
520 520 tr = self.transhandle
521 521 if tr != None and tr.running():
522 522 return tr.nest()
523 523
524 524 # save dirstate for rollback
525 525 try:
526 526 ds = self.opener("dirstate").read()
527 527 except IOError:
528 528 ds = ""
529 529 self.opener("journal.dirstate", "w").write(ds)
530 530
531 531 renames = [(self.sjoin("journal"), self.sjoin("undo")),
532 532 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
533 533 tr = transaction.transaction(self.ui.warn, self.sopener,
534 534 self.sjoin("journal"),
535 535 aftertrans(renames))
536 536 self.transhandle = tr
537 537 return tr
538 538
539 539 def recover(self):
540 540 l = self.lock()
541 541 if os.path.exists(self.sjoin("journal")):
542 542 self.ui.status(_("rolling back interrupted transaction\n"))
543 543 transaction.rollback(self.sopener, self.sjoin("journal"))
544 544 self.reload()
545 545 return True
546 546 else:
547 547 self.ui.warn(_("no interrupted transaction available\n"))
548 548 return False
549 549
550 550 def rollback(self, wlock=None):
551 551 if not wlock:
552 552 wlock = self.wlock()
553 553 l = self.lock()
554 554 if os.path.exists(self.sjoin("undo")):
555 555 self.ui.status(_("rolling back last transaction\n"))
556 556 transaction.rollback(self.sopener, self.sjoin("undo"))
557 557 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
558 558 self.reload()
559 559 self.wreload()
560 560 else:
561 561 self.ui.warn(_("no rollback information available\n"))
562 562
563 563 def wreload(self):
564 564 self.dirstate.read()
565 565
566 566 def reload(self):
567 567 self.changelog.load()
568 568 self.manifest.load()
569 569 self.tagscache = None
570 570 self.nodetagscache = None
571 571
572 572 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
573 573 desc=None):
574 574 try:
575 575 l = lock.lock(lockname, 0, releasefn, desc=desc)
576 576 except lock.LockHeld, inst:
577 577 if not wait:
578 578 raise
579 579 self.ui.warn(_("waiting for lock on %s held by %r\n") %
580 580 (desc, inst.locker))
581 581 # default to 600 seconds timeout
582 582 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
583 583 releasefn, desc=desc)
584 584 if acquirefn:
585 585 acquirefn()
586 586 return l
587 587
588 588 def lock(self, wait=1):
589 589 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
590 590 desc=_('repository %s') % self.origroot)
591 591
592 592 def wlock(self, wait=1):
593 593 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
594 594 self.wreload,
595 595 desc=_('working directory of %s') % self.origroot)
596 596
597 597 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
598 598 """
599 599 commit an individual file as part of a larger transaction
600 600 """
601 601
602 602 t = self.wread(fn)
603 603 fl = self.file(fn)
604 604 fp1 = manifest1.get(fn, nullid)
605 605 fp2 = manifest2.get(fn, nullid)
606 606
607 607 meta = {}
608 608 cp = self.dirstate.copied(fn)
609 609 if cp:
610 610 meta["copy"] = cp
611 611 if not manifest2: # not a branch merge
612 612 meta["copyrev"] = hex(manifest1.get(cp, nullid))
613 613 fp2 = nullid
614 614 elif fp2 != nullid: # copied on remote side
615 615 meta["copyrev"] = hex(manifest1.get(cp, nullid))
616 616 elif fp1 != nullid: # copied on local side, reversed
617 617 meta["copyrev"] = hex(manifest2.get(cp))
618 618 fp2 = nullid
619 619 else: # directory rename
620 620 meta["copyrev"] = hex(manifest1.get(cp, nullid))
621 621 self.ui.debug(_(" %s: copy %s:%s\n") %
622 622 (fn, cp, meta["copyrev"]))
623 623 fp1 = nullid
624 624 elif fp2 != nullid:
625 625 # is one parent an ancestor of the other?
626 626 fpa = fl.ancestor(fp1, fp2)
627 627 if fpa == fp1:
628 628 fp1, fp2 = fp2, nullid
629 629 elif fpa == fp2:
630 630 fp2 = nullid
631 631
632 632 # is the file unmodified from the parent? report existing entry
633 633 if fp2 == nullid and not fl.cmp(fp1, t):
634 634 return fp1
635 635
636 636 changelist.append(fn)
637 637 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
638 638
639 639 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
640 640 if p1 is None:
641 641 p1, p2 = self.dirstate.parents()
642 642 return self.commit(files=files, text=text, user=user, date=date,
643 643 p1=p1, p2=p2, wlock=wlock)
644 644
645 645 def commit(self, files=None, text="", user=None, date=None,
646 646 match=util.always, force=False, lock=None, wlock=None,
647 647 force_editor=False, p1=None, p2=None, extra={}):
648 648
649 649 commit = []
650 650 remove = []
651 651 changed = []
652 652 use_dirstate = (p1 is None) # not rawcommit
653 653 extra = extra.copy()
654 654
655 655 if use_dirstate:
656 656 if files:
657 657 for f in files:
658 658 s = self.dirstate.state(f)
659 659 if s in 'nmai':
660 660 commit.append(f)
661 661 elif s == 'r':
662 662 remove.append(f)
663 663 else:
664 664 self.ui.warn(_("%s not tracked!\n") % f)
665 665 else:
666 666 changes = self.status(match=match)[:5]
667 667 modified, added, removed, deleted, unknown = changes
668 668 commit = modified + added
669 669 remove = removed
670 670 else:
671 671 commit = files
672 672
673 673 if use_dirstate:
674 674 p1, p2 = self.dirstate.parents()
675 675 update_dirstate = True
676 676 else:
677 677 p1, p2 = p1, p2 or nullid
678 678 update_dirstate = (self.dirstate.parents()[0] == p1)
679 679
680 680 c1 = self.changelog.read(p1)
681 681 c2 = self.changelog.read(p2)
682 682 m1 = self.manifest.read(c1[0]).copy()
683 683 m2 = self.manifest.read(c2[0])
684 684
685 685 if use_dirstate:
686 686 branchname = self.workingctx().branch()
687 687 try:
688 688 branchname = branchname.decode('UTF-8').encode('UTF-8')
689 689 except UnicodeDecodeError:
690 690 raise util.Abort(_('branch name not in UTF-8!'))
691 691 else:
692 692 branchname = ""
693 693
694 694 if use_dirstate:
695 695 oldname = c1[5].get("branch", "") # stored in UTF-8
696 696 if not commit and not remove and not force and p2 == nullid and \
697 697 branchname == oldname:
698 698 self.ui.status(_("nothing changed\n"))
699 699 return None
700 700
701 701 xp1 = hex(p1)
702 702 if p2 == nullid: xp2 = ''
703 703 else: xp2 = hex(p2)
704 704
705 705 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
706 706
707 707 if not wlock:
708 708 wlock = self.wlock()
709 709 if not lock:
710 710 lock = self.lock()
711 711 tr = self.transaction()
712 712
713 713 # check in files
714 714 new = {}
715 715 linkrev = self.changelog.count()
716 716 commit.sort()
717 717 for f in commit:
718 718 self.ui.note(f + "\n")
719 719 try:
720 720 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
721 721 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
722 722 except IOError:
723 723 if use_dirstate:
724 724 self.ui.warn(_("trouble committing %s!\n") % f)
725 725 raise
726 726 else:
727 727 remove.append(f)
728 728
729 729 # update manifest
730 730 m1.update(new)
731 731 remove.sort()
732 732
733 733 for f in remove:
734 734 if f in m1:
735 735 del m1[f]
736 736 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
737 737
738 738 # add changeset
739 739 new = new.keys()
740 740 new.sort()
741 741
742 742 user = user or self.ui.username()
743 743 if not text or force_editor:
744 744 edittext = []
745 745 if text:
746 746 edittext.append(text)
747 747 edittext.append("")
748 748 edittext.append("HG: user: %s" % user)
749 749 if p2 != nullid:
750 750 edittext.append("HG: branch merge")
751 751 edittext.extend(["HG: changed %s" % f for f in changed])
752 752 edittext.extend(["HG: removed %s" % f for f in remove])
753 753 if not changed and not remove:
754 754 edittext.append("HG: no files changed")
755 755 edittext.append("")
756 756 # run editor in the repository root
757 757 olddir = os.getcwd()
758 758 os.chdir(self.root)
759 759 text = self.ui.edit("\n".join(edittext), user)
760 760 os.chdir(olddir)
761 761
762 762 lines = [line.rstrip() for line in text.rstrip().splitlines()]
763 763 while lines and not lines[0]:
764 764 del lines[0]
765 765 if not lines:
766 766 return None
767 767 text = '\n'.join(lines)
768 768 if branchname:
769 769 extra["branch"] = branchname
770 770 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
771 771 user, date, extra)
772 772 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
773 773 parent2=xp2)
774 774 tr.close()
775 775
776 776 if use_dirstate or update_dirstate:
777 777 self.dirstate.setparents(n)
778 778 if use_dirstate:
779 779 self.dirstate.update(new, "n")
780 780 self.dirstate.forget(remove)
781 781
782 782 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
783 783 return n
784 784
785 785 def walk(self, node=None, files=[], match=util.always, badmatch=None):
786 786 '''
787 787 walk recursively through the directory tree or a given
788 788 changeset, finding all files matched by the match
789 789 function
790 790
791 791 results are yielded in a tuple (src, filename), where src
792 792 is one of:
793 793 'f' the file was found in the directory tree
794 794 'm' the file was only in the dirstate and not in the tree
795 795 'b' file was not found and matched badmatch
796 796 '''
797 797
798 798 if node:
799 799 fdict = dict.fromkeys(files)
800 800 for fn in self.manifest.read(self.changelog.read(node)[0]):
801 801 for ffn in fdict:
802 802 # match if the file is the exact name or a directory
803 803 if ffn == fn or fn.startswith("%s/" % ffn):
804 804 del fdict[ffn]
805 805 break
806 806 if match(fn):
807 807 yield 'm', fn
808 808 for fn in fdict:
809 809 if badmatch and badmatch(fn):
810 810 if match(fn):
811 811 yield 'b', fn
812 812 else:
813 813 self.ui.warn(_('%s: No such file in rev %s\n') % (
814 814 util.pathto(self.getcwd(), fn), short(node)))
815 815 else:
816 816 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
817 817 yield src, fn
818 818
819 819 def status(self, node1=None, node2=None, files=[], match=util.always,
820 820 wlock=None, list_ignored=False, list_clean=False):
821 821 """return status of files between two nodes or node and working directory
822 822
823 823 If node1 is None, use the first dirstate parent instead.
824 824 If node2 is None, compare node1 with working directory.
825 825 """
826 826
827 827 def fcmp(fn, mf):
828 828 t1 = self.wread(fn)
829 829 return self.file(fn).cmp(mf.get(fn, nullid), t1)
830 830
831 831 def mfmatches(node):
832 832 change = self.changelog.read(node)
833 833 mf = self.manifest.read(change[0]).copy()
834 834 for fn in mf.keys():
835 835 if not match(fn):
836 836 del mf[fn]
837 837 return mf
838 838
839 839 modified, added, removed, deleted, unknown = [], [], [], [], []
840 840 ignored, clean = [], []
841 841
842 842 compareworking = False
843 843 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
844 844 compareworking = True
845 845
846 846 if not compareworking:
847 847 # read the manifest from node1 before the manifest from node2,
848 848 # so that we'll hit the manifest cache if we're going through
849 849 # all the revisions in parent->child order.
850 850 mf1 = mfmatches(node1)
851 851
852 852 # are we comparing the working directory?
853 853 if not node2:
854 854 if not wlock:
855 855 try:
856 856 wlock = self.wlock(wait=0)
857 857 except lock.LockException:
858 858 wlock = None
859 859 (lookup, modified, added, removed, deleted, unknown,
860 860 ignored, clean) = self.dirstate.status(files, match,
861 861 list_ignored, list_clean)
862 862
863 863 # are we comparing working dir against its parent?
864 864 if compareworking:
865 865 if lookup:
866 866 # do a full compare of any files that might have changed
867 867 mf2 = mfmatches(self.dirstate.parents()[0])
868 868 for f in lookup:
869 869 if fcmp(f, mf2):
870 870 modified.append(f)
871 871 else:
872 872 clean.append(f)
873 873 if wlock is not None:
874 874 self.dirstate.update([f], "n")
875 875 else:
876 876 # we are comparing working dir against non-parent
877 877 # generate a pseudo-manifest for the working dir
878 878 # XXX: create it in dirstate.py ?
879 879 mf2 = mfmatches(self.dirstate.parents()[0])
880 880 for f in lookup + modified + added:
881 881 mf2[f] = ""
882 882 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
883 883 for f in removed:
884 884 if f in mf2:
885 885 del mf2[f]
886 886 else:
887 887 # we are comparing two revisions
888 888 mf2 = mfmatches(node2)
889 889
890 890 if not compareworking:
891 891 # flush lists from dirstate before comparing manifests
892 892 modified, added, clean = [], [], []
893 893
894 894 # make sure to sort the files so we talk to the disk in a
895 895 # reasonable order
896 896 mf2keys = mf2.keys()
897 897 mf2keys.sort()
898 898 for fn in mf2keys:
899 899 if mf1.has_key(fn):
900 900 if mf1.flags(fn) != mf2.flags(fn) or \
901 901 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
902 902 modified.append(fn)
903 903 elif list_clean:
904 904 clean.append(fn)
905 905 del mf1[fn]
906 906 else:
907 907 added.append(fn)
908 908
909 909 removed = mf1.keys()
910 910
911 911 # sort and return results:
912 912 for l in modified, added, removed, deleted, unknown, ignored, clean:
913 913 l.sort()
914 914 return (modified, added, removed, deleted, unknown, ignored, clean)
915 915
916 916 def add(self, list, wlock=None):
917 917 if not wlock:
918 918 wlock = self.wlock()
919 919 for f in list:
920 920 p = self.wjoin(f)
921 921 if not os.path.exists(p):
922 922 self.ui.warn(_("%s does not exist!\n") % f)
923 923 elif not os.path.isfile(p):
924 924 self.ui.warn(_("%s not added: only files supported currently\n")
925 925 % f)
926 926 elif self.dirstate.state(f) in 'an':
927 927 self.ui.warn(_("%s already tracked!\n") % f)
928 928 else:
929 929 self.dirstate.update([f], "a")
930 930
931 931 def forget(self, list, wlock=None):
932 932 if not wlock:
933 933 wlock = self.wlock()
934 934 for f in list:
935 935 if self.dirstate.state(f) not in 'ai':
936 936 self.ui.warn(_("%s not added!\n") % f)
937 937 else:
938 938 self.dirstate.forget([f])
939 939
940 940 def remove(self, list, unlink=False, wlock=None):
941 941 if unlink:
942 942 for f in list:
943 943 try:
944 944 util.unlink(self.wjoin(f))
945 945 except OSError, inst:
946 946 if inst.errno != errno.ENOENT:
947 947 raise
948 948 if not wlock:
949 949 wlock = self.wlock()
950 950 for f in list:
951 951 p = self.wjoin(f)
952 952 if os.path.exists(p):
953 953 self.ui.warn(_("%s still exists!\n") % f)
954 954 elif self.dirstate.state(f) == 'a':
955 955 self.dirstate.forget([f])
956 956 elif f not in self.dirstate:
957 957 self.ui.warn(_("%s not tracked!\n") % f)
958 958 else:
959 959 self.dirstate.update([f], "r")
960 960
961 961 def undelete(self, list, wlock=None):
962 962 p = self.dirstate.parents()[0]
963 963 mn = self.changelog.read(p)[0]
964 964 m = self.manifest.read(mn)
965 965 if not wlock:
966 966 wlock = self.wlock()
967 967 for f in list:
968 968 if self.dirstate.state(f) not in "r":
969 969 self.ui.warn("%s not removed!\n" % f)
970 970 else:
971 971 t = self.file(f).read(m[f])
972 972 self.wwrite(f, t)
973 973 util.set_exec(self.wjoin(f), m.execf(f))
974 974 self.dirstate.update([f], "n")
975 975
976 976 def copy(self, source, dest, wlock=None):
977 977 p = self.wjoin(dest)
978 978 if not os.path.exists(p):
979 979 self.ui.warn(_("%s does not exist!\n") % dest)
980 980 elif not os.path.isfile(p):
981 981 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
982 982 else:
983 983 if not wlock:
984 984 wlock = self.wlock()
985 985 if self.dirstate.state(dest) == '?':
986 986 self.dirstate.update([dest], "a")
987 987 self.dirstate.copy(source, dest)
988 988
989 989 def heads(self, start=None):
990 990 heads = self.changelog.heads(start)
991 991 # sort the output in rev descending order
992 992 heads = [(-self.changelog.rev(h), h) for h in heads]
993 993 heads.sort()
994 994 return [n for (r, n) in heads]
995 995
996 # branchlookup returns a dict giving a list of branches for
997 # each head. A branch is defined as the tag of a node or
998 # the branch of the node's parents. If a node has multiple
999 # branch tags, tags are eliminated if they are visible from other
1000 # branch tags.
1001 #
1002 # So, for this graph: a->b->c->d->e
1003 # \ /
1004 # aa -----/
1005 # a has tag 2.6.12
1006 # d has tag 2.6.13
1007 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1008 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1009 # from the list.
1010 #
1011 # It is possible that more than one head will have the same branch tag.
1012 # callers need to check the result for multiple heads under the same
1013 # branch tag if that is a problem for them (ie checkout of a specific
1014 # branch).
1015 #
1016 # passing in a specific branch will limit the depth of the search
1017 # through the parents. It won't limit the branches returned in the
1018 # result though.
1019 def branchlookup(self, heads=None, branch=None):
1020 if not heads:
1021 heads = self.heads()
1022 headt = [ h for h in heads ]
1023 chlog = self.changelog
1024 branches = {}
1025 merges = []
1026 seenmerge = {}
1027
1028 # traverse the tree once for each head, recording in the branches
1029 # dict which tags are visible from this head. The branches
1030 # dict also records which tags are visible from each tag
1031 # while we traverse.
1032 while headt or merges:
1033 if merges:
1034 n, found = merges.pop()
1035 visit = [n]
1036 else:
1037 h = headt.pop()
1038 visit = [h]
1039 found = [h]
1040 seen = {}
1041 while visit:
1042 n = visit.pop()
1043 if n in seen:
1044 continue
1045 pp = chlog.parents(n)
1046 tags = self.nodetags(n)
1047 if tags:
1048 for x in tags:
1049 if x == 'tip':
1050 continue
1051 for f in found:
1052 branches.setdefault(f, {})[n] = 1
1053 branches.setdefault(n, {})[n] = 1
1054 break
1055 if n not in found:
1056 found.append(n)
1057 if branch in tags:
1058 continue
1059 seen[n] = 1
1060 if pp[1] != nullid and n not in seenmerge:
1061 merges.append((pp[1], [x for x in found]))
1062 seenmerge[n] = 1
1063 if pp[0] != nullid:
1064 visit.append(pp[0])
1065 # traverse the branches dict, eliminating branch tags from each
1066 # head that are visible from another branch tag for that head.
1067 out = {}
1068 viscache = {}
1069 for h in heads:
1070 def visible(node):
1071 if node in viscache:
1072 return viscache[node]
1073 ret = {}
1074 visit = [node]
1075 while visit:
1076 x = visit.pop()
1077 if x in viscache:
1078 ret.update(viscache[x])
1079 elif x not in ret:
1080 ret[x] = 1
1081 if x in branches:
1082 visit[len(visit):] = branches[x].keys()
1083 viscache[node] = ret
1084 return ret
1085 if h not in branches:
1086 continue
1087 # O(n^2), but somewhat limited. This only searches the
1088 # tags visible from a specific head, not all the tags in the
1089 # whole repo.
1090 for b in branches[h]:
1091 vis = False
1092 for bb in branches[h].keys():
1093 if b != bb:
1094 if b in visible(bb):
1095 vis = True
1096 break
1097 if not vis:
1098 l = out.setdefault(h, [])
1099 l[len(l):] = self.nodetags(b)
1100 return out
1101
1102 996 def branches(self, nodes):
1103 997 if not nodes:
1104 998 nodes = [self.changelog.tip()]
1105 999 b = []
1106 1000 for n in nodes:
1107 1001 t = n
1108 1002 while 1:
1109 1003 p = self.changelog.parents(n)
1110 1004 if p[1] != nullid or p[0] == nullid:
1111 1005 b.append((t, n, p[0], p[1]))
1112 1006 break
1113 1007 n = p[0]
1114 1008 return b
1115 1009
1116 1010 def between(self, pairs):
1117 1011 r = []
1118 1012
1119 1013 for top, bottom in pairs:
1120 1014 n, l, i = top, [], 0
1121 1015 f = 1
1122 1016
1123 1017 while n != bottom:
1124 1018 p = self.changelog.parents(n)[0]
1125 1019 if i == f:
1126 1020 l.append(n)
1127 1021 f = f * 2
1128 1022 n = p
1129 1023 i += 1
1130 1024
1131 1025 r.append(l)
1132 1026
1133 1027 return r
1134 1028
1135 1029 def findincoming(self, remote, base=None, heads=None, force=False):
1136 1030 """Return list of roots of the subsets of missing nodes from remote
1137 1031
1138 1032 If base dict is specified, assume that these nodes and their parents
1139 1033 exist on the remote side and that no child of a node of base exists
1140 1034 in both remote and self.
1141 1035 Furthermore base will be updated to include the nodes that exists
1142 1036 in self and remote but no children exists in self and remote.
1143 1037 If a list of heads is specified, return only nodes which are heads
1144 1038 or ancestors of these heads.
1145 1039
1146 1040 All the ancestors of base are in self and in remote.
1147 1041 All the descendants of the list returned are missing in self.
1148 1042 (and so we know that the rest of the nodes are missing in remote, see
1149 1043 outgoing)
1150 1044 """
1151 1045 m = self.changelog.nodemap
1152 1046 search = []
1153 1047 fetch = {}
1154 1048 seen = {}
1155 1049 seenbranch = {}
1156 1050 if base == None:
1157 1051 base = {}
1158 1052
1159 1053 if not heads:
1160 1054 heads = remote.heads()
1161 1055
1162 1056 if self.changelog.tip() == nullid:
1163 1057 base[nullid] = 1
1164 1058 if heads != [nullid]:
1165 1059 return [nullid]
1166 1060 return []
1167 1061
1168 1062 # assume we're closer to the tip than the root
1169 1063 # and start by examining the heads
1170 1064 self.ui.status(_("searching for changes\n"))
1171 1065
1172 1066 unknown = []
1173 1067 for h in heads:
1174 1068 if h not in m:
1175 1069 unknown.append(h)
1176 1070 else:
1177 1071 base[h] = 1
1178 1072
1179 1073 if not unknown:
1180 1074 return []
1181 1075
1182 1076 req = dict.fromkeys(unknown)
1183 1077 reqcnt = 0
1184 1078
1185 1079 # search through remote branches
1186 1080 # a 'branch' here is a linear segment of history, with four parts:
1187 1081 # head, root, first parent, second parent
1188 1082 # (a branch always has two parents (or none) by definition)
1189 1083 unknown = remote.branches(unknown)
1190 1084 while unknown:
1191 1085 r = []
1192 1086 while unknown:
1193 1087 n = unknown.pop(0)
1194 1088 if n[0] in seen:
1195 1089 continue
1196 1090
1197 1091 self.ui.debug(_("examining %s:%s\n")
1198 1092 % (short(n[0]), short(n[1])))
1199 1093 if n[0] == nullid: # found the end of the branch
1200 1094 pass
1201 1095 elif n in seenbranch:
1202 1096 self.ui.debug(_("branch already found\n"))
1203 1097 continue
1204 1098 elif n[1] and n[1] in m: # do we know the base?
1205 1099 self.ui.debug(_("found incomplete branch %s:%s\n")
1206 1100 % (short(n[0]), short(n[1])))
1207 1101 search.append(n) # schedule branch range for scanning
1208 1102 seenbranch[n] = 1
1209 1103 else:
1210 1104 if n[1] not in seen and n[1] not in fetch:
1211 1105 if n[2] in m and n[3] in m:
1212 1106 self.ui.debug(_("found new changeset %s\n") %
1213 1107 short(n[1]))
1214 1108 fetch[n[1]] = 1 # earliest unknown
1215 1109 for p in n[2:4]:
1216 1110 if p in m:
1217 1111 base[p] = 1 # latest known
1218 1112
1219 1113 for p in n[2:4]:
1220 1114 if p not in req and p not in m:
1221 1115 r.append(p)
1222 1116 req[p] = 1
1223 1117 seen[n[0]] = 1
1224 1118
1225 1119 if r:
1226 1120 reqcnt += 1
1227 1121 self.ui.debug(_("request %d: %s\n") %
1228 1122 (reqcnt, " ".join(map(short, r))))
1229 1123 for p in xrange(0, len(r), 10):
1230 1124 for b in remote.branches(r[p:p+10]):
1231 1125 self.ui.debug(_("received %s:%s\n") %
1232 1126 (short(b[0]), short(b[1])))
1233 1127 unknown.append(b)
1234 1128
1235 1129 # do binary search on the branches we found
1236 1130 while search:
1237 1131 n = search.pop(0)
1238 1132 reqcnt += 1
1239 1133 l = remote.between([(n[0], n[1])])[0]
1240 1134 l.append(n[1])
1241 1135 p = n[0]
1242 1136 f = 1
1243 1137 for i in l:
1244 1138 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1245 1139 if i in m:
1246 1140 if f <= 2:
1247 1141 self.ui.debug(_("found new branch changeset %s\n") %
1248 1142 short(p))
1249 1143 fetch[p] = 1
1250 1144 base[i] = 1
1251 1145 else:
1252 1146 self.ui.debug(_("narrowed branch search to %s:%s\n")
1253 1147 % (short(p), short(i)))
1254 1148 search.append((p, i))
1255 1149 break
1256 1150 p, f = i, f * 2
1257 1151
1258 1152 # sanity check our fetch list
1259 1153 for f in fetch.keys():
1260 1154 if f in m:
1261 1155 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1262 1156
1263 1157 if base.keys() == [nullid]:
1264 1158 if force:
1265 1159 self.ui.warn(_("warning: repository is unrelated\n"))
1266 1160 else:
1267 1161 raise util.Abort(_("repository is unrelated"))
1268 1162
1269 1163 self.ui.debug(_("found new changesets starting at ") +
1270 1164 " ".join([short(f) for f in fetch]) + "\n")
1271 1165
1272 1166 self.ui.debug(_("%d total queries\n") % reqcnt)
1273 1167
1274 1168 return fetch.keys()
1275 1169
1276 1170 def findoutgoing(self, remote, base=None, heads=None, force=False):
1277 1171 """Return list of nodes that are roots of subsets not in remote
1278 1172
1279 1173 If base dict is specified, assume that these nodes and their parents
1280 1174 exist on the remote side.
1281 1175 If a list of heads is specified, return only nodes which are heads
1282 1176 or ancestors of these heads, and return a second element which
1283 1177 contains all remote heads which get new children.
1284 1178 """
1285 1179 if base == None:
1286 1180 base = {}
1287 1181 self.findincoming(remote, base, heads, force=force)
1288 1182
1289 1183 self.ui.debug(_("common changesets up to ")
1290 1184 + " ".join(map(short, base.keys())) + "\n")
1291 1185
1292 1186 remain = dict.fromkeys(self.changelog.nodemap)
1293 1187
1294 1188 # prune everything remote has from the tree
1295 1189 del remain[nullid]
1296 1190 remove = base.keys()
1297 1191 while remove:
1298 1192 n = remove.pop(0)
1299 1193 if n in remain:
1300 1194 del remain[n]
1301 1195 for p in self.changelog.parents(n):
1302 1196 remove.append(p)
1303 1197
1304 1198 # find every node whose parents have been pruned
1305 1199 subset = []
1306 1200 # find every remote head that will get new children
1307 1201 updated_heads = {}
1308 1202 for n in remain:
1309 1203 p1, p2 = self.changelog.parents(n)
1310 1204 if p1 not in remain and p2 not in remain:
1311 1205 subset.append(n)
1312 1206 if heads:
1313 1207 if p1 in heads:
1314 1208 updated_heads[p1] = True
1315 1209 if p2 in heads:
1316 1210 updated_heads[p2] = True
1317 1211
1318 1212 # this is the set of all roots we have to push
1319 1213 if heads:
1320 1214 return subset, updated_heads.keys()
1321 1215 else:
1322 1216 return subset
1323 1217
1324 1218 def pull(self, remote, heads=None, force=False, lock=None):
1325 1219 mylock = False
1326 1220 if not lock:
1327 1221 lock = self.lock()
1328 1222 mylock = True
1329 1223
1330 1224 try:
1331 1225 fetch = self.findincoming(remote, force=force)
1332 1226 if fetch == [nullid]:
1333 1227 self.ui.status(_("requesting all changes\n"))
1334 1228
1335 1229 if not fetch:
1336 1230 self.ui.status(_("no changes found\n"))
1337 1231 return 0
1338 1232
1339 1233 if heads is None:
1340 1234 cg = remote.changegroup(fetch, 'pull')
1341 1235 else:
1342 1236 if 'changegroupsubset' not in remote.capabilities:
1343 1237 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1344 1238 cg = remote.changegroupsubset(fetch, heads, 'pull')
1345 1239 return self.addchangegroup(cg, 'pull', remote.url())
1346 1240 finally:
1347 1241 if mylock:
1348 1242 lock.release()
1349 1243
1350 1244 def push(self, remote, force=False, revs=None):
1351 1245 # there are two ways to push to remote repo:
1352 1246 #
1353 1247 # addchangegroup assumes local user can lock remote
1354 1248 # repo (local filesystem, old ssh servers).
1355 1249 #
1356 1250 # unbundle assumes local user cannot lock remote repo (new ssh
1357 1251 # servers, http servers).
1358 1252
1359 1253 if remote.capable('unbundle'):
1360 1254 return self.push_unbundle(remote, force, revs)
1361 1255 return self.push_addchangegroup(remote, force, revs)
1362 1256
1363 1257 def prepush(self, remote, force, revs):
1364 1258 base = {}
1365 1259 remote_heads = remote.heads()
1366 1260 inc = self.findincoming(remote, base, remote_heads, force=force)
1367 1261
1368 1262 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1369 1263 if revs is not None:
1370 1264 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1371 1265 else:
1372 1266 bases, heads = update, self.changelog.heads()
1373 1267
1374 1268 if not bases:
1375 1269 self.ui.status(_("no changes found\n"))
1376 1270 return None, 1
1377 1271 elif not force:
1378 1272 # check if we're creating new remote heads
1379 1273 # to be a remote head after push, node must be either
1380 1274 # - unknown locally
1381 1275 # - a local outgoing head descended from update
1382 1276 # - a remote head that's known locally and not
1383 1277 # ancestral to an outgoing head
1384 1278
1385 1279 warn = 0
1386 1280
1387 1281 if remote_heads == [nullid]:
1388 1282 warn = 0
1389 1283 elif not revs and len(heads) > len(remote_heads):
1390 1284 warn = 1
1391 1285 else:
1392 1286 newheads = list(heads)
1393 1287 for r in remote_heads:
1394 1288 if r in self.changelog.nodemap:
1395 1289 desc = self.changelog.heads(r)
1396 1290 l = [h for h in heads if h in desc]
1397 1291 if not l:
1398 1292 newheads.append(r)
1399 1293 else:
1400 1294 newheads.append(r)
1401 1295 if len(newheads) > len(remote_heads):
1402 1296 warn = 1
1403 1297
1404 1298 if warn:
1405 1299 self.ui.warn(_("abort: push creates new remote branches!\n"))
1406 1300 self.ui.status(_("(did you forget to merge?"
1407 1301 " use push -f to force)\n"))
1408 1302 return None, 1
1409 1303 elif inc:
1410 1304 self.ui.warn(_("note: unsynced remote changes!\n"))
1411 1305
1412 1306
1413 1307 if revs is None:
1414 1308 cg = self.changegroup(update, 'push')
1415 1309 else:
1416 1310 cg = self.changegroupsubset(update, revs, 'push')
1417 1311 return cg, remote_heads
1418 1312
1419 1313 def push_addchangegroup(self, remote, force, revs):
1420 1314 lock = remote.lock()
1421 1315
1422 1316 ret = self.prepush(remote, force, revs)
1423 1317 if ret[0] is not None:
1424 1318 cg, remote_heads = ret
1425 1319 return remote.addchangegroup(cg, 'push', self.url())
1426 1320 return ret[1]
1427 1321
1428 1322 def push_unbundle(self, remote, force, revs):
1429 1323 # local repo finds heads on server, finds out what revs it
1430 1324 # must push. once revs transferred, if server finds it has
1431 1325 # different heads (someone else won commit/push race), server
1432 1326 # aborts.
1433 1327
1434 1328 ret = self.prepush(remote, force, revs)
1435 1329 if ret[0] is not None:
1436 1330 cg, remote_heads = ret
1437 1331 if force: remote_heads = ['force']
1438 1332 return remote.unbundle(cg, remote_heads, 'push')
1439 1333 return ret[1]
1440 1334
1441 1335 def changegroupinfo(self, nodes):
1442 1336 self.ui.note(_("%d changesets found\n") % len(nodes))
1443 1337 if self.ui.debugflag:
1444 1338 self.ui.debug(_("List of changesets:\n"))
1445 1339 for node in nodes:
1446 1340 self.ui.debug("%s\n" % hex(node))
1447 1341
1448 1342 def changegroupsubset(self, bases, heads, source):
1449 1343 """This function generates a changegroup consisting of all the nodes
1450 1344 that are descendents of any of the bases, and ancestors of any of
1451 1345 the heads.
1452 1346
1453 1347 It is fairly complex as determining which filenodes and which
1454 1348 manifest nodes need to be included for the changeset to be complete
1455 1349 is non-trivial.
1456 1350
1457 1351 Another wrinkle is doing the reverse, figuring out which changeset in
1458 1352 the changegroup a particular filenode or manifestnode belongs to."""
1459 1353
1460 1354 self.hook('preoutgoing', throw=True, source=source)
1461 1355
1462 1356 # Set up some initial variables
1463 1357 # Make it easy to refer to self.changelog
1464 1358 cl = self.changelog
1465 1359 # msng is short for missing - compute the list of changesets in this
1466 1360 # changegroup.
1467 1361 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1468 1362 self.changegroupinfo(msng_cl_lst)
1469 1363 # Some bases may turn out to be superfluous, and some heads may be
1470 1364 # too. nodesbetween will return the minimal set of bases and heads
1471 1365 # necessary to re-create the changegroup.
1472 1366
1473 1367 # Known heads are the list of heads that it is assumed the recipient
1474 1368 # of this changegroup will know about.
1475 1369 knownheads = {}
1476 1370 # We assume that all parents of bases are known heads.
1477 1371 for n in bases:
1478 1372 for p in cl.parents(n):
1479 1373 if p != nullid:
1480 1374 knownheads[p] = 1
1481 1375 knownheads = knownheads.keys()
1482 1376 if knownheads:
1483 1377 # Now that we know what heads are known, we can compute which
1484 1378 # changesets are known. The recipient must know about all
1485 1379 # changesets required to reach the known heads from the null
1486 1380 # changeset.
1487 1381 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1488 1382 junk = None
1489 1383 # Transform the list into an ersatz set.
1490 1384 has_cl_set = dict.fromkeys(has_cl_set)
1491 1385 else:
1492 1386 # If there were no known heads, the recipient cannot be assumed to
1493 1387 # know about any changesets.
1494 1388 has_cl_set = {}
1495 1389
1496 1390 # Make it easy to refer to self.manifest
1497 1391 mnfst = self.manifest
1498 1392 # We don't know which manifests are missing yet
1499 1393 msng_mnfst_set = {}
1500 1394 # Nor do we know which filenodes are missing.
1501 1395 msng_filenode_set = {}
1502 1396
1503 1397 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1504 1398 junk = None
1505 1399
1506 1400 # A changeset always belongs to itself, so the changenode lookup
1507 1401 # function for a changenode is identity.
1508 1402 def identity(x):
1509 1403 return x
1510 1404
1511 1405 # A function generating function. Sets up an environment for the
1512 1406 # inner function.
1513 1407 def cmp_by_rev_func(revlog):
1514 1408 # Compare two nodes by their revision number in the environment's
1515 1409 # revision history. Since the revision number both represents the
1516 1410 # most efficient order to read the nodes in, and represents a
1517 1411 # topological sorting of the nodes, this function is often useful.
1518 1412 def cmp_by_rev(a, b):
1519 1413 return cmp(revlog.rev(a), revlog.rev(b))
1520 1414 return cmp_by_rev
1521 1415
1522 1416 # If we determine that a particular file or manifest node must be a
1523 1417 # node that the recipient of the changegroup will already have, we can
1524 1418 # also assume the recipient will have all the parents. This function
1525 1419 # prunes them from the set of missing nodes.
1526 1420 def prune_parents(revlog, hasset, msngset):
1527 1421 haslst = hasset.keys()
1528 1422 haslst.sort(cmp_by_rev_func(revlog))
1529 1423 for node in haslst:
1530 1424 parentlst = [p for p in revlog.parents(node) if p != nullid]
1531 1425 while parentlst:
1532 1426 n = parentlst.pop()
1533 1427 if n not in hasset:
1534 1428 hasset[n] = 1
1535 1429 p = [p for p in revlog.parents(n) if p != nullid]
1536 1430 parentlst.extend(p)
1537 1431 for n in hasset:
1538 1432 msngset.pop(n, None)
1539 1433
1540 1434 # This is a function generating function used to set up an environment
1541 1435 # for the inner function to execute in.
1542 1436 def manifest_and_file_collector(changedfileset):
1543 1437 # This is an information gathering function that gathers
1544 1438 # information from each changeset node that goes out as part of
1545 1439 # the changegroup. The information gathered is a list of which
1546 1440 # manifest nodes are potentially required (the recipient may
1547 1441 # already have them) and total list of all files which were
1548 1442 # changed in any changeset in the changegroup.
1549 1443 #
1550 1444 # We also remember the first changenode we saw any manifest
1551 1445 # referenced by so we can later determine which changenode 'owns'
1552 1446 # the manifest.
1553 1447 def collect_manifests_and_files(clnode):
1554 1448 c = cl.read(clnode)
1555 1449 for f in c[3]:
1556 1450 # This is to make sure we only have one instance of each
1557 1451 # filename string for each filename.
1558 1452 changedfileset.setdefault(f, f)
1559 1453 msng_mnfst_set.setdefault(c[0], clnode)
1560 1454 return collect_manifests_and_files
1561 1455
1562 1456 # Figure out which manifest nodes (of the ones we think might be part
1563 1457 # of the changegroup) the recipient must know about and remove them
1564 1458 # from the changegroup.
1565 1459 def prune_manifests():
1566 1460 has_mnfst_set = {}
1567 1461 for n in msng_mnfst_set:
1568 1462 # If a 'missing' manifest thinks it belongs to a changenode
1569 1463 # the recipient is assumed to have, obviously the recipient
1570 1464 # must have that manifest.
1571 1465 linknode = cl.node(mnfst.linkrev(n))
1572 1466 if linknode in has_cl_set:
1573 1467 has_mnfst_set[n] = 1
1574 1468 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1575 1469
1576 1470 # Use the information collected in collect_manifests_and_files to say
1577 1471 # which changenode any manifestnode belongs to.
1578 1472 def lookup_manifest_link(mnfstnode):
1579 1473 return msng_mnfst_set[mnfstnode]
1580 1474
1581 1475 # A function generating function that sets up the initial environment
1582 1476 # the inner function.
1583 1477 def filenode_collector(changedfiles):
1584 1478 next_rev = [0]
1585 1479 # This gathers information from each manifestnode included in the
1586 1480 # changegroup about which filenodes the manifest node references
1587 1481 # so we can include those in the changegroup too.
1588 1482 #
1589 1483 # It also remembers which changenode each filenode belongs to. It
1590 1484 # does this by assuming the a filenode belongs to the changenode
1591 1485 # the first manifest that references it belongs to.
1592 1486 def collect_msng_filenodes(mnfstnode):
1593 1487 r = mnfst.rev(mnfstnode)
1594 1488 if r == next_rev[0]:
1595 1489 # If the last rev we looked at was the one just previous,
1596 1490 # we only need to see a diff.
1597 1491 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1598 1492 # For each line in the delta
1599 1493 for dline in delta.splitlines():
1600 1494 # get the filename and filenode for that line
1601 1495 f, fnode = dline.split('\0')
1602 1496 fnode = bin(fnode[:40])
1603 1497 f = changedfiles.get(f, None)
1604 1498 # And if the file is in the list of files we care
1605 1499 # about.
1606 1500 if f is not None:
1607 1501 # Get the changenode this manifest belongs to
1608 1502 clnode = msng_mnfst_set[mnfstnode]
1609 1503 # Create the set of filenodes for the file if
1610 1504 # there isn't one already.
1611 1505 ndset = msng_filenode_set.setdefault(f, {})
1612 1506 # And set the filenode's changelog node to the
1613 1507 # manifest's if it hasn't been set already.
1614 1508 ndset.setdefault(fnode, clnode)
1615 1509 else:
1616 1510 # Otherwise we need a full manifest.
1617 1511 m = mnfst.read(mnfstnode)
1618 1512 # For every file in we care about.
1619 1513 for f in changedfiles:
1620 1514 fnode = m.get(f, None)
1621 1515 # If it's in the manifest
1622 1516 if fnode is not None:
1623 1517 # See comments above.
1624 1518 clnode = msng_mnfst_set[mnfstnode]
1625 1519 ndset = msng_filenode_set.setdefault(f, {})
1626 1520 ndset.setdefault(fnode, clnode)
1627 1521 # Remember the revision we hope to see next.
1628 1522 next_rev[0] = r + 1
1629 1523 return collect_msng_filenodes
1630 1524
1631 1525 # We have a list of filenodes we think we need for a file, lets remove
1632 1526 # all those we now the recipient must have.
1633 1527 def prune_filenodes(f, filerevlog):
1634 1528 msngset = msng_filenode_set[f]
1635 1529 hasset = {}
1636 1530 # If a 'missing' filenode thinks it belongs to a changenode we
1637 1531 # assume the recipient must have, then the recipient must have
1638 1532 # that filenode.
1639 1533 for n in msngset:
1640 1534 clnode = cl.node(filerevlog.linkrev(n))
1641 1535 if clnode in has_cl_set:
1642 1536 hasset[n] = 1
1643 1537 prune_parents(filerevlog, hasset, msngset)
1644 1538
1645 1539 # A function generator function that sets up the a context for the
1646 1540 # inner function.
1647 1541 def lookup_filenode_link_func(fname):
1648 1542 msngset = msng_filenode_set[fname]
1649 1543 # Lookup the changenode the filenode belongs to.
1650 1544 def lookup_filenode_link(fnode):
1651 1545 return msngset[fnode]
1652 1546 return lookup_filenode_link
1653 1547
1654 1548 # Now that we have all theses utility functions to help out and
1655 1549 # logically divide up the task, generate the group.
1656 1550 def gengroup():
1657 1551 # The set of changed files starts empty.
1658 1552 changedfiles = {}
1659 1553 # Create a changenode group generator that will call our functions
1660 1554 # back to lookup the owning changenode and collect information.
1661 1555 group = cl.group(msng_cl_lst, identity,
1662 1556 manifest_and_file_collector(changedfiles))
1663 1557 for chnk in group:
1664 1558 yield chnk
1665 1559
1666 1560 # The list of manifests has been collected by the generator
1667 1561 # calling our functions back.
1668 1562 prune_manifests()
1669 1563 msng_mnfst_lst = msng_mnfst_set.keys()
1670 1564 # Sort the manifestnodes by revision number.
1671 1565 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1672 1566 # Create a generator for the manifestnodes that calls our lookup
1673 1567 # and data collection functions back.
1674 1568 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1675 1569 filenode_collector(changedfiles))
1676 1570 for chnk in group:
1677 1571 yield chnk
1678 1572
1679 1573 # These are no longer needed, dereference and toss the memory for
1680 1574 # them.
1681 1575 msng_mnfst_lst = None
1682 1576 msng_mnfst_set.clear()
1683 1577
1684 1578 changedfiles = changedfiles.keys()
1685 1579 changedfiles.sort()
1686 1580 # Go through all our files in order sorted by name.
1687 1581 for fname in changedfiles:
1688 1582 filerevlog = self.file(fname)
1689 1583 # Toss out the filenodes that the recipient isn't really
1690 1584 # missing.
1691 1585 if msng_filenode_set.has_key(fname):
1692 1586 prune_filenodes(fname, filerevlog)
1693 1587 msng_filenode_lst = msng_filenode_set[fname].keys()
1694 1588 else:
1695 1589 msng_filenode_lst = []
1696 1590 # If any filenodes are left, generate the group for them,
1697 1591 # otherwise don't bother.
1698 1592 if len(msng_filenode_lst) > 0:
1699 1593 yield changegroup.genchunk(fname)
1700 1594 # Sort the filenodes by their revision #
1701 1595 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1702 1596 # Create a group generator and only pass in a changenode
1703 1597 # lookup function as we need to collect no information
1704 1598 # from filenodes.
1705 1599 group = filerevlog.group(msng_filenode_lst,
1706 1600 lookup_filenode_link_func(fname))
1707 1601 for chnk in group:
1708 1602 yield chnk
1709 1603 if msng_filenode_set.has_key(fname):
1710 1604 # Don't need this anymore, toss it to free memory.
1711 1605 del msng_filenode_set[fname]
1712 1606 # Signal that no more groups are left.
1713 1607 yield changegroup.closechunk()
1714 1608
1715 1609 if msng_cl_lst:
1716 1610 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1717 1611
1718 1612 return util.chunkbuffer(gengroup())
1719 1613
1720 1614 def changegroup(self, basenodes, source):
1721 1615 """Generate a changegroup of all nodes that we have that a recipient
1722 1616 doesn't.
1723 1617
1724 1618 This is much easier than the previous function as we can assume that
1725 1619 the recipient has any changenode we aren't sending them."""
1726 1620
1727 1621 self.hook('preoutgoing', throw=True, source=source)
1728 1622
1729 1623 cl = self.changelog
1730 1624 nodes = cl.nodesbetween(basenodes, None)[0]
1731 1625 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1732 1626 self.changegroupinfo(nodes)
1733 1627
1734 1628 def identity(x):
1735 1629 return x
1736 1630
1737 1631 def gennodelst(revlog):
1738 1632 for r in xrange(0, revlog.count()):
1739 1633 n = revlog.node(r)
1740 1634 if revlog.linkrev(n) in revset:
1741 1635 yield n
1742 1636
1743 1637 def changed_file_collector(changedfileset):
1744 1638 def collect_changed_files(clnode):
1745 1639 c = cl.read(clnode)
1746 1640 for fname in c[3]:
1747 1641 changedfileset[fname] = 1
1748 1642 return collect_changed_files
1749 1643
1750 1644 def lookuprevlink_func(revlog):
1751 1645 def lookuprevlink(n):
1752 1646 return cl.node(revlog.linkrev(n))
1753 1647 return lookuprevlink
1754 1648
1755 1649 def gengroup():
1756 1650 # construct a list of all changed files
1757 1651 changedfiles = {}
1758 1652
1759 1653 for chnk in cl.group(nodes, identity,
1760 1654 changed_file_collector(changedfiles)):
1761 1655 yield chnk
1762 1656 changedfiles = changedfiles.keys()
1763 1657 changedfiles.sort()
1764 1658
1765 1659 mnfst = self.manifest
1766 1660 nodeiter = gennodelst(mnfst)
1767 1661 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1768 1662 yield chnk
1769 1663
1770 1664 for fname in changedfiles:
1771 1665 filerevlog = self.file(fname)
1772 1666 nodeiter = gennodelst(filerevlog)
1773 1667 nodeiter = list(nodeiter)
1774 1668 if nodeiter:
1775 1669 yield changegroup.genchunk(fname)
1776 1670 lookup = lookuprevlink_func(filerevlog)
1777 1671 for chnk in filerevlog.group(nodeiter, lookup):
1778 1672 yield chnk
1779 1673
1780 1674 yield changegroup.closechunk()
1781 1675
1782 1676 if nodes:
1783 1677 self.hook('outgoing', node=hex(nodes[0]), source=source)
1784 1678
1785 1679 return util.chunkbuffer(gengroup())
1786 1680
1787 1681 def addchangegroup(self, source, srctype, url):
1788 1682 """add changegroup to repo.
1789 1683
1790 1684 return values:
1791 1685 - nothing changed or no source: 0
1792 1686 - more heads than before: 1+added heads (2..n)
1793 1687 - less heads than before: -1-removed heads (-2..-n)
1794 1688 - number of heads stays the same: 1
1795 1689 """
1796 1690 def csmap(x):
1797 1691 self.ui.debug(_("add changeset %s\n") % short(x))
1798 1692 return cl.count()
1799 1693
1800 1694 def revmap(x):
1801 1695 return cl.rev(x)
1802 1696
1803 1697 if not source:
1804 1698 return 0
1805 1699
1806 1700 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1807 1701
1808 1702 changesets = files = revisions = 0
1809 1703
1810 1704 tr = self.transaction()
1811 1705
1812 1706 # write changelog data to temp files so concurrent readers will not see
1813 1707 # inconsistent view
1814 1708 cl = None
1815 1709 try:
1816 1710 cl = appendfile.appendchangelog(self.sopener,
1817 1711 self.changelog.version)
1818 1712
1819 1713 oldheads = len(cl.heads())
1820 1714
1821 1715 # pull off the changeset group
1822 1716 self.ui.status(_("adding changesets\n"))
1823 1717 cor = cl.count() - 1
1824 1718 chunkiter = changegroup.chunkiter(source)
1825 1719 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1826 1720 raise util.Abort(_("received changelog group is empty"))
1827 1721 cnr = cl.count() - 1
1828 1722 changesets = cnr - cor
1829 1723
1830 1724 # pull off the manifest group
1831 1725 self.ui.status(_("adding manifests\n"))
1832 1726 chunkiter = changegroup.chunkiter(source)
1833 1727 # no need to check for empty manifest group here:
1834 1728 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1835 1729 # no new manifest will be created and the manifest group will
1836 1730 # be empty during the pull
1837 1731 self.manifest.addgroup(chunkiter, revmap, tr)
1838 1732
1839 1733 # process the files
1840 1734 self.ui.status(_("adding file changes\n"))
1841 1735 while 1:
1842 1736 f = changegroup.getchunk(source)
1843 1737 if not f:
1844 1738 break
1845 1739 self.ui.debug(_("adding %s revisions\n") % f)
1846 1740 fl = self.file(f)
1847 1741 o = fl.count()
1848 1742 chunkiter = changegroup.chunkiter(source)
1849 1743 if fl.addgroup(chunkiter, revmap, tr) is None:
1850 1744 raise util.Abort(_("received file revlog group is empty"))
1851 1745 revisions += fl.count() - o
1852 1746 files += 1
1853 1747
1854 1748 cl.writedata()
1855 1749 finally:
1856 1750 if cl:
1857 1751 cl.cleanup()
1858 1752
1859 1753 # make changelog see real files again
1860 1754 self.changelog = changelog.changelog(self.sopener,
1861 1755 self.changelog.version)
1862 1756 self.changelog.checkinlinesize(tr)
1863 1757
1864 1758 newheads = len(self.changelog.heads())
1865 1759 heads = ""
1866 1760 if oldheads and newheads != oldheads:
1867 1761 heads = _(" (%+d heads)") % (newheads - oldheads)
1868 1762
1869 1763 self.ui.status(_("added %d changesets"
1870 1764 " with %d changes to %d files%s\n")
1871 1765 % (changesets, revisions, files, heads))
1872 1766
1873 1767 if changesets > 0:
1874 1768 self.hook('pretxnchangegroup', throw=True,
1875 1769 node=hex(self.changelog.node(cor+1)), source=srctype,
1876 1770 url=url)
1877 1771
1878 1772 tr.close()
1879 1773
1880 1774 if changesets > 0:
1881 1775 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1882 1776 source=srctype, url=url)
1883 1777
1884 1778 for i in xrange(cor + 1, cnr + 1):
1885 1779 self.hook("incoming", node=hex(self.changelog.node(i)),
1886 1780 source=srctype, url=url)
1887 1781
1888 1782 # never return 0 here:
1889 1783 if newheads < oldheads:
1890 1784 return newheads - oldheads - 1
1891 1785 else:
1892 1786 return newheads - oldheads + 1
1893 1787
1894 1788
1895 1789 def stream_in(self, remote):
1896 1790 fp = remote.stream_out()
1897 1791 l = fp.readline()
1898 1792 try:
1899 1793 resp = int(l)
1900 1794 except ValueError:
1901 1795 raise util.UnexpectedOutput(
1902 1796 _('Unexpected response from remote server:'), l)
1903 1797 if resp == 1:
1904 1798 raise util.Abort(_('operation forbidden by server'))
1905 1799 elif resp == 2:
1906 1800 raise util.Abort(_('locking the remote repository failed'))
1907 1801 elif resp != 0:
1908 1802 raise util.Abort(_('the server sent an unknown error code'))
1909 1803 self.ui.status(_('streaming all changes\n'))
1910 1804 l = fp.readline()
1911 1805 try:
1912 1806 total_files, total_bytes = map(int, l.split(' ', 1))
1913 1807 except ValueError, TypeError:
1914 1808 raise util.UnexpectedOutput(
1915 1809 _('Unexpected response from remote server:'), l)
1916 1810 self.ui.status(_('%d files to transfer, %s of data\n') %
1917 1811 (total_files, util.bytecount(total_bytes)))
1918 1812 start = time.time()
1919 1813 for i in xrange(total_files):
1920 1814 # XXX doesn't support '\n' or '\r' in filenames
1921 1815 l = fp.readline()
1922 1816 try:
1923 1817 name, size = l.split('\0', 1)
1924 1818 size = int(size)
1925 1819 except ValueError, TypeError:
1926 1820 raise util.UnexpectedOutput(
1927 1821 _('Unexpected response from remote server:'), l)
1928 1822 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1929 1823 ofp = self.sopener(name, 'w')
1930 1824 for chunk in util.filechunkiter(fp, limit=size):
1931 1825 ofp.write(chunk)
1932 1826 ofp.close()
1933 1827 elapsed = time.time() - start
1934 1828 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1935 1829 (util.bytecount(total_bytes), elapsed,
1936 1830 util.bytecount(total_bytes / elapsed)))
1937 1831 self.reload()
1938 1832 return len(self.heads()) + 1
1939 1833
1940 1834 def clone(self, remote, heads=[], stream=False):
1941 1835 '''clone remote repository.
1942 1836
1943 1837 keyword arguments:
1944 1838 heads: list of revs to clone (forces use of pull)
1945 1839 stream: use streaming clone if possible'''
1946 1840
1947 1841 # now, all clients that can request uncompressed clones can
1948 1842 # read repo formats supported by all servers that can serve
1949 1843 # them.
1950 1844
1951 1845 # if revlog format changes, client will have to check version
1952 1846 # and format flags on "stream" capability, and use
1953 1847 # uncompressed only if compatible.
1954 1848
1955 1849 if stream and not heads and remote.capable('stream'):
1956 1850 return self.stream_in(remote)
1957 1851 return self.pull(remote, heads)
1958 1852
1959 1853 # used to avoid circular references so destructors work
1960 1854 def aftertrans(files):
1961 1855 renamefiles = [tuple(t) for t in files]
1962 1856 def a():
1963 1857 for src, dest in renamefiles:
1964 1858 util.rename(src, dest)
1965 1859 return a
1966 1860
1967 1861 def instance(ui, path, create):
1968 1862 return localrepository(ui, util.drop_scheme('file', path), create)
1969 1863
1970 1864 def islocal(path):
1971 1865 return True
@@ -1,495 +1,498 b''
1 1 # merge.py - directory-level update/merge handling for Mercurial
2 2 #
3 3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import gettext as _
10 10 from demandload import *
11 11 demandload(globals(), "errno util os tempfile")
12 12
13 13 def filemerge(repo, fw, fo, wctx, mctx):
14 14 """perform a 3-way merge in the working directory
15 15
16 16 fw = filename in the working directory
17 17 fo = filename in other parent
18 18 wctx, mctx = working and merge changecontexts
19 19 """
20 20
21 21 def temp(prefix, ctx):
22 22 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
23 23 (fd, name) = tempfile.mkstemp(prefix=pre)
24 24 f = os.fdopen(fd, "wb")
25 25 repo.wwrite(ctx.path(), ctx.data(), f)
26 26 f.close()
27 27 return name
28 28
29 29 fcm = wctx.filectx(fw)
30 30 fco = mctx.filectx(fo)
31 31
32 32 if not fco.cmp(fcm.data()): # files identical?
33 33 return None
34 34
35 35 fca = fcm.ancestor(fco)
36 36 if not fca:
37 37 fca = repo.filectx(fw, fileid=nullrev)
38 38 a = repo.wjoin(fw)
39 39 b = temp("base", fca)
40 40 c = temp("other", fco)
41 41
42 42 if fw != fo:
43 43 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
44 44 else:
45 45 repo.ui.status(_("merging %s\n") % fw)
46 46
47 47 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
48 48
49 49 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
50 50 or "hgmerge")
51 51 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
52 52 environ={'HG_FILE': fw,
53 53 'HG_MY_NODE': str(wctx.parents()[0]),
54 54 'HG_OTHER_NODE': str(mctx)})
55 55 if r:
56 56 repo.ui.warn(_("merging %s failed!\n") % fw)
57 57
58 58 os.unlink(b)
59 59 os.unlink(c)
60 60 return r
61 61
62 62 def checkunknown(wctx, mctx):
63 63 "check for collisions between unknown files and files in mctx"
64 64 man = mctx.manifest()
65 65 for f in wctx.unknown():
66 66 if f in man:
67 67 if mctx.filectx(f).cmp(wctx.filectx(f).data()):
68 68 raise util.Abort(_("untracked local file '%s' differs"\
69 69 " from remote version") % f)
70 70
71 71 def checkcollision(mctx):
72 72 "check for case folding collisions in the destination context"
73 73 folded = {}
74 74 for fn in mctx.manifest():
75 75 fold = fn.lower()
76 76 if fold in folded:
77 77 raise util.Abort(_("case-folding collision between %s and %s")
78 78 % (fn, folded[fold]))
79 79 folded[fold] = fn
80 80
81 81 def forgetremoved(wctx, mctx):
82 82 """
83 83 Forget removed files
84 84
85 85 If we're jumping between revisions (as opposed to merging), and if
86 86 neither the working directory nor the target rev has the file,
87 87 then we need to remove it from the dirstate, to prevent the
88 88 dirstate from listing the file when it is no longer in the
89 89 manifest.
90 90 """
91 91
92 92 action = []
93 93 man = mctx.manifest()
94 94 for f in wctx.deleted() + wctx.removed():
95 95 if f not in man:
96 96 action.append((f, "f"))
97 97
98 98 return action
99 99
100 100 def findcopies(repo, m1, m2, ma, limit):
101 101 """
102 102 Find moves and copies between m1 and m2 back to limit linkrev
103 103 """
104 104
105 105 def findold(fctx):
106 106 "find files that path was copied from, back to linkrev limit"
107 107 old = {}
108 108 orig = fctx.path()
109 109 visit = [fctx]
110 110 while visit:
111 111 fc = visit.pop()
112 112 if fc.path() != orig and fc.path() not in old:
113 113 old[fc.path()] = 1
114 114 if fc.rev() < limit:
115 115 continue
116 116 visit += fc.parents()
117 117
118 118 old = old.keys()
119 119 old.sort()
120 120 return old
121 121
122 122 def nonoverlap(d1, d2, d3):
123 123 "Return list of elements in d1 not in d2 or d3"
124 124 l = [d for d in d1 if d not in d3 and d not in d2]
125 125 l.sort()
126 126 return l
127 127
128 128 def checkcopies(c, man):
129 129 '''check possible copies for filectx c'''
130 130 for of in findold(c):
131 131 if of not in man:
132 132 return
133 133 c2 = ctx(of, man[of])
134 134 ca = c.ancestor(c2)
135 135 if not ca: # unrelated
136 136 return
137 137 if ca.path() == c.path() or ca.path() == c2.path():
138 138 fullcopy[c.path()] = of
139 139 if c == ca or c2 == ca: # no merge needed, ignore copy
140 140 return
141 141 copy[c.path()] = of
142 142
143 143 def dirs(files):
144 144 d = {}
145 145 for f in files:
146 146 d[os.path.dirname(f)] = True
147 147 return d
148 148
149 149 if not repo.ui.configbool("merge", "followcopies", True):
150 150 return {}
151 151
152 152 # avoid silly behavior for update from empty dir
153 153 if not m1 or not m2 or not ma:
154 154 return {}
155 155
156 156 dcopies = repo.dirstate.copies()
157 157 copy = {}
158 158 fullcopy = {}
159 159 u1 = nonoverlap(m1, m2, ma)
160 160 u2 = nonoverlap(m2, m1, ma)
161 161 ctx = util.cachefunc(lambda f, n: repo.filectx(f, fileid=n[:20]))
162 162
163 163 for f in u1:
164 164 checkcopies(ctx(dcopies.get(f, f), m1[f]), m2)
165 165
166 166 for f in u2:
167 167 checkcopies(ctx(f, m2[f]), m1)
168 168
169 169 if not fullcopy or not repo.ui.configbool("merge", "followdirs", True):
170 170 return copy
171 171
172 172 # generate a directory move map
173 173 d1, d2 = dirs(m1), dirs(m2)
174 174 invalid = {}
175 175 dirmove = {}
176 176
177 177 for dst, src in fullcopy.items():
178 178 dsrc, ddst = os.path.dirname(src), os.path.dirname(dst)
179 179 if dsrc in invalid:
180 180 continue
181 181 elif (dsrc in d1 and ddst in d1) or (dsrc in d2 and ddst in d2):
182 182 invalid[dsrc] = True
183 183 elif dsrc in dirmove and dirmove[dsrc] != ddst:
184 184 invalid[dsrc] = True
185 185 del dirmove[dsrc]
186 186 else:
187 187 dirmove[dsrc] = ddst
188 188
189 189 del d1, d2, invalid
190 190
191 191 if not dirmove:
192 192 return copy
193 193
194 194 # check unaccounted nonoverlapping files
195 195 for f in u1 + u2:
196 196 if f not in fullcopy:
197 197 d = os.path.dirname(f)
198 198 if d in dirmove:
199 199 copy[f] = dirmove[d] + "/" + os.path.basename(f)
200 200
201 201 return copy
202 202
203 203 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
204 204 """
205 205 Merge p1 and p2 with ancestor ma and generate merge action list
206 206
207 207 overwrite = whether we clobber working files
208 208 partial = function to filter file lists
209 209 """
210 210
211 211 repo.ui.note(_("resolving manifests\n"))
212 212 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
213 213 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
214 214
215 215 m1 = p1.manifest()
216 216 m2 = p2.manifest()
217 217 ma = pa.manifest()
218 218 backwards = (pa == p2)
219 219 action = []
220 220 copy = {}
221 221
222 222 def fmerge(f, f2=None, fa=None):
223 223 """merge executable flags"""
224 224 if not f2:
225 225 f2 = f
226 226 fa = f
227 227 a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2)
228 228 return ((a^b) | (a^c)) ^ a
229 229
230 230 def act(msg, m, f, *args):
231 231 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
232 232 action.append((f, m) + args)
233 233
234 234 if not (backwards or overwrite):
235 235 copy = findcopies(repo, m1, m2, ma, pa.rev())
236 236 copied = dict.fromkeys(copy.values())
237 237
238 238 # Compare manifests
239 239 for f, n in m1.iteritems():
240 240 if partial and not partial(f):
241 241 continue
242 242 if f in m2:
243 243 # are files different?
244 244 if n != m2[f]:
245 245 a = ma.get(f, nullid)
246 246 # are both different from the ancestor?
247 247 if not overwrite and n != a and m2[f] != a:
248 248 act("versions differ", "m", f, f, f, fmerge(f), False)
249 249 # are we clobbering?
250 250 # is remote's version newer?
251 251 # or are we going back in time and clean?
252 252 elif overwrite or m2[f] != a or (backwards and not n[20:]):
253 253 act("remote is newer", "g", f, m2.execf(f))
254 254 # local is newer, not overwrite, check mode bits
255 255 elif fmerge(f) != m1.execf(f):
256 256 act("update permissions", "e", f, m2.execf(f))
257 257 # contents same, check mode bits
258 258 elif m1.execf(f) != m2.execf(f):
259 259 if overwrite or fmerge(f) != m1.execf(f):
260 260 act("update permissions", "e", f, m2.execf(f))
261 261 elif f in copied:
262 262 continue
263 263 elif f in copy:
264 264 f2 = copy[f]
265 265 if f2 not in m2: # directory rename
266 266 act("remote renamed directory to " + f2, "d",
267 267 f, None, f2, m1.execf(f))
268 268 elif f2 in m1: # case 2 A,B/B/B
269 269 act("local copied to " + f2, "m",
270 270 f, f2, f, fmerge(f, f2, f2), False)
271 271 else: # case 4,21 A/B/B
272 272 act("local moved to " + f2, "m",
273 273 f, f2, f, fmerge(f, f2, f2), False)
274 274 elif f in ma:
275 275 if n != ma[f] and not overwrite:
276 276 if repo.ui.prompt(
277 277 (_(" local changed %s which remote deleted\n") % f) +
278 278 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("d"):
279 279 act("prompt delete", "r", f)
280 280 else:
281 281 act("other deleted", "r", f)
282 282 else:
283 283 # file is created on branch or in working directory
284 284 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
285 285 act("remote deleted", "r", f)
286 286
287 287 for f, n in m2.iteritems():
288 288 if partial and not partial(f):
289 289 continue
290 290 if f in m1:
291 291 continue
292 292 if f in copied:
293 293 continue
294 294 if f in copy:
295 295 f2 = copy[f]
296 296 if f2 not in m1: # directory rename
297 297 act("local renamed directory to " + f2, "d",
298 298 None, f, f2, m2.execf(f))
299 299 elif f2 in m2: # rename case 1, A/A,B/A
300 300 act("remote copied to " + f, "m",
301 301 f2, f, f, fmerge(f2, f, f2), False)
302 302 else: # case 3,20 A/B/A
303 303 act("remote moved to " + f, "m",
304 304 f2, f, f, fmerge(f2, f, f2), True)
305 305 elif f in ma:
306 306 if overwrite or backwards:
307 307 act("recreating", "g", f, m2.execf(f))
308 308 elif n != ma[f]:
309 309 if repo.ui.prompt(
310 310 (_("remote changed %s which local deleted\n") % f) +
311 311 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("k"):
312 312 act("prompt recreating", "g", f, m2.execf(f))
313 313 else:
314 314 act("remote created", "g", f, m2.execf(f))
315 315
316 316 return action
317 317
318 318 def applyupdates(repo, action, wctx, mctx):
319 319 "apply the merge action list to the working directory"
320 320
321 321 updated, merged, removed, unresolved = 0, 0, 0, 0
322 322 action.sort()
323 323 for a in action:
324 324 f, m = a[:2]
325 325 if f and f[0] == "/":
326 326 continue
327 327 if m == "r": # remove
328 328 repo.ui.note(_("removing %s\n") % f)
329 329 util.audit_path(f)
330 330 try:
331 331 util.unlink(repo.wjoin(f))
332 332 except OSError, inst:
333 333 if inst.errno != errno.ENOENT:
334 334 repo.ui.warn(_("update failed to remove %s: %s!\n") %
335 335 (f, inst.strerror))
336 336 removed += 1
337 337 elif m == "m": # merge
338 338 f2, fd, flag, move = a[2:]
339 339 r = filemerge(repo, f, f2, wctx, mctx)
340 340 if r > 0:
341 341 unresolved += 1
342 342 else:
343 343 if r is None:
344 344 updated += 1
345 345 else:
346 346 merged += 1
347 347 if f != fd:
348 348 repo.ui.debug(_("copying %s to %s\n") % (f, fd))
349 349 repo.wwrite(fd, repo.wread(f))
350 350 if move:
351 351 repo.ui.debug(_("removing %s\n") % f)
352 352 os.unlink(repo.wjoin(f))
353 353 util.set_exec(repo.wjoin(fd), flag)
354 354 elif m == "g": # get
355 355 flag = a[2]
356 356 repo.ui.note(_("getting %s\n") % f)
357 357 t = mctx.filectx(f).data()
358 358 repo.wwrite(f, t)
359 359 util.set_exec(repo.wjoin(f), flag)
360 360 updated += 1
361 361 elif m == "d": # directory rename
362 362 f2, fd, flag = a[2:]
363 363 if f:
364 364 repo.ui.note(_("moving %s to %s\n") % (f, fd))
365 365 t = wctx.filectx(f).data()
366 366 repo.wwrite(fd, t)
367 367 util.set_exec(repo.wjoin(fd), flag)
368 368 util.unlink(repo.wjoin(f))
369 369 if f2:
370 370 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
371 371 t = mctx.filectx(f2).data()
372 372 repo.wwrite(fd, t)
373 373 util.set_exec(repo.wjoin(fd), flag)
374 374 updated += 1
375 375 elif m == "e": # exec
376 376 flag = a[2]
377 377 util.set_exec(repo.wjoin(f), flag)
378 378
379 379 return updated, merged, removed, unresolved
380 380
381 381 def recordupdates(repo, action, branchmerge):
382 382 "record merge actions to the dirstate"
383 383
384 384 for a in action:
385 385 f, m = a[:2]
386 386 if m == "r": # remove
387 387 if branchmerge:
388 388 repo.dirstate.update([f], 'r')
389 389 else:
390 390 repo.dirstate.forget([f])
391 391 elif m == "f": # forget
392 392 repo.dirstate.forget([f])
393 393 elif m == "g": # get
394 394 if branchmerge:
395 395 repo.dirstate.update([f], 'n', st_mtime=-1)
396 396 else:
397 397 repo.dirstate.update([f], 'n')
398 398 elif m == "m": # merge
399 399 f2, fd, flag, move = a[2:]
400 400 if branchmerge:
401 401 # We've done a branch merge, mark this file as merged
402 402 # so that we properly record the merger later
403 403 repo.dirstate.update([fd], 'm')
404 404 if f != f2: # copy/rename
405 405 if move:
406 406 repo.dirstate.update([f], 'r')
407 407 if f != fd:
408 408 repo.dirstate.copy(f, fd)
409 409 else:
410 410 repo.dirstate.copy(f2, fd)
411 411 else:
412 412 # We've update-merged a locally modified file, so
413 413 # we set the dirstate to emulate a normal checkout
414 414 # of that file some time in the past. Thus our
415 415 # merge will appear as a normal local file
416 416 # modification.
417 417 repo.dirstate.update([fd], 'n', st_size=-1, st_mtime=-1)
418 418 if move:
419 419 repo.dirstate.forget([f])
420 420 elif m == "d": # directory rename
421 421 f2, fd, flag = a[2:]
422 422 if branchmerge:
423 423 repo.dirstate.update([fd], 'a')
424 424 if f:
425 425 repo.dirstate.update([f], 'r')
426 426 repo.dirstate.copy(f, fd)
427 427 if f2:
428 428 repo.dirstate.copy(f2, fd)
429 429 else:
430 430 repo.dirstate.update([fd], 'n')
431 431 if f:
432 432 repo.dirstate.forget([f])
433 433
434 434 def update(repo, node, branchmerge, force, partial, wlock):
435 435 """
436 436 Perform a merge between the working directory and the given node
437 437
438 438 branchmerge = whether to merge between branches
439 439 force = whether to force branch merging or file overwriting
440 440 partial = a function to filter file lists (dirstate not updated)
441 441 wlock = working dir lock, if already held
442 442 """
443 443
444 if node is None:
445 node = "tip"
446
444 447 if not wlock:
445 448 wlock = repo.wlock()
446 449
447 450 overwrite = force and not branchmerge
448 451 forcemerge = force and branchmerge
449 452 wc = repo.workingctx()
450 453 pl = wc.parents()
451 454 p1, p2 = pl[0], repo.changectx(node)
452 455 pa = p1.ancestor(p2)
453 456 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
454 457
455 458 ### check phase
456 459 if not overwrite and len(pl) > 1:
457 460 raise util.Abort(_("outstanding uncommitted merges"))
458 461 if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
459 462 if branchmerge:
460 463 raise util.Abort(_("there is nothing to merge, just use "
461 464 "'hg update' or look at 'hg heads'"))
462 465 elif not (overwrite or branchmerge):
463 466 raise util.Abort(_("update spans branches, use 'hg merge' "
464 467 "or 'hg update -C' to lose changes"))
465 468 if branchmerge and not forcemerge:
466 469 if wc.files():
467 470 raise util.Abort(_("outstanding uncommitted changes"))
468 471
469 472 ### calculate phase
470 473 action = []
471 474 if not force:
472 475 checkunknown(wc, p2)
473 476 if not util.checkfolding(repo.path):
474 477 checkcollision(p2)
475 478 if not branchmerge:
476 479 action += forgetremoved(wc, p2)
477 480 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
478 481
479 482 ### apply phase
480 483 if not branchmerge: # just jump to the new rev
481 484 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
482 485 if not partial:
483 486 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
484 487
485 488 stats = applyupdates(repo, action, wc, p2)
486 489
487 490 if not partial:
488 491 recordupdates(repo, action, branchmerge)
489 492 repo.dirstate.setparents(fp1, fp2)
490 493 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
491 494 if not branchmerge:
492 495 repo.opener("branch", "w").write(p2.branch() + "\n")
493 496
494 497 return stats
495 498
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now