##// END OF EJS Templates
use repo[changeid] to get a changectx
Matt Mackall -
r6747:f6c00b17 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,124 +1,124 b''
1 1 # acl.py - changeset access control for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 #
8 8 # this hook allows to allow or deny access to parts of a repo when
9 9 # taking incoming changesets.
10 10 #
11 11 # authorization is against local user name on system where hook is
12 12 # run, not committer of original changeset (since that is easy to
13 13 # spoof).
14 14 #
15 15 # acl hook is best to use if you use hgsh to set up restricted shells
16 16 # for authenticated users to only push to / pull from. not safe if
17 17 # user has interactive shell access, because they can disable hook.
18 18 # also not safe if remote users share one local account, because then
19 19 # no way to tell remote users apart.
20 20 #
21 21 # to use, configure acl extension in hgrc like this:
22 22 #
23 23 # [extensions]
24 24 # hgext.acl =
25 25 #
26 26 # [hooks]
27 27 # pretxnchangegroup.acl = python:hgext.acl.hook
28 28 #
29 29 # [acl]
30 30 # sources = serve # check if source of incoming changes in this list
31 31 # # ("serve" == ssh or http, "push", "pull", "bundle")
32 32 #
33 33 # allow and deny lists have subtree pattern (default syntax is glob)
34 34 # on left, user names on right. deny list checked before allow list.
35 35 #
36 36 # [acl.allow]
37 37 # # if acl.allow not present, all users allowed by default
38 38 # # empty acl.allow = no users allowed
39 39 # docs/** = doc_writer
40 40 # .hgtags = release_engineer
41 41 #
42 42 # [acl.deny]
43 43 # # if acl.deny not present, no users denied by default
44 44 # # empty acl.deny = all users allowed
45 45 # glob pattern = user4, user5
46 46 # ** = user6
47 47
48 48 from mercurial.i18n import _
49 49 from mercurial.node import bin, short
50 50 from mercurial import util
51 51 import getpass
52 52
53 53 class checker(object):
54 54 '''acl checker.'''
55 55
56 56 def buildmatch(self, key):
57 57 '''return tuple of (match function, list enabled).'''
58 58 if not self.ui.has_section(key):
59 59 self.ui.debug(_('acl: %s not enabled\n') % key)
60 60 return None, False
61 61
62 62 thisuser = self.getuser()
63 63 pats = [pat for pat, users in self.ui.configitems(key)
64 64 if thisuser in users.replace(',', ' ').split()]
65 65 self.ui.debug(_('acl: %s enabled, %d entries for user %s\n') %
66 66 (key, len(pats), thisuser))
67 67 if pats:
68 68 match = util.matcher(self.repo.root, names=pats)[1]
69 69 else:
70 70 match = util.never
71 71 return match, True
72 72
73 73 def getuser(self):
74 74 '''return name of authenticated user.'''
75 75 return self.user
76 76
77 77 def __init__(self, ui, repo):
78 78 self.ui = ui
79 79 self.repo = repo
80 80 self.user = getpass.getuser()
81 81 cfg = self.ui.config('acl', 'config')
82 82 if cfg:
83 83 self.ui.readsections(cfg, 'acl.allow', 'acl.deny')
84 84 self.allow, self.allowable = self.buildmatch('acl.allow')
85 85 self.deny, self.deniable = self.buildmatch('acl.deny')
86 86
87 87 def skipsource(self, source):
88 88 '''true if incoming changes from this source should be skipped.'''
89 89 ok_sources = self.ui.config('acl', 'sources', 'serve').split()
90 90 return source not in ok_sources
91 91
92 92 def check(self, node):
93 93 '''return if access allowed, raise exception if not.'''
94 files = self.repo.changectx(node).files()
94 files = self.repo[node].files()
95 95 if self.deniable:
96 96 for f in files:
97 97 if self.deny(f):
98 98 self.ui.debug(_('acl: user %s denied on %s\n') %
99 99 (self.getuser(), f))
100 100 raise util.Abort(_('acl: access denied for changeset %s') %
101 101 short(node))
102 102 if self.allowable:
103 103 for f in files:
104 104 if not self.allow(f):
105 105 self.ui.debug(_('acl: user %s not allowed on %s\n') %
106 106 (self.getuser(), f))
107 107 raise util.Abort(_('acl: access denied for changeset %s') %
108 108 short(node))
109 109 self.ui.debug(_('acl: allowing changeset %s\n') % short(node))
110 110
111 111 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
112 112 if hooktype != 'pretxnchangegroup':
113 113 raise util.Abort(_('config error - hook type "%s" cannot stop '
114 114 'incoming changesets') % hooktype)
115 115
116 116 c = checker(ui, repo)
117 117 if c.skipsource(source):
118 118 ui.debug(_('acl: changes have source "%s" - skipping\n') % source)
119 119 return
120 120
121 121 start = repo.changelog.rev(bin(node))
122 122 end = repo.changelog.count()
123 123 for rev in xrange(start, end):
124 124 c.check(repo.changelog.node(rev))
@@ -1,311 +1,311 b''
1 1 # bugzilla.py - bugzilla integration for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 #
8 8 # hook extension to update comments of bugzilla bugs when changesets
9 9 # that refer to bugs by id are seen. this hook does not change bug
10 10 # status, only comments.
11 11 #
12 12 # to configure, add items to '[bugzilla]' section of hgrc.
13 13 #
14 14 # to use, configure bugzilla extension and enable like this:
15 15 #
16 16 # [extensions]
17 17 # hgext.bugzilla =
18 18 #
19 19 # [hooks]
20 20 # # run bugzilla hook on every change pulled or pushed in here
21 21 # incoming.bugzilla = python:hgext.bugzilla.hook
22 22 #
23 23 # config items:
24 24 #
25 25 # section name is 'bugzilla'.
26 26 # [bugzilla]
27 27 #
28 28 # REQUIRED:
29 29 # host = bugzilla # mysql server where bugzilla database lives
30 30 # password = ** # user's password
31 31 # version = 2.16 # version of bugzilla installed
32 32 #
33 33 # OPTIONAL:
34 34 # bzuser = ... # fallback bugzilla user name to record comments with
35 35 # db = bugs # database to connect to
36 36 # notify = ... # command to run to get bugzilla to send mail
37 37 # regexp = ... # regexp to match bug ids (must contain one "()" group)
38 38 # strip = 0 # number of slashes to strip for url paths
39 39 # style = ... # style file to use when formatting comments
40 40 # template = ... # template to use when formatting comments
41 41 # timeout = 5 # database connection timeout (seconds)
42 42 # user = bugs # user to connect to database as
43 43 # [web]
44 44 # baseurl = http://hgserver/... # root of hg web site for browsing commits
45 45 #
46 46 # if hg committer names are not same as bugzilla user names, use
47 47 # "usermap" feature to map from committer email to bugzilla user name.
48 48 # usermap can be in hgrc or separate config file.
49 49 #
50 50 # [bugzilla]
51 51 # usermap = filename # cfg file with "committer"="bugzilla user" info
52 52 # [usermap]
53 53 # committer_email = bugzilla_user_name
54 54
55 55 from mercurial.i18n import _
56 56 from mercurial.node import short
57 57 from mercurial import cmdutil, templater, util
58 58 import re, time
59 59
60 60 MySQLdb = None
61 61
62 62 def buglist(ids):
63 63 return '(' + ','.join(map(str, ids)) + ')'
64 64
65 65 class bugzilla_2_16(object):
66 66 '''support for bugzilla version 2.16.'''
67 67
68 68 def __init__(self, ui):
69 69 self.ui = ui
70 70 host = self.ui.config('bugzilla', 'host', 'localhost')
71 71 user = self.ui.config('bugzilla', 'user', 'bugs')
72 72 passwd = self.ui.config('bugzilla', 'password')
73 73 db = self.ui.config('bugzilla', 'db', 'bugs')
74 74 timeout = int(self.ui.config('bugzilla', 'timeout', 5))
75 75 usermap = self.ui.config('bugzilla', 'usermap')
76 76 if usermap:
77 77 self.ui.readsections(usermap, 'usermap')
78 78 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
79 79 (host, db, user, '*' * len(passwd)))
80 80 self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd,
81 81 db=db, connect_timeout=timeout)
82 82 self.cursor = self.conn.cursor()
83 83 self.run('select fieldid from fielddefs where name = "longdesc"')
84 84 ids = self.cursor.fetchall()
85 85 if len(ids) != 1:
86 86 raise util.Abort(_('unknown database schema'))
87 87 self.longdesc_id = ids[0][0]
88 88 self.user_ids = {}
89 89
90 90 def run(self, *args, **kwargs):
91 91 '''run a query.'''
92 92 self.ui.note(_('query: %s %s\n') % (args, kwargs))
93 93 try:
94 94 self.cursor.execute(*args, **kwargs)
95 95 except MySQLdb.MySQLError, err:
96 96 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
97 97 raise
98 98
99 99 def filter_real_bug_ids(self, ids):
100 100 '''filter not-existing bug ids from list.'''
101 101 self.run('select bug_id from bugs where bug_id in %s' % buglist(ids))
102 102 ids = [c[0] for c in self.cursor.fetchall()]
103 103 ids.sort()
104 104 return ids
105 105
106 106 def filter_unknown_bug_ids(self, node, ids):
107 107 '''filter bug ids from list that already refer to this changeset.'''
108 108
109 109 self.run('''select bug_id from longdescs where
110 110 bug_id in %s and thetext like "%%%s%%"''' %
111 111 (buglist(ids), short(node)))
112 112 unknown = dict.fromkeys(ids)
113 113 for (id,) in self.cursor.fetchall():
114 114 self.ui.status(_('bug %d already knows about changeset %s\n') %
115 115 (id, short(node)))
116 116 unknown.pop(id, None)
117 117 ids = unknown.keys()
118 118 ids.sort()
119 119 return ids
120 120
121 121 def notify(self, ids):
122 122 '''tell bugzilla to send mail.'''
123 123
124 124 self.ui.status(_('telling bugzilla to send mail:\n'))
125 125 for id in ids:
126 126 self.ui.status(_(' bug %s\n') % id)
127 127 cmd = self.ui.config('bugzilla', 'notify',
128 128 'cd /var/www/html/bugzilla && '
129 129 './processmail %s nobody@nowhere.com') % id
130 130 fp = util.popen('(%s) 2>&1' % cmd)
131 131 out = fp.read()
132 132 ret = fp.close()
133 133 if ret:
134 134 self.ui.warn(out)
135 135 raise util.Abort(_('bugzilla notify command %s') %
136 136 util.explain_exit(ret)[0])
137 137 self.ui.status(_('done\n'))
138 138
139 139 def get_user_id(self, user):
140 140 '''look up numeric bugzilla user id.'''
141 141 try:
142 142 return self.user_ids[user]
143 143 except KeyError:
144 144 try:
145 145 userid = int(user)
146 146 except ValueError:
147 147 self.ui.note(_('looking up user %s\n') % user)
148 148 self.run('''select userid from profiles
149 149 where login_name like %s''', user)
150 150 all = self.cursor.fetchall()
151 151 if len(all) != 1:
152 152 raise KeyError(user)
153 153 userid = int(all[0][0])
154 154 self.user_ids[user] = userid
155 155 return userid
156 156
157 157 def map_committer(self, user):
158 158 '''map name of committer to bugzilla user name.'''
159 159 for committer, bzuser in self.ui.configitems('usermap'):
160 160 if committer.lower() == user.lower():
161 161 return bzuser
162 162 return user
163 163
164 164 def add_comment(self, bugid, text, committer):
165 165 '''add comment to bug. try adding comment as committer of
166 166 changeset, otherwise as default bugzilla user.'''
167 167 user = self.map_committer(committer)
168 168 try:
169 169 userid = self.get_user_id(user)
170 170 except KeyError:
171 171 try:
172 172 defaultuser = self.ui.config('bugzilla', 'bzuser')
173 173 if not defaultuser:
174 174 raise util.Abort(_('cannot find bugzilla user id for %s') %
175 175 user)
176 176 userid = self.get_user_id(defaultuser)
177 177 except KeyError:
178 178 raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
179 179 (user, defaultuser))
180 180 now = time.strftime('%Y-%m-%d %H:%M:%S')
181 181 self.run('''insert into longdescs
182 182 (bug_id, who, bug_when, thetext)
183 183 values (%s, %s, %s, %s)''',
184 184 (bugid, userid, now, text))
185 185 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
186 186 values (%s, %s, %s, %s)''',
187 187 (bugid, userid, now, self.longdesc_id))
188 188
189 189 class bugzilla(object):
190 190 # supported versions of bugzilla. different versions have
191 191 # different schemas.
192 192 _versions = {
193 193 '2.16': bugzilla_2_16,
194 194 }
195 195
196 196 _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
197 197 r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
198 198
199 199 _bz = None
200 200
201 201 def __init__(self, ui, repo):
202 202 self.ui = ui
203 203 self.repo = repo
204 204
205 205 def bz(self):
206 206 '''return object that knows how to talk to bugzilla version in
207 207 use.'''
208 208
209 209 if bugzilla._bz is None:
210 210 bzversion = self.ui.config('bugzilla', 'version')
211 211 try:
212 212 bzclass = bugzilla._versions[bzversion]
213 213 except KeyError:
214 214 raise util.Abort(_('bugzilla version %s not supported') %
215 215 bzversion)
216 216 bugzilla._bz = bzclass(self.ui)
217 217 return bugzilla._bz
218 218
219 219 def __getattr__(self, key):
220 220 return getattr(self.bz(), key)
221 221
222 222 _bug_re = None
223 223 _split_re = None
224 224
225 225 def find_bug_ids(self, ctx):
226 226 '''find valid bug ids that are referred to in changeset
227 227 comments and that do not already have references to this
228 228 changeset.'''
229 229
230 230 if bugzilla._bug_re is None:
231 231 bugzilla._bug_re = re.compile(
232 232 self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
233 233 re.IGNORECASE)
234 234 bugzilla._split_re = re.compile(r'\D+')
235 235 start = 0
236 236 ids = {}
237 237 while True:
238 238 m = bugzilla._bug_re.search(ctx.description(), start)
239 239 if not m:
240 240 break
241 241 start = m.end()
242 242 for id in bugzilla._split_re.split(m.group(1)):
243 243 if not id: continue
244 244 ids[int(id)] = 1
245 245 ids = ids.keys()
246 246 if ids:
247 247 ids = self.filter_real_bug_ids(ids)
248 248 if ids:
249 249 ids = self.filter_unknown_bug_ids(ctx.node(), ids)
250 250 return ids
251 251
252 252 def update(self, bugid, ctx):
253 253 '''update bugzilla bug with reference to changeset.'''
254 254
255 255 def webroot(root):
256 256 '''strip leading prefix of repo root and turn into
257 257 url-safe path.'''
258 258 count = int(self.ui.config('bugzilla', 'strip', 0))
259 259 root = util.pconvert(root)
260 260 while count > 0:
261 261 c = root.find('/')
262 262 if c == -1:
263 263 break
264 264 root = root[c+1:]
265 265 count -= 1
266 266 return root
267 267
268 268 mapfile = self.ui.config('bugzilla', 'style')
269 269 tmpl = self.ui.config('bugzilla', 'template')
270 270 t = cmdutil.changeset_templater(self.ui, self.repo,
271 271 False, mapfile, False)
272 272 if not mapfile and not tmpl:
273 273 tmpl = _('changeset {node|short} in repo {root} refers '
274 274 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
275 275 if tmpl:
276 276 tmpl = templater.parsestring(tmpl, quoted=False)
277 277 t.use_template(tmpl)
278 278 self.ui.pushbuffer()
279 279 t.show(changenode=ctx.node(), changes=ctx.changeset(),
280 280 bug=str(bugid),
281 281 hgweb=self.ui.config('web', 'baseurl'),
282 282 root=self.repo.root,
283 283 webroot=webroot(self.repo.root))
284 284 data = self.ui.popbuffer()
285 285 self.add_comment(bugid, data, util.email(ctx.user()))
286 286
287 287 def hook(ui, repo, hooktype, node=None, **kwargs):
288 288 '''add comment to bugzilla for each changeset that refers to a
289 289 bugzilla bug id. only add a comment once per bug, so same change
290 290 seen multiple times does not fill bug with duplicate data.'''
291 291 try:
292 292 import MySQLdb as mysql
293 293 global MySQLdb
294 294 MySQLdb = mysql
295 295 except ImportError, err:
296 296 raise util.Abort(_('python mysql support not available: %s') % err)
297 297
298 298 if node is None:
299 299 raise util.Abort(_('hook type %s does not pass a changeset id') %
300 300 hooktype)
301 301 try:
302 302 bz = bugzilla(ui, repo)
303 ctx = repo.changectx(node)
303 ctx = repo[node]
304 304 ids = bz.find_bug_ids(ctx)
305 305 if ids:
306 306 for id in ids:
307 307 bz.update(id, ctx)
308 308 bz.notify(ids)
309 309 except MySQLdb.MySQLError, err:
310 310 raise util.Abort(_('database error: %s') % err[1])
311 311
@@ -1,41 +1,41 b''
1 1 # Mercurial extension to provide the 'hg children' command
2 2 #
3 3 # Copyright 2007 by Intevation GmbH <intevation@intevation.de>
4 4 # Author(s):
5 5 # Thomas Arendsen Hein <thomas@intevation.de>
6 6 #
7 7 # This software may be used and distributed according to the terms
8 8 # of the GNU General Public License, incorporated herein by reference.
9 9
10 10 from mercurial import cmdutil
11 11 from mercurial.commands import templateopts
12 12 from mercurial.i18n import _
13 13
14 14
15 15 def children(ui, repo, file_=None, **opts):
16 16 """show the children of the given or working dir revision
17 17
18 18 Print the children of the working directory's revisions.
19 19 If a revision is given via --rev, the children of that revision
20 20 will be printed. If a file argument is given, revision in
21 21 which the file was last changed (after the working directory
22 22 revision or the argument to --rev if given) is printed.
23 23 """
24 24 rev = opts.get('rev')
25 25 if file_:
26 26 ctx = repo.filectx(file_, changeid=rev)
27 27 else:
28 ctx = repo.changectx(rev)
28 ctx = repo[rev]
29 29
30 30 displayer = cmdutil.show_changeset(ui, repo, opts)
31 31 for node in [cp.node() for cp in ctx.children()]:
32 32 displayer.show(changenode=node)
33 33
34 34
35 35 cmdtable = {
36 36 "children":
37 37 (children,
38 38 [('r', 'rev', '', _('show children of the specified rev')),
39 39 ] + templateopts,
40 40 _('hg children [-r REV] [FILE]')),
41 41 }
@@ -1,290 +1,290 b''
1 1 # hg backend for convert extension
2 2
3 3 # Notes for hg->hg conversion:
4 4 #
5 5 # * Old versions of Mercurial didn't trim the whitespace from the ends
6 6 # of commit messages, but new versions do. Changesets created by
7 7 # those older versions, then converted, may thus have different
8 8 # hashes for changesets that are otherwise identical.
9 9 #
10 10 # * By default, the source revision is stored in the converted
11 11 # revision. This will cause the converted revision to have a
12 12 # different identity than the source. To avoid this, use the
13 13 # following option: "--config convert.hg.saverev=false"
14 14
15 15
16 16 import os, time
17 17 from mercurial.i18n import _
18 18 from mercurial.repo import RepoError
19 19 from mercurial.node import bin, hex, nullid
20 20 from mercurial import hg, revlog, util, context
21 21
22 22 from common import NoRepo, commit, converter_source, converter_sink
23 23
24 24 class mercurial_sink(converter_sink):
25 25 def __init__(self, ui, path):
26 26 converter_sink.__init__(self, ui, path)
27 27 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
28 28 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
29 29 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
30 30 self.lastbranch = None
31 31 if os.path.isdir(path) and len(os.listdir(path)) > 0:
32 32 try:
33 33 self.repo = hg.repository(self.ui, path)
34 34 if not self.repo.local():
35 35 raise NoRepo(_('%s is not a local Mercurial repo') % path)
36 36 except RepoError, err:
37 37 ui.print_exc()
38 38 raise NoRepo(err.args[0])
39 39 else:
40 40 try:
41 41 ui.status(_('initializing destination %s repository\n') % path)
42 42 self.repo = hg.repository(self.ui, path, create=True)
43 43 if not self.repo.local():
44 44 raise NoRepo(_('%s is not a local Mercurial repo') % path)
45 45 self.created.append(path)
46 46 except RepoError, err:
47 47 ui.print_exc()
48 48 raise NoRepo("could not create hg repo %s as sink" % path)
49 49 self.lock = None
50 50 self.wlock = None
51 51 self.filemapmode = False
52 52
53 53 def before(self):
54 54 self.ui.debug(_('run hg sink pre-conversion action\n'))
55 55 self.wlock = self.repo.wlock()
56 56 self.lock = self.repo.lock()
57 57
58 58 def after(self):
59 59 self.ui.debug(_('run hg sink post-conversion action\n'))
60 60 self.lock = None
61 61 self.wlock = None
62 62
63 63 def revmapfile(self):
64 64 return os.path.join(self.path, ".hg", "shamap")
65 65
66 66 def authorfile(self):
67 67 return os.path.join(self.path, ".hg", "authormap")
68 68
69 69 def getheads(self):
70 70 h = self.repo.changelog.heads()
71 71 return [ hex(x) for x in h ]
72 72
73 73 def setbranch(self, branch, pbranches):
74 74 if not self.clonebranches:
75 75 return
76 76
77 77 setbranch = (branch != self.lastbranch)
78 78 self.lastbranch = branch
79 79 if not branch:
80 80 branch = 'default'
81 81 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
82 82 pbranch = pbranches and pbranches[0][1] or 'default'
83 83
84 84 branchpath = os.path.join(self.path, branch)
85 85 if setbranch:
86 86 self.after()
87 87 try:
88 88 self.repo = hg.repository(self.ui, branchpath)
89 89 except:
90 90 self.repo = hg.repository(self.ui, branchpath, create=True)
91 91 self.before()
92 92
93 93 # pbranches may bring revisions from other branches (merge parents)
94 94 # Make sure we have them, or pull them.
95 95 missings = {}
96 96 for b in pbranches:
97 97 try:
98 98 self.repo.lookup(b[0])
99 99 except:
100 100 missings.setdefault(b[1], []).append(b[0])
101 101
102 102 if missings:
103 103 self.after()
104 104 for pbranch, heads in missings.iteritems():
105 105 pbranchpath = os.path.join(self.path, pbranch)
106 106 prepo = hg.repository(self.ui, pbranchpath)
107 107 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
108 108 self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
109 109 self.before()
110 110
111 111 def putcommit(self, files, copies, parents, commit, source):
112 112
113 113 files = dict(files)
114 114 def getfilectx(repo, memctx, f):
115 115 v = files[f]
116 116 data = source.getfile(f, v)
117 117 e = source.getmode(f, v)
118 118 return context.memfilectx(f, data, 'l' in e, 'x' in e, copies.get(f))
119 119
120 120 pl = []
121 121 for p in parents:
122 122 if p not in pl:
123 123 pl.append(p)
124 124 parents = pl
125 125 nparents = len(parents)
126 126 if self.filemapmode and nparents == 1:
127 127 m1node = self.repo.changelog.read(bin(parents[0]))[0]
128 128 parent = parents[0]
129 129
130 130 if len(parents) < 2: parents.append("0" * 40)
131 131 if len(parents) < 2: parents.append("0" * 40)
132 132 p2 = parents.pop(0)
133 133
134 134 text = commit.desc
135 135 extra = commit.extra.copy()
136 136 if self.branchnames and commit.branch:
137 137 extra['branch'] = commit.branch
138 138 if commit.rev:
139 139 extra['convert_revision'] = commit.rev
140 140
141 141 while parents:
142 142 p1 = p2
143 143 p2 = parents.pop(0)
144 144 ctx = context.memctx(self.repo, (p1, p2), text, files.keys(), getfilectx,
145 145 commit.author, commit.date, extra)
146 146 a = self.repo.commitctx(ctx)
147 147 text = "(octopus merge fixup)\n"
148 148 p2 = hex(self.repo.changelog.tip())
149 149
150 150 if self.filemapmode and nparents == 1:
151 151 man = self.repo.manifest
152 152 mnode = self.repo.changelog.read(bin(p2))[0]
153 153 if not man.cmp(m1node, man.revision(mnode)):
154 154 self.repo.rollback()
155 155 return parent
156 156 return p2
157 157
158 158 def puttags(self, tags):
159 159 try:
160 parentctx = self.repo.changectx(self.tagsbranch)
160 parentctx = self.repo[self.tagsbranch]
161 161 tagparent = parentctx.node()
162 162 except RepoError, inst:
163 163 parentctx = None
164 164 tagparent = nullid
165 165
166 166 try:
167 167 old = parentctx.filectx(".hgtags").data()
168 168 oldlines = old.splitlines(1)
169 169 oldlines.sort()
170 170 except:
171 171 oldlines = []
172 172
173 173 newlines = [("%s %s\n" % (tags[tag], tag)) for tag in tags.keys()]
174 174 newlines.sort()
175 175
176 176 if newlines == oldlines:
177 177 return None
178 178 data = "".join(newlines)
179 179
180 180 def getfilectx(repo, memctx, f):
181 181 return context.memfilectx(f, data, False, False, None)
182 182
183 183 self.ui.status("updating tags\n")
184 184 date = "%s 0" % int(time.mktime(time.gmtime()))
185 185 extra = {'branch': self.tagsbranch}
186 186 ctx = context.memctx(self.repo, (tagparent, None), "update tags",
187 187 [".hgtags"], getfilectx, "convert-repo", date,
188 188 extra)
189 189 self.repo.commitctx(ctx)
190 190 return hex(self.repo.changelog.tip())
191 191
192 192 def setfilemapmode(self, active):
193 193 self.filemapmode = active
194 194
195 195 class mercurial_source(converter_source):
196 196 def __init__(self, ui, path, rev=None):
197 197 converter_source.__init__(self, ui, path, rev)
198 198 self.saverev = ui.configbool('convert', 'hg.saverev', True)
199 199 try:
200 200 self.repo = hg.repository(self.ui, path)
201 201 # try to provoke an exception if this isn't really a hg
202 202 # repo, but some other bogus compatible-looking url
203 203 if not self.repo.local():
204 204 raise RepoError()
205 205 except RepoError:
206 206 ui.print_exc()
207 207 raise NoRepo("%s is not a local Mercurial repo" % path)
208 208 self.lastrev = None
209 209 self.lastctx = None
210 210 self._changescache = None
211 211 self.convertfp = None
212 212
213 213 def changectx(self, rev):
214 214 if self.lastrev != rev:
215 self.lastctx = self.repo.changectx(rev)
215 self.lastctx = self.repo[rev]
216 216 self.lastrev = rev
217 217 return self.lastctx
218 218
219 219 def getheads(self):
220 220 if self.rev:
221 return [hex(self.repo.changectx(self.rev).node())]
221 return [hex(self.repo[self.rev].node())]
222 222 else:
223 223 return [hex(node) for node in self.repo.heads()]
224 224
225 225 def getfile(self, name, rev):
226 226 try:
227 return self.changectx(rev).filectx(name).data()
227 return self.changectx(rev)[name].data()
228 228 except revlog.LookupError, err:
229 229 raise IOError(err)
230 230
231 231 def getmode(self, name, rev):
232 232 m = self.changectx(rev).manifest()
233 233 return (m.execf(name) and 'x' or '') + (m.linkf(name) and 'l' or '')
234 234
235 235 def getchanges(self, rev):
236 236 ctx = self.changectx(rev)
237 237 if self._changescache and self._changescache[0] == rev:
238 238 m, a, r = self._changescache[1]
239 239 else:
240 240 m, a, r = self.repo.status(ctx.parents()[0].node(), ctx.node())[:3]
241 241 changes = [(name, rev) for name in m + a + r]
242 242 changes.sort()
243 243 return (changes, self.getcopies(ctx, m + a))
244 244
245 245 def getcopies(self, ctx, files):
246 246 copies = {}
247 247 for name in files:
248 248 try:
249 249 copies[name] = ctx.filectx(name).renamed()[0]
250 250 except TypeError:
251 251 pass
252 252 return copies
253 253
254 254 def getcommit(self, rev):
255 255 ctx = self.changectx(rev)
256 256 parents = [hex(p.node()) for p in ctx.parents() if p.node() != nullid]
257 257 if self.saverev:
258 258 crev = rev
259 259 else:
260 260 crev = None
261 261 return commit(author=ctx.user(), date=util.datestr(ctx.date()),
262 262 desc=ctx.description(), rev=crev, parents=parents,
263 263 branch=ctx.branch(), extra=ctx.extra())
264 264
265 265 def gettags(self):
266 266 tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
267 267 return dict([(name, hex(node)) for name, node in tags])
268 268
269 269 def getchangedfiles(self, rev, i):
270 270 ctx = self.changectx(rev)
271 271 i = i or 0
272 272 changes = self.repo.status(ctx.parents()[i].node(), ctx.node())[:3]
273 273
274 274 if i == 0:
275 275 self._changescache = (rev, changes)
276 276
277 277 return changes[0] + changes[1] + changes[2]
278 278
279 279 def converted(self, rev, destrev):
280 280 if self.convertfp is None:
281 281 self.convertfp = open(os.path.join(self.path, '.hg', 'shamap'),
282 282 'a')
283 283 self.convertfp.write('%s %s\n' % (destrev, rev))
284 284 self.convertfp.flush()
285 285
286 286 def before(self):
287 287 self.ui.debug(_('run hg source pre-conversion action\n'))
288 288
289 289 def after(self):
290 290 self.ui.debug(_('run hg source post-conversion action\n'))
@@ -1,251 +1,251 b''
1 1 # extdiff.py - external diff program support for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 '''
9 9 The `extdiff' Mercurial extension allows you to use external programs
10 10 to compare revisions, or revision with working dir. The external diff
11 11 programs are called with a configurable set of options and two
12 12 non-option arguments: paths to directories containing snapshots of
13 13 files to compare.
14 14
15 15 To enable this extension:
16 16
17 17 [extensions]
18 18 hgext.extdiff =
19 19
20 20 The `extdiff' extension also allows to configure new diff commands, so
21 21 you do not need to type "hg extdiff -p kdiff3" always.
22 22
23 23 [extdiff]
24 24 # add new command that runs GNU diff(1) in 'context diff' mode
25 25 cdiff = gdiff -Nprc5
26 26 ## or the old way:
27 27 #cmd.cdiff = gdiff
28 28 #opts.cdiff = -Nprc5
29 29
30 30 # add new command called vdiff, runs kdiff3
31 31 vdiff = kdiff3
32 32
33 33 # add new command called meld, runs meld (no need to name twice)
34 34 meld =
35 35
36 36 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
37 37 #(see http://www.vim.org/scripts/script.php?script_id=102)
38 38 # Non english user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
39 39 # your .vimrc
40 40 vimdiff = gvim -f '+next' '+execute "DirDiff" argv(0) argv(1)'
41 41
42 42 You can use -I/-X and list of file or directory names like normal
43 43 "hg diff" command. The `extdiff' extension makes snapshots of only
44 44 needed files, so running the external diff program will actually be
45 45 pretty fast (at least faster than having to compare the entire tree).
46 46 '''
47 47
48 48 from mercurial.i18n import _
49 49 from mercurial.node import short
50 50 from mercurial import cmdutil, util, commands
51 51 import os, shlex, shutil, tempfile
52 52
53 53 def snapshot_node(ui, repo, files, node, tmproot):
54 54 '''snapshot files as of some revision'''
55 mf = repo.changectx(node).manifest()
56 55 dirname = os.path.basename(repo.root)
57 56 if dirname == "":
58 57 dirname = "root"
59 58 dirname = '%s.%s' % (dirname, short(node))
60 59 base = os.path.join(tmproot, dirname)
61 60 os.mkdir(base)
62 61 ui.note(_('making snapshot of %d files from rev %s\n') %
63 62 (len(files), short(node)))
63 ctx = repo[node]
64 64 for fn in files:
65 if not fn in mf:
65 wfn = util.pconvert(fn)
66 if not wfn in ctx:
66 67 # skipping new file after a merge ?
67 68 continue
68 wfn = util.pconvert(fn)
69 69 ui.note(' %s\n' % wfn)
70 70 dest = os.path.join(base, wfn)
71 71 destdir = os.path.dirname(dest)
72 72 if not os.path.isdir(destdir):
73 73 os.makedirs(destdir)
74 data = repo.wwritedata(wfn, repo.file(wfn).read(mf[wfn]))
74 data = repo.wwritedata(wfn, ctx[wfn].data())
75 75 open(dest, 'wb').write(data)
76 76 return dirname
77 77
78 78
79 79 def snapshot_wdir(ui, repo, files, tmproot):
80 80 '''snapshot files from working directory.
81 81 if not using snapshot, -I/-X does not work and recursive diff
82 82 in tools like kdiff3 and meld displays too many files.'''
83 83 repo_root = repo.root
84 84
85 85 dirname = os.path.basename(repo_root)
86 86 if dirname == "":
87 87 dirname = "root"
88 88 base = os.path.join(tmproot, dirname)
89 89 os.mkdir(base)
90 90 ui.note(_('making snapshot of %d files from working dir\n') %
91 91 (len(files)))
92 92
93 93 fns_and_mtime = []
94 94
95 95 for fn in files:
96 96 wfn = util.pconvert(fn)
97 97 ui.note(' %s\n' % wfn)
98 98 dest = os.path.join(base, wfn)
99 99 destdir = os.path.dirname(dest)
100 100 if not os.path.isdir(destdir):
101 101 os.makedirs(destdir)
102 102
103 103 fp = open(dest, 'wb')
104 104 for chunk in util.filechunkiter(repo.wopener(wfn)):
105 105 fp.write(chunk)
106 106 fp.close()
107 107
108 108 fns_and_mtime.append((dest, os.path.join(repo_root, fn),
109 109 os.path.getmtime(dest)))
110 110
111 111
112 112 return dirname, fns_and_mtime
113 113
114 114
115 115 def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
116 116 '''Do the actuall diff:
117 117
118 118 - copy to a temp structure if diffing 2 internal revisions
119 119 - copy to a temp structure if diffing working revision with
120 120 another one and more than 1 file is changed
121 121 - just invoke the diff for a single file in the working dir
122 122 '''
123 123 node1, node2 = cmdutil.revpair(repo, opts['rev'])
124 124 matcher = cmdutil.match(repo, pats, opts)
125 125 modified, added, removed, deleted, unknown = repo.status(
126 126 node1, node2, matcher)[:5]
127 127 if not (modified or added or removed):
128 128 return 0
129 129
130 130 tmproot = tempfile.mkdtemp(prefix='extdiff.')
131 131 dir2root = ''
132 132 try:
133 133 # Always make a copy of node1
134 134 dir1 = snapshot_node(ui, repo, modified + removed, node1, tmproot)
135 135 changes = len(modified) + len(removed) + len(added)
136 136
137 137 fns_and_mtime = []
138 138
139 139 # If node2 in not the wc or there is >1 change, copy it
140 140 if node2:
141 141 dir2 = snapshot_node(ui, repo, modified + added, node2, tmproot)
142 142 elif changes > 1:
143 143 #we only actually need to get the files to copy back to the working
144 144 #dir in this case (because the other cases are: diffing 2 revisions
145 145 #or single file -- in which case the file is already directly passed
146 146 #to the diff tool).
147 147 dir2, fns_and_mtime = snapshot_wdir(ui, repo, modified + added, tmproot)
148 148 else:
149 149 # This lets the diff tool open the changed file directly
150 150 dir2 = ''
151 151 dir2root = repo.root
152 152
153 153 # If only one change, diff the files instead of the directories
154 154 if changes == 1 :
155 155 if len(modified):
156 156 dir1 = os.path.join(dir1, util.localpath(modified[0]))
157 157 dir2 = os.path.join(dir2root, dir2, util.localpath(modified[0]))
158 158 elif len(removed) :
159 159 dir1 = os.path.join(dir1, util.localpath(removed[0]))
160 160 dir2 = os.devnull
161 161 else:
162 162 dir1 = os.devnull
163 163 dir2 = os.path.join(dir2root, dir2, util.localpath(added[0]))
164 164
165 165 cmdline = ('%s %s %s %s' %
166 166 (util.shellquote(diffcmd), ' '.join(diffopts),
167 167 util.shellquote(dir1), util.shellquote(dir2)))
168 168 ui.debug('running %r in %s\n' % (cmdline, tmproot))
169 169 util.system(cmdline, cwd=tmproot)
170 170
171 171 for copy_fn, working_fn, mtime in fns_and_mtime:
172 172 if os.path.getmtime(copy_fn) != mtime:
173 173 ui.debug('File changed while diffing. '
174 174 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
175 175 util.copyfile(copy_fn, working_fn)
176 176
177 177 return 1
178 178 finally:
179 179 ui.note(_('cleaning up temp directory\n'))
180 180 shutil.rmtree(tmproot)
181 181
182 182 def extdiff(ui, repo, *pats, **opts):
183 183 '''use external program to diff repository (or selected files)
184 184
185 185 Show differences between revisions for the specified files, using
186 186 an external program. The default program used is diff, with
187 187 default options "-Npru".
188 188
189 189 To select a different program, use the -p option. The program
190 190 will be passed the names of two directories to compare. To pass
191 191 additional options to the program, use the -o option. These will
192 192 be passed before the names of the directories to compare.
193 193
194 194 When two revision arguments are given, then changes are
195 195 shown between those revisions. If only one revision is
196 196 specified then that revision is compared to the working
197 197 directory, and, when no revisions are specified, the
198 198 working directory files are compared to its parent.'''
199 199 program = opts['program'] or 'diff'
200 200 if opts['program']:
201 201 option = opts['option']
202 202 else:
203 203 option = opts['option'] or ['-Npru']
204 204 return dodiff(ui, repo, program, option, pats, opts)
205 205
206 206 cmdtable = {
207 207 "extdiff":
208 208 (extdiff,
209 209 [('p', 'program', '', _('comparison program to run')),
210 210 ('o', 'option', [], _('pass option to comparison program')),
211 211 ('r', 'rev', [], _('revision')),
212 212 ] + commands.walkopts,
213 213 _('hg extdiff [OPT]... [FILE]...')),
214 214 }
215 215
216 216 def uisetup(ui):
217 217 for cmd, path in ui.configitems('extdiff'):
218 218 if cmd.startswith('cmd.'):
219 219 cmd = cmd[4:]
220 220 if not path: path = cmd
221 221 diffopts = ui.config('extdiff', 'opts.' + cmd, '')
222 222 diffopts = diffopts and [diffopts] or []
223 223 elif cmd.startswith('opts.'):
224 224 continue
225 225 else:
226 226 # command = path opts
227 227 if path:
228 228 diffopts = shlex.split(path)
229 229 path = diffopts.pop(0)
230 230 else:
231 231 path, diffopts = cmd, []
232 232 def save(cmd, path, diffopts):
233 233 '''use closure to save diff command to use'''
234 234 def mydiff(ui, repo, *pats, **opts):
235 235 return dodiff(ui, repo, path, diffopts, pats, opts)
236 236 mydiff.__doc__ = '''use %(path)s to diff repository (or selected files)
237 237
238 238 Show differences between revisions for the specified
239 239 files, using the %(path)s program.
240 240
241 241 When two revision arguments are given, then changes are
242 242 shown between those revisions. If only one revision is
243 243 specified then that revision is compared to the working
244 244 directory, and, when no revisions are specified, the
245 245 working directory files are compared to its parent.''' % {
246 246 'path': util.uirepr(path),
247 247 }
248 248 return mydiff
249 249 cmdtable[cmd] = (save(cmd, path, diffopts),
250 250 cmdtable['extdiff'][1][1:],
251 251 _('hg %s [OPTION]... [FILE]...') % cmd)
@@ -1,357 +1,357 b''
1 1 # Minimal support for git commands on an hg repository
2 2 #
3 3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 '''browsing the repository in a graphical way
8 8
9 9 The hgk extension allows browsing the history of a repository in a
10 10 graphical way. It requires Tcl/Tk version 8.4 or later. (Tcl/Tk is
11 11 not distributed with Mercurial.)
12 12
13 13 hgk consists of two parts: a Tcl script that does the displaying and
14 14 querying of information, and an extension to mercurial named hgk.py,
15 15 which provides hooks for hgk to get information. hgk can be found in
16 16 the contrib directory, and hgk.py can be found in the hgext directory.
17 17
18 18 To load the hgext.py extension, add it to your .hgrc file (you have
19 19 to use your global $HOME/.hgrc file, not one in a repository). You
20 20 can specify an absolute path:
21 21
22 22 [extensions]
23 23 hgk=/usr/local/lib/hgk.py
24 24
25 25 Mercurial can also scan the default python library path for a file
26 26 named 'hgk.py' if you set hgk empty:
27 27
28 28 [extensions]
29 29 hgk=
30 30
31 31 The hg view command will launch the hgk Tcl script. For this command
32 32 to work, hgk must be in your search path. Alternately, you can
33 33 specify the path to hgk in your .hgrc file:
34 34
35 35 [hgk]
36 36 path=/location/of/hgk
37 37
38 38 hgk can make use of the extdiff extension to visualize revisions.
39 39 Assuming you had already configured extdiff vdiff command, just add:
40 40
41 41 [hgk]
42 42 vdiff=vdiff
43 43
44 44 Revisions context menu will now display additional entries to fire
45 45 vdiff on hovered and selected revisions.'''
46 46
47 47 import os
48 48 from mercurial import commands, util, patch, revlog, cmdutil
49 49 from mercurial.node import nullid, nullrev, short
50 50
51 51 def difftree(ui, repo, node1=None, node2=None, *files, **opts):
52 52 """diff trees from two commits"""
53 53 def __difftree(repo, node1, node2, files=[]):
54 54 assert node2 is not None
55 mmap = repo.changectx(node1).manifest()
56 mmap2 = repo.changectx(node2).manifest()
55 mmap = repo[node1].manifest()
56 mmap2 = repo[node2].manifest()
57 57 m = cmdutil.match(repo, files)
58 58 status = repo.status(node1, node2, match=m)[:5]
59 59 modified, added, removed, deleted, unknown = status
60 60
61 61 empty = short(nullid)
62 62
63 63 for f in modified:
64 64 # TODO get file permissions
65 65 ui.write(":100664 100664 %s %s M\t%s\t%s\n" %
66 66 (short(mmap[f]), short(mmap2[f]), f, f))
67 67 for f in added:
68 68 ui.write(":000000 100664 %s %s N\t%s\t%s\n" %
69 69 (empty, short(mmap2[f]), f, f))
70 70 for f in removed:
71 71 ui.write(":100664 000000 %s %s D\t%s\t%s\n" %
72 72 (short(mmap[f]), empty, f, f))
73 73 ##
74 74
75 75 while True:
76 76 if opts['stdin']:
77 77 try:
78 78 line = raw_input().split(' ')
79 79 node1 = line[0]
80 80 if len(line) > 1:
81 81 node2 = line[1]
82 82 else:
83 83 node2 = None
84 84 except EOFError:
85 85 break
86 86 node1 = repo.lookup(node1)
87 87 if node2:
88 88 node2 = repo.lookup(node2)
89 89 else:
90 90 node2 = node1
91 91 node1 = repo.changelog.parents(node1)[0]
92 92 if opts['patch']:
93 93 if opts['pretty']:
94 94 catcommit(ui, repo, node2, "")
95 95 m = cmdutil.match(repo, files)
96 96 patch.diff(repo, node1, node2, match=m,
97 97 opts=patch.diffopts(ui, {'git': True}))
98 98 else:
99 99 __difftree(repo, node1, node2, files=files)
100 100 if not opts['stdin']:
101 101 break
102 102
103 103 def catcommit(ui, repo, n, prefix, ctx=None):
104 104 nlprefix = '\n' + prefix;
105 105 if ctx is None:
106 ctx = repo.changectx(n)
106 ctx = repo[n]
107 107 (p1, p2) = ctx.parents()
108 108 ui.write("tree %s\n" % short(ctx.changeset()[0])) # use ctx.node() instead ??
109 109 if p1: ui.write("parent %s\n" % short(p1.node()))
110 110 if p2: ui.write("parent %s\n" % short(p2.node()))
111 111 date = ctx.date()
112 112 description = ctx.description().replace("\0", "")
113 113 lines = description.splitlines()
114 114 if lines and lines[-1].startswith('committer:'):
115 115 committer = lines[-1].split(': ')[1].rstrip()
116 116 else:
117 117 committer = ctx.user()
118 118
119 119 ui.write("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1]))
120 120 ui.write("committer %s %s %s\n" % (committer, int(date[0]), date[1]))
121 121 ui.write("revision %d\n" % ctx.rev())
122 122 ui.write("branch %s\n\n" % ctx.branch())
123 123
124 124 if prefix != "":
125 125 ui.write("%s%s\n" % (prefix, description.replace('\n', nlprefix).strip()))
126 126 else:
127 127 ui.write(description + "\n")
128 128 if prefix:
129 129 ui.write('\0')
130 130
131 131 def base(ui, repo, node1, node2):
132 132 """Output common ancestor information"""
133 133 node1 = repo.lookup(node1)
134 134 node2 = repo.lookup(node2)
135 135 n = repo.changelog.ancestor(node1, node2)
136 136 ui.write(short(n) + "\n")
137 137
138 138 def catfile(ui, repo, type=None, r=None, **opts):
139 139 """cat a specific revision"""
140 140 # in stdin mode, every line except the commit is prefixed with two
141 141 # spaces. This way the our caller can find the commit without magic
142 142 # strings
143 143 #
144 144 prefix = ""
145 145 if opts['stdin']:
146 146 try:
147 147 (type, r) = raw_input().split(' ');
148 148 prefix = " "
149 149 except EOFError:
150 150 return
151 151
152 152 else:
153 153 if not type or not r:
154 154 ui.warn("cat-file: type or revision not supplied\n")
155 155 commands.help_(ui, 'cat-file')
156 156
157 157 while r:
158 158 if type != "commit":
159 159 ui.warn("aborting hg cat-file only understands commits\n")
160 160 return 1;
161 161 n = repo.lookup(r)
162 162 catcommit(ui, repo, n, prefix)
163 163 if opts['stdin']:
164 164 try:
165 165 (type, r) = raw_input().split(' ');
166 166 except EOFError:
167 167 break
168 168 else:
169 169 break
170 170
171 171 # git rev-tree is a confusing thing. You can supply a number of
172 172 # commit sha1s on the command line, and it walks the commit history
173 173 # telling you which commits are reachable from the supplied ones via
174 174 # a bitmask based on arg position.
175 175 # you can specify a commit to stop at by starting the sha1 with ^
176 176 def revtree(ui, args, repo, full="tree", maxnr=0, parents=False):
177 177 def chlogwalk():
178 178 count = repo.changelog.count()
179 179 i = count
180 180 l = [0] * 100
181 181 chunk = 100
182 182 while True:
183 183 if chunk > i:
184 184 chunk = i
185 185 i = 0
186 186 else:
187 187 i -= chunk
188 188
189 189 for x in xrange(0, chunk):
190 190 if i + x >= count:
191 191 l[chunk - x:] = [0] * (chunk - x)
192 192 break
193 193 if full != None:
194 l[x] = repo.changectx(i + x)
194 l[x] = repo[i + x]
195 195 l[x].changeset() # force reading
196 196 else:
197 197 l[x] = 1
198 198 for x in xrange(chunk-1, -1, -1):
199 199 if l[x] != 0:
200 200 yield (i + x, full != None and l[x] or None)
201 201 if i == 0:
202 202 break
203 203
204 204 # calculate and return the reachability bitmask for sha
205 205 def is_reachable(ar, reachable, sha):
206 206 if len(ar) == 0:
207 207 return 1
208 208 mask = 0
209 209 for i in xrange(len(ar)):
210 210 if sha in reachable[i]:
211 211 mask |= 1 << i
212 212
213 213 return mask
214 214
215 215 reachable = []
216 216 stop_sha1 = []
217 217 want_sha1 = []
218 218 count = 0
219 219
220 220 # figure out which commits they are asking for and which ones they
221 221 # want us to stop on
222 222 for i in xrange(len(args)):
223 223 if args[i].startswith('^'):
224 224 s = repo.lookup(args[i][1:])
225 225 stop_sha1.append(s)
226 226 want_sha1.append(s)
227 227 elif args[i] != 'HEAD':
228 228 want_sha1.append(repo.lookup(args[i]))
229 229
230 230 # calculate the graph for the supplied commits
231 231 for i in xrange(len(want_sha1)):
232 232 reachable.append({});
233 233 n = want_sha1[i];
234 234 visit = [n];
235 235 reachable[i][n] = 1
236 236 while visit:
237 237 n = visit.pop(0)
238 238 if n in stop_sha1:
239 239 continue
240 240 for p in repo.changelog.parents(n):
241 241 if p not in reachable[i]:
242 242 reachable[i][p] = 1
243 243 visit.append(p)
244 244 if p in stop_sha1:
245 245 continue
246 246
247 247 # walk the repository looking for commits that are in our
248 248 # reachability graph
249 249 for i, ctx in chlogwalk():
250 250 n = repo.changelog.node(i)
251 251 mask = is_reachable(want_sha1, reachable, n)
252 252 if mask:
253 253 parentstr = ""
254 254 if parents:
255 255 pp = repo.changelog.parents(n)
256 256 if pp[0] != nullid:
257 257 parentstr += " " + short(pp[0])
258 258 if pp[1] != nullid:
259 259 parentstr += " " + short(pp[1])
260 260 if not full:
261 261 ui.write("%s%s\n" % (short(n), parentstr))
262 262 elif full == "commit":
263 263 ui.write("%s%s\n" % (short(n), parentstr))
264 264 catcommit(ui, repo, n, ' ', ctx)
265 265 else:
266 266 (p1, p2) = repo.changelog.parents(n)
267 267 (h, h1, h2) = map(short, (n, p1, p2))
268 268 (i1, i2) = map(repo.changelog.rev, (p1, p2))
269 269
270 270 date = ctx.date()[0]
271 271 ui.write("%s %s:%s" % (date, h, mask))
272 272 mask = is_reachable(want_sha1, reachable, p1)
273 273 if i1 != nullrev and mask > 0:
274 274 ui.write("%s:%s " % (h1, mask)),
275 275 mask = is_reachable(want_sha1, reachable, p2)
276 276 if i2 != nullrev and mask > 0:
277 277 ui.write("%s:%s " % (h2, mask))
278 278 ui.write("\n")
279 279 if maxnr and count >= maxnr:
280 280 break
281 281 count += 1
282 282
283 283 def revparse(ui, repo, *revs, **opts):
284 284 """Parse given revisions"""
285 285 def revstr(rev):
286 286 if rev == 'HEAD':
287 287 rev = 'tip'
288 288 return revlog.hex(repo.lookup(rev))
289 289
290 290 for r in revs:
291 291 revrange = r.split(':', 1)
292 292 ui.write('%s\n' % revstr(revrange[0]))
293 293 if len(revrange) == 2:
294 294 ui.write('^%s\n' % revstr(revrange[1]))
295 295
296 296 # git rev-list tries to order things by date, and has the ability to stop
297 297 # at a given commit without walking the whole repo. TODO add the stop
298 298 # parameter
299 299 def revlist(ui, repo, *revs, **opts):
300 300 """print revisions"""
301 301 if opts['header']:
302 302 full = "commit"
303 303 else:
304 304 full = None
305 305 copy = [x for x in revs]
306 306 revtree(ui, copy, repo, full, opts['max_count'], opts['parents'])
307 307
308 308 def config(ui, repo, **opts):
309 309 """print extension options"""
310 310 def writeopt(name, value):
311 311 ui.write('k=%s\nv=%s\n' % (name, value))
312 312
313 313 writeopt('vdiff', ui.config('hgk', 'vdiff', ''))
314 314
315 315
316 316 def view(ui, repo, *etc, **opts):
317 317 "start interactive history viewer"
318 318 os.chdir(repo.root)
319 319 optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v])
320 320 cmd = ui.config("hgk", "path", "hgk") + " %s %s" % (optstr, " ".join(etc))
321 321 ui.debug("running %s\n" % cmd)
322 322 util.system(cmd)
323 323
324 324 cmdtable = {
325 325 "^view":
326 326 (view,
327 327 [('l', 'limit', '', 'limit number of changes displayed')],
328 328 'hg view [-l LIMIT] [REVRANGE]'),
329 329 "debug-diff-tree":
330 330 (difftree,
331 331 [('p', 'patch', None, 'generate patch'),
332 332 ('r', 'recursive', None, 'recursive'),
333 333 ('P', 'pretty', None, 'pretty'),
334 334 ('s', 'stdin', None, 'stdin'),
335 335 ('C', 'copy', None, 'detect copies'),
336 336 ('S', 'search', "", 'search')],
337 337 'hg git-diff-tree [OPTION]... NODE1 NODE2 [FILE]...'),
338 338 "debug-cat-file":
339 339 (catfile,
340 340 [('s', 'stdin', None, 'stdin')],
341 341 'hg debug-cat-file [OPTION]... TYPE FILE'),
342 342 "debug-config":
343 343 (config, [], 'hg debug-config'),
344 344 "debug-merge-base":
345 345 (base, [], 'hg debug-merge-base node node'),
346 346 "debug-rev-parse":
347 347 (revparse,
348 348 [('', 'default', '', 'ignored')],
349 349 'hg debug-rev-parse REV'),
350 350 "debug-rev-list":
351 351 (revlist,
352 352 [('H', 'header', None, 'header'),
353 353 ('t', 'topo-order', None, 'topo-order'),
354 354 ('p', 'parents', None, 'parents'),
355 355 ('n', 'max-count', 0, 'max-count')],
356 356 'hg debug-rev-list [options] revs'),
357 357 }
@@ -1,567 +1,567 b''
1 1 # keyword.py - $Keyword$ expansion for Mercurial
2 2 #
3 3 # Copyright 2007, 2008 Christian Ebert <blacktrash@gmx.net>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 #
8 8 # $Id$
9 9 #
10 10 # Keyword expansion hack against the grain of a DSCM
11 11 #
12 12 # There are many good reasons why this is not needed in a distributed
13 13 # SCM, still it may be useful in very small projects based on single
14 14 # files (like LaTeX packages), that are mostly addressed to an audience
15 15 # not running a version control system.
16 16 #
17 17 # For in-depth discussion refer to
18 18 # <http://www.selenic.com/mercurial/wiki/index.cgi/KeywordPlan>.
19 19 #
20 20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 21 #
22 22 # Binary files are not touched.
23 23 #
24 24 # Setup in hgrc:
25 25 #
26 26 # [extensions]
27 27 # # enable extension
28 28 # hgext.keyword =
29 29 #
30 30 # Files to act upon/ignore are specified in the [keyword] section.
31 31 # Customized keyword template mappings in the [keywordmaps] section.
32 32 #
33 33 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
34 34
35 35 '''keyword expansion in local repositories
36 36
37 37 This extension expands RCS/CVS-like or self-customized $Keywords$
38 38 in tracked text files selected by your configuration.
39 39
40 40 Keywords are only expanded in local repositories and not stored in
41 41 the change history. The mechanism can be regarded as a convenience
42 42 for the current user or for archive distribution.
43 43
44 44 Configuration is done in the [keyword] and [keywordmaps] sections
45 45 of hgrc files.
46 46
47 47 Example:
48 48
49 49 [keyword]
50 50 # expand keywords in every python file except those matching "x*"
51 51 **.py =
52 52 x* = ignore
53 53
54 54 Note: the more specific you are in your filename patterns
55 55 the less you lose speed in huge repos.
56 56
57 57 For [keywordmaps] template mapping and expansion demonstration and
58 58 control run "hg kwdemo".
59 59
60 60 An additional date template filter {date|utcdate} is provided.
61 61
62 62 The default template mappings (view with "hg kwdemo -d") can be replaced
63 63 with customized keywords and templates.
64 64 Again, run "hg kwdemo" to control the results of your config changes.
65 65
66 66 Before changing/disabling active keywords, run "hg kwshrink" to avoid
67 67 the risk of inadvertedly storing expanded keywords in the change history.
68 68
69 69 To force expansion after enabling it, or a configuration change, run
70 70 "hg kwexpand".
71 71
72 72 Also, when committing with the record extension or using mq's qrecord, be aware
73 73 that keywords cannot be updated. Again, run "hg kwexpand" on the files in
74 74 question to update keyword expansions after all changes have been checked in.
75 75
76 76 Expansions spanning more than one line and incremental expansions,
77 77 like CVS' $Log$, are not supported. A keyword template map
78 78 "Log = {desc}" expands to the first line of the changeset description.
79 79 '''
80 80
81 81 from mercurial import commands, cmdutil, dispatch, filelog, revlog
82 82 from mercurial import patch, localrepo, templater, templatefilters, util
83 83 from mercurial.hgweb import webcommands
84 84 from mercurial.node import nullid, hex
85 85 from mercurial.i18n import _
86 86 import re, shutil, tempfile, time
87 87
88 88 commands.optionalrepo += ' kwdemo'
89 89
90 90 # hg commands that do not act on keywords
91 91 nokwcommands = ('add addremove annotate bundle copy export grep incoming init'
92 92 ' log outgoing push rename rollback tip'
93 93 ' convert email glog')
94 94
95 95 # hg commands that trigger expansion only when writing to working dir,
96 96 # not when reading filelog, and unexpand when reading from working dir
97 97 restricted = 'record qfold qimport qnew qpush qrefresh qrecord'
98 98
99 99 def utcdate(date):
100 100 '''Returns hgdate in cvs-like UTC format.'''
101 101 return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0]))
102 102
103 103 # make keyword tools accessible
104 104 kwtools = {'templater': None, 'hgcmd': '', 'inc': [], 'exc': ['.hg*']}
105 105
106 106
107 107 class kwtemplater(object):
108 108 '''
109 109 Sets up keyword templates, corresponding keyword regex, and
110 110 provides keyword substitution functions.
111 111 '''
112 112 templates = {
113 113 'Revision': '{node|short}',
114 114 'Author': '{author|user}',
115 115 'Date': '{date|utcdate}',
116 116 'RCSFile': '{file|basename},v',
117 117 'Source': '{root}/{file},v',
118 118 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
119 119 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
120 120 }
121 121
122 122 def __init__(self, ui, repo):
123 123 self.ui = ui
124 124 self.repo = repo
125 125 self.matcher = util.matcher(repo.root,
126 126 inc=kwtools['inc'], exc=kwtools['exc'])[1]
127 127 self.restrict = kwtools['hgcmd'] in restricted.split()
128 128
129 129 kwmaps = self.ui.configitems('keywordmaps')
130 130 if kwmaps: # override default templates
131 131 kwmaps = [(k, templater.parsestring(v, False))
132 132 for (k, v) in kwmaps]
133 133 self.templates = dict(kwmaps)
134 134 escaped = map(re.escape, self.templates.keys())
135 135 kwpat = r'\$(%s)(: [^$\n\r]*? )??\$' % '|'.join(escaped)
136 136 self.re_kw = re.compile(kwpat)
137 137
138 138 templatefilters.filters['utcdate'] = utcdate
139 139 self.ct = cmdutil.changeset_templater(self.ui, self.repo,
140 140 False, '', False)
141 141
142 142 def getnode(self, path, fnode):
143 143 '''Derives changenode from file path and filenode.'''
144 144 # used by kwfilelog.read and kwexpand
145 145 c = self.repo.filectx(path, fileid=fnode)
146 146 return c.node()
147 147
148 148 def substitute(self, data, path, node, subfunc):
149 149 '''Replaces keywords in data with expanded template.'''
150 150 def kwsub(mobj):
151 151 kw = mobj.group(1)
152 152 self.ct.use_template(self.templates[kw])
153 153 self.ui.pushbuffer()
154 154 self.ct.show(changenode=node, root=self.repo.root, file=path)
155 155 ekw = templatefilters.firstline(self.ui.popbuffer())
156 156 return '$%s: %s $' % (kw, ekw)
157 157 return subfunc(kwsub, data)
158 158
159 159 def expand(self, path, node, data):
160 160 '''Returns data with keywords expanded.'''
161 161 if not self.restrict and self.matcher(path) and not util.binary(data):
162 162 changenode = self.getnode(path, node)
163 163 return self.substitute(data, path, changenode, self.re_kw.sub)
164 164 return data
165 165
166 166 def iskwfile(self, path, islink):
167 167 '''Returns true if path matches [keyword] pattern
168 168 and is not a symbolic link.
169 169 Caveat: localrepository._link fails on Windows.'''
170 170 return self.matcher(path) and not islink(path)
171 171
172 172 def overwrite(self, node, expand, files):
173 173 '''Overwrites selected files expanding/shrinking keywords.'''
174 174 if node is not None: # commit
175 ctx = self.repo.changectx(node)
175 ctx = self.repo[node]
176 176 mf = ctx.manifest()
177 177 files = [f for f in ctx.files() if f in mf]
178 178 notify = self.ui.debug
179 179 else: # kwexpand/kwshrink
180 ctx = self.repo.changectx('.')
180 ctx = self.repo['.']
181 181 mf = ctx.manifest()
182 182 notify = self.ui.note
183 183 candidates = [f for f in files if self.iskwfile(f, mf.linkf)]
184 184 if candidates:
185 185 self.restrict = True # do not expand when reading
186 186 candidates.sort()
187 187 action = expand and 'expanding' or 'shrinking'
188 188 for f in candidates:
189 189 fp = self.repo.file(f)
190 190 data = fp.read(mf[f])
191 191 if util.binary(data):
192 192 continue
193 193 if expand:
194 194 changenode = node or self.getnode(f, mf[f])
195 195 data, found = self.substitute(data, f, changenode,
196 196 self.re_kw.subn)
197 197 else:
198 198 found = self.re_kw.search(data)
199 199 if found:
200 200 notify(_('overwriting %s %s keywords\n') % (f, action))
201 201 self.repo.wwrite(f, data, mf.flags(f))
202 202 self.repo.dirstate.normal(f)
203 203 self.restrict = False
204 204
205 205 def shrinktext(self, text):
206 206 '''Unconditionally removes all keyword substitutions from text.'''
207 207 return self.re_kw.sub(r'$\1$', text)
208 208
209 209 def shrink(self, fname, text):
210 210 '''Returns text with all keyword substitutions removed.'''
211 211 if self.matcher(fname) and not util.binary(text):
212 212 return self.shrinktext(text)
213 213 return text
214 214
215 215 def shrinklines(self, fname, lines):
216 216 '''Returns lines with keyword substitutions removed.'''
217 217 if self.matcher(fname):
218 218 text = ''.join(lines)
219 219 if not util.binary(text):
220 220 return self.shrinktext(text).splitlines(True)
221 221 return lines
222 222
223 223 def wread(self, fname, data):
224 224 '''If in restricted mode returns data read from wdir with
225 225 keyword substitutions removed.'''
226 226 return self.restrict and self.shrink(fname, data) or data
227 227
228 228 class kwfilelog(filelog.filelog):
229 229 '''
230 230 Subclass of filelog to hook into its read, add, cmp methods.
231 231 Keywords are "stored" unexpanded, and processed on reading.
232 232 '''
233 233 def __init__(self, opener, kwt, path):
234 234 super(kwfilelog, self).__init__(opener, path)
235 235 self.kwt = kwt
236 236 self.path = path
237 237
238 238 def read(self, node):
239 239 '''Expands keywords when reading filelog.'''
240 240 data = super(kwfilelog, self).read(node)
241 241 return self.kwt.expand(self.path, node, data)
242 242
243 243 def add(self, text, meta, tr, link, p1=None, p2=None):
244 244 '''Removes keyword substitutions when adding to filelog.'''
245 245 text = self.kwt.shrink(self.path, text)
246 246 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
247 247
248 248 def cmp(self, node, text):
249 249 '''Removes keyword substitutions for comparison.'''
250 250 text = self.kwt.shrink(self.path, text)
251 251 if self.renamed(node):
252 252 t2 = super(kwfilelog, self).read(node)
253 253 return t2 != text
254 254 return revlog.revlog.cmp(self, node, text)
255 255
256 256 def _status(ui, repo, kwt, *pats, **opts):
257 257 '''Bails out if [keyword] configuration is not active.
258 258 Returns status of working directory.'''
259 259 if kwt:
260 260 matcher = cmdutil.match(repo, pats, opts)
261 261 return repo.status(match=matcher, list_clean=True)
262 262 if ui.configitems('keyword'):
263 263 raise util.Abort(_('[keyword] patterns cannot match'))
264 264 raise util.Abort(_('no [keyword] patterns configured'))
265 265
266 266 def _kwfwrite(ui, repo, expand, *pats, **opts):
267 267 '''Selects files and passes them to kwtemplater.overwrite.'''
268 268 if repo.dirstate.parents()[1] != nullid:
269 269 raise util.Abort(_('outstanding uncommitted merge'))
270 270 kwt = kwtools['templater']
271 271 status = _status(ui, repo, kwt, *pats, **opts)
272 272 modified, added, removed, deleted, unknown, ignored, clean = status
273 273 if modified or added or removed or deleted:
274 274 raise util.Abort(_('outstanding uncommitted changes'))
275 275 wlock = lock = None
276 276 try:
277 277 wlock = repo.wlock()
278 278 lock = repo.lock()
279 279 kwt.overwrite(None, expand, clean)
280 280 finally:
281 281 del wlock, lock
282 282
283 283
284 284 def demo(ui, repo, *args, **opts):
285 285 '''print [keywordmaps] configuration and an expansion example
286 286
287 287 Show current, custom, or default keyword template maps
288 288 and their expansion.
289 289
290 290 Extend current configuration by specifying maps as arguments
291 291 and optionally by reading from an additional hgrc file.
292 292
293 293 Override current keyword template maps with "default" option.
294 294 '''
295 295 def demostatus(stat):
296 296 ui.status(_('\n\t%s\n') % stat)
297 297
298 298 def demoitems(section, items):
299 299 ui.write('[%s]\n' % section)
300 300 for k, v in items:
301 301 ui.write('%s = %s\n' % (k, v))
302 302
303 303 msg = 'hg keyword config and expansion example'
304 304 kwstatus = 'current'
305 305 fn = 'demo.txt'
306 306 branchname = 'demobranch'
307 307 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
308 308 ui.note(_('creating temporary repo at %s\n') % tmpdir)
309 309 repo = localrepo.localrepository(ui, tmpdir, True)
310 310 ui.setconfig('keyword', fn, '')
311 311 if args or opts.get('rcfile'):
312 312 kwstatus = 'custom'
313 313 if opts.get('rcfile'):
314 314 ui.readconfig(opts.get('rcfile'))
315 315 if opts.get('default'):
316 316 kwstatus = 'default'
317 317 kwmaps = kwtemplater.templates
318 318 if ui.configitems('keywordmaps'):
319 319 # override maps from optional rcfile
320 320 for k, v in kwmaps.iteritems():
321 321 ui.setconfig('keywordmaps', k, v)
322 322 elif args:
323 323 # simulate hgrc parsing
324 324 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
325 325 fp = repo.opener('hgrc', 'w')
326 326 fp.writelines(rcmaps)
327 327 fp.close()
328 328 ui.readconfig(repo.join('hgrc'))
329 329 if not opts.get('default'):
330 330 kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates
331 331 uisetup(ui)
332 332 reposetup(ui, repo)
333 333 for k, v in ui.configitems('extensions'):
334 334 if k.endswith('keyword'):
335 335 extension = '%s = %s' % (k, v)
336 336 break
337 337 demostatus('config using %s keyword template maps' % kwstatus)
338 338 ui.write('[extensions]\n%s\n' % extension)
339 339 demoitems('keyword', ui.configitems('keyword'))
340 340 demoitems('keywordmaps', kwmaps.iteritems())
341 341 keywords = '$' + '$\n$'.join(kwmaps.keys()) + '$\n'
342 342 repo.wopener(fn, 'w').write(keywords)
343 343 repo.add([fn])
344 344 path = repo.wjoin(fn)
345 345 ui.note(_('\n%s keywords written to %s:\n') % (kwstatus, path))
346 346 ui.note(keywords)
347 347 ui.note('\nhg -R "%s" branch "%s"\n' % (tmpdir, branchname))
348 348 # silence branch command if not verbose
349 349 quiet = ui.quiet
350 350 ui.quiet = not ui.verbose
351 351 commands.branch(ui, repo, branchname)
352 352 ui.quiet = quiet
353 353 for name, cmd in ui.configitems('hooks'):
354 354 if name.split('.', 1)[0].find('commit') > -1:
355 355 repo.ui.setconfig('hooks', name, '')
356 356 ui.note(_('unhooked all commit hooks\n'))
357 357 ui.note('hg -R "%s" ci -m "%s"\n' % (tmpdir, msg))
358 358 repo.commit(text=msg)
359 359 format = ui.verbose and ' in %s' % path or ''
360 360 demostatus('%s keywords expanded%s' % (kwstatus, format))
361 361 ui.write(repo.wread(fn))
362 362 ui.debug(_('\nremoving temporary repo %s\n') % tmpdir)
363 363 shutil.rmtree(tmpdir, ignore_errors=True)
364 364
365 365 def expand(ui, repo, *pats, **opts):
366 366 '''expand keywords in working directory
367 367
368 368 Run after (re)enabling keyword expansion.
369 369
370 370 kwexpand refuses to run if given files contain local changes.
371 371 '''
372 372 # 3rd argument sets expansion to True
373 373 _kwfwrite(ui, repo, True, *pats, **opts)
374 374
375 375 def files(ui, repo, *pats, **opts):
376 376 '''print files currently configured for keyword expansion
377 377
378 378 Crosscheck which files in working directory are potential targets for
379 379 keyword expansion.
380 380 That is, files matched by [keyword] config patterns but not symlinks.
381 381 '''
382 382 kwt = kwtools['templater']
383 383 status = _status(ui, repo, kwt, *pats, **opts)
384 384 modified, added, removed, deleted, unknown, ignored, clean = status
385 385 files = modified + added + clean
386 386 if opts.get('untracked'):
387 387 files += unknown
388 388 files.sort()
389 wctx = repo.changectx(None)
389 wctx = repo[None]
390 390 islink = lambda p: 'l' in wctx.flags(p)
391 391 kwfiles = [f for f in files if kwt.iskwfile(f, islink)]
392 392 cwd = pats and repo.getcwd() or ''
393 393 kwfstats = not opts.get('ignore') and (('K', kwfiles),) or ()
394 394 if opts.get('all') or opts.get('ignore'):
395 395 kwfstats += (('I', [f for f in files if f not in kwfiles]),)
396 396 for char, filenames in kwfstats:
397 397 format = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
398 398 for f in filenames:
399 399 ui.write(format % repo.pathto(f, cwd))
400 400
401 401 def shrink(ui, repo, *pats, **opts):
402 402 '''revert expanded keywords in working directory
403 403
404 404 Run before changing/disabling active keywords
405 405 or if you experience problems with "hg import" or "hg merge".
406 406
407 407 kwshrink refuses to run if given files contain local changes.
408 408 '''
409 409 # 3rd argument sets expansion to False
410 410 _kwfwrite(ui, repo, False, *pats, **opts)
411 411
412 412
413 413 def uisetup(ui):
414 414 '''Collects [keyword] config in kwtools.
415 415 Monkeypatches dispatch._parse if needed.'''
416 416
417 417 for pat, opt in ui.configitems('keyword'):
418 418 if opt != 'ignore':
419 419 kwtools['inc'].append(pat)
420 420 else:
421 421 kwtools['exc'].append(pat)
422 422
423 423 if kwtools['inc']:
424 424 def kwdispatch_parse(ui, args):
425 425 '''Monkeypatch dispatch._parse to obtain running hg command.'''
426 426 cmd, func, args, options, cmdoptions = dispatch_parse(ui, args)
427 427 kwtools['hgcmd'] = cmd
428 428 return cmd, func, args, options, cmdoptions
429 429
430 430 dispatch_parse = dispatch._parse
431 431 dispatch._parse = kwdispatch_parse
432 432
433 433 def reposetup(ui, repo):
434 434 '''Sets up repo as kwrepo for keyword substitution.
435 435 Overrides file method to return kwfilelog instead of filelog
436 436 if file matches user configuration.
437 437 Wraps commit to overwrite configured files with updated
438 438 keyword substitutions.
439 439 Monkeypatches patch and webcommands.'''
440 440
441 441 try:
442 442 if (not repo.local() or not kwtools['inc']
443 443 or kwtools['hgcmd'] in nokwcommands.split()
444 444 or '.hg' in util.splitpath(repo.root)
445 445 or repo._url.startswith('bundle:')):
446 446 return
447 447 except AttributeError:
448 448 pass
449 449
450 450 kwtools['templater'] = kwt = kwtemplater(ui, repo)
451 451
452 452 class kwrepo(repo.__class__):
453 453 def file(self, f):
454 454 if f[0] == '/':
455 455 f = f[1:]
456 456 return kwfilelog(self.sopener, kwt, f)
457 457
458 458 def wread(self, filename):
459 459 data = super(kwrepo, self).wread(filename)
460 460 return kwt.wread(filename, data)
461 461
462 462 def commit(self, files=None, text='', user=None, date=None,
463 463 match=None, force=False, force_editor=False,
464 464 p1=None, p2=None, extra={}, empty_ok=False):
465 465 wlock = lock = None
466 466 _p1 = _p2 = None
467 467 try:
468 468 wlock = self.wlock()
469 469 lock = self.lock()
470 470 # store and postpone commit hooks
471 471 commithooks = {}
472 472 for name, cmd in ui.configitems('hooks'):
473 473 if name.split('.', 1)[0] == 'commit':
474 474 commithooks[name] = cmd
475 475 ui.setconfig('hooks', name, None)
476 476 if commithooks:
477 477 # store parents for commit hook environment
478 478 if p1 is None:
479 479 _p1, _p2 = repo.dirstate.parents()
480 480 else:
481 481 _p1, _p2 = p1, p2 or nullid
482 482 _p1 = hex(_p1)
483 483 if _p2 == nullid:
484 484 _p2 = ''
485 485 else:
486 486 _p2 = hex(_p2)
487 487
488 488 n = super(kwrepo, self).commit(files, text, user, date, match,
489 489 force, force_editor, p1, p2,
490 490 extra, empty_ok)
491 491
492 492 # restore commit hooks
493 493 for name, cmd in commithooks.iteritems():
494 494 ui.setconfig('hooks', name, cmd)
495 495 if n is not None:
496 496 kwt.overwrite(n, True, None)
497 497 repo.hook('commit', node=n, parent1=_p1, parent2=_p2)
498 498 return n
499 499 finally:
500 500 del wlock, lock
501 501
502 502 # monkeypatches
503 503 def kwpatchfile_init(self, ui, fname, missing=False):
504 504 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
505 505 rejects or conflicts due to expanded keywords in working dir.'''
506 506 patchfile_init(self, ui, fname, missing)
507 507 # shrink keywords read from working dir
508 508 self.lines = kwt.shrinklines(self.fname, self.lines)
509 509
510 510 def kw_diff(repo, node1=None, node2=None, match=None,
511 511 fp=None, changes=None, opts=None):
512 512 '''Monkeypatch patch.diff to avoid expansion except when
513 513 comparing against working dir.'''
514 514 if node2 is not None:
515 515 kwt.matcher = util.never
516 elif node1 is not None and node1 != repo.changectx('.').node():
516 elif node1 is not None and node1 != repo['.'].node():
517 517 kwt.restrict = True
518 518 patch_diff(repo, node1, node2, match, fp, changes, opts)
519 519
520 520 def kwweb_annotate(web, req, tmpl):
521 521 '''Wraps webcommands.annotate turning off keyword expansion.'''
522 522 kwt.matcher = util.never
523 523 return webcommands_annotate(web, req, tmpl)
524 524
525 525 def kwweb_changeset(web, req, tmpl):
526 526 '''Wraps webcommands.changeset turning off keyword expansion.'''
527 527 kwt.matcher = util.never
528 528 return webcommands_changeset(web, req, tmpl)
529 529
530 530 def kwweb_filediff(web, req, tmpl):
531 531 '''Wraps webcommands.filediff turning off keyword expansion.'''
532 532 kwt.matcher = util.never
533 533 return webcommands_filediff(web, req, tmpl)
534 534
535 535 repo.__class__ = kwrepo
536 536
537 537 patchfile_init = patch.patchfile.__init__
538 538 patch_diff = patch.diff
539 539 webcommands_annotate = webcommands.annotate
540 540 webcommands_changeset = webcommands.changeset
541 541 webcommands_filediff = webcommands.filediff
542 542
543 543 patch.patchfile.__init__ = kwpatchfile_init
544 544 patch.diff = kw_diff
545 545 webcommands.annotate = kwweb_annotate
546 546 webcommands.changeset = webcommands.rev = kwweb_changeset
547 547 webcommands.filediff = webcommands.diff = kwweb_filediff
548 548
549 549
550 550 cmdtable = {
551 551 'kwdemo':
552 552 (demo,
553 553 [('d', 'default', None, _('show default keyword template maps')),
554 554 ('f', 'rcfile', [], _('read maps from rcfile'))],
555 555 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...')),
556 556 'kwexpand': (expand, commands.walkopts,
557 557 _('hg kwexpand [OPTION]... [FILE]...')),
558 558 'kwfiles':
559 559 (files,
560 560 [('a', 'all', None, _('show keyword status flags of all files')),
561 561 ('i', 'ignore', None, _('show files excluded from expansion')),
562 562 ('u', 'untracked', None, _('additionally show untracked files')),
563 563 ] + commands.walkopts,
564 564 _('hg kwfiles [OPTION]... [FILE]...')),
565 565 'kwshrink': (shrink, commands.walkopts,
566 566 _('hg kwshrink [OPTION]... [FILE]...')),
567 567 }
@@ -1,2458 +1,2458 b''
1 1 # mq.py - patch queues for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 '''patch management and development
9 9
10 10 This extension lets you work with a stack of patches in a Mercurial
11 11 repository. It manages two stacks of patches - all known patches, and
12 12 applied patches (subset of known patches).
13 13
14 14 Known patches are represented as patch files in the .hg/patches
15 15 directory. Applied patches are both patch files and changesets.
16 16
17 17 Common tasks (use "hg help command" for more details):
18 18
19 19 prepare repository to work with patches qinit
20 20 create new patch qnew
21 21 import existing patch qimport
22 22
23 23 print patch series qseries
24 24 print applied patches qapplied
25 25 print name of top applied patch qtop
26 26
27 27 add known patch to applied stack qpush
28 28 remove patch from applied stack qpop
29 29 refresh contents of top applied patch qrefresh
30 30 '''
31 31
32 32 from mercurial.i18n import _
33 33 from mercurial.node import bin, hex, short
34 34 from mercurial.repo import RepoError
35 35 from mercurial import commands, cmdutil, hg, patch, revlog, util
36 36 from mercurial import repair
37 37 import os, sys, re, errno
38 38
39 39 commands.norepo += " qclone"
40 40
41 41 # Patch names looks like unix-file names.
42 42 # They must be joinable with queue directory and result in the patch path.
43 43 normname = util.normpath
44 44
45 45 class statusentry:
46 46 def __init__(self, rev, name=None):
47 47 if not name:
48 48 fields = rev.split(':', 1)
49 49 if len(fields) == 2:
50 50 self.rev, self.name = fields
51 51 else:
52 52 self.rev, self.name = None, None
53 53 else:
54 54 self.rev, self.name = rev, name
55 55
56 56 def __str__(self):
57 57 return self.rev + ':' + self.name
58 58
59 59 class queue:
60 60 def __init__(self, ui, path, patchdir=None):
61 61 self.basepath = path
62 62 self.path = patchdir or os.path.join(path, "patches")
63 63 self.opener = util.opener(self.path)
64 64 self.ui = ui
65 65 self.applied = []
66 66 self.full_series = []
67 67 self.applied_dirty = 0
68 68 self.series_dirty = 0
69 69 self.series_path = "series"
70 70 self.status_path = "status"
71 71 self.guards_path = "guards"
72 72 self.active_guards = None
73 73 self.guards_dirty = False
74 74 self._diffopts = None
75 75
76 76 if os.path.exists(self.join(self.series_path)):
77 77 self.full_series = self.opener(self.series_path).read().splitlines()
78 78 self.parse_series()
79 79
80 80 if os.path.exists(self.join(self.status_path)):
81 81 lines = self.opener(self.status_path).read().splitlines()
82 82 self.applied = [statusentry(l) for l in lines]
83 83
84 84 def diffopts(self):
85 85 if self._diffopts is None:
86 86 self._diffopts = patch.diffopts(self.ui)
87 87 return self._diffopts
88 88
89 89 def join(self, *p):
90 90 return os.path.join(self.path, *p)
91 91
92 92 def find_series(self, patch):
93 93 pre = re.compile("(\s*)([^#]+)")
94 94 index = 0
95 95 for l in self.full_series:
96 96 m = pre.match(l)
97 97 if m:
98 98 s = m.group(2)
99 99 s = s.rstrip()
100 100 if s == patch:
101 101 return index
102 102 index += 1
103 103 return None
104 104
105 105 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
106 106
107 107 def parse_series(self):
108 108 self.series = []
109 109 self.series_guards = []
110 110 for l in self.full_series:
111 111 h = l.find('#')
112 112 if h == -1:
113 113 patch = l
114 114 comment = ''
115 115 elif h == 0:
116 116 continue
117 117 else:
118 118 patch = l[:h]
119 119 comment = l[h:]
120 120 patch = patch.strip()
121 121 if patch:
122 122 if patch in self.series:
123 123 raise util.Abort(_('%s appears more than once in %s') %
124 124 (patch, self.join(self.series_path)))
125 125 self.series.append(patch)
126 126 self.series_guards.append(self.guard_re.findall(comment))
127 127
128 128 def check_guard(self, guard):
129 129 if not guard:
130 130 return _('guard cannot be an empty string')
131 131 bad_chars = '# \t\r\n\f'
132 132 first = guard[0]
133 133 for c in '-+':
134 134 if first == c:
135 135 return (_('guard %r starts with invalid character: %r') %
136 136 (guard, c))
137 137 for c in bad_chars:
138 138 if c in guard:
139 139 return _('invalid character in guard %r: %r') % (guard, c)
140 140
141 141 def set_active(self, guards):
142 142 for guard in guards:
143 143 bad = self.check_guard(guard)
144 144 if bad:
145 145 raise util.Abort(bad)
146 146 guards = dict.fromkeys(guards).keys()
147 147 guards.sort()
148 148 self.ui.debug('active guards: %s\n' % ' '.join(guards))
149 149 self.active_guards = guards
150 150 self.guards_dirty = True
151 151
152 152 def active(self):
153 153 if self.active_guards is None:
154 154 self.active_guards = []
155 155 try:
156 156 guards = self.opener(self.guards_path).read().split()
157 157 except IOError, err:
158 158 if err.errno != errno.ENOENT: raise
159 159 guards = []
160 160 for i, guard in enumerate(guards):
161 161 bad = self.check_guard(guard)
162 162 if bad:
163 163 self.ui.warn('%s:%d: %s\n' %
164 164 (self.join(self.guards_path), i + 1, bad))
165 165 else:
166 166 self.active_guards.append(guard)
167 167 return self.active_guards
168 168
169 169 def set_guards(self, idx, guards):
170 170 for g in guards:
171 171 if len(g) < 2:
172 172 raise util.Abort(_('guard %r too short') % g)
173 173 if g[0] not in '-+':
174 174 raise util.Abort(_('guard %r starts with invalid char') % g)
175 175 bad = self.check_guard(g[1:])
176 176 if bad:
177 177 raise util.Abort(bad)
178 178 drop = self.guard_re.sub('', self.full_series[idx])
179 179 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
180 180 self.parse_series()
181 181 self.series_dirty = True
182 182
183 183 def pushable(self, idx):
184 184 if isinstance(idx, str):
185 185 idx = self.series.index(idx)
186 186 patchguards = self.series_guards[idx]
187 187 if not patchguards:
188 188 return True, None
189 189 default = False
190 190 guards = self.active()
191 191 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
192 192 if exactneg:
193 193 return False, exactneg[0]
194 194 pos = [g for g in patchguards if g[0] == '+']
195 195 exactpos = [g for g in pos if g[1:] in guards]
196 196 if pos:
197 197 if exactpos:
198 198 return True, exactpos[0]
199 199 return False, pos
200 200 return True, ''
201 201
202 202 def explain_pushable(self, idx, all_patches=False):
203 203 write = all_patches and self.ui.write or self.ui.warn
204 204 if all_patches or self.ui.verbose:
205 205 if isinstance(idx, str):
206 206 idx = self.series.index(idx)
207 207 pushable, why = self.pushable(idx)
208 208 if all_patches and pushable:
209 209 if why is None:
210 210 write(_('allowing %s - no guards in effect\n') %
211 211 self.series[idx])
212 212 else:
213 213 if not why:
214 214 write(_('allowing %s - no matching negative guards\n') %
215 215 self.series[idx])
216 216 else:
217 217 write(_('allowing %s - guarded by %r\n') %
218 218 (self.series[idx], why))
219 219 if not pushable:
220 220 if why:
221 221 write(_('skipping %s - guarded by %r\n') %
222 222 (self.series[idx], why))
223 223 else:
224 224 write(_('skipping %s - no matching guards\n') %
225 225 self.series[idx])
226 226
227 227 def save_dirty(self):
228 228 def write_list(items, path):
229 229 fp = self.opener(path, 'w')
230 230 for i in items:
231 231 fp.write("%s\n" % i)
232 232 fp.close()
233 233 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
234 234 if self.series_dirty: write_list(self.full_series, self.series_path)
235 235 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
236 236
237 237 def readheaders(self, patch):
238 238 def eatdiff(lines):
239 239 while lines:
240 240 l = lines[-1]
241 241 if (l.startswith("diff -") or
242 242 l.startswith("Index:") or
243 243 l.startswith("===========")):
244 244 del lines[-1]
245 245 else:
246 246 break
247 247 def eatempty(lines):
248 248 while lines:
249 249 l = lines[-1]
250 250 if re.match('\s*$', l):
251 251 del lines[-1]
252 252 else:
253 253 break
254 254
255 255 pf = self.join(patch)
256 256 message = []
257 257 comments = []
258 258 user = None
259 259 date = None
260 260 format = None
261 261 subject = None
262 262 diffstart = 0
263 263
264 264 for line in file(pf):
265 265 line = line.rstrip()
266 266 if line.startswith('diff --git'):
267 267 diffstart = 2
268 268 break
269 269 if diffstart:
270 270 if line.startswith('+++ '):
271 271 diffstart = 2
272 272 break
273 273 if line.startswith("--- "):
274 274 diffstart = 1
275 275 continue
276 276 elif format == "hgpatch":
277 277 # parse values when importing the result of an hg export
278 278 if line.startswith("# User "):
279 279 user = line[7:]
280 280 elif line.startswith("# Date "):
281 281 date = line[7:]
282 282 elif not line.startswith("# ") and line:
283 283 message.append(line)
284 284 format = None
285 285 elif line == '# HG changeset patch':
286 286 format = "hgpatch"
287 287 elif (format != "tagdone" and (line.startswith("Subject: ") or
288 288 line.startswith("subject: "))):
289 289 subject = line[9:]
290 290 format = "tag"
291 291 elif (format != "tagdone" and (line.startswith("From: ") or
292 292 line.startswith("from: "))):
293 293 user = line[6:]
294 294 format = "tag"
295 295 elif format == "tag" and line == "":
296 296 # when looking for tags (subject: from: etc) they
297 297 # end once you find a blank line in the source
298 298 format = "tagdone"
299 299 elif message or line:
300 300 message.append(line)
301 301 comments.append(line)
302 302
303 303 eatdiff(message)
304 304 eatdiff(comments)
305 305 eatempty(message)
306 306 eatempty(comments)
307 307
308 308 # make sure message isn't empty
309 309 if format and format.startswith("tag") and subject:
310 310 message.insert(0, "")
311 311 message.insert(0, subject)
312 312 return (message, comments, user, date, diffstart > 1)
313 313
314 314 def removeundo(self, repo):
315 315 undo = repo.sjoin('undo')
316 316 if not os.path.exists(undo):
317 317 return
318 318 try:
319 319 os.unlink(undo)
320 320 except OSError, inst:
321 321 self.ui.warn('error removing undo: %s\n' % str(inst))
322 322
323 323 def printdiff(self, repo, node1, node2=None, files=None,
324 324 fp=None, changes=None, opts={}):
325 325 m = cmdutil.match(repo, files, opts)
326 326 patch.diff(repo, node1, node2, m, fp, changes, self.diffopts())
327 327
328 328 def mergeone(self, repo, mergeq, head, patch, rev):
329 329 # first try just applying the patch
330 330 (err, n) = self.apply(repo, [ patch ], update_status=False,
331 331 strict=True, merge=rev)
332 332
333 333 if err == 0:
334 334 return (err, n)
335 335
336 336 if n is None:
337 337 raise util.Abort(_("apply failed for patch %s") % patch)
338 338
339 339 self.ui.warn("patch didn't work out, merging %s\n" % patch)
340 340
341 341 # apply failed, strip away that rev and merge.
342 342 hg.clean(repo, head)
343 343 self.strip(repo, n, update=False, backup='strip')
344 344
345 ctx = repo.changectx(rev)
345 ctx = repo[rev]
346 346 ret = hg.merge(repo, rev)
347 347 if ret:
348 348 raise util.Abort(_("update returned %d") % ret)
349 349 n = repo.commit(None, ctx.description(), ctx.user(), force=1)
350 350 if n == None:
351 351 raise util.Abort(_("repo commit failed"))
352 352 try:
353 353 message, comments, user, date, patchfound = mergeq.readheaders(patch)
354 354 except:
355 355 raise util.Abort(_("unable to read %s") % patch)
356 356
357 357 patchf = self.opener(patch, "w")
358 358 if comments:
359 359 comments = "\n".join(comments) + '\n\n'
360 360 patchf.write(comments)
361 361 self.printdiff(repo, head, n, fp=patchf)
362 362 patchf.close()
363 363 self.removeundo(repo)
364 364 return (0, n)
365 365
366 366 def qparents(self, repo, rev=None):
367 367 if rev is None:
368 368 (p1, p2) = repo.dirstate.parents()
369 369 if p2 == revlog.nullid:
370 370 return p1
371 371 if len(self.applied) == 0:
372 372 return None
373 373 return revlog.bin(self.applied[-1].rev)
374 374 pp = repo.changelog.parents(rev)
375 375 if pp[1] != revlog.nullid:
376 376 arevs = [ x.rev for x in self.applied ]
377 377 p0 = revlog.hex(pp[0])
378 378 p1 = revlog.hex(pp[1])
379 379 if p0 in arevs:
380 380 return pp[0]
381 381 if p1 in arevs:
382 382 return pp[1]
383 383 return pp[0]
384 384
385 385 def mergepatch(self, repo, mergeq, series):
386 386 if len(self.applied) == 0:
387 387 # each of the patches merged in will have two parents. This
388 388 # can confuse the qrefresh, qdiff, and strip code because it
389 389 # needs to know which parent is actually in the patch queue.
390 390 # so, we insert a merge marker with only one parent. This way
391 391 # the first patch in the queue is never a merge patch
392 392 #
393 393 pname = ".hg.patches.merge.marker"
394 394 n = repo.commit(None, '[mq]: merge marker', user=None, force=1)
395 395 self.removeundo(repo)
396 396 self.applied.append(statusentry(revlog.hex(n), pname))
397 397 self.applied_dirty = 1
398 398
399 399 head = self.qparents(repo)
400 400
401 401 for patch in series:
402 402 patch = mergeq.lookup(patch, strict=True)
403 403 if not patch:
404 404 self.ui.warn("patch %s does not exist\n" % patch)
405 405 return (1, None)
406 406 pushable, reason = self.pushable(patch)
407 407 if not pushable:
408 408 self.explain_pushable(patch, all_patches=True)
409 409 continue
410 410 info = mergeq.isapplied(patch)
411 411 if not info:
412 412 self.ui.warn("patch %s is not applied\n" % patch)
413 413 return (1, None)
414 414 rev = revlog.bin(info[1])
415 415 (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
416 416 if head:
417 417 self.applied.append(statusentry(revlog.hex(head), patch))
418 418 self.applied_dirty = 1
419 419 if err:
420 420 return (err, head)
421 421 self.save_dirty()
422 422 return (0, head)
423 423
424 424 def patch(self, repo, patchfile):
425 425 '''Apply patchfile to the working directory.
426 426 patchfile: file name of patch'''
427 427 files = {}
428 428 try:
429 429 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
430 430 files=files)
431 431 except Exception, inst:
432 432 self.ui.note(str(inst) + '\n')
433 433 if not self.ui.verbose:
434 434 self.ui.warn("patch failed, unable to continue (try -v)\n")
435 435 return (False, files, False)
436 436
437 437 return (True, files, fuzz)
438 438
439 439 def apply(self, repo, series, list=False, update_status=True,
440 440 strict=False, patchdir=None, merge=None, all_files={}):
441 441 wlock = lock = tr = None
442 442 try:
443 443 wlock = repo.wlock()
444 444 lock = repo.lock()
445 445 tr = repo.transaction()
446 446 try:
447 447 ret = self._apply(repo, series, list, update_status,
448 448 strict, patchdir, merge, all_files=all_files)
449 449 tr.close()
450 450 self.save_dirty()
451 451 return ret
452 452 except:
453 453 try:
454 454 tr.abort()
455 455 finally:
456 456 repo.invalidate()
457 457 repo.dirstate.invalidate()
458 458 raise
459 459 finally:
460 460 del tr, lock, wlock
461 461 self.removeundo(repo)
462 462
463 463 def _apply(self, repo, series, list=False, update_status=True,
464 464 strict=False, patchdir=None, merge=None, all_files={}):
465 465 # TODO unify with commands.py
466 466 if not patchdir:
467 467 patchdir = self.path
468 468 err = 0
469 469 n = None
470 470 for patchname in series:
471 471 pushable, reason = self.pushable(patchname)
472 472 if not pushable:
473 473 self.explain_pushable(patchname, all_patches=True)
474 474 continue
475 475 self.ui.warn("applying %s\n" % patchname)
476 476 pf = os.path.join(patchdir, patchname)
477 477
478 478 try:
479 479 message, comments, user, date, patchfound = self.readheaders(patchname)
480 480 except:
481 481 self.ui.warn("Unable to read %s\n" % patchname)
482 482 err = 1
483 483 break
484 484
485 485 if not message:
486 486 message = "imported patch %s\n" % patchname
487 487 else:
488 488 if list:
489 489 message.append("\nimported patch %s" % patchname)
490 490 message = '\n'.join(message)
491 491
492 492 (patcherr, files, fuzz) = self.patch(repo, pf)
493 493 all_files.update(files)
494 494 patcherr = not patcherr
495 495
496 496 if merge and files:
497 497 # Mark as removed/merged and update dirstate parent info
498 498 removed = []
499 499 merged = []
500 500 for f in files:
501 501 if os.path.exists(repo.wjoin(f)):
502 502 merged.append(f)
503 503 else:
504 504 removed.append(f)
505 505 for f in removed:
506 506 repo.dirstate.remove(f)
507 507 for f in merged:
508 508 repo.dirstate.merge(f)
509 509 p1, p2 = repo.dirstate.parents()
510 510 repo.dirstate.setparents(p1, merge)
511 511
512 512 files = patch.updatedir(self.ui, repo, files)
513 513 match = cmdutil.matchfiles(repo, files or [])
514 514 n = repo.commit(files, message, user, date, match=match,
515 515 force=True)
516 516
517 517 if n == None:
518 518 raise util.Abort(_("repo commit failed"))
519 519
520 520 if update_status:
521 521 self.applied.append(statusentry(revlog.hex(n), patchname))
522 522
523 523 if patcherr:
524 524 if not patchfound:
525 525 self.ui.warn("patch %s is empty\n" % patchname)
526 526 err = 0
527 527 else:
528 528 self.ui.warn("patch failed, rejects left in working dir\n")
529 529 err = 1
530 530 break
531 531
532 532 if fuzz and strict:
533 533 self.ui.warn("fuzz found when applying patch, stopping\n")
534 534 err = 1
535 535 break
536 536 return (err, n)
537 537
538 538 def _clean_series(self, patches):
539 539 indices = [self.find_series(p) for p in patches]
540 540 indices.sort()
541 541 for i in indices[-1::-1]:
542 542 del self.full_series[i]
543 543 self.parse_series()
544 544 self.series_dirty = 1
545 545
546 546 def finish(self, repo, revs):
547 547 revs.sort()
548 548 firstrev = repo.changelog.rev(revlog.bin(self.applied[0].rev))
549 549 appliedbase = 0
550 550 patches = []
551 551 for rev in revs:
552 552 if rev < firstrev:
553 553 raise util.Abort(_('revision %d is not managed') % rev)
554 554 base = revlog.bin(self.applied[appliedbase].rev)
555 555 node = repo.changelog.node(rev)
556 556 if node != base:
557 557 raise util.Abort(_('cannot delete revision %d above '
558 558 'applied patches') % rev)
559 559 patches.append(self.applied[appliedbase].name)
560 560 appliedbase += 1
561 561
562 562 r = self.qrepo()
563 563 if r:
564 564 r.remove(patches, True)
565 565 else:
566 566 for p in patches:
567 567 os.unlink(self.join(p))
568 568
569 569 del self.applied[:appliedbase]
570 570 self.applied_dirty = 1
571 571 self._clean_series(patches)
572 572
573 573 def delete(self, repo, patches, opts):
574 574 if not patches and not opts.get('rev'):
575 575 raise util.Abort(_('qdelete requires at least one revision or '
576 576 'patch name'))
577 577
578 578 realpatches = []
579 579 for patch in patches:
580 580 patch = self.lookup(patch, strict=True)
581 581 info = self.isapplied(patch)
582 582 if info:
583 583 raise util.Abort(_("cannot delete applied patch %s") % patch)
584 584 if patch not in self.series:
585 585 raise util.Abort(_("patch %s not in series file") % patch)
586 586 realpatches.append(patch)
587 587
588 588 appliedbase = 0
589 589 if opts.get('rev'):
590 590 if not self.applied:
591 591 raise util.Abort(_('no patches applied'))
592 592 revs = cmdutil.revrange(repo, opts['rev'])
593 593 if len(revs) > 1 and revs[0] > revs[1]:
594 594 revs.reverse()
595 595 for rev in revs:
596 596 if appliedbase >= len(self.applied):
597 597 raise util.Abort(_("revision %d is not managed") % rev)
598 598
599 599 base = revlog.bin(self.applied[appliedbase].rev)
600 600 node = repo.changelog.node(rev)
601 601 if node != base:
602 602 raise util.Abort(_("cannot delete revision %d above "
603 603 "applied patches") % rev)
604 604 realpatches.append(self.applied[appliedbase].name)
605 605 appliedbase += 1
606 606
607 607 if not opts.get('keep'):
608 608 r = self.qrepo()
609 609 if r:
610 610 r.remove(realpatches, True)
611 611 else:
612 612 for p in realpatches:
613 613 os.unlink(self.join(p))
614 614
615 615 if appliedbase:
616 616 del self.applied[:appliedbase]
617 617 self.applied_dirty = 1
618 618 self._clean_series(realpatches)
619 619
620 620 def check_toppatch(self, repo):
621 621 if len(self.applied) > 0:
622 622 top = revlog.bin(self.applied[-1].rev)
623 623 pp = repo.dirstate.parents()
624 624 if top not in pp:
625 625 raise util.Abort(_("working directory revision is not qtip"))
626 626 return top
627 627 return None
628 628 def check_localchanges(self, repo, force=False, refresh=True):
629 629 m, a, r, d = repo.status()[:4]
630 630 if m or a or r or d:
631 631 if not force:
632 632 if refresh:
633 633 raise util.Abort(_("local changes found, refresh first"))
634 634 else:
635 635 raise util.Abort(_("local changes found"))
636 636 return m, a, r, d
637 637
638 638 _reserved = ('series', 'status', 'guards')
639 639 def check_reserved_name(self, name):
640 640 if (name in self._reserved or name.startswith('.hg')
641 641 or name.startswith('.mq')):
642 642 raise util.Abort(_('"%s" cannot be used as the name of a patch')
643 643 % name)
644 644
645 645 def new(self, repo, patch, *pats, **opts):
646 646 msg = opts.get('msg')
647 647 force = opts.get('force')
648 648 user = opts.get('user')
649 649 date = opts.get('date')
650 650 if date:
651 651 date = util.parsedate(date)
652 652 self.check_reserved_name(patch)
653 653 if os.path.exists(self.join(patch)):
654 654 raise util.Abort(_('patch "%s" already exists') % patch)
655 655 if opts.get('include') or opts.get('exclude') or pats:
656 656 match = cmdutil.match(repo, pats, opts)
657 657 m, a, r, d = repo.status(match=match)[:4]
658 658 else:
659 659 m, a, r, d = self.check_localchanges(repo, force)
660 660 match = cmdutil.match(repo, m + a + r)
661 661 commitfiles = m + a + r
662 662 self.check_toppatch(repo)
663 663 wlock = repo.wlock()
664 664 try:
665 665 insert = self.full_series_end()
666 666 commitmsg = msg and msg or ("[mq]: %s" % patch)
667 667 n = repo.commit(commitfiles, commitmsg, user, date, match=match, force=True)
668 668 if n == None:
669 669 raise util.Abort(_("repo commit failed"))
670 670 self.full_series[insert:insert] = [patch]
671 671 self.applied.append(statusentry(revlog.hex(n), patch))
672 672 self.parse_series()
673 673 self.series_dirty = 1
674 674 self.applied_dirty = 1
675 675 p = self.opener(patch, "w")
676 676 if date:
677 677 p.write("# HG changeset patch\n")
678 678 if user:
679 679 p.write("# User " + user + "\n")
680 680 p.write("# Date %d %d\n" % date)
681 681 p.write("\n")
682 682 elif user:
683 683 p.write("From: " + user + "\n")
684 684 p.write("\n")
685 685 if msg:
686 686 msg = msg + "\n"
687 687 p.write(msg)
688 688 p.close()
689 689 wlock = None
690 690 r = self.qrepo()
691 691 if r: r.add([patch])
692 692 if commitfiles:
693 693 self.refresh(repo, short=True, git=opts.get('git'))
694 694 self.removeundo(repo)
695 695 finally:
696 696 del wlock
697 697
698 698 def strip(self, repo, rev, update=True, backup="all", force=None):
699 699 wlock = lock = None
700 700 try:
701 701 wlock = repo.wlock()
702 702 lock = repo.lock()
703 703
704 704 if update:
705 705 self.check_localchanges(repo, force=force, refresh=False)
706 706 urev = self.qparents(repo, rev)
707 707 hg.clean(repo, urev)
708 708 repo.dirstate.write()
709 709
710 710 self.removeundo(repo)
711 711 repair.strip(self.ui, repo, rev, backup)
712 712 # strip may have unbundled a set of backed up revisions after
713 713 # the actual strip
714 714 self.removeundo(repo)
715 715 finally:
716 716 del lock, wlock
717 717
718 718 def isapplied(self, patch):
719 719 """returns (index, rev, patch)"""
720 720 for i in xrange(len(self.applied)):
721 721 a = self.applied[i]
722 722 if a.name == patch:
723 723 return (i, a.rev, a.name)
724 724 return None
725 725
726 726 # if the exact patch name does not exist, we try a few
727 727 # variations. If strict is passed, we try only #1
728 728 #
729 729 # 1) a number to indicate an offset in the series file
730 730 # 2) a unique substring of the patch name was given
731 731 # 3) patchname[-+]num to indicate an offset in the series file
732 732 def lookup(self, patch, strict=False):
733 733 patch = patch and str(patch)
734 734
735 735 def partial_name(s):
736 736 if s in self.series:
737 737 return s
738 738 matches = [x for x in self.series if s in x]
739 739 if len(matches) > 1:
740 740 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
741 741 for m in matches:
742 742 self.ui.warn(' %s\n' % m)
743 743 return None
744 744 if matches:
745 745 return matches[0]
746 746 if len(self.series) > 0 and len(self.applied) > 0:
747 747 if s == 'qtip':
748 748 return self.series[self.series_end(True)-1]
749 749 if s == 'qbase':
750 750 return self.series[0]
751 751 return None
752 752 if patch == None:
753 753 return None
754 754
755 755 # we don't want to return a partial match until we make
756 756 # sure the file name passed in does not exist (checked below)
757 757 res = partial_name(patch)
758 758 if res and res == patch:
759 759 return res
760 760
761 761 if not os.path.isfile(self.join(patch)):
762 762 try:
763 763 sno = int(patch)
764 764 except(ValueError, OverflowError):
765 765 pass
766 766 else:
767 767 if sno < len(self.series):
768 768 return self.series[sno]
769 769 if not strict:
770 770 # return any partial match made above
771 771 if res:
772 772 return res
773 773 minus = patch.rfind('-')
774 774 if minus >= 0:
775 775 res = partial_name(patch[:minus])
776 776 if res:
777 777 i = self.series.index(res)
778 778 try:
779 779 off = int(patch[minus+1:] or 1)
780 780 except(ValueError, OverflowError):
781 781 pass
782 782 else:
783 783 if i - off >= 0:
784 784 return self.series[i - off]
785 785 plus = patch.rfind('+')
786 786 if plus >= 0:
787 787 res = partial_name(patch[:plus])
788 788 if res:
789 789 i = self.series.index(res)
790 790 try:
791 791 off = int(patch[plus+1:] or 1)
792 792 except(ValueError, OverflowError):
793 793 pass
794 794 else:
795 795 if i + off < len(self.series):
796 796 return self.series[i + off]
797 797 raise util.Abort(_("patch %s not in series") % patch)
798 798
799 799 def push(self, repo, patch=None, force=False, list=False,
800 800 mergeq=None):
801 801 wlock = repo.wlock()
802 802 if repo.dirstate.parents()[0] != repo.changelog.tip():
803 803 self.ui.status(_("(working directory not at tip)\n"))
804 804
805 805 try:
806 806 patch = self.lookup(patch)
807 807 # Suppose our series file is: A B C and the current 'top'
808 808 # patch is B. qpush C should be performed (moving forward)
809 809 # qpush B is a NOP (no change) qpush A is an error (can't
810 810 # go backwards with qpush)
811 811 if patch:
812 812 info = self.isapplied(patch)
813 813 if info:
814 814 if info[0] < len(self.applied) - 1:
815 815 raise util.Abort(
816 816 _("cannot push to a previous patch: %s") % patch)
817 817 if info[0] < len(self.series) - 1:
818 818 self.ui.warn(
819 819 _('qpush: %s is already at the top\n') % patch)
820 820 else:
821 821 self.ui.warn(_('all patches are currently applied\n'))
822 822 return
823 823
824 824 # Following the above example, starting at 'top' of B:
825 825 # qpush should be performed (pushes C), but a subsequent
826 826 # qpush without an argument is an error (nothing to
827 827 # apply). This allows a loop of "...while hg qpush..." to
828 828 # work as it detects an error when done
829 829 if self.series_end() == len(self.series):
830 830 self.ui.warn(_('patch series already fully applied\n'))
831 831 return 1
832 832 if not force:
833 833 self.check_localchanges(repo)
834 834
835 835 self.applied_dirty = 1;
836 836 start = self.series_end()
837 837 if start > 0:
838 838 self.check_toppatch(repo)
839 839 if not patch:
840 840 patch = self.series[start]
841 841 end = start + 1
842 842 else:
843 843 end = self.series.index(patch, start) + 1
844 844 s = self.series[start:end]
845 845 all_files = {}
846 846 try:
847 847 if mergeq:
848 848 ret = self.mergepatch(repo, mergeq, s)
849 849 else:
850 850 ret = self.apply(repo, s, list, all_files=all_files)
851 851 except:
852 852 self.ui.warn(_('cleaning up working directory...'))
853 853 node = repo.dirstate.parents()[0]
854 854 hg.revert(repo, node, None)
855 855 unknown = repo.status()[4]
856 856 # only remove unknown files that we know we touched or
857 857 # created while patching
858 858 for f in unknown:
859 859 if f in all_files:
860 860 util.unlink(repo.wjoin(f))
861 861 self.ui.warn(_('done\n'))
862 862 raise
863 863 top = self.applied[-1].name
864 864 if ret[0]:
865 865 self.ui.write(
866 866 "Errors during apply, please fix and refresh %s\n" % top)
867 867 else:
868 868 self.ui.write("Now at: %s\n" % top)
869 869 return ret[0]
870 870 finally:
871 871 del wlock
872 872
873 873 def pop(self, repo, patch=None, force=False, update=True, all=False):
874 874 def getfile(f, rev, flags):
875 875 t = repo.file(f).read(rev)
876 876 repo.wwrite(f, t, flags)
877 877
878 878 wlock = repo.wlock()
879 879 try:
880 880 if patch:
881 881 # index, rev, patch
882 882 info = self.isapplied(patch)
883 883 if not info:
884 884 patch = self.lookup(patch)
885 885 info = self.isapplied(patch)
886 886 if not info:
887 887 raise util.Abort(_("patch %s is not applied") % patch)
888 888
889 889 if len(self.applied) == 0:
890 890 # Allow qpop -a to work repeatedly,
891 891 # but not qpop without an argument
892 892 self.ui.warn(_("no patches applied\n"))
893 893 return not all
894 894
895 895 if not update:
896 896 parents = repo.dirstate.parents()
897 897 rr = [ revlog.bin(x.rev) for x in self.applied ]
898 898 for p in parents:
899 899 if p in rr:
900 900 self.ui.warn("qpop: forcing dirstate update\n")
901 901 update = True
902 902
903 903 if not force and update:
904 904 self.check_localchanges(repo)
905 905
906 906 self.applied_dirty = 1;
907 907 end = len(self.applied)
908 908 if not patch:
909 909 if all:
910 910 popi = 0
911 911 else:
912 912 popi = len(self.applied) - 1
913 913 else:
914 914 popi = info[0] + 1
915 915 if popi >= end:
916 916 self.ui.warn("qpop: %s is already at the top\n" % patch)
917 917 return
918 918 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
919 919
920 920 start = info[0]
921 921 rev = revlog.bin(info[1])
922 922
923 923 if update:
924 924 top = self.check_toppatch(repo)
925 925
926 926 if repo.changelog.heads(rev) != [revlog.bin(self.applied[-1].rev)]:
927 927 raise util.Abort("popping would remove a revision not "
928 928 "managed by this patch queue")
929 929
930 930 # we know there are no local changes, so we can make a simplified
931 931 # form of hg.update.
932 932 if update:
933 933 qp = self.qparents(repo, rev)
934 934 changes = repo.changelog.read(qp)
935 935 mmap = repo.manifest.read(changes[0])
936 936 m, a, r, d, u = repo.status(qp, top)[:5]
937 937 if d:
938 938 raise util.Abort("deletions found between repo revs")
939 939 for f in m:
940 940 getfile(f, mmap[f], mmap.flags(f))
941 941 for f in r:
942 942 getfile(f, mmap[f], mmap.flags(f))
943 943 for f in m + r:
944 944 repo.dirstate.normal(f)
945 945 for f in a:
946 946 try:
947 947 os.unlink(repo.wjoin(f))
948 948 except OSError, e:
949 949 if e.errno != errno.ENOENT:
950 950 raise
951 951 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
952 952 except: pass
953 953 repo.dirstate.forget(f)
954 954 repo.dirstate.setparents(qp, revlog.nullid)
955 955 del self.applied[start:end]
956 956 self.strip(repo, rev, update=False, backup='strip')
957 957 if len(self.applied):
958 958 self.ui.write("Now at: %s\n" % self.applied[-1].name)
959 959 else:
960 960 self.ui.write("Patch queue now empty\n")
961 961 finally:
962 962 del wlock
963 963
964 964 def diff(self, repo, pats, opts):
965 965 top = self.check_toppatch(repo)
966 966 if not top:
967 967 self.ui.write("No patches applied\n")
968 968 return
969 969 qp = self.qparents(repo, top)
970 970 self._diffopts = patch.diffopts(self.ui, opts)
971 971 self.printdiff(repo, qp, files=pats, opts=opts)
972 972
973 973 def refresh(self, repo, pats=None, **opts):
974 974 if len(self.applied) == 0:
975 975 self.ui.write("No patches applied\n")
976 976 return 1
977 977 newdate = opts.get('date')
978 978 if newdate:
979 979 newdate = '%d %d' % util.parsedate(newdate)
980 980 wlock = repo.wlock()
981 981 try:
982 982 self.check_toppatch(repo)
983 983 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
984 984 top = revlog.bin(top)
985 985 if repo.changelog.heads(top) != [top]:
986 986 raise util.Abort("cannot refresh a revision with children")
987 987 cparents = repo.changelog.parents(top)
988 988 patchparent = self.qparents(repo, top)
989 989 message, comments, user, date, patchfound = self.readheaders(patchfn)
990 990
991 991 patchf = self.opener(patchfn, 'r+')
992 992
993 993 # if the patch was a git patch, refresh it as a git patch
994 994 for line in patchf:
995 995 if line.startswith('diff --git'):
996 996 self.diffopts().git = True
997 997 break
998 998
999 999 msg = opts.get('msg', '').rstrip()
1000 1000 if msg and comments:
1001 1001 # Remove existing message, keeping the rest of the comments
1002 1002 # fields.
1003 1003 # If comments contains 'subject: ', message will prepend
1004 1004 # the field and a blank line.
1005 1005 if message:
1006 1006 subj = 'subject: ' + message[0].lower()
1007 1007 for i in xrange(len(comments)):
1008 1008 if subj == comments[i].lower():
1009 1009 del comments[i]
1010 1010 message = message[2:]
1011 1011 break
1012 1012 ci = 0
1013 1013 for mi in xrange(len(message)):
1014 1014 while message[mi] != comments[ci]:
1015 1015 ci += 1
1016 1016 del comments[ci]
1017 1017
1018 1018 def setheaderfield(comments, prefixes, new):
1019 1019 # Update all references to a field in the patch header.
1020 1020 # If none found, add it email style.
1021 1021 res = False
1022 1022 for prefix in prefixes:
1023 1023 for i in xrange(len(comments)):
1024 1024 if comments[i].startswith(prefix):
1025 1025 comments[i] = prefix + new
1026 1026 res = True
1027 1027 break
1028 1028 return res
1029 1029
1030 1030 newuser = opts.get('user')
1031 1031 if newuser:
1032 1032 if not setheaderfield(comments, ['From: ', '# User '], newuser):
1033 1033 try:
1034 1034 patchheaderat = comments.index('# HG changeset patch')
1035 1035 comments.insert(patchheaderat + 1,'# User ' + newuser)
1036 1036 except ValueError:
1037 1037 comments = ['From: ' + newuser, ''] + comments
1038 1038 user = newuser
1039 1039
1040 1040 if newdate:
1041 1041 if setheaderfield(comments, ['# Date '], newdate):
1042 1042 date = newdate
1043 1043
1044 1044 if msg:
1045 1045 comments.append(msg)
1046 1046
1047 1047 patchf.seek(0)
1048 1048 patchf.truncate()
1049 1049
1050 1050 if comments:
1051 1051 comments = "\n".join(comments) + '\n\n'
1052 1052 patchf.write(comments)
1053 1053
1054 1054 if opts.get('git'):
1055 1055 self.diffopts().git = True
1056 1056 matchfn = cmdutil.match(repo, pats, opts)
1057 1057 tip = repo.changelog.tip()
1058 1058 if top == tip:
1059 1059 # if the top of our patch queue is also the tip, there is an
1060 1060 # optimization here. We update the dirstate in place and strip
1061 1061 # off the tip commit. Then just commit the current directory
1062 1062 # tree. We can also send repo.commit the list of files
1063 1063 # changed to speed up the diff
1064 1064 #
1065 1065 # in short mode, we only diff the files included in the
1066 1066 # patch already
1067 1067 #
1068 1068 # this should really read:
1069 1069 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
1070 1070 # but we do it backwards to take advantage of manifest/chlog
1071 1071 # caching against the next repo.status call
1072 1072 #
1073 1073 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
1074 1074 changes = repo.changelog.read(tip)
1075 1075 man = repo.manifest.read(changes[0])
1076 1076 aaa = aa[:]
1077 1077 if opts.get('short'):
1078 1078 match = cmdutil.matchfiles(repo, mm + aa + dd)
1079 1079 else:
1080 1080 match = cmdutil.matchall(repo)
1081 1081 m, a, r, d, u = repo.status(match=match)[:5]
1082 1082
1083 1083 # we might end up with files that were added between
1084 1084 # tip and the dirstate parent, but then changed in the
1085 1085 # local dirstate. in this case, we want them to only
1086 1086 # show up in the added section
1087 1087 for x in m:
1088 1088 if x not in aa:
1089 1089 mm.append(x)
1090 1090 # we might end up with files added by the local dirstate that
1091 1091 # were deleted by the patch. In this case, they should only
1092 1092 # show up in the changed section.
1093 1093 for x in a:
1094 1094 if x in dd:
1095 1095 del dd[dd.index(x)]
1096 1096 mm.append(x)
1097 1097 else:
1098 1098 aa.append(x)
1099 1099 # make sure any files deleted in the local dirstate
1100 1100 # are not in the add or change column of the patch
1101 1101 forget = []
1102 1102 for x in d + r:
1103 1103 if x in aa:
1104 1104 del aa[aa.index(x)]
1105 1105 forget.append(x)
1106 1106 continue
1107 1107 elif x in mm:
1108 1108 del mm[mm.index(x)]
1109 1109 dd.append(x)
1110 1110
1111 1111 m = util.unique(mm)
1112 1112 r = util.unique(dd)
1113 1113 a = util.unique(aa)
1114 1114 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1115 1115 match = cmdutil.matchfiles(repo, util.unique(c[0] + c[1] + c[2]))
1116 1116 patch.diff(repo, patchparent, match=match,
1117 1117 fp=patchf, changes=c, opts=self.diffopts())
1118 1118 patchf.close()
1119 1119
1120 1120 repo.dirstate.setparents(*cparents)
1121 1121 copies = {}
1122 1122 for dst in a:
1123 1123 src = repo.dirstate.copied(dst)
1124 1124 if src is not None:
1125 1125 copies.setdefault(src, []).append(dst)
1126 1126 repo.dirstate.add(dst)
1127 1127 # remember the copies between patchparent and tip
1128 1128 # this may be slow, so don't do it if we're not tracking copies
1129 1129 if self.diffopts().git:
1130 1130 for dst in aaa:
1131 1131 f = repo.file(dst)
1132 1132 src = f.renamed(man[dst])
1133 1133 if src:
1134 1134 copies[src[0]] = copies.get(dst, [])
1135 1135 if dst in a:
1136 1136 copies[src[0]].append(dst)
1137 1137 # we can't copy a file created by the patch itself
1138 1138 if dst in copies:
1139 1139 del copies[dst]
1140 1140 for src, dsts in copies.iteritems():
1141 1141 for dst in dsts:
1142 1142 repo.dirstate.copy(src, dst)
1143 1143 for f in r:
1144 1144 repo.dirstate.remove(f)
1145 1145 # if the patch excludes a modified file, mark that
1146 1146 # file with mtime=0 so status can see it.
1147 1147 mm = []
1148 1148 for i in xrange(len(m)-1, -1, -1):
1149 1149 if not matchfn(m[i]):
1150 1150 mm.append(m[i])
1151 1151 del m[i]
1152 1152 for f in m:
1153 1153 repo.dirstate.normal(f)
1154 1154 for f in mm:
1155 1155 repo.dirstate.normallookup(f)
1156 1156 for f in forget:
1157 1157 repo.dirstate.forget(f)
1158 1158
1159 1159 if not msg:
1160 1160 if not message:
1161 1161 message = "[mq]: %s\n" % patchfn
1162 1162 else:
1163 1163 message = "\n".join(message)
1164 1164 else:
1165 1165 message = msg
1166 1166
1167 1167 if not user:
1168 1168 user = changes[1]
1169 1169
1170 1170 self.applied.pop()
1171 1171 self.applied_dirty = 1
1172 1172 self.strip(repo, top, update=False,
1173 1173 backup='strip')
1174 1174 n = repo.commit(match.files(), message, user, date, match=match,
1175 1175 force=1)
1176 1176 self.applied.append(statusentry(revlog.hex(n), patchfn))
1177 1177 self.removeundo(repo)
1178 1178 else:
1179 1179 self.printdiff(repo, patchparent, fp=patchf)
1180 1180 patchf.close()
1181 1181 added = repo.status()[1]
1182 1182 for a in added:
1183 1183 f = repo.wjoin(a)
1184 1184 try:
1185 1185 os.unlink(f)
1186 1186 except OSError, e:
1187 1187 if e.errno != errno.ENOENT:
1188 1188 raise
1189 1189 try: os.removedirs(os.path.dirname(f))
1190 1190 except: pass
1191 1191 # forget the file copies in the dirstate
1192 1192 # push should readd the files later on
1193 1193 repo.dirstate.forget(a)
1194 1194 self.pop(repo, force=True)
1195 1195 self.push(repo, force=True)
1196 1196 finally:
1197 1197 del wlock
1198 1198
1199 1199 def init(self, repo, create=False):
1200 1200 if not create and os.path.isdir(self.path):
1201 1201 raise util.Abort(_("patch queue directory already exists"))
1202 1202 try:
1203 1203 os.mkdir(self.path)
1204 1204 except OSError, inst:
1205 1205 if inst.errno != errno.EEXIST or not create:
1206 1206 raise
1207 1207 if create:
1208 1208 return self.qrepo(create=True)
1209 1209
1210 1210 def unapplied(self, repo, patch=None):
1211 1211 if patch and patch not in self.series:
1212 1212 raise util.Abort(_("patch %s is not in series file") % patch)
1213 1213 if not patch:
1214 1214 start = self.series_end()
1215 1215 else:
1216 1216 start = self.series.index(patch) + 1
1217 1217 unapplied = []
1218 1218 for i in xrange(start, len(self.series)):
1219 1219 pushable, reason = self.pushable(i)
1220 1220 if pushable:
1221 1221 unapplied.append((i, self.series[i]))
1222 1222 self.explain_pushable(i)
1223 1223 return unapplied
1224 1224
1225 1225 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1226 1226 summary=False):
1227 1227 def displayname(patchname):
1228 1228 if summary:
1229 1229 msg = self.readheaders(patchname)[0]
1230 1230 msg = msg and ': ' + msg[0] or ': '
1231 1231 else:
1232 1232 msg = ''
1233 1233 return '%s%s' % (patchname, msg)
1234 1234
1235 1235 applied = dict.fromkeys([p.name for p in self.applied])
1236 1236 if length is None:
1237 1237 length = len(self.series) - start
1238 1238 if not missing:
1239 1239 for i in xrange(start, start+length):
1240 1240 patch = self.series[i]
1241 1241 if patch in applied:
1242 1242 stat = 'A'
1243 1243 elif self.pushable(i)[0]:
1244 1244 stat = 'U'
1245 1245 else:
1246 1246 stat = 'G'
1247 1247 pfx = ''
1248 1248 if self.ui.verbose:
1249 1249 pfx = '%d %s ' % (i, stat)
1250 1250 elif status and status != stat:
1251 1251 continue
1252 1252 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1253 1253 else:
1254 1254 msng_list = []
1255 1255 for root, dirs, files in os.walk(self.path):
1256 1256 d = root[len(self.path) + 1:]
1257 1257 for f in files:
1258 1258 fl = os.path.join(d, f)
1259 1259 if (fl not in self.series and
1260 1260 fl not in (self.status_path, self.series_path,
1261 1261 self.guards_path)
1262 1262 and not fl.startswith('.')):
1263 1263 msng_list.append(fl)
1264 1264 msng_list.sort()
1265 1265 for x in msng_list:
1266 1266 pfx = self.ui.verbose and ('D ') or ''
1267 1267 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1268 1268
1269 1269 def issaveline(self, l):
1270 1270 if l.name == '.hg.patches.save.line':
1271 1271 return True
1272 1272
1273 1273 def qrepo(self, create=False):
1274 1274 if create or os.path.isdir(self.join(".hg")):
1275 1275 return hg.repository(self.ui, path=self.path, create=create)
1276 1276
1277 1277 def restore(self, repo, rev, delete=None, qupdate=None):
1278 1278 c = repo.changelog.read(rev)
1279 1279 desc = c[4].strip()
1280 1280 lines = desc.splitlines()
1281 1281 i = 0
1282 1282 datastart = None
1283 1283 series = []
1284 1284 applied = []
1285 1285 qpp = None
1286 1286 for i in xrange(0, len(lines)):
1287 1287 if lines[i] == 'Patch Data:':
1288 1288 datastart = i + 1
1289 1289 elif lines[i].startswith('Dirstate:'):
1290 1290 l = lines[i].rstrip()
1291 1291 l = l[10:].split(' ')
1292 1292 qpp = [ bin(x) for x in l ]
1293 1293 elif datastart != None:
1294 1294 l = lines[i].rstrip()
1295 1295 se = statusentry(l)
1296 1296 file_ = se.name
1297 1297 if se.rev:
1298 1298 applied.append(se)
1299 1299 else:
1300 1300 series.append(file_)
1301 1301 if datastart == None:
1302 1302 self.ui.warn("No saved patch data found\n")
1303 1303 return 1
1304 1304 self.ui.warn("restoring status: %s\n" % lines[0])
1305 1305 self.full_series = series
1306 1306 self.applied = applied
1307 1307 self.parse_series()
1308 1308 self.series_dirty = 1
1309 1309 self.applied_dirty = 1
1310 1310 heads = repo.changelog.heads()
1311 1311 if delete:
1312 1312 if rev not in heads:
1313 1313 self.ui.warn("save entry has children, leaving it alone\n")
1314 1314 else:
1315 1315 self.ui.warn("removing save entry %s\n" % short(rev))
1316 1316 pp = repo.dirstate.parents()
1317 1317 if rev in pp:
1318 1318 update = True
1319 1319 else:
1320 1320 update = False
1321 1321 self.strip(repo, rev, update=update, backup='strip')
1322 1322 if qpp:
1323 1323 self.ui.warn("saved queue repository parents: %s %s\n" %
1324 1324 (short(qpp[0]), short(qpp[1])))
1325 1325 if qupdate:
1326 1326 self.ui.status(_("queue directory updating\n"))
1327 1327 r = self.qrepo()
1328 1328 if not r:
1329 1329 self.ui.warn("Unable to load queue repository\n")
1330 1330 return 1
1331 1331 hg.clean(r, qpp[0])
1332 1332
1333 1333 def save(self, repo, msg=None):
1334 1334 if len(self.applied) == 0:
1335 1335 self.ui.warn("save: no patches applied, exiting\n")
1336 1336 return 1
1337 1337 if self.issaveline(self.applied[-1]):
1338 1338 self.ui.warn("status is already saved\n")
1339 1339 return 1
1340 1340
1341 1341 ar = [ ':' + x for x in self.full_series ]
1342 1342 if not msg:
1343 1343 msg = "hg patches saved state"
1344 1344 else:
1345 1345 msg = "hg patches: " + msg.rstrip('\r\n')
1346 1346 r = self.qrepo()
1347 1347 if r:
1348 1348 pp = r.dirstate.parents()
1349 1349 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1350 1350 msg += "\n\nPatch Data:\n"
1351 1351 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1352 1352 "\n".join(ar) + '\n' or "")
1353 1353 n = repo.commit(None, text, user=None, force=1)
1354 1354 if not n:
1355 1355 self.ui.warn("repo commit failed\n")
1356 1356 return 1
1357 1357 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1358 1358 self.applied_dirty = 1
1359 1359 self.removeundo(repo)
1360 1360
1361 1361 def full_series_end(self):
1362 1362 if len(self.applied) > 0:
1363 1363 p = self.applied[-1].name
1364 1364 end = self.find_series(p)
1365 1365 if end == None:
1366 1366 return len(self.full_series)
1367 1367 return end + 1
1368 1368 return 0
1369 1369
1370 1370 def series_end(self, all_patches=False):
1371 1371 """If all_patches is False, return the index of the next pushable patch
1372 1372 in the series, or the series length. If all_patches is True, return the
1373 1373 index of the first patch past the last applied one.
1374 1374 """
1375 1375 end = 0
1376 1376 def next(start):
1377 1377 if all_patches:
1378 1378 return start
1379 1379 i = start
1380 1380 while i < len(self.series):
1381 1381 p, reason = self.pushable(i)
1382 1382 if p:
1383 1383 break
1384 1384 self.explain_pushable(i)
1385 1385 i += 1
1386 1386 return i
1387 1387 if len(self.applied) > 0:
1388 1388 p = self.applied[-1].name
1389 1389 try:
1390 1390 end = self.series.index(p)
1391 1391 except ValueError:
1392 1392 return 0
1393 1393 return next(end + 1)
1394 1394 return next(end)
1395 1395
1396 1396 def appliedname(self, index):
1397 1397 pname = self.applied[index].name
1398 1398 if not self.ui.verbose:
1399 1399 p = pname
1400 1400 else:
1401 1401 p = str(self.series.index(pname)) + " " + pname
1402 1402 return p
1403 1403
1404 1404 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1405 1405 force=None, git=False):
1406 1406 def checkseries(patchname):
1407 1407 if patchname in self.series:
1408 1408 raise util.Abort(_('patch %s is already in the series file')
1409 1409 % patchname)
1410 1410 def checkfile(patchname):
1411 1411 if not force and os.path.exists(self.join(patchname)):
1412 1412 raise util.Abort(_('patch "%s" already exists')
1413 1413 % patchname)
1414 1414
1415 1415 if rev:
1416 1416 if files:
1417 1417 raise util.Abort(_('option "-r" not valid when importing '
1418 1418 'files'))
1419 1419 rev = cmdutil.revrange(repo, rev)
1420 1420 rev.sort(lambda x, y: cmp(y, x))
1421 1421 if (len(files) > 1 or len(rev) > 1) and patchname:
1422 1422 raise util.Abort(_('option "-n" not valid when importing multiple '
1423 1423 'patches'))
1424 1424 i = 0
1425 1425 added = []
1426 1426 if rev:
1427 1427 # If mq patches are applied, we can only import revisions
1428 1428 # that form a linear path to qbase.
1429 1429 # Otherwise, they should form a linear path to a head.
1430 1430 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1431 1431 if len(heads) > 1:
1432 1432 raise util.Abort(_('revision %d is the root of more than one '
1433 1433 'branch') % rev[-1])
1434 1434 if self.applied:
1435 1435 base = revlog.hex(repo.changelog.node(rev[0]))
1436 1436 if base in [n.rev for n in self.applied]:
1437 1437 raise util.Abort(_('revision %d is already managed')
1438 1438 % rev[0])
1439 1439 if heads != [revlog.bin(self.applied[-1].rev)]:
1440 1440 raise util.Abort(_('revision %d is not the parent of '
1441 1441 'the queue') % rev[0])
1442 1442 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1443 1443 lastparent = repo.changelog.parentrevs(base)[0]
1444 1444 else:
1445 1445 if heads != [repo.changelog.node(rev[0])]:
1446 1446 raise util.Abort(_('revision %d has unmanaged children')
1447 1447 % rev[0])
1448 1448 lastparent = None
1449 1449
1450 1450 if git:
1451 1451 self.diffopts().git = True
1452 1452
1453 1453 for r in rev:
1454 1454 p1, p2 = repo.changelog.parentrevs(r)
1455 1455 n = repo.changelog.node(r)
1456 1456 if p2 != revlog.nullrev:
1457 1457 raise util.Abort(_('cannot import merge revision %d') % r)
1458 1458 if lastparent and lastparent != r:
1459 1459 raise util.Abort(_('revision %d is not the parent of %d')
1460 1460 % (r, lastparent))
1461 1461 lastparent = p1
1462 1462
1463 1463 if not patchname:
1464 1464 patchname = normname('%d.diff' % r)
1465 1465 self.check_reserved_name(patchname)
1466 1466 checkseries(patchname)
1467 1467 checkfile(patchname)
1468 1468 self.full_series.insert(0, patchname)
1469 1469
1470 1470 patchf = self.opener(patchname, "w")
1471 1471 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1472 1472 patchf.close()
1473 1473
1474 1474 se = statusentry(revlog.hex(n), patchname)
1475 1475 self.applied.insert(0, se)
1476 1476
1477 1477 added.append(patchname)
1478 1478 patchname = None
1479 1479 self.parse_series()
1480 1480 self.applied_dirty = 1
1481 1481
1482 1482 for filename in files:
1483 1483 if existing:
1484 1484 if filename == '-':
1485 1485 raise util.Abort(_('-e is incompatible with import from -'))
1486 1486 if not patchname:
1487 1487 patchname = normname(filename)
1488 1488 self.check_reserved_name(patchname)
1489 1489 if not os.path.isfile(self.join(patchname)):
1490 1490 raise util.Abort(_("patch %s does not exist") % patchname)
1491 1491 else:
1492 1492 try:
1493 1493 if filename == '-':
1494 1494 if not patchname:
1495 1495 raise util.Abort(_('need --name to import a patch from -'))
1496 1496 text = sys.stdin.read()
1497 1497 else:
1498 1498 text = file(filename, 'rb').read()
1499 1499 except IOError:
1500 1500 raise util.Abort(_("unable to read %s") % patchname)
1501 1501 if not patchname:
1502 1502 patchname = normname(os.path.basename(filename))
1503 1503 self.check_reserved_name(patchname)
1504 1504 checkfile(patchname)
1505 1505 patchf = self.opener(patchname, "w")
1506 1506 patchf.write(text)
1507 1507 checkseries(patchname)
1508 1508 index = self.full_series_end() + i
1509 1509 self.full_series[index:index] = [patchname]
1510 1510 self.parse_series()
1511 1511 self.ui.warn("adding %s to series file\n" % patchname)
1512 1512 i += 1
1513 1513 added.append(patchname)
1514 1514 patchname = None
1515 1515 self.series_dirty = 1
1516 1516 qrepo = self.qrepo()
1517 1517 if qrepo:
1518 1518 qrepo.add(added)
1519 1519
1520 1520 def delete(ui, repo, *patches, **opts):
1521 1521 """remove patches from queue
1522 1522
1523 1523 The patches must not be applied, unless they are arguments to
1524 1524 the --rev parameter. At least one patch or revision is required.
1525 1525
1526 1526 With --rev, mq will stop managing the named revisions (converting
1527 1527 them to regular mercurial changesets). The qfinish command should be
1528 1528 used as an alternative for qdel -r, as the latter option is deprecated.
1529 1529
1530 1530 With --keep, the patch files are preserved in the patch directory."""
1531 1531 q = repo.mq
1532 1532 q.delete(repo, patches, opts)
1533 1533 q.save_dirty()
1534 1534 return 0
1535 1535
1536 1536 def applied(ui, repo, patch=None, **opts):
1537 1537 """print the patches already applied"""
1538 1538 q = repo.mq
1539 1539 if patch:
1540 1540 if patch not in q.series:
1541 1541 raise util.Abort(_("patch %s is not in series file") % patch)
1542 1542 end = q.series.index(patch) + 1
1543 1543 else:
1544 1544 end = q.series_end(True)
1545 1545 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1546 1546
1547 1547 def unapplied(ui, repo, patch=None, **opts):
1548 1548 """print the patches not yet applied"""
1549 1549 q = repo.mq
1550 1550 if patch:
1551 1551 if patch not in q.series:
1552 1552 raise util.Abort(_("patch %s is not in series file") % patch)
1553 1553 start = q.series.index(patch) + 1
1554 1554 else:
1555 1555 start = q.series_end(True)
1556 1556 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1557 1557
1558 1558 def qimport(ui, repo, *filename, **opts):
1559 1559 """import a patch
1560 1560
1561 1561 The patch is inserted into the series after the last applied patch.
1562 1562 If no patches have been applied, qimport prepends the patch
1563 1563 to the series.
1564 1564
1565 1565 The patch will have the same name as its source file unless you
1566 1566 give it a new one with --name.
1567 1567
1568 1568 You can register an existing patch inside the patch directory
1569 1569 with the --existing flag.
1570 1570
1571 1571 With --force, an existing patch of the same name will be overwritten.
1572 1572
1573 1573 An existing changeset may be placed under mq control with --rev
1574 1574 (e.g. qimport --rev tip -n patch will place tip under mq control).
1575 1575 With --git, patches imported with --rev will use the git diff
1576 1576 format.
1577 1577 """
1578 1578 q = repo.mq
1579 1579 q.qimport(repo, filename, patchname=opts['name'],
1580 1580 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1581 1581 git=opts['git'])
1582 1582 q.save_dirty()
1583 1583 return 0
1584 1584
1585 1585 def init(ui, repo, **opts):
1586 1586 """init a new queue repository
1587 1587
1588 1588 The queue repository is unversioned by default. If -c is
1589 1589 specified, qinit will create a separate nested repository
1590 1590 for patches (qinit -c may also be run later to convert
1591 1591 an unversioned patch repository into a versioned one).
1592 1592 You can use qcommit to commit changes to this queue repository."""
1593 1593 q = repo.mq
1594 1594 r = q.init(repo, create=opts['create_repo'])
1595 1595 q.save_dirty()
1596 1596 if r:
1597 1597 if not os.path.exists(r.wjoin('.hgignore')):
1598 1598 fp = r.wopener('.hgignore', 'w')
1599 1599 fp.write('^\\.hg\n')
1600 1600 fp.write('^\\.mq\n')
1601 1601 fp.write('syntax: glob\n')
1602 1602 fp.write('status\n')
1603 1603 fp.write('guards\n')
1604 1604 fp.close()
1605 1605 if not os.path.exists(r.wjoin('series')):
1606 1606 r.wopener('series', 'w').close()
1607 1607 r.add(['.hgignore', 'series'])
1608 1608 commands.add(ui, r)
1609 1609 return 0
1610 1610
1611 1611 def clone(ui, source, dest=None, **opts):
1612 1612 '''clone main and patch repository at same time
1613 1613
1614 1614 If source is local, destination will have no patches applied. If
1615 1615 source is remote, this command can not check if patches are
1616 1616 applied in source, so cannot guarantee that patches are not
1617 1617 applied in destination. If you clone remote repository, be sure
1618 1618 before that it has no patches applied.
1619 1619
1620 1620 Source patch repository is looked for in <src>/.hg/patches by
1621 1621 default. Use -p <url> to change.
1622 1622
1623 1623 The patch directory must be a nested mercurial repository, as
1624 1624 would be created by qinit -c.
1625 1625 '''
1626 1626 def patchdir(repo):
1627 1627 url = repo.url()
1628 1628 if url.endswith('/'):
1629 1629 url = url[:-1]
1630 1630 return url + '/.hg/patches'
1631 1631 cmdutil.setremoteconfig(ui, opts)
1632 1632 if dest is None:
1633 1633 dest = hg.defaultdest(source)
1634 1634 sr = hg.repository(ui, ui.expandpath(source))
1635 1635 patchespath = opts['patches'] or patchdir(sr)
1636 1636 try:
1637 1637 pr = hg.repository(ui, patchespath)
1638 1638 except RepoError:
1639 1639 raise util.Abort(_('versioned patch repository not found'
1640 1640 ' (see qinit -c)'))
1641 1641 qbase, destrev = None, None
1642 1642 if sr.local():
1643 1643 if sr.mq.applied:
1644 1644 qbase = revlog.bin(sr.mq.applied[0].rev)
1645 1645 if not hg.islocal(dest):
1646 1646 heads = dict.fromkeys(sr.heads())
1647 1647 for h in sr.heads(qbase):
1648 1648 del heads[h]
1649 1649 destrev = heads.keys()
1650 1650 destrev.append(sr.changelog.parents(qbase)[0])
1651 1651 elif sr.capable('lookup'):
1652 1652 try:
1653 1653 qbase = sr.lookup('qbase')
1654 1654 except RepoError:
1655 1655 pass
1656 1656 ui.note(_('cloning main repo\n'))
1657 1657 sr, dr = hg.clone(ui, sr.url(), dest,
1658 1658 pull=opts['pull'],
1659 1659 rev=destrev,
1660 1660 update=False,
1661 1661 stream=opts['uncompressed'])
1662 1662 ui.note(_('cloning patch repo\n'))
1663 1663 spr, dpr = hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1664 1664 pull=opts['pull'], update=not opts['noupdate'],
1665 1665 stream=opts['uncompressed'])
1666 1666 if dr.local():
1667 1667 if qbase:
1668 1668 ui.note(_('stripping applied patches from destination repo\n'))
1669 1669 dr.mq.strip(dr, qbase, update=False, backup=None)
1670 1670 if not opts['noupdate']:
1671 1671 ui.note(_('updating destination repo\n'))
1672 1672 hg.update(dr, dr.changelog.tip())
1673 1673
1674 1674 def commit(ui, repo, *pats, **opts):
1675 1675 """commit changes in the queue repository"""
1676 1676 q = repo.mq
1677 1677 r = q.qrepo()
1678 1678 if not r: raise util.Abort('no queue repository')
1679 1679 commands.commit(r.ui, r, *pats, **opts)
1680 1680
1681 1681 def series(ui, repo, **opts):
1682 1682 """print the entire series file"""
1683 1683 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1684 1684 return 0
1685 1685
1686 1686 def top(ui, repo, **opts):
1687 1687 """print the name of the current patch"""
1688 1688 q = repo.mq
1689 1689 t = q.applied and q.series_end(True) or 0
1690 1690 if t:
1691 1691 return q.qseries(repo, start=t-1, length=1, status='A',
1692 1692 summary=opts.get('summary'))
1693 1693 else:
1694 1694 ui.write("No patches applied\n")
1695 1695 return 1
1696 1696
1697 1697 def next(ui, repo, **opts):
1698 1698 """print the name of the next patch"""
1699 1699 q = repo.mq
1700 1700 end = q.series_end()
1701 1701 if end == len(q.series):
1702 1702 ui.write("All patches applied\n")
1703 1703 return 1
1704 1704 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1705 1705
1706 1706 def prev(ui, repo, **opts):
1707 1707 """print the name of the previous patch"""
1708 1708 q = repo.mq
1709 1709 l = len(q.applied)
1710 1710 if l == 1:
1711 1711 ui.write("Only one patch applied\n")
1712 1712 return 1
1713 1713 if not l:
1714 1714 ui.write("No patches applied\n")
1715 1715 return 1
1716 1716 return q.qseries(repo, start=l-2, length=1, status='A',
1717 1717 summary=opts.get('summary'))
1718 1718
1719 1719 def setupheaderopts(ui, opts):
1720 1720 def do(opt,val):
1721 1721 if not opts[opt] and opts['current' + opt]:
1722 1722 opts[opt] = val
1723 1723 do('user', ui.username())
1724 1724 do('date', "%d %d" % util.makedate())
1725 1725
1726 1726 def new(ui, repo, patch, *args, **opts):
1727 1727 """create a new patch
1728 1728
1729 1729 qnew creates a new patch on top of the currently-applied patch
1730 1730 (if any). It will refuse to run if there are any outstanding
1731 1731 changes unless -f is specified, in which case the patch will
1732 1732 be initialised with them. You may also use -I, -X, and/or a list of
1733 1733 files after the patch name to add only changes to matching files
1734 1734 to the new patch, leaving the rest as uncommitted modifications.
1735 1735
1736 1736 -e, -m or -l set the patch header as well as the commit message.
1737 1737 If none is specified, the patch header is empty and the
1738 1738 commit message is '[mq]: PATCH'"""
1739 1739 q = repo.mq
1740 1740 message = cmdutil.logmessage(opts)
1741 1741 if opts['edit']:
1742 1742 message = ui.edit(message, ui.username())
1743 1743 opts['msg'] = message
1744 1744 setupheaderopts(ui, opts)
1745 1745 q.new(repo, patch, *args, **opts)
1746 1746 q.save_dirty()
1747 1747 return 0
1748 1748
1749 1749 def refresh(ui, repo, *pats, **opts):
1750 1750 """update the current patch
1751 1751
1752 1752 If any file patterns are provided, the refreshed patch will contain only
1753 1753 the modifications that match those patterns; the remaining modifications
1754 1754 will remain in the working directory.
1755 1755
1756 1756 hg add/remove/copy/rename work as usual, though you might want to use
1757 1757 git-style patches (--git or [diff] git=1) to track copies and renames.
1758 1758 """
1759 1759 q = repo.mq
1760 1760 message = cmdutil.logmessage(opts)
1761 1761 if opts['edit']:
1762 1762 if not q.applied:
1763 1763 ui.write(_("No patches applied\n"))
1764 1764 return 1
1765 1765 if message:
1766 1766 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1767 1767 patch = q.applied[-1].name
1768 1768 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1769 1769 message = ui.edit('\n'.join(message), user or ui.username())
1770 1770 setupheaderopts(ui, opts)
1771 1771 ret = q.refresh(repo, pats, msg=message, **opts)
1772 1772 q.save_dirty()
1773 1773 return ret
1774 1774
1775 1775 def diff(ui, repo, *pats, **opts):
1776 1776 """diff of the current patch and subsequent modifications
1777 1777
1778 1778 Shows a diff which includes the current patch as well as any changes which
1779 1779 have been made in the working directory since the last refresh (thus
1780 1780 showing what the current patch would become after a qrefresh).
1781 1781
1782 1782 Use 'hg diff' if you only want to see the changes made since the last
1783 1783 qrefresh, or 'hg export qtip' if you want to see changes made by the
1784 1784 current patch without including changes made since the qrefresh.
1785 1785 """
1786 1786 repo.mq.diff(repo, pats, opts)
1787 1787 return 0
1788 1788
1789 1789 def fold(ui, repo, *files, **opts):
1790 1790 """fold the named patches into the current patch
1791 1791
1792 1792 Patches must not yet be applied. Each patch will be successively
1793 1793 applied to the current patch in the order given. If all the
1794 1794 patches apply successfully, the current patch will be refreshed
1795 1795 with the new cumulative patch, and the folded patches will
1796 1796 be deleted. With -k/--keep, the folded patch files will not
1797 1797 be removed afterwards.
1798 1798
1799 1799 The header for each folded patch will be concatenated with
1800 1800 the current patch header, separated by a line of '* * *'."""
1801 1801
1802 1802 q = repo.mq
1803 1803
1804 1804 if not files:
1805 1805 raise util.Abort(_('qfold requires at least one patch name'))
1806 1806 if not q.check_toppatch(repo):
1807 1807 raise util.Abort(_('No patches applied'))
1808 1808
1809 1809 message = cmdutil.logmessage(opts)
1810 1810 if opts['edit']:
1811 1811 if message:
1812 1812 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1813 1813
1814 1814 parent = q.lookup('qtip')
1815 1815 patches = []
1816 1816 messages = []
1817 1817 for f in files:
1818 1818 p = q.lookup(f)
1819 1819 if p in patches or p == parent:
1820 1820 ui.warn(_('Skipping already folded patch %s') % p)
1821 1821 if q.isapplied(p):
1822 1822 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1823 1823 patches.append(p)
1824 1824
1825 1825 for p in patches:
1826 1826 if not message:
1827 1827 messages.append(q.readheaders(p)[0])
1828 1828 pf = q.join(p)
1829 1829 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1830 1830 if not patchsuccess:
1831 1831 raise util.Abort(_('Error folding patch %s') % p)
1832 1832 patch.updatedir(ui, repo, files)
1833 1833
1834 1834 if not message:
1835 1835 message, comments, user = q.readheaders(parent)[0:3]
1836 1836 for msg in messages:
1837 1837 message.append('* * *')
1838 1838 message.extend(msg)
1839 1839 message = '\n'.join(message)
1840 1840
1841 1841 if opts['edit']:
1842 1842 message = ui.edit(message, user or ui.username())
1843 1843
1844 1844 q.refresh(repo, msg=message)
1845 1845 q.delete(repo, patches, opts)
1846 1846 q.save_dirty()
1847 1847
1848 1848 def goto(ui, repo, patch, **opts):
1849 1849 '''push or pop patches until named patch is at top of stack'''
1850 1850 q = repo.mq
1851 1851 patch = q.lookup(patch)
1852 1852 if q.isapplied(patch):
1853 1853 ret = q.pop(repo, patch, force=opts['force'])
1854 1854 else:
1855 1855 ret = q.push(repo, patch, force=opts['force'])
1856 1856 q.save_dirty()
1857 1857 return ret
1858 1858
1859 1859 def guard(ui, repo, *args, **opts):
1860 1860 '''set or print guards for a patch
1861 1861
1862 1862 Guards control whether a patch can be pushed. A patch with no
1863 1863 guards is always pushed. A patch with a positive guard ("+foo") is
1864 1864 pushed only if the qselect command has activated it. A patch with
1865 1865 a negative guard ("-foo") is never pushed if the qselect command
1866 1866 has activated it.
1867 1867
1868 1868 With no arguments, print the currently active guards.
1869 1869 With arguments, set guards for the named patch.
1870 1870
1871 1871 To set a negative guard "-foo" on topmost patch ("--" is needed so
1872 1872 hg will not interpret "-foo" as an option):
1873 1873 hg qguard -- -foo
1874 1874
1875 1875 To set guards on another patch:
1876 1876 hg qguard other.patch +2.6.17 -stable
1877 1877 '''
1878 1878 def status(idx):
1879 1879 guards = q.series_guards[idx] or ['unguarded']
1880 1880 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1881 1881 q = repo.mq
1882 1882 patch = None
1883 1883 args = list(args)
1884 1884 if opts['list']:
1885 1885 if args or opts['none']:
1886 1886 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1887 1887 for i in xrange(len(q.series)):
1888 1888 status(i)
1889 1889 return
1890 1890 if not args or args[0][0:1] in '-+':
1891 1891 if not q.applied:
1892 1892 raise util.Abort(_('no patches applied'))
1893 1893 patch = q.applied[-1].name
1894 1894 if patch is None and args[0][0:1] not in '-+':
1895 1895 patch = args.pop(0)
1896 1896 if patch is None:
1897 1897 raise util.Abort(_('no patch to work with'))
1898 1898 if args or opts['none']:
1899 1899 idx = q.find_series(patch)
1900 1900 if idx is None:
1901 1901 raise util.Abort(_('no patch named %s') % patch)
1902 1902 q.set_guards(idx, args)
1903 1903 q.save_dirty()
1904 1904 else:
1905 1905 status(q.series.index(q.lookup(patch)))
1906 1906
1907 1907 def header(ui, repo, patch=None):
1908 1908 """Print the header of the topmost or specified patch"""
1909 1909 q = repo.mq
1910 1910
1911 1911 if patch:
1912 1912 patch = q.lookup(patch)
1913 1913 else:
1914 1914 if not q.applied:
1915 1915 ui.write('No patches applied\n')
1916 1916 return 1
1917 1917 patch = q.lookup('qtip')
1918 1918 message = repo.mq.readheaders(patch)[0]
1919 1919
1920 1920 ui.write('\n'.join(message) + '\n')
1921 1921
1922 1922 def lastsavename(path):
1923 1923 (directory, base) = os.path.split(path)
1924 1924 names = os.listdir(directory)
1925 1925 namere = re.compile("%s.([0-9]+)" % base)
1926 1926 maxindex = None
1927 1927 maxname = None
1928 1928 for f in names:
1929 1929 m = namere.match(f)
1930 1930 if m:
1931 1931 index = int(m.group(1))
1932 1932 if maxindex == None or index > maxindex:
1933 1933 maxindex = index
1934 1934 maxname = f
1935 1935 if maxname:
1936 1936 return (os.path.join(directory, maxname), maxindex)
1937 1937 return (None, None)
1938 1938
1939 1939 def savename(path):
1940 1940 (last, index) = lastsavename(path)
1941 1941 if last is None:
1942 1942 index = 0
1943 1943 newpath = path + ".%d" % (index + 1)
1944 1944 return newpath
1945 1945
1946 1946 def push(ui, repo, patch=None, **opts):
1947 1947 """push the next patch onto the stack
1948 1948
1949 1949 When --force is applied, all local changes in patched files will be lost.
1950 1950 """
1951 1951 q = repo.mq
1952 1952 mergeq = None
1953 1953
1954 1954 if opts['all']:
1955 1955 if not q.series:
1956 1956 ui.warn(_('no patches in series\n'))
1957 1957 return 0
1958 1958 patch = q.series[-1]
1959 1959 if opts['merge']:
1960 1960 if opts['name']:
1961 1961 newpath = repo.join(opts['name'])
1962 1962 else:
1963 1963 newpath, i = lastsavename(q.path)
1964 1964 if not newpath:
1965 1965 ui.warn("no saved queues found, please use -n\n")
1966 1966 return 1
1967 1967 mergeq = queue(ui, repo.join(""), newpath)
1968 1968 ui.warn("merging with queue at: %s\n" % mergeq.path)
1969 1969 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1970 1970 mergeq=mergeq)
1971 1971 return ret
1972 1972
1973 1973 def pop(ui, repo, patch=None, **opts):
1974 1974 """pop the current patch off the stack
1975 1975
1976 1976 By default, pops off the top of the patch stack. If given a patch name,
1977 1977 keeps popping off patches until the named patch is at the top of the stack.
1978 1978 """
1979 1979 localupdate = True
1980 1980 if opts['name']:
1981 1981 q = queue(ui, repo.join(""), repo.join(opts['name']))
1982 1982 ui.warn('using patch queue: %s\n' % q.path)
1983 1983 localupdate = False
1984 1984 else:
1985 1985 q = repo.mq
1986 1986 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1987 1987 all=opts['all'])
1988 1988 q.save_dirty()
1989 1989 return ret
1990 1990
1991 1991 def rename(ui, repo, patch, name=None, **opts):
1992 1992 """rename a patch
1993 1993
1994 1994 With one argument, renames the current patch to PATCH1.
1995 1995 With two arguments, renames PATCH1 to PATCH2."""
1996 1996
1997 1997 q = repo.mq
1998 1998
1999 1999 if not name:
2000 2000 name = patch
2001 2001 patch = None
2002 2002
2003 2003 if patch:
2004 2004 patch = q.lookup(patch)
2005 2005 else:
2006 2006 if not q.applied:
2007 2007 ui.write(_('No patches applied\n'))
2008 2008 return
2009 2009 patch = q.lookup('qtip')
2010 2010 absdest = q.join(name)
2011 2011 if os.path.isdir(absdest):
2012 2012 name = normname(os.path.join(name, os.path.basename(patch)))
2013 2013 absdest = q.join(name)
2014 2014 if os.path.exists(absdest):
2015 2015 raise util.Abort(_('%s already exists') % absdest)
2016 2016
2017 2017 if name in q.series:
2018 2018 raise util.Abort(_('A patch named %s already exists in the series file') % name)
2019 2019
2020 2020 if ui.verbose:
2021 2021 ui.write('Renaming %s to %s\n' % (patch, name))
2022 2022 i = q.find_series(patch)
2023 2023 guards = q.guard_re.findall(q.full_series[i])
2024 2024 q.full_series[i] = name + ''.join([' #' + g for g in guards])
2025 2025 q.parse_series()
2026 2026 q.series_dirty = 1
2027 2027
2028 2028 info = q.isapplied(patch)
2029 2029 if info:
2030 2030 q.applied[info[0]] = statusentry(info[1], name)
2031 2031 q.applied_dirty = 1
2032 2032
2033 2033 util.rename(q.join(patch), absdest)
2034 2034 r = q.qrepo()
2035 2035 if r:
2036 2036 wlock = r.wlock()
2037 2037 try:
2038 2038 if r.dirstate[patch] == 'a':
2039 2039 r.dirstate.forget(patch)
2040 2040 r.dirstate.add(name)
2041 2041 else:
2042 2042 if r.dirstate[name] == 'r':
2043 2043 r.undelete([name])
2044 2044 r.copy(patch, name)
2045 2045 r.remove([patch], False)
2046 2046 finally:
2047 2047 del wlock
2048 2048
2049 2049 q.save_dirty()
2050 2050
2051 2051 def restore(ui, repo, rev, **opts):
2052 2052 """restore the queue state saved by a rev"""
2053 2053 rev = repo.lookup(rev)
2054 2054 q = repo.mq
2055 2055 q.restore(repo, rev, delete=opts['delete'],
2056 2056 qupdate=opts['update'])
2057 2057 q.save_dirty()
2058 2058 return 0
2059 2059
2060 2060 def save(ui, repo, **opts):
2061 2061 """save current queue state"""
2062 2062 q = repo.mq
2063 2063 message = cmdutil.logmessage(opts)
2064 2064 ret = q.save(repo, msg=message)
2065 2065 if ret:
2066 2066 return ret
2067 2067 q.save_dirty()
2068 2068 if opts['copy']:
2069 2069 path = q.path
2070 2070 if opts['name']:
2071 2071 newpath = os.path.join(q.basepath, opts['name'])
2072 2072 if os.path.exists(newpath):
2073 2073 if not os.path.isdir(newpath):
2074 2074 raise util.Abort(_('destination %s exists and is not '
2075 2075 'a directory') % newpath)
2076 2076 if not opts['force']:
2077 2077 raise util.Abort(_('destination %s exists, '
2078 2078 'use -f to force') % newpath)
2079 2079 else:
2080 2080 newpath = savename(path)
2081 2081 ui.warn("copy %s to %s\n" % (path, newpath))
2082 2082 util.copyfiles(path, newpath)
2083 2083 if opts['empty']:
2084 2084 try:
2085 2085 os.unlink(q.join(q.status_path))
2086 2086 except:
2087 2087 pass
2088 2088 return 0
2089 2089
2090 2090 def strip(ui, repo, rev, **opts):
2091 2091 """strip a revision and all its descendants from the repository
2092 2092
2093 2093 If one of the working dir's parent revisions is stripped, the working
2094 2094 directory will be updated to the parent of the stripped revision.
2095 2095 """
2096 2096 backup = 'all'
2097 2097 if opts['backup']:
2098 2098 backup = 'strip'
2099 2099 elif opts['nobackup']:
2100 2100 backup = 'none'
2101 2101
2102 2102 rev = repo.lookup(rev)
2103 2103 p = repo.dirstate.parents()
2104 2104 cl = repo.changelog
2105 2105 update = True
2106 2106 if p[0] == revlog.nullid:
2107 2107 update = False
2108 2108 elif p[1] == revlog.nullid and rev != cl.ancestor(p[0], rev):
2109 2109 update = False
2110 2110 elif rev not in (cl.ancestor(p[0], rev), cl.ancestor(p[1], rev)):
2111 2111 update = False
2112 2112
2113 2113 repo.mq.strip(repo, rev, backup=backup, update=update, force=opts['force'])
2114 2114 return 0
2115 2115
2116 2116 def select(ui, repo, *args, **opts):
2117 2117 '''set or print guarded patches to push
2118 2118
2119 2119 Use the qguard command to set or print guards on patch, then use
2120 2120 qselect to tell mq which guards to use. A patch will be pushed if it
2121 2121 has no guards or any positive guards match the currently selected guard,
2122 2122 but will not be pushed if any negative guards match the current guard.
2123 2123 For example:
2124 2124
2125 2125 qguard foo.patch -stable (negative guard)
2126 2126 qguard bar.patch +stable (positive guard)
2127 2127 qselect stable
2128 2128
2129 2129 This activates the "stable" guard. mq will skip foo.patch (because
2130 2130 it has a negative match) but push bar.patch (because it
2131 2131 has a positive match).
2132 2132
2133 2133 With no arguments, prints the currently active guards.
2134 2134 With one argument, sets the active guard.
2135 2135
2136 2136 Use -n/--none to deactivate guards (no other arguments needed).
2137 2137 When no guards are active, patches with positive guards are skipped
2138 2138 and patches with negative guards are pushed.
2139 2139
2140 2140 qselect can change the guards on applied patches. It does not pop
2141 2141 guarded patches by default. Use --pop to pop back to the last applied
2142 2142 patch that is not guarded. Use --reapply (which implies --pop) to push
2143 2143 back to the current patch afterwards, but skip guarded patches.
2144 2144
2145 2145 Use -s/--series to print a list of all guards in the series file (no
2146 2146 other arguments needed). Use -v for more information.'''
2147 2147
2148 2148 q = repo.mq
2149 2149 guards = q.active()
2150 2150 if args or opts['none']:
2151 2151 old_unapplied = q.unapplied(repo)
2152 2152 old_guarded = [i for i in xrange(len(q.applied)) if
2153 2153 not q.pushable(i)[0]]
2154 2154 q.set_active(args)
2155 2155 q.save_dirty()
2156 2156 if not args:
2157 2157 ui.status(_('guards deactivated\n'))
2158 2158 if not opts['pop'] and not opts['reapply']:
2159 2159 unapplied = q.unapplied(repo)
2160 2160 guarded = [i for i in xrange(len(q.applied))
2161 2161 if not q.pushable(i)[0]]
2162 2162 if len(unapplied) != len(old_unapplied):
2163 2163 ui.status(_('number of unguarded, unapplied patches has '
2164 2164 'changed from %d to %d\n') %
2165 2165 (len(old_unapplied), len(unapplied)))
2166 2166 if len(guarded) != len(old_guarded):
2167 2167 ui.status(_('number of guarded, applied patches has changed '
2168 2168 'from %d to %d\n') %
2169 2169 (len(old_guarded), len(guarded)))
2170 2170 elif opts['series']:
2171 2171 guards = {}
2172 2172 noguards = 0
2173 2173 for gs in q.series_guards:
2174 2174 if not gs:
2175 2175 noguards += 1
2176 2176 for g in gs:
2177 2177 guards.setdefault(g, 0)
2178 2178 guards[g] += 1
2179 2179 if ui.verbose:
2180 2180 guards['NONE'] = noguards
2181 2181 guards = guards.items()
2182 2182 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2183 2183 if guards:
2184 2184 ui.note(_('guards in series file:\n'))
2185 2185 for guard, count in guards:
2186 2186 ui.note('%2d ' % count)
2187 2187 ui.write(guard, '\n')
2188 2188 else:
2189 2189 ui.note(_('no guards in series file\n'))
2190 2190 else:
2191 2191 if guards:
2192 2192 ui.note(_('active guards:\n'))
2193 2193 for g in guards:
2194 2194 ui.write(g, '\n')
2195 2195 else:
2196 2196 ui.write(_('no active guards\n'))
2197 2197 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2198 2198 popped = False
2199 2199 if opts['pop'] or opts['reapply']:
2200 2200 for i in xrange(len(q.applied)):
2201 2201 pushable, reason = q.pushable(i)
2202 2202 if not pushable:
2203 2203 ui.status(_('popping guarded patches\n'))
2204 2204 popped = True
2205 2205 if i == 0:
2206 2206 q.pop(repo, all=True)
2207 2207 else:
2208 2208 q.pop(repo, i-1)
2209 2209 break
2210 2210 if popped:
2211 2211 try:
2212 2212 if reapply:
2213 2213 ui.status(_('reapplying unguarded patches\n'))
2214 2214 q.push(repo, reapply)
2215 2215 finally:
2216 2216 q.save_dirty()
2217 2217
2218 2218 def finish(ui, repo, *revrange, **opts):
2219 2219 """move applied patches into repository history
2220 2220
2221 2221 Finishes the specified revisions (corresponding to applied patches) by
2222 2222 moving them out of mq control into regular repository history.
2223 2223
2224 2224 Accepts a revision range or the --all option. If --all is specified, all
2225 2225 applied mq revisions are removed from mq control. Otherwise, the given
2226 2226 revisions must be at the base of the stack of applied patches.
2227 2227
2228 2228 This can be especially useful if your changes have been applied to an
2229 2229 upstream repository, or if you are about to push your changes to upstream.
2230 2230 """
2231 2231 if not opts['applied'] and not revrange:
2232 2232 raise util.Abort(_('no revisions specified'))
2233 2233 elif opts['applied']:
2234 2234 revrange = ('qbase:qtip',) + revrange
2235 2235
2236 2236 q = repo.mq
2237 2237 if not q.applied:
2238 2238 ui.status(_('no patches applied\n'))
2239 2239 return 0
2240 2240
2241 2241 revs = cmdutil.revrange(repo, revrange)
2242 2242 q.finish(repo, revs)
2243 2243 q.save_dirty()
2244 2244 return 0
2245 2245
2246 2246 def reposetup(ui, repo):
2247 2247 class mqrepo(repo.__class__):
2248 2248 def abort_if_wdir_patched(self, errmsg, force=False):
2249 2249 if self.mq.applied and not force:
2250 2250 parent = revlog.hex(self.dirstate.parents()[0])
2251 2251 if parent in [s.rev for s in self.mq.applied]:
2252 2252 raise util.Abort(errmsg)
2253 2253
2254 2254 def commit(self, *args, **opts):
2255 2255 if len(args) >= 6:
2256 2256 force = args[5]
2257 2257 else:
2258 2258 force = opts.get('force')
2259 2259 self.abort_if_wdir_patched(
2260 2260 _('cannot commit over an applied mq patch'),
2261 2261 force)
2262 2262
2263 2263 return super(mqrepo, self).commit(*args, **opts)
2264 2264
2265 2265 def push(self, remote, force=False, revs=None):
2266 2266 if self.mq.applied and not force and not revs:
2267 2267 raise util.Abort(_('source has mq patches applied'))
2268 2268 return super(mqrepo, self).push(remote, force, revs)
2269 2269
2270 2270 def tags(self):
2271 2271 if self.tagscache:
2272 2272 return self.tagscache
2273 2273
2274 2274 tagscache = super(mqrepo, self).tags()
2275 2275
2276 2276 q = self.mq
2277 2277 if not q.applied:
2278 2278 return tagscache
2279 2279
2280 2280 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2281 2281
2282 2282 if mqtags[-1][0] not in self.changelog.nodemap:
2283 2283 self.ui.warn('mq status file refers to unknown node %s\n'
2284 2284 % revlog.short(mqtags[-1][0]))
2285 2285 return tagscache
2286 2286
2287 2287 mqtags.append((mqtags[-1][0], 'qtip'))
2288 2288 mqtags.append((mqtags[0][0], 'qbase'))
2289 2289 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2290 2290 for patch in mqtags:
2291 2291 if patch[1] in tagscache:
2292 2292 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2293 2293 else:
2294 2294 tagscache[patch[1]] = patch[0]
2295 2295
2296 2296 return tagscache
2297 2297
2298 2298 def _branchtags(self, partial, lrev):
2299 2299 q = self.mq
2300 2300 if not q.applied:
2301 2301 return super(mqrepo, self)._branchtags(partial, lrev)
2302 2302
2303 2303 cl = self.changelog
2304 2304 qbasenode = revlog.bin(q.applied[0].rev)
2305 2305 if qbasenode not in cl.nodemap:
2306 2306 self.ui.warn('mq status file refers to unknown node %s\n'
2307 2307 % revlog.short(qbasenode))
2308 2308 return super(mqrepo, self)._branchtags(partial, lrev)
2309 2309
2310 2310 qbase = cl.rev(qbasenode)
2311 2311 start = lrev + 1
2312 2312 if start < qbase:
2313 2313 # update the cache (excluding the patches) and save it
2314 2314 self._updatebranchcache(partial, lrev+1, qbase)
2315 2315 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2316 2316 start = qbase
2317 2317 # if start = qbase, the cache is as updated as it should be.
2318 2318 # if start > qbase, the cache includes (part of) the patches.
2319 2319 # we might as well use it, but we won't save it.
2320 2320
2321 2321 # update the cache up to the tip
2322 2322 self._updatebranchcache(partial, start, cl.count())
2323 2323
2324 2324 return partial
2325 2325
2326 2326 if repo.local():
2327 2327 repo.__class__ = mqrepo
2328 2328 repo.mq = queue(ui, repo.join(""))
2329 2329
2330 2330 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2331 2331
2332 2332 headeropts = [
2333 2333 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2334 2334 ('u', 'user', '', _('add "From: <given user>" to patch')),
2335 2335 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2336 2336 ('d', 'date', '', _('add "Date: <given date>" to patch'))]
2337 2337
2338 2338 cmdtable = {
2339 2339 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2340 2340 "qclone":
2341 2341 (clone,
2342 2342 [('', 'pull', None, _('use pull protocol to copy metadata')),
2343 2343 ('U', 'noupdate', None, _('do not update the new working directories')),
2344 2344 ('', 'uncompressed', None,
2345 2345 _('use uncompressed transfer (fast over LAN)')),
2346 2346 ('p', 'patches', '', _('location of source patch repo')),
2347 2347 ] + commands.remoteopts,
2348 2348 _('hg qclone [OPTION]... SOURCE [DEST]')),
2349 2349 "qcommit|qci":
2350 2350 (commit,
2351 2351 commands.table["^commit|ci"][1],
2352 2352 _('hg qcommit [OPTION]... [FILE]...')),
2353 2353 "^qdiff":
2354 2354 (diff,
2355 2355 commands.diffopts + commands.diffopts2 + commands.walkopts,
2356 2356 _('hg qdiff [OPTION]... [FILE]...')),
2357 2357 "qdelete|qremove|qrm":
2358 2358 (delete,
2359 2359 [('k', 'keep', None, _('keep patch file')),
2360 2360 ('r', 'rev', [], _('stop managing a revision'))],
2361 2361 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2362 2362 'qfold':
2363 2363 (fold,
2364 2364 [('e', 'edit', None, _('edit patch header')),
2365 2365 ('k', 'keep', None, _('keep folded patch files')),
2366 2366 ] + commands.commitopts,
2367 2367 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2368 2368 'qgoto':
2369 2369 (goto,
2370 2370 [('f', 'force', None, _('overwrite any local changes'))],
2371 2371 _('hg qgoto [OPTION]... PATCH')),
2372 2372 'qguard':
2373 2373 (guard,
2374 2374 [('l', 'list', None, _('list all patches and guards')),
2375 2375 ('n', 'none', None, _('drop all guards'))],
2376 2376 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2377 2377 'qheader': (header, [], _('hg qheader [PATCH]')),
2378 2378 "^qimport":
2379 2379 (qimport,
2380 2380 [('e', 'existing', None, 'import file in patch dir'),
2381 2381 ('n', 'name', '', 'patch file name'),
2382 2382 ('f', 'force', None, 'overwrite existing files'),
2383 2383 ('r', 'rev', [], 'place existing revisions under mq control'),
2384 2384 ('g', 'git', None, _('use git extended diff format'))],
2385 2385 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2386 2386 "^qinit":
2387 2387 (init,
2388 2388 [('c', 'create-repo', None, 'create queue repository')],
2389 2389 _('hg qinit [-c]')),
2390 2390 "qnew":
2391 2391 (new,
2392 2392 [('e', 'edit', None, _('edit commit message')),
2393 2393 ('f', 'force', None, _('import uncommitted changes into patch')),
2394 2394 ('g', 'git', None, _('use git extended diff format')),
2395 2395 ] + commands.walkopts + commands.commitopts + headeropts,
2396 2396 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2397 2397 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2398 2398 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2399 2399 "^qpop":
2400 2400 (pop,
2401 2401 [('a', 'all', None, _('pop all patches')),
2402 2402 ('n', 'name', '', _('queue name to pop')),
2403 2403 ('f', 'force', None, _('forget any local changes'))],
2404 2404 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2405 2405 "^qpush":
2406 2406 (push,
2407 2407 [('f', 'force', None, _('apply if the patch has rejects')),
2408 2408 ('l', 'list', None, _('list patch name in commit text')),
2409 2409 ('a', 'all', None, _('apply all patches')),
2410 2410 ('m', 'merge', None, _('merge from another queue')),
2411 2411 ('n', 'name', '', _('merge queue name'))],
2412 2412 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2413 2413 "^qrefresh":
2414 2414 (refresh,
2415 2415 [('e', 'edit', None, _('edit commit message')),
2416 2416 ('g', 'git', None, _('use git extended diff format')),
2417 2417 ('s', 'short', None, _('refresh only files already in the patch')),
2418 2418 ] + commands.walkopts + commands.commitopts + headeropts,
2419 2419 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2420 2420 'qrename|qmv':
2421 2421 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2422 2422 "qrestore":
2423 2423 (restore,
2424 2424 [('d', 'delete', None, _('delete save entry')),
2425 2425 ('u', 'update', None, _('update queue working dir'))],
2426 2426 _('hg qrestore [-d] [-u] REV')),
2427 2427 "qsave":
2428 2428 (save,
2429 2429 [('c', 'copy', None, _('copy patch directory')),
2430 2430 ('n', 'name', '', _('copy directory name')),
2431 2431 ('e', 'empty', None, _('clear queue status file')),
2432 2432 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2433 2433 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2434 2434 "qselect":
2435 2435 (select,
2436 2436 [('n', 'none', None, _('disable all guards')),
2437 2437 ('s', 'series', None, _('list all guards in series file')),
2438 2438 ('', 'pop', None, _('pop to before first guarded applied patch')),
2439 2439 ('', 'reapply', None, _('pop, then reapply patches'))],
2440 2440 _('hg qselect [OPTION]... [GUARD]...')),
2441 2441 "qseries":
2442 2442 (series,
2443 2443 [('m', 'missing', None, _('print patches not in series')),
2444 2444 ] + seriesopts,
2445 2445 _('hg qseries [-ms]')),
2446 2446 "^strip":
2447 2447 (strip,
2448 2448 [('f', 'force', None, _('force removal with local changes')),
2449 2449 ('b', 'backup', None, _('bundle unrelated changesets')),
2450 2450 ('n', 'nobackup', None, _('no backups'))],
2451 2451 _('hg strip [-f] [-b] [-n] REV')),
2452 2452 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2453 2453 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2454 2454 "qfinish":
2455 2455 (finish,
2456 2456 [('a', 'applied', None, _('finish all applied changesets'))],
2457 2457 _('hg qfinish [-a] [REV...]')),
2458 2458 }
@@ -1,144 +1,144 b''
1 1 # win32text.py - LF <-> CRLF/CR translation utilities for Windows/Mac users
2 2 #
3 3 # This software may be used and distributed according to the terms
4 4 # of the GNU General Public License, incorporated herein by reference.
5 5 #
6 6 # To perform automatic newline conversion, use:
7 7 #
8 8 # [extensions]
9 9 # hgext.win32text =
10 10 # [encode]
11 11 # ** = cleverencode:
12 12 # # or ** = macencode:
13 13 # [decode]
14 14 # ** = cleverdecode:
15 15 # # or ** = macdecode:
16 16 #
17 17 # If not doing conversion, to make sure you do not commit CRLF/CR by accident:
18 18 #
19 19 # [hooks]
20 20 # pretxncommit.crlf = python:hgext.win32text.forbidcrlf
21 21 # # or pretxncommit.cr = python:hgext.win32text.forbidcr
22 22 #
23 23 # To do the same check on a server to prevent CRLF/CR from being pushed or
24 24 # pulled:
25 25 #
26 26 # [hooks]
27 27 # pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf
28 28 # # or pretxnchangegroup.cr = python:hgext.win32text.forbidcr
29 29
30 30 from mercurial.i18n import gettext as _
31 31 from mercurial.node import bin, short
32 32 from mercurial import util
33 33 import re
34 34
35 35 # regexp for single LF without CR preceding.
36 36 re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE)
37 37
38 38 newlinestr = {'\r\n': 'CRLF', '\r': 'CR'}
39 39 filterstr = {'\r\n': 'clever', '\r': 'mac'}
40 40
41 41 def checknewline(s, newline, ui=None, repo=None, filename=None):
42 42 # warn if already has 'newline' in repository.
43 43 # it might cause unexpected eol conversion.
44 44 # see issue 302:
45 45 # http://www.selenic.com/mercurial/bts/issue302
46 46 if newline in s and ui and filename and repo:
47 47 ui.warn(_('WARNING: %s already has %s line endings\n'
48 48 'and does not need EOL conversion by the win32text plugin.\n'
49 49 'Before your next commit, please reconsider your '
50 50 'encode/decode settings in \nMercurial.ini or %s.\n') %
51 51 (filename, newlinestr[newline], repo.join('hgrc')))
52 52
53 53 def dumbdecode(s, cmd, **kwargs):
54 54 checknewline(s, '\r\n', **kwargs)
55 55 # replace single LF to CRLF
56 56 return re_single_lf.sub('\\1\r\n', s)
57 57
58 58 def dumbencode(s, cmd):
59 59 return s.replace('\r\n', '\n')
60 60
61 61 def macdumbdecode(s, cmd, **kwargs):
62 62 checknewline(s, '\r', **kwargs)
63 63 return s.replace('\n', '\r')
64 64
65 65 def macdumbencode(s, cmd):
66 66 return s.replace('\r', '\n')
67 67
68 68 def cleverdecode(s, cmd, **kwargs):
69 69 if not util.binary(s):
70 70 return dumbdecode(s, cmd, **kwargs)
71 71 return s
72 72
73 73 def cleverencode(s, cmd):
74 74 if not util.binary(s):
75 75 return dumbencode(s, cmd)
76 76 return s
77 77
78 78 def macdecode(s, cmd, **kwargs):
79 79 if not util.binary(s):
80 80 return macdumbdecode(s, cmd, **kwargs)
81 81 return s
82 82
83 83 def macencode(s, cmd):
84 84 if not util.binary(s):
85 85 return macdumbencode(s, cmd)
86 86 return s
87 87
88 88 _filters = {
89 89 'dumbdecode:': dumbdecode,
90 90 'dumbencode:': dumbencode,
91 91 'cleverdecode:': cleverdecode,
92 92 'cleverencode:': cleverencode,
93 93 'macdumbdecode:': macdumbdecode,
94 94 'macdumbencode:': macdumbencode,
95 95 'macdecode:': macdecode,
96 96 'macencode:': macencode,
97 97 }
98 98
99 99 def forbidnewline(ui, repo, hooktype, node, newline, **kwargs):
100 100 halt = False
101 101 for rev in xrange(repo.changelog.rev(bin(node)), repo.changelog.count()):
102 c = repo.changectx(rev)
102 c = repo[rev]
103 103 for f in c.files():
104 104 if f not in c:
105 105 continue
106 106 data = c[f].data()
107 107 if not util.binary(data) and newline in data:
108 108 if not halt:
109 109 ui.warn(_('Attempt to commit or push text file(s) '
110 110 'using %s line endings\n') %
111 111 newlinestr[newline])
112 112 ui.warn(_('in %s: %s\n') % (short(c.node()), f))
113 113 halt = True
114 114 if halt and hooktype == 'pretxnchangegroup':
115 115 crlf = newlinestr[newline].lower()
116 116 filter = filterstr[newline]
117 117 ui.warn(_('\nTo prevent this mistake in your local repository,\n'
118 118 'add to Mercurial.ini or .hg/hgrc:\n'
119 119 '\n'
120 120 '[hooks]\n'
121 121 'pretxncommit.%s = python:hgext.win32text.forbid%s\n'
122 122 '\n'
123 123 'and also consider adding:\n'
124 124 '\n'
125 125 '[extensions]\n'
126 126 'hgext.win32text =\n'
127 127 '[encode]\n'
128 128 '** = %sencode:\n'
129 129 '[decode]\n'
130 130 '** = %sdecode:\n') % (crlf, crlf, filter, filter))
131 131 return halt
132 132
133 133 def forbidcrlf(ui, repo, hooktype, node, **kwargs):
134 134 return forbidnewline(ui, repo, hooktype, node, '\r\n', **kwargs)
135 135
136 136 def forbidcr(ui, repo, hooktype, node, **kwargs):
137 137 return forbidnewline(ui, repo, hooktype, node, '\r', **kwargs)
138 138
139 139 def reposetup(ui, repo):
140 140 if not repo.local():
141 141 return
142 142 for name, fn in _filters.iteritems():
143 143 repo.adddatafilter(name, fn)
144 144
@@ -1,225 +1,225 b''
1 1 # archival.py - revision archival for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of
6 6 # the GNU General Public License, incorporated herein by reference.
7 7
8 8 from i18n import _
9 9 from node import hex
10 10 import cStringIO, os, stat, tarfile, time, util, zipfile
11 11 import zlib, gzip
12 12
13 13 def tidyprefix(dest, prefix, suffixes):
14 14 '''choose prefix to use for names in archive. make sure prefix is
15 15 safe for consumers.'''
16 16
17 17 if prefix:
18 18 prefix = util.normpath(prefix)
19 19 else:
20 20 if not isinstance(dest, str):
21 21 raise ValueError('dest must be string if no prefix')
22 22 prefix = os.path.basename(dest)
23 23 lower = prefix.lower()
24 24 for sfx in suffixes:
25 25 if lower.endswith(sfx):
26 26 prefix = prefix[:-len(sfx)]
27 27 break
28 28 lpfx = os.path.normpath(util.localpath(prefix))
29 29 prefix = util.pconvert(lpfx)
30 30 if not prefix.endswith('/'):
31 31 prefix += '/'
32 32 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
33 33 raise util.Abort(_('archive prefix contains illegal components'))
34 34 return prefix
35 35
36 36 class tarit:
37 37 '''write archive to tar file or stream. can write uncompressed,
38 38 or compress with gzip or bzip2.'''
39 39
40 40 class GzipFileWithTime(gzip.GzipFile):
41 41
42 42 def __init__(self, *args, **kw):
43 43 timestamp = None
44 44 if 'timestamp' in kw:
45 45 timestamp = kw.pop('timestamp')
46 46 if timestamp == None:
47 47 self.timestamp = time.time()
48 48 else:
49 49 self.timestamp = timestamp
50 50 gzip.GzipFile.__init__(self, *args, **kw)
51 51
52 52 def _write_gzip_header(self):
53 53 self.fileobj.write('\037\213') # magic header
54 54 self.fileobj.write('\010') # compression method
55 55 # Python 2.6 deprecates self.filename
56 56 fname = getattr(self, 'name', None) or self.filename
57 57 flags = 0
58 58 if fname:
59 59 flags = gzip.FNAME
60 60 self.fileobj.write(chr(flags))
61 61 gzip.write32u(self.fileobj, long(self.timestamp))
62 62 self.fileobj.write('\002')
63 63 self.fileobj.write('\377')
64 64 if fname:
65 65 self.fileobj.write(fname + '\000')
66 66
67 67 def __init__(self, dest, prefix, mtime, kind=''):
68 68 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
69 69 '.tgz', '.tbz2'])
70 70 self.mtime = mtime
71 71
72 72 def taropen(name, mode, fileobj=None):
73 73 if kind == 'gz':
74 74 mode = mode[0]
75 75 if not fileobj:
76 76 fileobj = open(name, mode + 'b')
77 77 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
78 78 zlib.Z_BEST_COMPRESSION,
79 79 fileobj, timestamp=mtime)
80 80 return tarfile.TarFile.taropen(name, mode, gzfileobj)
81 81 else:
82 82 return tarfile.open(name, mode + kind, fileobj)
83 83
84 84 if isinstance(dest, str):
85 85 self.z = taropen(dest, mode='w:')
86 86 else:
87 87 # Python 2.5-2.5.1 have a regression that requires a name arg
88 88 self.z = taropen(name='', mode='w|', fileobj=dest)
89 89
90 90 def addfile(self, name, mode, islink, data):
91 91 i = tarfile.TarInfo(self.prefix + name)
92 92 i.mtime = self.mtime
93 93 i.size = len(data)
94 94 if islink:
95 95 i.type = tarfile.SYMTYPE
96 96 i.mode = 0777
97 97 i.linkname = data
98 98 data = None
99 99 else:
100 100 i.mode = mode
101 101 data = cStringIO.StringIO(data)
102 102 self.z.addfile(i, data)
103 103
104 104 def done(self):
105 105 self.z.close()
106 106
107 107 class tellable:
108 108 '''provide tell method for zipfile.ZipFile when writing to http
109 109 response file object.'''
110 110
111 111 def __init__(self, fp):
112 112 self.fp = fp
113 113 self.offset = 0
114 114
115 115 def __getattr__(self, key):
116 116 return getattr(self.fp, key)
117 117
118 118 def write(self, s):
119 119 self.fp.write(s)
120 120 self.offset += len(s)
121 121
122 122 def tell(self):
123 123 return self.offset
124 124
125 125 class zipit:
126 126 '''write archive to zip file or stream. can write uncompressed,
127 127 or compressed with deflate.'''
128 128
129 129 def __init__(self, dest, prefix, mtime, compress=True):
130 130 self.prefix = tidyprefix(dest, prefix, ('.zip',))
131 131 if not isinstance(dest, str):
132 132 try:
133 133 dest.tell()
134 134 except (AttributeError, IOError):
135 135 dest = tellable(dest)
136 136 self.z = zipfile.ZipFile(dest, 'w',
137 137 compress and zipfile.ZIP_DEFLATED or
138 138 zipfile.ZIP_STORED)
139 139 self.date_time = time.gmtime(mtime)[:6]
140 140
141 141 def addfile(self, name, mode, islink, data):
142 142 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
143 143 i.compress_type = self.z.compression
144 144 # unzip will not honor unix file modes unless file creator is
145 145 # set to unix (id 3).
146 146 i.create_system = 3
147 147 ftype = stat.S_IFREG
148 148 if islink:
149 149 mode = 0777
150 150 ftype = stat.S_IFLNK
151 151 i.external_attr = (mode | ftype) << 16L
152 152 self.z.writestr(i, data)
153 153
154 154 def done(self):
155 155 self.z.close()
156 156
157 157 class fileit:
158 158 '''write archive as files in directory.'''
159 159
160 160 def __init__(self, name, prefix, mtime):
161 161 if prefix:
162 162 raise util.Abort(_('cannot give prefix when archiving to files'))
163 163 self.basedir = name
164 164 self.opener = util.opener(self.basedir)
165 165
166 166 def addfile(self, name, mode, islink, data):
167 167 if islink:
168 168 self.opener.symlink(data, name)
169 169 return
170 170 f = self.opener(name, "w", atomictemp=True)
171 171 f.write(data)
172 172 f.rename()
173 173 destfile = os.path.join(self.basedir, name)
174 174 os.chmod(destfile, mode)
175 175
176 176 def done(self):
177 177 pass
178 178
179 179 archivers = {
180 180 'files': fileit,
181 181 'tar': tarit,
182 182 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
183 183 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
184 184 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
185 185 'zip': zipit,
186 186 }
187 187
188 188 def archive(repo, dest, node, kind, decode=True, matchfn=None,
189 189 prefix=None, mtime=None):
190 190 '''create archive of repo as it was at node.
191 191
192 192 dest can be name of directory, name of archive file, or file
193 193 object to write archive to.
194 194
195 195 kind is type of archive to create.
196 196
197 197 decode tells whether to put files through decode filters from
198 198 hgrc.
199 199
200 200 matchfn is function to filter names of files to write to archive.
201 201
202 202 prefix is name of path to put before every archive member.'''
203 203
204 204 def write(name, mode, islink, getdata):
205 205 if matchfn and not matchfn(name): return
206 206 data = getdata()
207 207 if decode:
208 208 data = repo.wwritedata(name, data)
209 209 archiver.addfile(name, mode, islink, data)
210 210
211 ctx = repo.changectx(node)
211 ctx = repo[node]
212 212 if kind not in archivers:
213 213 raise util.Abort(_("unknown archive type '%s'" % kind))
214 214 archiver = archivers[kind](dest, prefix, mtime or ctx.date()[0])
215 215 m = ctx.manifest()
216 216 items = m.items()
217 217 items.sort()
218 218 if repo.ui.configbool("ui", "archivemeta", True):
219 219 write('.hg_archival.txt', 0644, False,
220 220 lambda: 'repo: %s\nnode: %s\n' % (
221 221 hex(repo.changelog.node(0)), hex(node)))
222 222 for filename, filenode in items:
223 223 write(filename, m.execf(filename) and 0755 or 0644, m.linkf(filename),
224 224 lambda: repo.file(filename).read(filenode))
225 225 archiver.done()
@@ -1,1194 +1,1194 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, bisect, stat
11 11 import mdiff, bdiff, util, templater, templatefilters, patch, errno
12 12 import match as _match
13 13
14 14 revrangesep = ':'
15 15
16 16 class UnknownCommand(Exception):
17 17 """Exception raised if command is not in the command table."""
18 18 class AmbiguousCommand(Exception):
19 19 """Exception raised if command shortcut matches more than one command."""
20 20
21 21 def findpossible(ui, cmd, table):
22 22 """
23 23 Return cmd -> (aliases, command table entry)
24 24 for each matching command.
25 25 Return debug commands (or their aliases) only if no normal command matches.
26 26 """
27 27 choice = {}
28 28 debugchoice = {}
29 29 for e in table.keys():
30 30 aliases = e.lstrip("^").split("|")
31 31 found = None
32 32 if cmd in aliases:
33 33 found = cmd
34 34 elif not ui.config("ui", "strict"):
35 35 for a in aliases:
36 36 if a.startswith(cmd):
37 37 found = a
38 38 break
39 39 if found is not None:
40 40 if aliases[0].startswith("debug") or found.startswith("debug"):
41 41 debugchoice[found] = (aliases, table[e])
42 42 else:
43 43 choice[found] = (aliases, table[e])
44 44
45 45 if not choice and debugchoice:
46 46 choice = debugchoice
47 47
48 48 return choice
49 49
50 50 def findcmd(ui, cmd, table):
51 51 """Return (aliases, command table entry) for command string."""
52 52 choice = findpossible(ui, cmd, table)
53 53
54 54 if cmd in choice:
55 55 return choice[cmd]
56 56
57 57 if len(choice) > 1:
58 58 clist = choice.keys()
59 59 clist.sort()
60 60 raise AmbiguousCommand(cmd, clist)
61 61
62 62 if choice:
63 63 return choice.values()[0]
64 64
65 65 raise UnknownCommand(cmd)
66 66
67 67 def bail_if_changed(repo):
68 68 if repo.dirstate.parents()[1] != nullid:
69 69 raise util.Abort(_('outstanding uncommitted merge'))
70 70 modified, added, removed, deleted = repo.status()[:4]
71 71 if modified or added or removed or deleted:
72 72 raise util.Abort(_("outstanding uncommitted changes"))
73 73
74 74 def logmessage(opts):
75 75 """ get the log message according to -m and -l option """
76 76 message = opts['message']
77 77 logfile = opts['logfile']
78 78
79 79 if message and logfile:
80 80 raise util.Abort(_('options --message and --logfile are mutually '
81 81 'exclusive'))
82 82 if not message and logfile:
83 83 try:
84 84 if logfile == '-':
85 85 message = sys.stdin.read()
86 86 else:
87 87 message = open(logfile).read()
88 88 except IOError, inst:
89 89 raise util.Abort(_("can't read commit message '%s': %s") %
90 90 (logfile, inst.strerror))
91 91 return message
92 92
93 93 def loglimit(opts):
94 94 """get the log limit according to option -l/--limit"""
95 95 limit = opts.get('limit')
96 96 if limit:
97 97 try:
98 98 limit = int(limit)
99 99 except ValueError:
100 100 raise util.Abort(_('limit must be a positive integer'))
101 101 if limit <= 0: raise util.Abort(_('limit must be positive'))
102 102 else:
103 103 limit = sys.maxint
104 104 return limit
105 105
106 106 def setremoteconfig(ui, opts):
107 107 "copy remote options to ui tree"
108 108 if opts.get('ssh'):
109 109 ui.setconfig("ui", "ssh", opts['ssh'])
110 110 if opts.get('remotecmd'):
111 111 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
112 112
113 113 def revpair(repo, revs):
114 114 '''return pair of nodes, given list of revisions. second item can
115 115 be None, meaning use working dir.'''
116 116
117 117 def revfix(repo, val, defval):
118 118 if not val and val != 0 and defval is not None:
119 119 val = defval
120 120 return repo.lookup(val)
121 121
122 122 if not revs:
123 123 return repo.dirstate.parents()[0], None
124 124 end = None
125 125 if len(revs) == 1:
126 126 if revrangesep in revs[0]:
127 127 start, end = revs[0].split(revrangesep, 1)
128 128 start = revfix(repo, start, 0)
129 129 end = revfix(repo, end, repo.changelog.count() - 1)
130 130 else:
131 131 start = revfix(repo, revs[0], None)
132 132 elif len(revs) == 2:
133 133 if revrangesep in revs[0] or revrangesep in revs[1]:
134 134 raise util.Abort(_('too many revisions specified'))
135 135 start = revfix(repo, revs[0], None)
136 136 end = revfix(repo, revs[1], None)
137 137 else:
138 138 raise util.Abort(_('too many revisions specified'))
139 139 return start, end
140 140
141 141 def revrange(repo, revs):
142 142 """Yield revision as strings from a list of revision specifications."""
143 143
144 144 def revfix(repo, val, defval):
145 145 if not val and val != 0 and defval is not None:
146 146 return defval
147 147 return repo.changelog.rev(repo.lookup(val))
148 148
149 149 seen, l = {}, []
150 150 for spec in revs:
151 151 if revrangesep in spec:
152 152 start, end = spec.split(revrangesep, 1)
153 153 start = revfix(repo, start, 0)
154 154 end = revfix(repo, end, repo.changelog.count() - 1)
155 155 step = start > end and -1 or 1
156 156 for rev in xrange(start, end+step, step):
157 157 if rev in seen:
158 158 continue
159 159 seen[rev] = 1
160 160 l.append(rev)
161 161 else:
162 162 rev = revfix(repo, spec, None)
163 163 if rev in seen:
164 164 continue
165 165 seen[rev] = 1
166 166 l.append(rev)
167 167
168 168 return l
169 169
170 170 def make_filename(repo, pat, node,
171 171 total=None, seqno=None, revwidth=None, pathname=None):
172 172 node_expander = {
173 173 'H': lambda: hex(node),
174 174 'R': lambda: str(repo.changelog.rev(node)),
175 175 'h': lambda: short(node),
176 176 }
177 177 expander = {
178 178 '%': lambda: '%',
179 179 'b': lambda: os.path.basename(repo.root),
180 180 }
181 181
182 182 try:
183 183 if node:
184 184 expander.update(node_expander)
185 185 if node:
186 186 expander['r'] = (lambda:
187 187 str(repo.changelog.rev(node)).zfill(revwidth or 0))
188 188 if total is not None:
189 189 expander['N'] = lambda: str(total)
190 190 if seqno is not None:
191 191 expander['n'] = lambda: str(seqno)
192 192 if total is not None and seqno is not None:
193 193 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
194 194 if pathname is not None:
195 195 expander['s'] = lambda: os.path.basename(pathname)
196 196 expander['d'] = lambda: os.path.dirname(pathname) or '.'
197 197 expander['p'] = lambda: pathname
198 198
199 199 newname = []
200 200 patlen = len(pat)
201 201 i = 0
202 202 while i < patlen:
203 203 c = pat[i]
204 204 if c == '%':
205 205 i += 1
206 206 c = pat[i]
207 207 c = expander[c]()
208 208 newname.append(c)
209 209 i += 1
210 210 return ''.join(newname)
211 211 except KeyError, inst:
212 212 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
213 213 inst.args[0])
214 214
215 215 def make_file(repo, pat, node=None,
216 216 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
217 217 if not pat or pat == '-':
218 218 return 'w' in mode and sys.stdout or sys.stdin
219 219 if hasattr(pat, 'write') and 'w' in mode:
220 220 return pat
221 221 if hasattr(pat, 'read') and 'r' in mode:
222 222 return pat
223 223 return open(make_filename(repo, pat, node, total, seqno, revwidth,
224 224 pathname),
225 225 mode)
226 226
227 227 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
228 228 if not globbed and default == 'relpath':
229 229 pats = util.expand_glob(pats or [])
230 230 m = _match.match(repo.root, repo.getcwd(), pats,
231 231 opts.get('include'), opts.get('exclude'), default)
232 232 def badfn(f, msg):
233 233 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
234 234 return False
235 235 m.bad = badfn
236 236 return m
237 237
238 238 def matchall(repo):
239 239 return _match.always(repo.root, repo.getcwd())
240 240
241 241 def matchfiles(repo, files):
242 242 return _match.exact(repo.root, repo.getcwd(), files)
243 243
244 244 def findrenames(repo, added=None, removed=None, threshold=0.5):
245 245 '''find renamed files -- yields (before, after, score) tuples'''
246 246 if added is None or removed is None:
247 247 added, removed = repo.status()[1:3]
248 ctx = repo.changectx('.')
248 ctx = repo['.']
249 249 for a in added:
250 250 aa = repo.wread(a)
251 251 bestname, bestscore = None, threshold
252 252 for r in removed:
253 253 rr = ctx.filectx(r).data()
254 254
255 255 # bdiff.blocks() returns blocks of matching lines
256 256 # count the number of bytes in each
257 257 equal = 0
258 258 alines = mdiff.splitnewlines(aa)
259 259 matches = bdiff.blocks(aa, rr)
260 260 for x1,x2,y1,y2 in matches:
261 261 for line in alines[x1:x2]:
262 262 equal += len(line)
263 263
264 264 lengths = len(aa) + len(rr)
265 265 if lengths:
266 266 myscore = equal*2.0 / lengths
267 267 if myscore >= bestscore:
268 268 bestname, bestscore = r, myscore
269 269 if bestname:
270 270 yield bestname, a, bestscore
271 271
272 272 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
273 273 if dry_run is None:
274 274 dry_run = opts.get('dry_run')
275 275 if similarity is None:
276 276 similarity = float(opts.get('similarity') or 0)
277 277 add, remove = [], []
278 278 mapping = {}
279 279 audit_path = util.path_auditor(repo.root)
280 280 m = match(repo, pats, opts)
281 281 for abs in repo.walk(m):
282 282 target = repo.wjoin(abs)
283 283 good = True
284 284 try:
285 285 audit_path(abs)
286 286 except:
287 287 good = False
288 288 rel = m.rel(abs)
289 289 exact = m.exact(abs)
290 290 if good and abs not in repo.dirstate:
291 291 add.append(abs)
292 292 mapping[abs] = rel, m.exact(abs)
293 293 if repo.ui.verbose or not exact:
294 294 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
295 295 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
296 296 or (os.path.isdir(target) and not os.path.islink(target))):
297 297 remove.append(abs)
298 298 mapping[abs] = rel, exact
299 299 if repo.ui.verbose or not exact:
300 300 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
301 301 if not dry_run:
302 302 repo.remove(remove)
303 303 repo.add(add)
304 304 if similarity > 0:
305 305 for old, new, score in findrenames(repo, add, remove, similarity):
306 306 oldrel, oldexact = mapping[old]
307 307 newrel, newexact = mapping[new]
308 308 if repo.ui.verbose or not oldexact or not newexact:
309 309 repo.ui.status(_('recording removal of %s as rename to %s '
310 310 '(%d%% similar)\n') %
311 311 (oldrel, newrel, score * 100))
312 312 if not dry_run:
313 313 repo.copy(old, new)
314 314
315 315 def copy(ui, repo, pats, opts, rename=False):
316 316 # called with the repo lock held
317 317 #
318 318 # hgsep => pathname that uses "/" to separate directories
319 319 # ossep => pathname that uses os.sep to separate directories
320 320 cwd = repo.getcwd()
321 321 targets = {}
322 322 after = opts.get("after")
323 323 dryrun = opts.get("dry_run")
324 324
325 325 def walkpat(pat):
326 326 srcs = []
327 327 m = match(repo, [pat], opts, globbed=True)
328 328 for abs in repo.walk(m):
329 329 state = repo.dirstate[abs]
330 330 rel = m.rel(abs)
331 331 exact = m.exact(abs)
332 332 if state in '?r':
333 333 if exact and state == '?':
334 334 ui.warn(_('%s: not copying - file is not managed\n') % rel)
335 335 if exact and state == 'r':
336 336 ui.warn(_('%s: not copying - file has been marked for'
337 337 ' remove\n') % rel)
338 338 continue
339 339 # abs: hgsep
340 340 # rel: ossep
341 341 srcs.append((abs, rel, exact))
342 342 return srcs
343 343
344 344 # abssrc: hgsep
345 345 # relsrc: ossep
346 346 # otarget: ossep
347 347 def copyfile(abssrc, relsrc, otarget, exact):
348 348 abstarget = util.canonpath(repo.root, cwd, otarget)
349 349 reltarget = repo.pathto(abstarget, cwd)
350 350 target = repo.wjoin(abstarget)
351 351 src = repo.wjoin(abssrc)
352 352 state = repo.dirstate[abstarget]
353 353
354 354 # check for collisions
355 355 prevsrc = targets.get(abstarget)
356 356 if prevsrc is not None:
357 357 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
358 358 (reltarget, repo.pathto(abssrc, cwd),
359 359 repo.pathto(prevsrc, cwd)))
360 360 return
361 361
362 362 # check for overwrites
363 363 exists = os.path.exists(target)
364 364 if (not after and exists or after and state in 'mn'):
365 365 if not opts['force']:
366 366 ui.warn(_('%s: not overwriting - file exists\n') %
367 367 reltarget)
368 368 return
369 369
370 370 if after:
371 371 if not exists:
372 372 return
373 373 elif not dryrun:
374 374 try:
375 375 if exists:
376 376 os.unlink(target)
377 377 targetdir = os.path.dirname(target) or '.'
378 378 if not os.path.isdir(targetdir):
379 379 os.makedirs(targetdir)
380 380 util.copyfile(src, target)
381 381 except IOError, inst:
382 382 if inst.errno == errno.ENOENT:
383 383 ui.warn(_('%s: deleted in working copy\n') % relsrc)
384 384 else:
385 385 ui.warn(_('%s: cannot copy - %s\n') %
386 386 (relsrc, inst.strerror))
387 387 return True # report a failure
388 388
389 389 if ui.verbose or not exact:
390 390 action = rename and "moving" or "copying"
391 391 ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
392 392
393 393 targets[abstarget] = abssrc
394 394
395 395 # fix up dirstate
396 396 origsrc = repo.dirstate.copied(abssrc) or abssrc
397 397 if abstarget == origsrc: # copying back a copy?
398 398 if state not in 'mn' and not dryrun:
399 399 repo.dirstate.normallookup(abstarget)
400 400 else:
401 401 if repo.dirstate[origsrc] == 'a':
402 402 if not ui.quiet:
403 403 ui.warn(_("%s has not been committed yet, so no copy "
404 404 "data will be stored for %s.\n")
405 405 % (repo.pathto(origsrc, cwd), reltarget))
406 406 if abstarget not in repo.dirstate and not dryrun:
407 407 repo.add([abstarget])
408 408 elif not dryrun:
409 409 repo.copy(origsrc, abstarget)
410 410
411 411 if rename and not dryrun:
412 412 repo.remove([abssrc], not after)
413 413
414 414 # pat: ossep
415 415 # dest ossep
416 416 # srcs: list of (hgsep, hgsep, ossep, bool)
417 417 # return: function that takes hgsep and returns ossep
418 418 def targetpathfn(pat, dest, srcs):
419 419 if os.path.isdir(pat):
420 420 abspfx = util.canonpath(repo.root, cwd, pat)
421 421 abspfx = util.localpath(abspfx)
422 422 if destdirexists:
423 423 striplen = len(os.path.split(abspfx)[0])
424 424 else:
425 425 striplen = len(abspfx)
426 426 if striplen:
427 427 striplen += len(os.sep)
428 428 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
429 429 elif destdirexists:
430 430 res = lambda p: os.path.join(dest,
431 431 os.path.basename(util.localpath(p)))
432 432 else:
433 433 res = lambda p: dest
434 434 return res
435 435
436 436 # pat: ossep
437 437 # dest ossep
438 438 # srcs: list of (hgsep, hgsep, ossep, bool)
439 439 # return: function that takes hgsep and returns ossep
440 440 def targetpathafterfn(pat, dest, srcs):
441 441 if util.patkind(pat, None)[0]:
442 442 # a mercurial pattern
443 443 res = lambda p: os.path.join(dest,
444 444 os.path.basename(util.localpath(p)))
445 445 else:
446 446 abspfx = util.canonpath(repo.root, cwd, pat)
447 447 if len(abspfx) < len(srcs[0][0]):
448 448 # A directory. Either the target path contains the last
449 449 # component of the source path or it does not.
450 450 def evalpath(striplen):
451 451 score = 0
452 452 for s in srcs:
453 453 t = os.path.join(dest, util.localpath(s[0])[striplen:])
454 454 if os.path.exists(t):
455 455 score += 1
456 456 return score
457 457
458 458 abspfx = util.localpath(abspfx)
459 459 striplen = len(abspfx)
460 460 if striplen:
461 461 striplen += len(os.sep)
462 462 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
463 463 score = evalpath(striplen)
464 464 striplen1 = len(os.path.split(abspfx)[0])
465 465 if striplen1:
466 466 striplen1 += len(os.sep)
467 467 if evalpath(striplen1) > score:
468 468 striplen = striplen1
469 469 res = lambda p: os.path.join(dest,
470 470 util.localpath(p)[striplen:])
471 471 else:
472 472 # a file
473 473 if destdirexists:
474 474 res = lambda p: os.path.join(dest,
475 475 os.path.basename(util.localpath(p)))
476 476 else:
477 477 res = lambda p: dest
478 478 return res
479 479
480 480
481 481 pats = util.expand_glob(pats)
482 482 if not pats:
483 483 raise util.Abort(_('no source or destination specified'))
484 484 if len(pats) == 1:
485 485 raise util.Abort(_('no destination specified'))
486 486 dest = pats.pop()
487 487 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
488 488 if not destdirexists:
489 489 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
490 490 raise util.Abort(_('with multiple sources, destination must be an '
491 491 'existing directory'))
492 492 if util.endswithsep(dest):
493 493 raise util.Abort(_('destination %s is not a directory') % dest)
494 494
495 495 tfn = targetpathfn
496 496 if after:
497 497 tfn = targetpathafterfn
498 498 copylist = []
499 499 for pat in pats:
500 500 srcs = walkpat(pat)
501 501 if not srcs:
502 502 continue
503 503 copylist.append((tfn(pat, dest, srcs), srcs))
504 504 if not copylist:
505 505 raise util.Abort(_('no files to copy'))
506 506
507 507 errors = 0
508 508 for targetpath, srcs in copylist:
509 509 for abssrc, relsrc, exact in srcs:
510 510 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
511 511 errors += 1
512 512
513 513 if errors:
514 514 ui.warn(_('(consider using --after)\n'))
515 515
516 516 return errors
517 517
518 518 def service(opts, parentfn=None, initfn=None, runfn=None):
519 519 '''Run a command as a service.'''
520 520
521 521 if opts['daemon'] and not opts['daemon_pipefds']:
522 522 rfd, wfd = os.pipe()
523 523 args = sys.argv[:]
524 524 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
525 525 # Don't pass --cwd to the child process, because we've already
526 526 # changed directory.
527 527 for i in xrange(1,len(args)):
528 528 if args[i].startswith('--cwd='):
529 529 del args[i]
530 530 break
531 531 elif args[i].startswith('--cwd'):
532 532 del args[i:i+2]
533 533 break
534 534 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
535 535 args[0], args)
536 536 os.close(wfd)
537 537 os.read(rfd, 1)
538 538 if parentfn:
539 539 return parentfn(pid)
540 540 else:
541 541 os._exit(0)
542 542
543 543 if initfn:
544 544 initfn()
545 545
546 546 if opts['pid_file']:
547 547 fp = open(opts['pid_file'], 'w')
548 548 fp.write(str(os.getpid()) + '\n')
549 549 fp.close()
550 550
551 551 if opts['daemon_pipefds']:
552 552 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
553 553 os.close(rfd)
554 554 try:
555 555 os.setsid()
556 556 except AttributeError:
557 557 pass
558 558 os.write(wfd, 'y')
559 559 os.close(wfd)
560 560 sys.stdout.flush()
561 561 sys.stderr.flush()
562 562 fd = os.open(util.nulldev, os.O_RDWR)
563 563 if fd != 0: os.dup2(fd, 0)
564 564 if fd != 1: os.dup2(fd, 1)
565 565 if fd != 2: os.dup2(fd, 2)
566 566 if fd not in (0, 1, 2): os.close(fd)
567 567
568 568 if runfn:
569 569 return runfn()
570 570
571 571 class changeset_printer(object):
572 572 '''show changeset information when templating not requested.'''
573 573
574 574 def __init__(self, ui, repo, patch, buffered):
575 575 self.ui = ui
576 576 self.repo = repo
577 577 self.buffered = buffered
578 578 self.patch = patch
579 579 self.header = {}
580 580 self.hunk = {}
581 581 self.lastheader = None
582 582
583 583 def flush(self, rev):
584 584 if rev in self.header:
585 585 h = self.header[rev]
586 586 if h != self.lastheader:
587 587 self.lastheader = h
588 588 self.ui.write(h)
589 589 del self.header[rev]
590 590 if rev in self.hunk:
591 591 self.ui.write(self.hunk[rev])
592 592 del self.hunk[rev]
593 593 return 1
594 594 return 0
595 595
596 596 def show(self, rev=0, changenode=None, copies=(), **props):
597 597 if self.buffered:
598 598 self.ui.pushbuffer()
599 599 self._show(rev, changenode, copies, props)
600 600 self.hunk[rev] = self.ui.popbuffer()
601 601 else:
602 602 self._show(rev, changenode, copies, props)
603 603
604 604 def _show(self, rev, changenode, copies, props):
605 605 '''show a single changeset or file revision'''
606 606 log = self.repo.changelog
607 607 if changenode is None:
608 608 changenode = log.node(rev)
609 609 elif not rev:
610 610 rev = log.rev(changenode)
611 611
612 612 if self.ui.quiet:
613 613 self.ui.write("%d:%s\n" % (rev, short(changenode)))
614 614 return
615 615
616 616 changes = log.read(changenode)
617 617 date = util.datestr(changes[2])
618 618 extra = changes[5]
619 619 branch = extra.get("branch")
620 620
621 621 hexfunc = self.ui.debugflag and hex or short
622 622
623 623 parents = [(p, hexfunc(log.node(p)))
624 624 for p in self._meaningful_parentrevs(log, rev)]
625 625
626 626 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
627 627
628 628 # don't show the default branch name
629 629 if branch != 'default':
630 630 branch = util.tolocal(branch)
631 631 self.ui.write(_("branch: %s\n") % branch)
632 632 for tag in self.repo.nodetags(changenode):
633 633 self.ui.write(_("tag: %s\n") % tag)
634 634 for parent in parents:
635 635 self.ui.write(_("parent: %d:%s\n") % parent)
636 636
637 637 if self.ui.debugflag:
638 638 self.ui.write(_("manifest: %d:%s\n") %
639 639 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
640 640 self.ui.write(_("user: %s\n") % changes[1])
641 641 self.ui.write(_("date: %s\n") % date)
642 642
643 643 if self.ui.debugflag:
644 644 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
645 645 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
646 646 files):
647 647 if value:
648 648 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
649 649 elif changes[3] and self.ui.verbose:
650 650 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
651 651 if copies and self.ui.verbose:
652 652 copies = ['%s (%s)' % c for c in copies]
653 653 self.ui.write(_("copies: %s\n") % ' '.join(copies))
654 654
655 655 if extra and self.ui.debugflag:
656 656 extraitems = extra.items()
657 657 extraitems.sort()
658 658 for key, value in extraitems:
659 659 self.ui.write(_("extra: %s=%s\n")
660 660 % (key, value.encode('string_escape')))
661 661
662 662 description = changes[4].strip()
663 663 if description:
664 664 if self.ui.verbose:
665 665 self.ui.write(_("description:\n"))
666 666 self.ui.write(description)
667 667 self.ui.write("\n\n")
668 668 else:
669 669 self.ui.write(_("summary: %s\n") %
670 670 description.splitlines()[0])
671 671 self.ui.write("\n")
672 672
673 673 self.showpatch(changenode)
674 674
675 675 def showpatch(self, node):
676 676 if self.patch:
677 677 prev = self.repo.changelog.parents(node)[0]
678 678 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
679 679 opts=patch.diffopts(self.ui))
680 680 self.ui.write("\n")
681 681
682 682 def _meaningful_parentrevs(self, log, rev):
683 683 """Return list of meaningful (or all if debug) parentrevs for rev.
684 684
685 685 For merges (two non-nullrev revisions) both parents are meaningful.
686 686 Otherwise the first parent revision is considered meaningful if it
687 687 is not the preceding revision.
688 688 """
689 689 parents = log.parentrevs(rev)
690 690 if not self.ui.debugflag and parents[1] == nullrev:
691 691 if parents[0] >= rev - 1:
692 692 parents = []
693 693 else:
694 694 parents = [parents[0]]
695 695 return parents
696 696
697 697
698 698 class changeset_templater(changeset_printer):
699 699 '''format changeset information.'''
700 700
701 701 def __init__(self, ui, repo, patch, mapfile, buffered):
702 702 changeset_printer.__init__(self, ui, repo, patch, buffered)
703 703 filters = templatefilters.filters.copy()
704 704 filters['formatnode'] = (ui.debugflag and (lambda x: x)
705 705 or (lambda x: x[:12]))
706 706 self.t = templater.templater(mapfile, filters,
707 707 cache={
708 708 'parent': '{rev}:{node|formatnode} ',
709 709 'manifest': '{rev}:{node|formatnode}',
710 710 'filecopy': '{name} ({source})'})
711 711
712 712 def use_template(self, t):
713 713 '''set template string to use'''
714 714 self.t.cache['changeset'] = t
715 715
716 716 def _show(self, rev, changenode, copies, props):
717 717 '''show a single changeset or file revision'''
718 718 log = self.repo.changelog
719 719 if changenode is None:
720 720 changenode = log.node(rev)
721 721 elif not rev:
722 722 rev = log.rev(changenode)
723 723
724 724 changes = log.read(changenode)
725 725
726 726 def showlist(name, values, plural=None, **args):
727 727 '''expand set of values.
728 728 name is name of key in template map.
729 729 values is list of strings or dicts.
730 730 plural is plural of name, if not simply name + 's'.
731 731
732 732 expansion works like this, given name 'foo'.
733 733
734 734 if values is empty, expand 'no_foos'.
735 735
736 736 if 'foo' not in template map, return values as a string,
737 737 joined by space.
738 738
739 739 expand 'start_foos'.
740 740
741 741 for each value, expand 'foo'. if 'last_foo' in template
742 742 map, expand it instead of 'foo' for last key.
743 743
744 744 expand 'end_foos'.
745 745 '''
746 746 if plural: names = plural
747 747 else: names = name + 's'
748 748 if not values:
749 749 noname = 'no_' + names
750 750 if noname in self.t:
751 751 yield self.t(noname, **args)
752 752 return
753 753 if name not in self.t:
754 754 if isinstance(values[0], str):
755 755 yield ' '.join(values)
756 756 else:
757 757 for v in values:
758 758 yield dict(v, **args)
759 759 return
760 760 startname = 'start_' + names
761 761 if startname in self.t:
762 762 yield self.t(startname, **args)
763 763 vargs = args.copy()
764 764 def one(v, tag=name):
765 765 try:
766 766 vargs.update(v)
767 767 except (AttributeError, ValueError):
768 768 try:
769 769 for a, b in v:
770 770 vargs[a] = b
771 771 except ValueError:
772 772 vargs[name] = v
773 773 return self.t(tag, **vargs)
774 774 lastname = 'last_' + name
775 775 if lastname in self.t:
776 776 last = values.pop()
777 777 else:
778 778 last = None
779 779 for v in values:
780 780 yield one(v)
781 781 if last is not None:
782 782 yield one(last, tag=lastname)
783 783 endname = 'end_' + names
784 784 if endname in self.t:
785 785 yield self.t(endname, **args)
786 786
787 787 def showbranches(**args):
788 788 branch = changes[5].get("branch")
789 789 if branch != 'default':
790 790 branch = util.tolocal(branch)
791 791 return showlist('branch', [branch], plural='branches', **args)
792 792
793 793 def showparents(**args):
794 794 parents = [[('rev', p), ('node', hex(log.node(p)))]
795 795 for p in self._meaningful_parentrevs(log, rev)]
796 796 return showlist('parent', parents, **args)
797 797
798 798 def showtags(**args):
799 799 return showlist('tag', self.repo.nodetags(changenode), **args)
800 800
801 801 def showextras(**args):
802 802 extras = changes[5].items()
803 803 extras.sort()
804 804 for key, value in extras:
805 805 args = args.copy()
806 806 args.update(dict(key=key, value=value))
807 807 yield self.t('extra', **args)
808 808
809 809 def showcopies(**args):
810 810 c = [{'name': x[0], 'source': x[1]} for x in copies]
811 811 return showlist('file_copy', c, plural='file_copies', **args)
812 812
813 813 files = []
814 814 def getfiles():
815 815 if not files:
816 816 files[:] = self.repo.status(
817 817 log.parents(changenode)[0], changenode)[:3]
818 818 return files
819 819 def showfiles(**args):
820 820 return showlist('file', changes[3], **args)
821 821 def showmods(**args):
822 822 return showlist('file_mod', getfiles()[0], **args)
823 823 def showadds(**args):
824 824 return showlist('file_add', getfiles()[1], **args)
825 825 def showdels(**args):
826 826 return showlist('file_del', getfiles()[2], **args)
827 827 def showmanifest(**args):
828 828 args = args.copy()
829 829 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
830 830 node=hex(changes[0])))
831 831 return self.t('manifest', **args)
832 832
833 833 defprops = {
834 834 'author': changes[1],
835 835 'branches': showbranches,
836 836 'date': changes[2],
837 837 'desc': changes[4].strip(),
838 838 'file_adds': showadds,
839 839 'file_dels': showdels,
840 840 'file_mods': showmods,
841 841 'files': showfiles,
842 842 'file_copies': showcopies,
843 843 'manifest': showmanifest,
844 844 'node': hex(changenode),
845 845 'parents': showparents,
846 846 'rev': rev,
847 847 'tags': showtags,
848 848 'extras': showextras,
849 849 }
850 850 props = props.copy()
851 851 props.update(defprops)
852 852
853 853 try:
854 854 if self.ui.debugflag and 'header_debug' in self.t:
855 855 key = 'header_debug'
856 856 elif self.ui.quiet and 'header_quiet' in self.t:
857 857 key = 'header_quiet'
858 858 elif self.ui.verbose and 'header_verbose' in self.t:
859 859 key = 'header_verbose'
860 860 elif 'header' in self.t:
861 861 key = 'header'
862 862 else:
863 863 key = ''
864 864 if key:
865 865 h = templater.stringify(self.t(key, **props))
866 866 if self.buffered:
867 867 self.header[rev] = h
868 868 else:
869 869 self.ui.write(h)
870 870 if self.ui.debugflag and 'changeset_debug' in self.t:
871 871 key = 'changeset_debug'
872 872 elif self.ui.quiet and 'changeset_quiet' in self.t:
873 873 key = 'changeset_quiet'
874 874 elif self.ui.verbose and 'changeset_verbose' in self.t:
875 875 key = 'changeset_verbose'
876 876 else:
877 877 key = 'changeset'
878 878 self.ui.write(templater.stringify(self.t(key, **props)))
879 879 self.showpatch(changenode)
880 880 except KeyError, inst:
881 881 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
882 882 inst.args[0]))
883 883 except SyntaxError, inst:
884 884 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
885 885
886 886 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
887 887 """show one changeset using template or regular display.
888 888
889 889 Display format will be the first non-empty hit of:
890 890 1. option 'template'
891 891 2. option 'style'
892 892 3. [ui] setting 'logtemplate'
893 893 4. [ui] setting 'style'
894 894 If all of these values are either the unset or the empty string,
895 895 regular display via changeset_printer() is done.
896 896 """
897 897 # options
898 898 patch = False
899 899 if opts.get('patch'):
900 900 patch = matchfn or matchall(repo)
901 901
902 902 tmpl = opts.get('template')
903 903 mapfile = None
904 904 if tmpl:
905 905 tmpl = templater.parsestring(tmpl, quoted=False)
906 906 else:
907 907 mapfile = opts.get('style')
908 908 # ui settings
909 909 if not mapfile:
910 910 tmpl = ui.config('ui', 'logtemplate')
911 911 if tmpl:
912 912 tmpl = templater.parsestring(tmpl)
913 913 else:
914 914 mapfile = ui.config('ui', 'style')
915 915
916 916 if tmpl or mapfile:
917 917 if mapfile:
918 918 if not os.path.split(mapfile)[0]:
919 919 mapname = (templater.templatepath('map-cmdline.' + mapfile)
920 920 or templater.templatepath(mapfile))
921 921 if mapname: mapfile = mapname
922 922 try:
923 923 t = changeset_templater(ui, repo, patch, mapfile, buffered)
924 924 except SyntaxError, inst:
925 925 raise util.Abort(inst.args[0])
926 926 if tmpl: t.use_template(tmpl)
927 927 return t
928 928 return changeset_printer(ui, repo, patch, buffered)
929 929
930 930 def finddate(ui, repo, date):
931 931 """Find the tipmost changeset that matches the given date spec"""
932 932 df = util.matchdate(date)
933 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
933 get = util.cachefunc(lambda r: repo[r].changeset())
934 934 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
935 935 results = {}
936 936 for st, rev, fns in changeiter:
937 937 if st == 'add':
938 938 d = get(rev)[2]
939 939 if df(d[0]):
940 940 results[rev] = d
941 941 elif st == 'iter':
942 942 if rev in results:
943 943 ui.status("Found revision %s from %s\n" %
944 944 (rev, util.datestr(results[rev])))
945 945 return str(rev)
946 946
947 947 raise util.Abort(_("revision matching date not found"))
948 948
949 949 def walkchangerevs(ui, repo, pats, change, opts):
950 950 '''Iterate over files and the revs they changed in.
951 951
952 952 Callers most commonly need to iterate backwards over the history
953 953 it is interested in. Doing so has awful (quadratic-looking)
954 954 performance, so we use iterators in a "windowed" way.
955 955
956 956 We walk a window of revisions in the desired order. Within the
957 957 window, we first walk forwards to gather data, then in the desired
958 958 order (usually backwards) to display it.
959 959
960 960 This function returns an (iterator, matchfn) tuple. The iterator
961 961 yields 3-tuples. They will be of one of the following forms:
962 962
963 963 "window", incrementing, lastrev: stepping through a window,
964 964 positive if walking forwards through revs, last rev in the
965 965 sequence iterated over - use to reset state for the current window
966 966
967 967 "add", rev, fns: out-of-order traversal of the given file names
968 968 fns, which changed during revision rev - use to gather data for
969 969 possible display
970 970
971 971 "iter", rev, None: in-order traversal of the revs earlier iterated
972 972 over with "add" - use to display data'''
973 973
974 974 def increasing_windows(start, end, windowsize=8, sizelimit=512):
975 975 if start < end:
976 976 while start < end:
977 977 yield start, min(windowsize, end-start)
978 978 start += windowsize
979 979 if windowsize < sizelimit:
980 980 windowsize *= 2
981 981 else:
982 982 while start > end:
983 983 yield start, min(windowsize, start-end-1)
984 984 start -= windowsize
985 985 if windowsize < sizelimit:
986 986 windowsize *= 2
987 987
988 988 m = match(repo, pats, opts)
989 989 follow = opts.get('follow') or opts.get('follow_first')
990 990
991 991 if repo.changelog.count() == 0:
992 992 return [], m
993 993
994 994 if follow:
995 defrange = '%s:0' % repo.changectx('.').rev()
995 defrange = '%s:0' % repo['.'].rev()
996 996 else:
997 997 defrange = '-1:0'
998 998 revs = revrange(repo, opts['rev'] or [defrange])
999 999 wanted = {}
1000 1000 slowpath = m.anypats() or opts.get('removed')
1001 1001 fncache = {}
1002 1002
1003 1003 if not slowpath and not m.files():
1004 1004 # No files, no patterns. Display all revs.
1005 1005 wanted = dict.fromkeys(revs)
1006 1006 copies = []
1007 1007 if not slowpath:
1008 1008 # Only files, no patterns. Check the history of each file.
1009 1009 def filerevgen(filelog, node):
1010 1010 cl_count = repo.changelog.count()
1011 1011 if node is None:
1012 1012 last = filelog.count() - 1
1013 1013 else:
1014 1014 last = filelog.rev(node)
1015 1015 for i, window in increasing_windows(last, nullrev):
1016 1016 revs = []
1017 1017 for j in xrange(i - window, i + 1):
1018 1018 n = filelog.node(j)
1019 1019 revs.append((filelog.linkrev(n),
1020 1020 follow and filelog.renamed(n)))
1021 1021 revs.reverse()
1022 1022 for rev in revs:
1023 1023 # only yield rev for which we have the changelog, it can
1024 1024 # happen while doing "hg log" during a pull or commit
1025 1025 if rev[0] < cl_count:
1026 1026 yield rev
1027 1027 def iterfiles():
1028 1028 for filename in m.files():
1029 1029 yield filename, None
1030 1030 for filename_node in copies:
1031 1031 yield filename_node
1032 1032 minrev, maxrev = min(revs), max(revs)
1033 1033 for file_, node in iterfiles():
1034 1034 filelog = repo.file(file_)
1035 1035 if filelog.count() == 0:
1036 1036 if node is None:
1037 1037 # A zero count may be a directory or deleted file, so
1038 1038 # try to find matching entries on the slow path.
1039 1039 slowpath = True
1040 1040 break
1041 1041 else:
1042 1042 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1043 1043 % (file_, short(node)))
1044 1044 continue
1045 1045 for rev, copied in filerevgen(filelog, node):
1046 1046 if rev <= maxrev:
1047 1047 if rev < minrev:
1048 1048 break
1049 1049 fncache.setdefault(rev, [])
1050 1050 fncache[rev].append(file_)
1051 1051 wanted[rev] = 1
1052 1052 if follow and copied:
1053 1053 copies.append(copied)
1054 1054 if slowpath:
1055 1055 if follow:
1056 1056 raise util.Abort(_('can only follow copies/renames for explicit '
1057 1057 'file names'))
1058 1058
1059 1059 # The slow path checks files modified in every changeset.
1060 1060 def changerevgen():
1061 1061 for i, window in increasing_windows(repo.changelog.count()-1,
1062 1062 nullrev):
1063 1063 for j in xrange(i - window, i + 1):
1064 1064 yield j, change(j)[3]
1065 1065
1066 1066 for rev, changefiles in changerevgen():
1067 1067 matches = filter(m, changefiles)
1068 1068 if matches:
1069 1069 fncache[rev] = matches
1070 1070 wanted[rev] = 1
1071 1071
1072 1072 class followfilter:
1073 1073 def __init__(self, onlyfirst=False):
1074 1074 self.startrev = nullrev
1075 1075 self.roots = []
1076 1076 self.onlyfirst = onlyfirst
1077 1077
1078 1078 def match(self, rev):
1079 1079 def realparents(rev):
1080 1080 if self.onlyfirst:
1081 1081 return repo.changelog.parentrevs(rev)[0:1]
1082 1082 else:
1083 1083 return filter(lambda x: x != nullrev,
1084 1084 repo.changelog.parentrevs(rev))
1085 1085
1086 1086 if self.startrev == nullrev:
1087 1087 self.startrev = rev
1088 1088 return True
1089 1089
1090 1090 if rev > self.startrev:
1091 1091 # forward: all descendants
1092 1092 if not self.roots:
1093 1093 self.roots.append(self.startrev)
1094 1094 for parent in realparents(rev):
1095 1095 if parent in self.roots:
1096 1096 self.roots.append(rev)
1097 1097 return True
1098 1098 else:
1099 1099 # backwards: all parents
1100 1100 if not self.roots:
1101 1101 self.roots.extend(realparents(self.startrev))
1102 1102 if rev in self.roots:
1103 1103 self.roots.remove(rev)
1104 1104 self.roots.extend(realparents(rev))
1105 1105 return True
1106 1106
1107 1107 return False
1108 1108
1109 1109 # it might be worthwhile to do this in the iterator if the rev range
1110 1110 # is descending and the prune args are all within that range
1111 1111 for rev in opts.get('prune', ()):
1112 1112 rev = repo.changelog.rev(repo.lookup(rev))
1113 1113 ff = followfilter()
1114 1114 stop = min(revs[0], revs[-1])
1115 1115 for x in xrange(rev, stop-1, -1):
1116 1116 if ff.match(x) and x in wanted:
1117 1117 del wanted[x]
1118 1118
1119 1119 def iterate():
1120 1120 if follow and not m.files():
1121 1121 ff = followfilter(onlyfirst=opts.get('follow_first'))
1122 1122 def want(rev):
1123 1123 if ff.match(rev) and rev in wanted:
1124 1124 return True
1125 1125 return False
1126 1126 else:
1127 1127 def want(rev):
1128 1128 return rev in wanted
1129 1129
1130 1130 for i, window in increasing_windows(0, len(revs)):
1131 1131 yield 'window', revs[0] < revs[-1], revs[-1]
1132 1132 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1133 1133 srevs = list(nrevs)
1134 1134 srevs.sort()
1135 1135 for rev in srevs:
1136 1136 fns = fncache.get(rev)
1137 1137 if not fns:
1138 1138 def fns_generator():
1139 1139 for f in change(rev)[3]:
1140 1140 if m(f):
1141 1141 yield f
1142 1142 fns = fns_generator()
1143 1143 yield 'add', rev, fns
1144 1144 for rev in nrevs:
1145 1145 yield 'iter', rev, None
1146 1146 return iterate(), m
1147 1147
1148 1148 def commit(ui, repo, commitfunc, pats, opts):
1149 1149 '''commit the specified files or all outstanding changes'''
1150 1150 date = opts.get('date')
1151 1151 if date:
1152 1152 opts['date'] = util.parsedate(date)
1153 1153 message = logmessage(opts)
1154 1154
1155 1155 # extract addremove carefully -- this function can be called from a command
1156 1156 # that doesn't support addremove
1157 1157 if opts.get('addremove'):
1158 1158 addremove(repo, pats, opts)
1159 1159
1160 1160 m = match(repo, pats, opts)
1161 1161 if pats:
1162 1162 status = repo.status(match=m)
1163 1163 modified, added, removed, deleted, unknown = status[:5]
1164 1164 files = modified + added + removed
1165 1165 slist = None
1166 1166 for f in m.files():
1167 1167 if f == '.':
1168 1168 continue
1169 1169 if f not in files:
1170 1170 rf = repo.wjoin(f)
1171 1171 rel = repo.pathto(f)
1172 1172 try:
1173 1173 mode = os.lstat(rf)[stat.ST_MODE]
1174 1174 except OSError:
1175 1175 raise util.Abort(_("file %s not found!") % rel)
1176 1176 if stat.S_ISDIR(mode):
1177 1177 name = f + '/'
1178 1178 if slist is None:
1179 1179 slist = list(files)
1180 1180 slist.sort()
1181 1181 i = bisect.bisect(slist, name)
1182 1182 if i >= len(slist) or not slist[i].startswith(name):
1183 1183 raise util.Abort(_("no match under directory %s!")
1184 1184 % rel)
1185 1185 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1186 1186 raise util.Abort(_("can't commit %s: "
1187 1187 "unsupported file type!") % rel)
1188 1188 elif f not in repo.dirstate:
1189 1189 raise util.Abort(_("file %s not tracked!") % rel)
1190 1190 m = matchfiles(repo, files)
1191 1191 try:
1192 1192 return commitfunc(ui, repo, message, m, opts)
1193 1193 except ValueError, inst:
1194 1194 raise util.Abort(str(inst))
@@ -1,3339 +1,3336 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from repo import RepoError, NoCapability
10 10 from i18n import _
11 11 import os, re, sys, urllib
12 12 import hg, util, revlog, bundlerepo, extensions, copies
13 13 import difflib, patch, time, help, mdiff, tempfile
14 14 import version, socket
15 15 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
16 16 import merge as merge_
17 17
18 18 # Commands start here, listed alphabetically
19 19
20 20 def add(ui, repo, *pats, **opts):
21 21 """add the specified files on the next commit
22 22
23 23 Schedule files to be version controlled and added to the repository.
24 24
25 25 The files will be added to the repository at the next commit. To
26 26 undo an add before that, see hg revert.
27 27
28 28 If no names are given, add all files in the repository.
29 29 """
30 30
31 31 rejected = None
32 32 exacts = {}
33 33 names = []
34 34 m = cmdutil.match(repo, pats, opts)
35 35 m.bad = lambda x,y: True
36 36 for abs in repo.walk(m):
37 37 if m.exact(abs):
38 38 if ui.verbose:
39 39 ui.status(_('adding %s\n') % m.rel(abs))
40 40 names.append(abs)
41 41 exacts[abs] = 1
42 42 elif abs not in repo.dirstate:
43 43 ui.status(_('adding %s\n') % m.rel(abs))
44 44 names.append(abs)
45 45 if not opts.get('dry_run'):
46 46 rejected = repo.add(names)
47 47 rejected = [p for p in rejected if p in exacts]
48 48 return rejected and 1 or 0
49 49
50 50 def addremove(ui, repo, *pats, **opts):
51 51 """add all new files, delete all missing files
52 52
53 53 Add all new files and remove all missing files from the repository.
54 54
55 55 New files are ignored if they match any of the patterns in .hgignore. As
56 56 with add, these changes take effect at the next commit.
57 57
58 58 Use the -s option to detect renamed files. With a parameter > 0,
59 59 this compares every removed file with every added file and records
60 60 those similar enough as renames. This option takes a percentage
61 61 between 0 (disabled) and 100 (files must be identical) as its
62 62 parameter. Detecting renamed files this way can be expensive.
63 63 """
64 64 try:
65 65 sim = float(opts.get('similarity') or 0)
66 66 except ValueError:
67 67 raise util.Abort(_('similarity must be a number'))
68 68 if sim < 0 or sim > 100:
69 69 raise util.Abort(_('similarity must be between 0 and 100'))
70 70 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
71 71
72 72 def annotate(ui, repo, *pats, **opts):
73 73 """show changeset information per file line
74 74
75 75 List changes in files, showing the revision id responsible for each line
76 76
77 77 This command is useful to discover who did a change or when a change took
78 78 place.
79 79
80 80 Without the -a option, annotate will avoid processing files it
81 81 detects as binary. With -a, annotate will generate an annotation
82 82 anyway, probably with undesirable results.
83 83 """
84 84 datefunc = ui.quiet and util.shortdate or util.datestr
85 85 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
86 86
87 87 if not pats:
88 88 raise util.Abort(_('at least one file name or pattern required'))
89 89
90 90 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
91 91 ('number', lambda x: str(x[0].rev())),
92 92 ('changeset', lambda x: short(x[0].node())),
93 93 ('date', getdate),
94 94 ('follow', lambda x: x[0].path()),
95 95 ]
96 96
97 97 if (not opts['user'] and not opts['changeset'] and not opts['date']
98 98 and not opts['follow']):
99 99 opts['number'] = 1
100 100
101 101 linenumber = opts.get('line_number') is not None
102 102 if (linenumber and (not opts['changeset']) and (not opts['number'])):
103 103 raise util.Abort(_('at least one of -n/-c is required for -l'))
104 104
105 105 funcmap = [func for op, func in opmap if opts.get(op)]
106 106 if linenumber:
107 107 lastfunc = funcmap[-1]
108 108 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
109 109
110 ctx = repo.changectx(opts['rev'])
110 ctx = repo[opts['rev']]
111 111
112 112 m = cmdutil.match(repo, pats, opts)
113 113 for abs in repo.walk(m, ctx.node()):
114 114 fctx = ctx.filectx(abs)
115 115 if not opts['text'] and util.binary(fctx.data()):
116 116 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
117 117 continue
118 118
119 119 lines = fctx.annotate(follow=opts.get('follow'),
120 120 linenumber=linenumber)
121 121 pieces = []
122 122
123 123 for f in funcmap:
124 124 l = [f(n) for n, dummy in lines]
125 125 if l:
126 126 m = max(map(len, l))
127 127 pieces.append(["%*s" % (m, x) for x in l])
128 128
129 129 if pieces:
130 130 for p, l in zip(zip(*pieces), lines):
131 131 ui.write("%s: %s" % (" ".join(p), l[1]))
132 132
133 133 def archive(ui, repo, dest, **opts):
134 134 '''create unversioned archive of a repository revision
135 135
136 136 By default, the revision used is the parent of the working
137 137 directory; use "-r" to specify a different revision.
138 138
139 139 To specify the type of archive to create, use "-t". Valid
140 140 types are:
141 141
142 142 "files" (default): a directory full of files
143 143 "tar": tar archive, uncompressed
144 144 "tbz2": tar archive, compressed using bzip2
145 145 "tgz": tar archive, compressed using gzip
146 146 "uzip": zip archive, uncompressed
147 147 "zip": zip archive, compressed using deflate
148 148
149 149 The exact name of the destination archive or directory is given
150 150 using a format string; see "hg help export" for details.
151 151
152 152 Each member added to an archive file has a directory prefix
153 153 prepended. Use "-p" to specify a format string for the prefix.
154 154 The default is the basename of the archive, with suffixes removed.
155 155 '''
156 156
157 ctx = repo.changectx(opts['rev'])
157 ctx = repo[opts['rev']]
158 158 if not ctx:
159 159 raise util.Abort(_('repository has no revisions'))
160 160 node = ctx.node()
161 161 dest = cmdutil.make_filename(repo, dest, node)
162 162 if os.path.realpath(dest) == repo.root:
163 163 raise util.Abort(_('repository root cannot be destination'))
164 164 matchfn = cmdutil.match(repo, [], opts)
165 165 kind = opts.get('type') or 'files'
166 166 prefix = opts['prefix']
167 167 if dest == '-':
168 168 if kind == 'files':
169 169 raise util.Abort(_('cannot archive plain files to stdout'))
170 170 dest = sys.stdout
171 171 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
172 172 prefix = cmdutil.make_filename(repo, prefix, node)
173 173 archival.archive(repo, dest, node, kind, not opts['no_decode'],
174 174 matchfn, prefix)
175 175
176 176 def backout(ui, repo, node=None, rev=None, **opts):
177 177 '''reverse effect of earlier changeset
178 178
179 179 Commit the backed out changes as a new changeset. The new
180 180 changeset is a child of the backed out changeset.
181 181
182 182 If you back out a changeset other than the tip, a new head is
183 183 created. This head will be the new tip and you should merge this
184 184 backout changeset with another head (current one by default).
185 185
186 186 The --merge option remembers the parent of the working directory
187 187 before starting the backout, then merges the new head with that
188 188 changeset afterwards. This saves you from doing the merge by
189 189 hand. The result of this merge is not committed, as for a normal
190 190 merge.
191 191
192 192 See \'hg help dates\' for a list of formats valid for -d/--date.
193 193 '''
194 194 if rev and node:
195 195 raise util.Abort(_("please specify just one revision"))
196 196
197 197 if not rev:
198 198 rev = node
199 199
200 200 if not rev:
201 201 raise util.Abort(_("please specify a revision to backout"))
202 202
203 203 date = opts.get('date')
204 204 if date:
205 205 opts['date'] = util.parsedate(date)
206 206
207 207 cmdutil.bail_if_changed(repo)
208 208 node = repo.lookup(rev)
209 209
210 210 op1, op2 = repo.dirstate.parents()
211 211 a = repo.changelog.ancestor(op1, node)
212 212 if a != node:
213 213 raise util.Abort(_('cannot back out change on a different branch'))
214 214
215 215 p1, p2 = repo.changelog.parents(node)
216 216 if p1 == nullid:
217 217 raise util.Abort(_('cannot back out a change with no parents'))
218 218 if p2 != nullid:
219 219 if not opts['parent']:
220 220 raise util.Abort(_('cannot back out a merge changeset without '
221 221 '--parent'))
222 222 p = repo.lookup(opts['parent'])
223 223 if p not in (p1, p2):
224 224 raise util.Abort(_('%s is not a parent of %s') %
225 225 (short(p), short(node)))
226 226 parent = p
227 227 else:
228 228 if opts['parent']:
229 229 raise util.Abort(_('cannot use --parent on non-merge changeset'))
230 230 parent = p1
231 231
232 232 # the backout should appear on the same branch
233 233 branch = repo.dirstate.branch()
234 234 hg.clean(repo, node, show_stats=False)
235 235 repo.dirstate.setbranch(branch)
236 236 revert_opts = opts.copy()
237 237 revert_opts['date'] = None
238 238 revert_opts['all'] = True
239 239 revert_opts['rev'] = hex(parent)
240 240 revert_opts['no_backup'] = None
241 241 revert(ui, repo, **revert_opts)
242 242 commit_opts = opts.copy()
243 243 commit_opts['addremove'] = False
244 244 if not commit_opts['message'] and not commit_opts['logfile']:
245 245 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
246 246 commit_opts['force_editor'] = True
247 247 commit(ui, repo, **commit_opts)
248 248 def nice(node):
249 249 return '%d:%s' % (repo.changelog.rev(node), short(node))
250 250 ui.status(_('changeset %s backs out changeset %s\n') %
251 251 (nice(repo.changelog.tip()), nice(node)))
252 252 if op1 != node:
253 253 hg.clean(repo, op1, show_stats=False)
254 254 if opts['merge']:
255 255 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
256 256 hg.merge(repo, hex(repo.changelog.tip()))
257 257 else:
258 258 ui.status(_('the backout changeset is a new head - '
259 259 'do not forget to merge\n'))
260 260 ui.status(_('(use "backout --merge" '
261 261 'if you want to auto-merge)\n'))
262 262
263 263 def bisect(ui, repo, rev=None, extra=None,
264 264 reset=None, good=None, bad=None, skip=None, noupdate=None):
265 265 """subdivision search of changesets
266 266
267 267 This command helps to find changesets which introduce problems.
268 268 To use, mark the earliest changeset you know exhibits the problem
269 269 as bad, then mark the latest changeset which is free from the
270 270 problem as good. Bisect will update your working directory to a
271 271 revision for testing. Once you have performed tests, mark the
272 272 working directory as bad or good and bisect will either update to
273 273 another candidate changeset or announce that it has found the bad
274 274 revision.
275 275 """
276 276 # backward compatibility
277 277 if rev in "good bad reset init".split():
278 278 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
279 279 cmd, rev, extra = rev, extra, None
280 280 if cmd == "good":
281 281 good = True
282 282 elif cmd == "bad":
283 283 bad = True
284 284 else:
285 285 reset = True
286 286 elif extra or good + bad + skip + reset > 1:
287 287 raise util.Abort("Incompatible arguments")
288 288
289 289 if reset:
290 290 p = repo.join("bisect.state")
291 291 if os.path.exists(p):
292 292 os.unlink(p)
293 293 return
294 294
295 295 # load state
296 296 state = {'good': [], 'bad': [], 'skip': []}
297 297 if os.path.exists(repo.join("bisect.state")):
298 298 for l in repo.opener("bisect.state"):
299 299 kind, node = l[:-1].split()
300 300 node = repo.lookup(node)
301 301 if kind not in state:
302 302 raise util.Abort(_("unknown bisect kind %s") % kind)
303 303 state[kind].append(node)
304 304
305 305 # update state
306 306 node = repo.lookup(rev or '.')
307 307 if good:
308 308 state['good'].append(node)
309 309 elif bad:
310 310 state['bad'].append(node)
311 311 elif skip:
312 312 state['skip'].append(node)
313 313
314 314 # save state
315 315 f = repo.opener("bisect.state", "w", atomictemp=True)
316 316 wlock = repo.wlock()
317 317 try:
318 318 for kind in state:
319 319 for node in state[kind]:
320 320 f.write("%s %s\n" % (kind, hex(node)))
321 321 f.rename()
322 322 finally:
323 323 del wlock
324 324
325 325 if not state['good'] or not state['bad']:
326 326 return
327 327
328 328 # actually bisect
329 329 node, changesets, good = hbisect.bisect(repo.changelog, state)
330 330 if changesets == 0:
331 331 ui.write(_("The first %s revision is:\n") % (good and "good" or "bad"))
332 332 displayer = cmdutil.show_changeset(ui, repo, {})
333 333 displayer.show(changenode=node)
334 334 elif node is not None:
335 335 # compute the approximate number of remaining tests
336 336 tests, size = 0, 2
337 337 while size <= changesets:
338 338 tests, size = tests + 1, size * 2
339 339 rev = repo.changelog.rev(node)
340 340 ui.write(_("Testing changeset %s:%s "
341 341 "(%s changesets remaining, ~%s tests)\n")
342 342 % (rev, short(node), changesets, tests))
343 343 if not noupdate:
344 344 cmdutil.bail_if_changed(repo)
345 345 return hg.clean(repo, node)
346 346
347 347 def branch(ui, repo, label=None, **opts):
348 348 """set or show the current branch name
349 349
350 350 With no argument, show the current branch name. With one argument,
351 351 set the working directory branch name (the branch does not exist in
352 352 the repository until the next commit).
353 353
354 354 Unless --force is specified, branch will not let you set a
355 355 branch name that shadows an existing branch.
356 356
357 357 Use the command 'hg update' to switch to an existing branch.
358 358 """
359 359
360 360 if label:
361 361 if not opts.get('force') and label in repo.branchtags():
362 if label not in [p.branch() for p in repo.changectx(None).parents()]:
362 if label not in [p.branch() for p in repo.parents()]:
363 363 raise util.Abort(_('a branch of the same name already exists'
364 364 ' (use --force to override)'))
365 365 repo.dirstate.setbranch(util.fromlocal(label))
366 366 ui.status(_('marked working directory as branch %s\n') % label)
367 367 else:
368 368 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
369 369
370 370 def branches(ui, repo, active=False):
371 371 """list repository named branches
372 372
373 373 List the repository's named branches, indicating which ones are
374 374 inactive. If active is specified, only show active branches.
375 375
376 376 A branch is considered active if it contains repository heads.
377 377
378 378 Use the command 'hg update' to switch to an existing branch.
379 379 """
380 380 hexfunc = ui.debugflag and hex or short
381 activebranches = [util.tolocal(repo.changectx(n).branch())
381 activebranches = [util.tolocal(repo[n].branch())
382 382 for n in repo.heads()]
383 383 branches = [(tag in activebranches, repo.changelog.rev(node), tag)
384 384 for tag, node in repo.branchtags().items()]
385 385 branches.sort()
386 386 branches.reverse()
387 387
388 388 for isactive, node, tag in branches:
389 389 if (not active) or isactive:
390 390 if ui.quiet:
391 391 ui.write("%s\n" % tag)
392 392 else:
393 393 rev = str(node).rjust(32 - util.locallen(tag))
394 394 isinactive = ((not isactive) and " (inactive)") or ''
395 395 data = tag, rev, hexfunc(repo.lookup(node)), isinactive
396 396 ui.write("%s%s:%s%s\n" % data)
397 397
398 398 def bundle(ui, repo, fname, dest=None, **opts):
399 399 """create a changegroup file
400 400
401 401 Generate a compressed changegroup file collecting changesets not
402 402 found in the other repository.
403 403
404 404 If no destination repository is specified the destination is
405 405 assumed to have all the nodes specified by one or more --base
406 406 parameters. To create a bundle containing all changesets, use
407 407 --all (or --base null). To change the compression method applied,
408 408 use the -t option (by default, bundles are compressed using bz2).
409 409
410 410 The bundle file can then be transferred using conventional means and
411 411 applied to another repository with the unbundle or pull command.
412 412 This is useful when direct push and pull are not available or when
413 413 exporting an entire repository is undesirable.
414 414
415 415 Applying bundles preserves all changeset contents including
416 416 permissions, copy/rename information, and revision history.
417 417 """
418 418 revs = opts.get('rev') or None
419 419 if revs:
420 420 revs = [repo.lookup(rev) for rev in revs]
421 421 if opts.get('all'):
422 422 base = ['null']
423 423 else:
424 424 base = opts.get('base')
425 425 if base:
426 426 if dest:
427 427 raise util.Abort(_("--base is incompatible with specifiying "
428 428 "a destination"))
429 429 base = [repo.lookup(rev) for rev in base]
430 430 # create the right base
431 431 # XXX: nodesbetween / changegroup* should be "fixed" instead
432 432 o = []
433 433 has = {nullid: None}
434 434 for n in base:
435 435 has.update(repo.changelog.reachable(n))
436 436 if revs:
437 437 visit = list(revs)
438 438 else:
439 439 visit = repo.changelog.heads()
440 440 seen = {}
441 441 while visit:
442 442 n = visit.pop(0)
443 443 parents = [p for p in repo.changelog.parents(n) if p not in has]
444 444 if len(parents) == 0:
445 445 o.insert(0, n)
446 446 else:
447 447 for p in parents:
448 448 if p not in seen:
449 449 seen[p] = 1
450 450 visit.append(p)
451 451 else:
452 452 cmdutil.setremoteconfig(ui, opts)
453 453 dest, revs, checkout = hg.parseurl(
454 454 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
455 455 other = hg.repository(ui, dest)
456 456 o = repo.findoutgoing(other, force=opts['force'])
457 457
458 458 if revs:
459 459 cg = repo.changegroupsubset(o, revs, 'bundle')
460 460 else:
461 461 cg = repo.changegroup(o, 'bundle')
462 462
463 463 bundletype = opts.get('type', 'bzip2').lower()
464 464 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
465 465 bundletype = btypes.get(bundletype)
466 466 if bundletype not in changegroup.bundletypes:
467 467 raise util.Abort(_('unknown bundle type specified with --type'))
468 468
469 469 changegroup.writebundle(cg, fname, bundletype)
470 470
471 471 def cat(ui, repo, file1, *pats, **opts):
472 472 """output the current or given revision of files
473 473
474 474 Print the specified files as they were at the given revision.
475 475 If no revision is given, the parent of the working directory is used,
476 476 or tip if no revision is checked out.
477 477
478 478 Output may be to a file, in which case the name of the file is
479 479 given using a format string. The formatting rules are the same as
480 480 for the export command, with the following additions:
481 481
482 482 %s basename of file being printed
483 483 %d dirname of file being printed, or '.' if in repo root
484 484 %p root-relative path name of file being printed
485 485 """
486 ctx = repo.changectx(opts['rev'])
486 ctx = repo[opts['rev']]
487 487 err = 1
488 488 m = cmdutil.match(repo, (file1,) + pats, opts)
489 489 for abs in repo.walk(m, ctx.node()):
490 490 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
491 491 data = ctx.filectx(abs).data()
492 492 if opts.get('decode'):
493 493 data = repo.wwritedata(abs, data)
494 494 fp.write(data)
495 495 err = 0
496 496 return err
497 497
498 498 def clone(ui, source, dest=None, **opts):
499 499 """make a copy of an existing repository
500 500
501 501 Create a copy of an existing repository in a new directory.
502 502
503 503 If no destination directory name is specified, it defaults to the
504 504 basename of the source.
505 505
506 506 The location of the source is added to the new repository's
507 507 .hg/hgrc file, as the default to be used for future pulls.
508 508
509 509 For efficiency, hardlinks are used for cloning whenever the source
510 510 and destination are on the same filesystem (note this applies only
511 511 to the repository data, not to the checked out files). Some
512 512 filesystems, such as AFS, implement hardlinking incorrectly, but
513 513 do not report errors. In these cases, use the --pull option to
514 514 avoid hardlinking.
515 515
516 516 In some cases, you can clone repositories and checked out files
517 517 using full hardlinks with
518 518
519 519 $ cp -al REPO REPOCLONE
520 520
521 521 This is the fastest way to clone, but it is not always safe. The
522 522 operation is not atomic (making sure REPO is not modified during
523 523 the operation is up to you) and you have to make sure your editor
524 524 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
525 525 this is not compatible with certain extensions that place their
526 526 metadata under the .hg directory, such as mq.
527 527
528 528 If you use the -r option to clone up to a specific revision, no
529 529 subsequent revisions will be present in the cloned repository.
530 530 This option implies --pull, even on local repositories.
531 531
532 532 If the -U option is used, the new clone will contain only a repository
533 533 (.hg) and no working copy (the working copy parent is the null revision).
534 534
535 535 See pull for valid source format details.
536 536
537 537 It is possible to specify an ssh:// URL as the destination, but no
538 538 .hg/hgrc and working directory will be created on the remote side.
539 539 Look at the help text for the pull command for important details
540 540 about ssh:// URLs.
541 541 """
542 542 cmdutil.setremoteconfig(ui, opts)
543 543 hg.clone(ui, source, dest,
544 544 pull=opts['pull'],
545 545 stream=opts['uncompressed'],
546 546 rev=opts['rev'],
547 547 update=not opts['noupdate'])
548 548
549 549 def commit(ui, repo, *pats, **opts):
550 550 """commit the specified files or all outstanding changes
551 551
552 552 Commit changes to the given files into the repository.
553 553
554 554 If a list of files is omitted, all changes reported by "hg status"
555 555 will be committed.
556 556
557 557 If you are committing the result of a merge, do not provide any
558 558 file names or -I/-X filters.
559 559
560 560 If no commit message is specified, the configured editor is started to
561 561 enter a message.
562 562
563 563 See 'hg help dates' for a list of formats valid for -d/--date.
564 564 """
565 565 def commitfunc(ui, repo, message, match, opts):
566 566 return repo.commit(match.files(), message, opts['user'], opts['date'],
567 567 match, force_editor=opts.get('force_editor'))
568 568
569 569 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
570 570 if not node:
571 571 return
572 572 cl = repo.changelog
573 573 rev = cl.rev(node)
574 574 parents = cl.parentrevs(rev)
575 575 if rev - 1 in parents:
576 576 # one of the parents was the old tip
577 577 return
578 578 if (parents == (nullrev, nullrev) or
579 579 len(cl.heads(cl.node(parents[0]))) > 1 and
580 580 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
581 581 ui.status(_('created new head\n'))
582 582
583 583 def copy(ui, repo, *pats, **opts):
584 584 """mark files as copied for the next commit
585 585
586 586 Mark dest as having copies of source files. If dest is a
587 587 directory, copies are put in that directory. If dest is a file,
588 588 there can only be one source.
589 589
590 590 By default, this command copies the contents of files as they
591 591 stand in the working directory. If invoked with --after, the
592 592 operation is recorded, but no copying is performed.
593 593
594 594 This command takes effect in the next commit. To undo a copy
595 595 before that, see hg revert.
596 596 """
597 597 wlock = repo.wlock(False)
598 598 try:
599 599 return cmdutil.copy(ui, repo, pats, opts)
600 600 finally:
601 601 del wlock
602 602
603 603 def debugancestor(ui, repo, *args):
604 604 """find the ancestor revision of two revisions in a given index"""
605 605 if len(args) == 3:
606 606 index, rev1, rev2 = args
607 607 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
608 608 lookup = r.lookup
609 609 elif len(args) == 2:
610 610 if not repo:
611 611 raise util.Abort(_("There is no Mercurial repository here "
612 612 "(.hg not found)"))
613 613 rev1, rev2 = args
614 614 r = repo.changelog
615 615 lookup = repo.lookup
616 616 else:
617 617 raise util.Abort(_('either two or three arguments required'))
618 618 a = r.ancestor(lookup(rev1), lookup(rev2))
619 619 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
620 620
621 621 def debugcomplete(ui, cmd='', **opts):
622 622 """returns the completion list associated with the given command"""
623 623
624 624 if opts['options']:
625 625 options = []
626 626 otables = [globalopts]
627 627 if cmd:
628 628 aliases, entry = cmdutil.findcmd(ui, cmd, table)
629 629 otables.append(entry[1])
630 630 for t in otables:
631 631 for o in t:
632 632 if o[0]:
633 633 options.append('-%s' % o[0])
634 634 options.append('--%s' % o[1])
635 635 ui.write("%s\n" % "\n".join(options))
636 636 return
637 637
638 638 clist = cmdutil.findpossible(ui, cmd, table).keys()
639 639 clist.sort()
640 640 ui.write("%s\n" % "\n".join(clist))
641 641
642 642 def debugfsinfo(ui, path = "."):
643 643 file('.debugfsinfo', 'w').write('')
644 644 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
645 645 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
646 646 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
647 647 and 'yes' or 'no'))
648 648 os.unlink('.debugfsinfo')
649 649
650 def debugrebuildstate(ui, repo, rev=""):
650 def debugrebuildstate(ui, repo, rev="tip"):
651 651 """rebuild the dirstate as it would look like for the given revision"""
652 if rev == "":
653 rev = repo.changelog.tip()
654 ctx = repo.changectx(rev)
655 files = ctx.manifest()
652 ctx = repo[rev]
656 653 wlock = repo.wlock()
657 654 try:
658 repo.dirstate.rebuild(rev, files)
655 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
659 656 finally:
660 657 del wlock
661 658
662 659 def debugcheckstate(ui, repo):
663 660 """validate the correctness of the current dirstate"""
664 661 parent1, parent2 = repo.dirstate.parents()
665 m1 = repo.changectx(parent1).manifest()
666 m2 = repo.changectx(parent2).manifest()
662 m1 = repo[parent1].manifest()
663 m2 = repo[parent2].manifest()
667 664 errors = 0
668 665 for f in repo.dirstate:
669 666 state = repo.dirstate[f]
670 667 if state in "nr" and f not in m1:
671 668 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
672 669 errors += 1
673 670 if state in "a" and f in m1:
674 671 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
675 672 errors += 1
676 673 if state in "m" and f not in m1 and f not in m2:
677 674 ui.warn(_("%s in state %s, but not in either manifest\n") %
678 675 (f, state))
679 676 errors += 1
680 677 for f in m1:
681 678 state = repo.dirstate[f]
682 679 if state not in "nrm":
683 680 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
684 681 errors += 1
685 682 if errors:
686 683 error = _(".hg/dirstate inconsistent with current parent's manifest")
687 684 raise util.Abort(error)
688 685
689 686 def showconfig(ui, repo, *values, **opts):
690 687 """show combined config settings from all hgrc files
691 688
692 689 With no args, print names and values of all config items.
693 690
694 691 With one arg of the form section.name, print just the value of
695 692 that config item.
696 693
697 694 With multiple args, print names and values of all config items
698 695 with matching section names."""
699 696
700 697 untrusted = bool(opts.get('untrusted'))
701 698 if values:
702 699 if len([v for v in values if '.' in v]) > 1:
703 700 raise util.Abort(_('only one config item permitted'))
704 701 for section, name, value in ui.walkconfig(untrusted=untrusted):
705 702 sectname = section + '.' + name
706 703 if values:
707 704 for v in values:
708 705 if v == section:
709 706 ui.write('%s=%s\n' % (sectname, value))
710 707 elif v == sectname:
711 708 ui.write(value, '\n')
712 709 else:
713 710 ui.write('%s=%s\n' % (sectname, value))
714 711
715 712 def debugsetparents(ui, repo, rev1, rev2=None):
716 713 """manually set the parents of the current working directory
717 714
718 715 This is useful for writing repository conversion tools, but should
719 716 be used with care.
720 717 """
721 718
722 719 if not rev2:
723 720 rev2 = hex(nullid)
724 721
725 722 wlock = repo.wlock()
726 723 try:
727 724 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
728 725 finally:
729 726 del wlock
730 727
731 728 def debugstate(ui, repo, nodates=None):
732 729 """show the contents of the current dirstate"""
733 730 k = repo.dirstate._map.items()
734 731 k.sort()
735 732 timestr = ""
736 733 showdate = not nodates
737 734 for file_, ent in k:
738 735 if showdate:
739 736 if ent[3] == -1:
740 737 # Pad or slice to locale representation
741 738 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
742 739 timestr = 'unset'
743 740 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
744 741 else:
745 742 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
746 743 if ent[1] & 020000:
747 744 mode = 'lnk'
748 745 else:
749 746 mode = '%3o' % (ent[1] & 0777)
750 747 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
751 748 for f in repo.dirstate.copies():
752 749 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
753 750
754 751 def debugdata(ui, file_, rev):
755 752 """dump the contents of a data file revision"""
756 753 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
757 754 try:
758 755 ui.write(r.revision(r.lookup(rev)))
759 756 except KeyError:
760 757 raise util.Abort(_('invalid revision identifier %s') % rev)
761 758
762 759 def debugdate(ui, date, range=None, **opts):
763 760 """parse and display a date"""
764 761 if opts["extended"]:
765 762 d = util.parsedate(date, util.extendeddateformats)
766 763 else:
767 764 d = util.parsedate(date)
768 765 ui.write("internal: %s %s\n" % d)
769 766 ui.write("standard: %s\n" % util.datestr(d))
770 767 if range:
771 768 m = util.matchdate(range)
772 769 ui.write("match: %s\n" % m(d[0]))
773 770
774 771 def debugindex(ui, file_):
775 772 """dump the contents of an index file"""
776 773 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
777 774 ui.write(" rev offset length base linkrev" +
778 775 " nodeid p1 p2\n")
779 776 for i in xrange(r.count()):
780 777 node = r.node(i)
781 778 try:
782 779 pp = r.parents(node)
783 780 except:
784 781 pp = [nullid, nullid]
785 782 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
786 783 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
787 784 short(node), short(pp[0]), short(pp[1])))
788 785
789 786 def debugindexdot(ui, file_):
790 787 """dump an index DAG as a .dot file"""
791 788 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
792 789 ui.write("digraph G {\n")
793 790 for i in xrange(r.count()):
794 791 node = r.node(i)
795 792 pp = r.parents(node)
796 793 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
797 794 if pp[1] != nullid:
798 795 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
799 796 ui.write("}\n")
800 797
801 798 def debuginstall(ui):
802 799 '''test Mercurial installation'''
803 800
804 801 def writetemp(contents):
805 802 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
806 803 f = os.fdopen(fd, "wb")
807 804 f.write(contents)
808 805 f.close()
809 806 return name
810 807
811 808 problems = 0
812 809
813 810 # encoding
814 811 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
815 812 try:
816 813 util.fromlocal("test")
817 814 except util.Abort, inst:
818 815 ui.write(" %s\n" % inst)
819 816 ui.write(_(" (check that your locale is properly set)\n"))
820 817 problems += 1
821 818
822 819 # compiled modules
823 820 ui.status(_("Checking extensions...\n"))
824 821 try:
825 822 import bdiff, mpatch, base85
826 823 except Exception, inst:
827 824 ui.write(" %s\n" % inst)
828 825 ui.write(_(" One or more extensions could not be found"))
829 826 ui.write(_(" (check that you compiled the extensions)\n"))
830 827 problems += 1
831 828
832 829 # templates
833 830 ui.status(_("Checking templates...\n"))
834 831 try:
835 832 import templater
836 833 t = templater.templater(templater.templatepath("map-cmdline.default"))
837 834 except Exception, inst:
838 835 ui.write(" %s\n" % inst)
839 836 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
840 837 problems += 1
841 838
842 839 # patch
843 840 ui.status(_("Checking patch...\n"))
844 841 patchproblems = 0
845 842 a = "1\n2\n3\n4\n"
846 843 b = "1\n2\n3\ninsert\n4\n"
847 844 fa = writetemp(a)
848 845 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
849 846 os.path.basename(fa))
850 847 fd = writetemp(d)
851 848
852 849 files = {}
853 850 try:
854 851 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
855 852 except util.Abort, e:
856 853 ui.write(_(" patch call failed:\n"))
857 854 ui.write(" " + str(e) + "\n")
858 855 patchproblems += 1
859 856 else:
860 857 if list(files) != [os.path.basename(fa)]:
861 858 ui.write(_(" unexpected patch output!\n"))
862 859 patchproblems += 1
863 860 a = file(fa).read()
864 861 if a != b:
865 862 ui.write(_(" patch test failed!\n"))
866 863 patchproblems += 1
867 864
868 865 if patchproblems:
869 866 if ui.config('ui', 'patch'):
870 867 ui.write(_(" (Current patch tool may be incompatible with patch,"
871 868 " or misconfigured. Please check your .hgrc file)\n"))
872 869 else:
873 870 ui.write(_(" Internal patcher failure, please report this error"
874 871 " to http://www.selenic.com/mercurial/bts\n"))
875 872 problems += patchproblems
876 873
877 874 os.unlink(fa)
878 875 os.unlink(fd)
879 876
880 877 # editor
881 878 ui.status(_("Checking commit editor...\n"))
882 879 editor = ui.geteditor()
883 880 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
884 881 if not cmdpath:
885 882 if editor == 'vi':
886 883 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
887 884 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
888 885 else:
889 886 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
890 887 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
891 888 problems += 1
892 889
893 890 # check username
894 891 ui.status(_("Checking username...\n"))
895 892 user = os.environ.get("HGUSER")
896 893 if user is None:
897 894 user = ui.config("ui", "username")
898 895 if user is None:
899 896 user = os.environ.get("EMAIL")
900 897 if not user:
901 898 ui.warn(" ")
902 899 ui.username()
903 900 ui.write(_(" (specify a username in your .hgrc file)\n"))
904 901
905 902 if not problems:
906 903 ui.status(_("No problems detected\n"))
907 904 else:
908 905 ui.write(_("%s problems detected,"
909 906 " please check your install!\n") % problems)
910 907
911 908 return problems
912 909
913 910 def debugrename(ui, repo, file1, *pats, **opts):
914 911 """dump rename information"""
915 912
916 ctx = repo.changectx(opts.get('rev', 'tip'))
913 ctx = repo[opts.get('rev', 'tip')]
917 914 m = cmdutil.match(repo, (file1,) + pats, opts)
918 915 for abs in repo.walk(m, ctx.node()):
919 916 fctx = ctx.filectx(abs)
920 917 o = fctx.filelog().renamed(fctx.filenode())
921 918 rel = m.rel(abs)
922 919 if o:
923 920 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
924 921 else:
925 922 ui.write(_("%s not renamed\n") % rel)
926 923
927 924 def debugwalk(ui, repo, *pats, **opts):
928 925 """show how files match on given patterns"""
929 926 m = cmdutil.match(repo, pats, opts)
930 927 items = list(repo.walk(m))
931 928 if not items:
932 929 return
933 930 fmt = 'f %%-%ds %%-%ds %%s' % (
934 931 max([len(abs) for abs in items]),
935 932 max([len(m.rel(abs)) for abs in items]))
936 933 for abs in items:
937 934 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
938 935 ui.write("%s\n" % line.rstrip())
939 936
940 937 def diff(ui, repo, *pats, **opts):
941 938 """diff repository (or selected files)
942 939
943 940 Show differences between revisions for the specified files.
944 941
945 942 Differences between files are shown using the unified diff format.
946 943
947 944 NOTE: diff may generate unexpected results for merges, as it will
948 945 default to comparing against the working directory's first parent
949 946 changeset if no revisions are specified.
950 947
951 948 When two revision arguments are given, then changes are shown
952 949 between those revisions. If only one revision is specified then
953 950 that revision is compared to the working directory, and, when no
954 951 revisions are specified, the working directory files are compared
955 952 to its parent.
956 953
957 954 Without the -a option, diff will avoid generating diffs of files
958 955 it detects as binary. With -a, diff will generate a diff anyway,
959 956 probably with undesirable results.
960 957 """
961 958 node1, node2 = cmdutil.revpair(repo, opts['rev'])
962 959
963 960 m = cmdutil.match(repo, pats, opts)
964 961 patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
965 962
966 963 def export(ui, repo, *changesets, **opts):
967 964 """dump the header and diffs for one or more changesets
968 965
969 966 Print the changeset header and diffs for one or more revisions.
970 967
971 968 The information shown in the changeset header is: author,
972 969 changeset hash, parent(s) and commit comment.
973 970
974 971 NOTE: export may generate unexpected diff output for merge changesets,
975 972 as it will compare the merge changeset against its first parent only.
976 973
977 974 Output may be to a file, in which case the name of the file is
978 975 given using a format string. The formatting rules are as follows:
979 976
980 977 %% literal "%" character
981 978 %H changeset hash (40 bytes of hexadecimal)
982 979 %N number of patches being generated
983 980 %R changeset revision number
984 981 %b basename of the exporting repository
985 982 %h short-form changeset hash (12 bytes of hexadecimal)
986 983 %n zero-padded sequence number, starting at 1
987 984 %r zero-padded changeset revision number
988 985
989 986 Without the -a option, export will avoid generating diffs of files
990 987 it detects as binary. With -a, export will generate a diff anyway,
991 988 probably with undesirable results.
992 989
993 990 With the --switch-parent option, the diff will be against the second
994 991 parent. It can be useful to review a merge.
995 992 """
996 993 if not changesets:
997 994 raise util.Abort(_("export requires at least one changeset"))
998 995 revs = cmdutil.revrange(repo, changesets)
999 996 if len(revs) > 1:
1000 997 ui.note(_('exporting patches:\n'))
1001 998 else:
1002 999 ui.note(_('exporting patch:\n'))
1003 1000 patch.export(repo, revs, template=opts['output'],
1004 1001 switch_parent=opts['switch_parent'],
1005 1002 opts=patch.diffopts(ui, opts))
1006 1003
1007 1004 def grep(ui, repo, pattern, *pats, **opts):
1008 1005 """search for a pattern in specified files and revisions
1009 1006
1010 1007 Search revisions of files for a regular expression.
1011 1008
1012 1009 This command behaves differently than Unix grep. It only accepts
1013 1010 Python/Perl regexps. It searches repository history, not the
1014 1011 working directory. It always prints the revision number in which
1015 1012 a match appears.
1016 1013
1017 1014 By default, grep only prints output for the first revision of a
1018 1015 file in which it finds a match. To get it to print every revision
1019 1016 that contains a change in match status ("-" for a match that
1020 1017 becomes a non-match, or "+" for a non-match that becomes a match),
1021 1018 use the --all flag.
1022 1019 """
1023 1020 reflags = 0
1024 1021 if opts['ignore_case']:
1025 1022 reflags |= re.I
1026 1023 try:
1027 1024 regexp = re.compile(pattern, reflags)
1028 1025 except Exception, inst:
1029 1026 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1030 1027 return None
1031 1028 sep, eol = ':', '\n'
1032 1029 if opts['print0']:
1033 1030 sep = eol = '\0'
1034 1031
1035 1032 fcache = {}
1036 1033 def getfile(fn):
1037 1034 if fn not in fcache:
1038 1035 fcache[fn] = repo.file(fn)
1039 1036 return fcache[fn]
1040 1037
1041 1038 def matchlines(body):
1042 1039 begin = 0
1043 1040 linenum = 0
1044 1041 while True:
1045 1042 match = regexp.search(body, begin)
1046 1043 if not match:
1047 1044 break
1048 1045 mstart, mend = match.span()
1049 1046 linenum += body.count('\n', begin, mstart) + 1
1050 1047 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1051 1048 lend = body.find('\n', mend)
1052 1049 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1053 1050 begin = lend + 1
1054 1051
1055 1052 class linestate(object):
1056 1053 def __init__(self, line, linenum, colstart, colend):
1057 1054 self.line = line
1058 1055 self.linenum = linenum
1059 1056 self.colstart = colstart
1060 1057 self.colend = colend
1061 1058
1062 1059 def __hash__(self):
1063 1060 return hash((self.linenum, self.line))
1064 1061
1065 1062 def __eq__(self, other):
1066 1063 return self.line == other.line
1067 1064
1068 1065 matches = {}
1069 1066 copies = {}
1070 1067 def grepbody(fn, rev, body):
1071 1068 matches[rev].setdefault(fn, [])
1072 1069 m = matches[rev][fn]
1073 1070 for lnum, cstart, cend, line in matchlines(body):
1074 1071 s = linestate(line, lnum, cstart, cend)
1075 1072 m.append(s)
1076 1073
1077 1074 def difflinestates(a, b):
1078 1075 sm = difflib.SequenceMatcher(None, a, b)
1079 1076 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1080 1077 if tag == 'insert':
1081 1078 for i in xrange(blo, bhi):
1082 1079 yield ('+', b[i])
1083 1080 elif tag == 'delete':
1084 1081 for i in xrange(alo, ahi):
1085 1082 yield ('-', a[i])
1086 1083 elif tag == 'replace':
1087 1084 for i in xrange(alo, ahi):
1088 1085 yield ('-', a[i])
1089 1086 for i in xrange(blo, bhi):
1090 1087 yield ('+', b[i])
1091 1088
1092 1089 prev = {}
1093 1090 def display(fn, rev, states, prevstates):
1094 1091 datefunc = ui.quiet and util.shortdate or util.datestr
1095 1092 found = False
1096 1093 filerevmatches = {}
1097 1094 r = prev.get(fn, -1)
1098 1095 if opts['all']:
1099 1096 iter = difflinestates(states, prevstates)
1100 1097 else:
1101 1098 iter = [('', l) for l in prevstates]
1102 1099 for change, l in iter:
1103 1100 cols = [fn, str(r)]
1104 1101 if opts['line_number']:
1105 1102 cols.append(str(l.linenum))
1106 1103 if opts['all']:
1107 1104 cols.append(change)
1108 1105 if opts['user']:
1109 1106 cols.append(ui.shortuser(get(r)[1]))
1110 1107 if opts.get('date'):
1111 1108 cols.append(datefunc(get(r)[2]))
1112 1109 if opts['files_with_matches']:
1113 1110 c = (fn, r)
1114 1111 if c in filerevmatches:
1115 1112 continue
1116 1113 filerevmatches[c] = 1
1117 1114 else:
1118 1115 cols.append(l.line)
1119 1116 ui.write(sep.join(cols), eol)
1120 1117 found = True
1121 1118 return found
1122 1119
1123 1120 fstate = {}
1124 1121 skip = {}
1125 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1122 get = util.cachefunc(lambda r: repo[r].changeset())
1126 1123 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1127 1124 found = False
1128 1125 follow = opts.get('follow')
1129 1126 for st, rev, fns in changeiter:
1130 1127 if st == 'window':
1131 1128 matches.clear()
1132 1129 elif st == 'add':
1133 ctx = repo.changectx(rev)
1130 ctx = repo[rev]
1134 1131 matches[rev] = {}
1135 1132 for fn in fns:
1136 1133 if fn in skip:
1137 1134 continue
1138 1135 try:
1139 1136 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1140 1137 fstate.setdefault(fn, [])
1141 1138 if follow:
1142 1139 copied = getfile(fn).renamed(ctx.filenode(fn))
1143 1140 if copied:
1144 1141 copies.setdefault(rev, {})[fn] = copied[0]
1145 1142 except revlog.LookupError:
1146 1143 pass
1147 1144 elif st == 'iter':
1148 1145 states = matches[rev].items()
1149 1146 states.sort()
1150 1147 for fn, m in states:
1151 1148 copy = copies.get(rev, {}).get(fn)
1152 1149 if fn in skip:
1153 1150 if copy:
1154 1151 skip[copy] = True
1155 1152 continue
1156 1153 if fn in prev or fstate[fn]:
1157 1154 r = display(fn, rev, m, fstate[fn])
1158 1155 found = found or r
1159 1156 if r and not opts['all']:
1160 1157 skip[fn] = True
1161 1158 if copy:
1162 1159 skip[copy] = True
1163 1160 fstate[fn] = m
1164 1161 if copy:
1165 1162 fstate[copy] = m
1166 1163 prev[fn] = rev
1167 1164
1168 1165 fstate = fstate.items()
1169 1166 fstate.sort()
1170 1167 for fn, state in fstate:
1171 1168 if fn in skip:
1172 1169 continue
1173 1170 if fn not in copies.get(prev[fn], {}):
1174 1171 found = display(fn, rev, {}, state) or found
1175 1172 return (not found and 1) or 0
1176 1173
1177 1174 def heads(ui, repo, *branchrevs, **opts):
1178 1175 """show current repository heads or show branch heads
1179 1176
1180 1177 With no arguments, show all repository head changesets.
1181 1178
1182 1179 If branch or revisions names are given this will show the heads of
1183 1180 the specified branches or the branches those revisions are tagged
1184 1181 with.
1185 1182
1186 1183 Repository "heads" are changesets that don't have child
1187 1184 changesets. They are where development generally takes place and
1188 1185 are the usual targets for update and merge operations.
1189 1186
1190 1187 Branch heads are changesets that have a given branch tag, but have
1191 1188 no child changesets with that tag. They are usually where
1192 1189 development on the given branch takes place.
1193 1190 """
1194 1191 if opts['rev']:
1195 1192 start = repo.lookup(opts['rev'])
1196 1193 else:
1197 1194 start = None
1198 1195 if not branchrevs:
1199 1196 # Assume we're looking repo-wide heads if no revs were specified.
1200 1197 heads = repo.heads(start)
1201 1198 else:
1202 1199 heads = []
1203 1200 visitedset = util.set()
1204 1201 for branchrev in branchrevs:
1205 branch = repo.changectx(branchrev).branch()
1202 branch = repo[branchrev].branch()
1206 1203 if branch in visitedset:
1207 1204 continue
1208 1205 visitedset.add(branch)
1209 1206 bheads = repo.branchheads(branch, start)
1210 1207 if not bheads:
1211 1208 if branch != branchrev:
1212 1209 ui.warn(_("no changes on branch %s containing %s are "
1213 1210 "reachable from %s\n")
1214 1211 % (branch, branchrev, opts['rev']))
1215 1212 else:
1216 1213 ui.warn(_("no changes on branch %s are reachable from %s\n")
1217 1214 % (branch, opts['rev']))
1218 1215 heads.extend(bheads)
1219 1216 if not heads:
1220 1217 return 1
1221 1218 displayer = cmdutil.show_changeset(ui, repo, opts)
1222 1219 for n in heads:
1223 1220 displayer.show(changenode=n)
1224 1221
1225 1222 def help_(ui, name=None, with_version=False):
1226 1223 """show help for a command, extension, or list of commands
1227 1224
1228 1225 With no arguments, print a list of commands and short help.
1229 1226
1230 1227 Given a command name, print help for that command.
1231 1228
1232 1229 Given an extension name, print help for that extension, and the
1233 1230 commands it provides."""
1234 1231 option_lists = []
1235 1232
1236 1233 def addglobalopts(aliases):
1237 1234 if ui.verbose:
1238 1235 option_lists.append((_("global options:"), globalopts))
1239 1236 if name == 'shortlist':
1240 1237 option_lists.append((_('use "hg help" for the full list '
1241 1238 'of commands'), ()))
1242 1239 else:
1243 1240 if name == 'shortlist':
1244 1241 msg = _('use "hg help" for the full list of commands '
1245 1242 'or "hg -v" for details')
1246 1243 elif aliases:
1247 1244 msg = _('use "hg -v help%s" to show aliases and '
1248 1245 'global options') % (name and " " + name or "")
1249 1246 else:
1250 1247 msg = _('use "hg -v help %s" to show global options') % name
1251 1248 option_lists.append((msg, ()))
1252 1249
1253 1250 def helpcmd(name):
1254 1251 if with_version:
1255 1252 version_(ui)
1256 1253 ui.write('\n')
1257 1254
1258 1255 try:
1259 1256 aliases, i = cmdutil.findcmd(ui, name, table)
1260 1257 except cmdutil.AmbiguousCommand, inst:
1261 1258 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1262 1259 helplist(_('list of commands:\n\n'), select)
1263 1260 return
1264 1261
1265 1262 # synopsis
1266 1263 ui.write("%s\n" % i[2])
1267 1264
1268 1265 # aliases
1269 1266 if not ui.quiet and len(aliases) > 1:
1270 1267 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1271 1268
1272 1269 # description
1273 1270 doc = i[0].__doc__
1274 1271 if not doc:
1275 1272 doc = _("(No help text available)")
1276 1273 if ui.quiet:
1277 1274 doc = doc.splitlines(0)[0]
1278 1275 ui.write("\n%s\n" % doc.rstrip())
1279 1276
1280 1277 if not ui.quiet:
1281 1278 # options
1282 1279 if i[1]:
1283 1280 option_lists.append((_("options:\n"), i[1]))
1284 1281
1285 1282 addglobalopts(False)
1286 1283
1287 1284 def helplist(header, select=None):
1288 1285 h = {}
1289 1286 cmds = {}
1290 1287 for c, e in table.items():
1291 1288 f = c.split("|", 1)[0]
1292 1289 if select and not select(f):
1293 1290 continue
1294 1291 if name == "shortlist" and not f.startswith("^"):
1295 1292 continue
1296 1293 f = f.lstrip("^")
1297 1294 if not ui.debugflag and f.startswith("debug"):
1298 1295 continue
1299 1296 doc = e[0].__doc__
1300 1297 if not doc:
1301 1298 doc = _("(No help text available)")
1302 1299 h[f] = doc.splitlines(0)[0].rstrip()
1303 1300 cmds[f] = c.lstrip("^")
1304 1301
1305 1302 if not h:
1306 1303 ui.status(_('no commands defined\n'))
1307 1304 return
1308 1305
1309 1306 ui.status(header)
1310 1307 fns = h.keys()
1311 1308 fns.sort()
1312 1309 m = max(map(len, fns))
1313 1310 for f in fns:
1314 1311 if ui.verbose:
1315 1312 commands = cmds[f].replace("|",", ")
1316 1313 ui.write(" %s:\n %s\n"%(commands, h[f]))
1317 1314 else:
1318 1315 ui.write(' %-*s %s\n' % (m, f, h[f]))
1319 1316
1320 1317 if not ui.quiet:
1321 1318 addglobalopts(True)
1322 1319
1323 1320 def helptopic(name):
1324 1321 v = None
1325 1322 for i, d in help.helptable:
1326 1323 l = i.split('|')
1327 1324 if name in l:
1328 1325 v = i
1329 1326 header = l[-1]
1330 1327 doc = d
1331 1328 if not v:
1332 1329 raise cmdutil.UnknownCommand(name)
1333 1330
1334 1331 # description
1335 1332 if not doc:
1336 1333 doc = _("(No help text available)")
1337 1334 if callable(doc):
1338 1335 doc = doc()
1339 1336
1340 1337 ui.write("%s\n" % header)
1341 1338 ui.write("%s\n" % doc.rstrip())
1342 1339
1343 1340 def helpext(name):
1344 1341 try:
1345 1342 mod = extensions.find(name)
1346 1343 except KeyError:
1347 1344 raise cmdutil.UnknownCommand(name)
1348 1345
1349 1346 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1350 1347 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1351 1348 for d in doc[1:]:
1352 1349 ui.write(d, '\n')
1353 1350
1354 1351 ui.status('\n')
1355 1352
1356 1353 try:
1357 1354 ct = mod.cmdtable
1358 1355 except AttributeError:
1359 1356 ct = {}
1360 1357
1361 1358 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1362 1359 helplist(_('list of commands:\n\n'), modcmds.has_key)
1363 1360
1364 1361 if name and name != 'shortlist':
1365 1362 i = None
1366 1363 for f in (helpcmd, helptopic, helpext):
1367 1364 try:
1368 1365 f(name)
1369 1366 i = None
1370 1367 break
1371 1368 except cmdutil.UnknownCommand, inst:
1372 1369 i = inst
1373 1370 if i:
1374 1371 raise i
1375 1372
1376 1373 else:
1377 1374 # program name
1378 1375 if ui.verbose or with_version:
1379 1376 version_(ui)
1380 1377 else:
1381 1378 ui.status(_("Mercurial Distributed SCM\n"))
1382 1379 ui.status('\n')
1383 1380
1384 1381 # list of commands
1385 1382 if name == "shortlist":
1386 1383 header = _('basic commands:\n\n')
1387 1384 else:
1388 1385 header = _('list of commands:\n\n')
1389 1386
1390 1387 helplist(header)
1391 1388
1392 1389 # list all option lists
1393 1390 opt_output = []
1394 1391 for title, options in option_lists:
1395 1392 opt_output.append(("\n%s" % title, None))
1396 1393 for shortopt, longopt, default, desc in options:
1397 1394 if "DEPRECATED" in desc and not ui.verbose: continue
1398 1395 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1399 1396 longopt and " --%s" % longopt),
1400 1397 "%s%s" % (desc,
1401 1398 default
1402 1399 and _(" (default: %s)") % default
1403 1400 or "")))
1404 1401
1405 1402 if ui.verbose:
1406 1403 ui.write(_("\nspecial help topics:\n"))
1407 1404 topics = []
1408 1405 for i, d in help.helptable:
1409 1406 l = i.split('|')
1410 1407 topics.append((", ".join(l[:-1]), l[-1]))
1411 1408 topics_len = max([len(s[0]) for s in topics])
1412 1409 for t, desc in topics:
1413 1410 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1414 1411
1415 1412 if opt_output:
1416 1413 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1417 1414 for first, second in opt_output:
1418 1415 if second:
1419 1416 ui.write(" %-*s %s\n" % (opts_len, first, second))
1420 1417 else:
1421 1418 ui.write("%s\n" % first)
1422 1419
1423 1420 def identify(ui, repo, source=None,
1424 1421 rev=None, num=None, id=None, branch=None, tags=None):
1425 1422 """identify the working copy or specified revision
1426 1423
1427 1424 With no revision, print a summary of the current state of the repo.
1428 1425
1429 1426 With a path, do a lookup in another repository.
1430 1427
1431 1428 This summary identifies the repository state using one or two parent
1432 1429 hash identifiers, followed by a "+" if there are uncommitted changes
1433 1430 in the working directory, a list of tags for this revision and a branch
1434 1431 name for non-default branches.
1435 1432 """
1436 1433
1437 1434 if not repo and not source:
1438 1435 raise util.Abort(_("There is no Mercurial repository here "
1439 1436 "(.hg not found)"))
1440 1437
1441 1438 hexfunc = ui.debugflag and hex or short
1442 1439 default = not (num or id or branch or tags)
1443 1440 output = []
1444 1441
1445 1442 if source:
1446 1443 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1447 1444 srepo = hg.repository(ui, source)
1448 1445 if not rev and revs:
1449 1446 rev = revs[0]
1450 1447 if not rev:
1451 1448 rev = "tip"
1452 1449 if num or branch or tags:
1453 1450 raise util.Abort(
1454 1451 "can't query remote revision number, branch, or tags")
1455 1452 output = [hexfunc(srepo.lookup(rev))]
1456 1453 elif not rev:
1457 ctx = repo.changectx(None)
1454 ctx = repo[None]
1458 1455 parents = ctx.parents()
1459 1456 changed = False
1460 1457 if default or id or num:
1461 1458 changed = ctx.files() + ctx.deleted()
1462 1459 if default or id:
1463 1460 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1464 1461 (changed) and "+" or "")]
1465 1462 if num:
1466 1463 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1467 1464 (changed) and "+" or ""))
1468 1465 else:
1469 ctx = repo.changectx(rev)
1466 ctx = repo[rev]
1470 1467 if default or id:
1471 1468 output = [hexfunc(ctx.node())]
1472 1469 if num:
1473 1470 output.append(str(ctx.rev()))
1474 1471
1475 1472 if not source and default and not ui.quiet:
1476 1473 b = util.tolocal(ctx.branch())
1477 1474 if b != 'default':
1478 1475 output.append("(%s)" % b)
1479 1476
1480 1477 # multiple tags for a single parent separated by '/'
1481 1478 t = "/".join(ctx.tags())
1482 1479 if t:
1483 1480 output.append(t)
1484 1481
1485 1482 if branch:
1486 1483 output.append(util.tolocal(ctx.branch()))
1487 1484
1488 1485 if tags:
1489 1486 output.extend(ctx.tags())
1490 1487
1491 1488 ui.write("%s\n" % ' '.join(output))
1492 1489
1493 1490 def import_(ui, repo, patch1, *patches, **opts):
1494 1491 """import an ordered set of patches
1495 1492
1496 1493 Import a list of patches and commit them individually.
1497 1494
1498 1495 If there are outstanding changes in the working directory, import
1499 1496 will abort unless given the -f flag.
1500 1497
1501 1498 You can import a patch straight from a mail message. Even patches
1502 1499 as attachments work (body part must be type text/plain or
1503 1500 text/x-patch to be used). From and Subject headers of email
1504 1501 message are used as default committer and commit message. All
1505 1502 text/plain body parts before first diff are added to commit
1506 1503 message.
1507 1504
1508 1505 If the imported patch was generated by hg export, user and description
1509 1506 from patch override values from message headers and body. Values
1510 1507 given on command line with -m and -u override these.
1511 1508
1512 1509 If --exact is specified, import will set the working directory
1513 1510 to the parent of each patch before applying it, and will abort
1514 1511 if the resulting changeset has a different ID than the one
1515 1512 recorded in the patch. This may happen due to character set
1516 1513 problems or other deficiencies in the text patch format.
1517 1514
1518 1515 To read a patch from standard input, use patch name "-".
1519 1516 See 'hg help dates' for a list of formats valid for -d/--date.
1520 1517 """
1521 1518 patches = (patch1,) + patches
1522 1519
1523 1520 date = opts.get('date')
1524 1521 if date:
1525 1522 opts['date'] = util.parsedate(date)
1526 1523
1527 1524 if opts.get('exact') or not opts['force']:
1528 1525 cmdutil.bail_if_changed(repo)
1529 1526
1530 1527 d = opts["base"]
1531 1528 strip = opts["strip"]
1532 1529 wlock = lock = None
1533 1530 try:
1534 1531 wlock = repo.wlock()
1535 1532 lock = repo.lock()
1536 1533 for p in patches:
1537 1534 pf = os.path.join(d, p)
1538 1535
1539 1536 if pf == '-':
1540 1537 ui.status(_("applying patch from stdin\n"))
1541 1538 data = patch.extract(ui, sys.stdin)
1542 1539 else:
1543 1540 ui.status(_("applying %s\n") % p)
1544 1541 if os.path.exists(pf):
1545 1542 data = patch.extract(ui, file(pf, 'rb'))
1546 1543 else:
1547 1544 data = patch.extract(ui, urllib.urlopen(pf))
1548 1545 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1549 1546
1550 1547 if tmpname is None:
1551 1548 raise util.Abort(_('no diffs found'))
1552 1549
1553 1550 try:
1554 1551 cmdline_message = cmdutil.logmessage(opts)
1555 1552 if cmdline_message:
1556 1553 # pickup the cmdline msg
1557 1554 message = cmdline_message
1558 1555 elif message:
1559 1556 # pickup the patch msg
1560 1557 message = message.strip()
1561 1558 else:
1562 1559 # launch the editor
1563 1560 message = None
1564 1561 ui.debug(_('message:\n%s\n') % message)
1565 1562
1566 wp = repo.changectx(None).parents()
1563 wp = repo.parents()
1567 1564 if opts.get('exact'):
1568 1565 if not nodeid or not p1:
1569 1566 raise util.Abort(_('not a mercurial patch'))
1570 1567 p1 = repo.lookup(p1)
1571 1568 p2 = repo.lookup(p2 or hex(nullid))
1572 1569
1573 1570 if p1 != wp[0].node():
1574 1571 hg.clean(repo, p1)
1575 1572 repo.dirstate.setparents(p1, p2)
1576 1573 elif p2:
1577 1574 try:
1578 1575 p1 = repo.lookup(p1)
1579 1576 p2 = repo.lookup(p2)
1580 1577 if p1 == wp[0].node():
1581 1578 repo.dirstate.setparents(p1, p2)
1582 1579 except RepoError:
1583 1580 pass
1584 1581 if opts.get('exact') or opts.get('import_branch'):
1585 1582 repo.dirstate.setbranch(branch or 'default')
1586 1583
1587 1584 files = {}
1588 1585 try:
1589 1586 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1590 1587 files=files)
1591 1588 finally:
1592 1589 files = patch.updatedir(ui, repo, files)
1593 1590 if not opts.get('no_commit'):
1594 1591 n = repo.commit(files, message, opts.get('user') or user,
1595 1592 opts.get('date') or date)
1596 1593 if opts.get('exact'):
1597 1594 if hex(n) != nodeid:
1598 1595 repo.rollback()
1599 1596 raise util.Abort(_('patch is damaged'
1600 1597 ' or loses information'))
1601 1598 # Force a dirstate write so that the next transaction
1602 1599 # backups an up-do-date file.
1603 1600 repo.dirstate.write()
1604 1601 finally:
1605 1602 os.unlink(tmpname)
1606 1603 finally:
1607 1604 del lock, wlock
1608 1605
1609 1606 def incoming(ui, repo, source="default", **opts):
1610 1607 """show new changesets found in source
1611 1608
1612 1609 Show new changesets found in the specified path/URL or the default
1613 1610 pull location. These are the changesets that would be pulled if a pull
1614 1611 was requested.
1615 1612
1616 1613 For remote repository, using --bundle avoids downloading the changesets
1617 1614 twice if the incoming is followed by a pull.
1618 1615
1619 1616 See pull for valid source format details.
1620 1617 """
1621 1618 limit = cmdutil.loglimit(opts)
1622 1619 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1623 1620 cmdutil.setremoteconfig(ui, opts)
1624 1621
1625 1622 other = hg.repository(ui, source)
1626 1623 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1627 1624 if revs:
1628 1625 revs = [other.lookup(rev) for rev in revs]
1629 1626 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1630 1627 if not incoming:
1631 1628 try:
1632 1629 os.unlink(opts["bundle"])
1633 1630 except:
1634 1631 pass
1635 1632 ui.status(_("no changes found\n"))
1636 1633 return 1
1637 1634
1638 1635 cleanup = None
1639 1636 try:
1640 1637 fname = opts["bundle"]
1641 1638 if fname or not other.local():
1642 1639 # create a bundle (uncompressed if other repo is not local)
1643 1640 if revs is None:
1644 1641 cg = other.changegroup(incoming, "incoming")
1645 1642 else:
1646 1643 cg = other.changegroupsubset(incoming, revs, 'incoming')
1647 1644 bundletype = other.local() and "HG10BZ" or "HG10UN"
1648 1645 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1649 1646 # keep written bundle?
1650 1647 if opts["bundle"]:
1651 1648 cleanup = None
1652 1649 if not other.local():
1653 1650 # use the created uncompressed bundlerepo
1654 1651 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1655 1652
1656 1653 o = other.changelog.nodesbetween(incoming, revs)[0]
1657 1654 if opts['newest_first']:
1658 1655 o.reverse()
1659 1656 displayer = cmdutil.show_changeset(ui, other, opts)
1660 1657 count = 0
1661 1658 for n in o:
1662 1659 if count >= limit:
1663 1660 break
1664 1661 parents = [p for p in other.changelog.parents(n) if p != nullid]
1665 1662 if opts['no_merges'] and len(parents) == 2:
1666 1663 continue
1667 1664 count += 1
1668 1665 displayer.show(changenode=n)
1669 1666 finally:
1670 1667 if hasattr(other, 'close'):
1671 1668 other.close()
1672 1669 if cleanup:
1673 1670 os.unlink(cleanup)
1674 1671
1675 1672 def init(ui, dest=".", **opts):
1676 1673 """create a new repository in the given directory
1677 1674
1678 1675 Initialize a new repository in the given directory. If the given
1679 1676 directory does not exist, it is created.
1680 1677
1681 1678 If no directory is given, the current directory is used.
1682 1679
1683 1680 It is possible to specify an ssh:// URL as the destination.
1684 1681 Look at the help text for the pull command for important details
1685 1682 about ssh:// URLs.
1686 1683 """
1687 1684 cmdutil.setremoteconfig(ui, opts)
1688 1685 hg.repository(ui, dest, create=1)
1689 1686
1690 1687 def locate(ui, repo, *pats, **opts):
1691 1688 """locate files matching specific patterns
1692 1689
1693 1690 Print all files under Mercurial control whose names match the
1694 1691 given patterns.
1695 1692
1696 1693 This command searches the entire repository by default. To search
1697 1694 just the current directory and its subdirectories, use
1698 1695 "--include .".
1699 1696
1700 1697 If no patterns are given to match, this command prints all file
1701 1698 names.
1702 1699
1703 1700 If you want to feed the output of this command into the "xargs"
1704 1701 command, use the "-0" option to both this command and "xargs".
1705 1702 This will avoid the problem of "xargs" treating single filenames
1706 1703 that contain white space as multiple filenames.
1707 1704 """
1708 1705 end = opts['print0'] and '\0' or '\n'
1709 1706 rev = opts['rev']
1710 1707 if rev:
1711 1708 node = repo.lookup(rev)
1712 1709 else:
1713 1710 node = None
1714 1711
1715 1712 ret = 1
1716 1713 m = cmdutil.match(repo, pats, opts, default='relglob')
1717 1714 m.bad = lambda x,y: False
1718 1715 for abs in repo.walk(m, node):
1719 1716 if not node and abs not in repo.dirstate:
1720 1717 continue
1721 1718 if opts['fullpath']:
1722 1719 ui.write(os.path.join(repo.root, abs), end)
1723 1720 else:
1724 1721 ui.write(((pats and m.rel(abs)) or abs), end)
1725 1722 ret = 0
1726 1723
1727 1724 return ret
1728 1725
1729 1726 def log(ui, repo, *pats, **opts):
1730 1727 """show revision history of entire repository or files
1731 1728
1732 1729 Print the revision history of the specified files or the entire
1733 1730 project.
1734 1731
1735 1732 File history is shown without following rename or copy history of
1736 1733 files. Use -f/--follow with a file name to follow history across
1737 1734 renames and copies. --follow without a file name will only show
1738 1735 ancestors or descendants of the starting revision. --follow-first
1739 1736 only follows the first parent of merge revisions.
1740 1737
1741 1738 If no revision range is specified, the default is tip:0 unless
1742 1739 --follow is set, in which case the working directory parent is
1743 1740 used as the starting revision.
1744 1741
1745 1742 See 'hg help dates' for a list of formats valid for -d/--date.
1746 1743
1747 1744 By default this command outputs: changeset id and hash, tags,
1748 1745 non-trivial parents, user, date and time, and a summary for each
1749 1746 commit. When the -v/--verbose switch is used, the list of changed
1750 1747 files and full commit message is shown.
1751 1748
1752 1749 NOTE: log -p may generate unexpected diff output for merge
1753 1750 changesets, as it will compare the merge changeset against its
1754 1751 first parent only. Also, the files: list will only reflect files
1755 1752 that are different from BOTH parents.
1756 1753
1757 1754 """
1758 1755
1759 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1756 get = util.cachefunc(lambda r: repo[r].changeset())
1760 1757 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1761 1758
1762 1759 limit = cmdutil.loglimit(opts)
1763 1760 count = 0
1764 1761
1765 1762 if opts['copies'] and opts['rev']:
1766 1763 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1767 1764 else:
1768 1765 endrev = repo.changelog.count()
1769 1766 rcache = {}
1770 1767 ncache = {}
1771 1768 def getrenamed(fn, rev):
1772 1769 '''looks up all renames for a file (up to endrev) the first
1773 1770 time the file is given. It indexes on the changerev and only
1774 1771 parses the manifest if linkrev != changerev.
1775 1772 Returns rename info for fn at changerev rev.'''
1776 1773 if fn not in rcache:
1777 1774 rcache[fn] = {}
1778 1775 ncache[fn] = {}
1779 1776 fl = repo.file(fn)
1780 1777 for i in xrange(fl.count()):
1781 1778 node = fl.node(i)
1782 1779 lr = fl.linkrev(node)
1783 1780 renamed = fl.renamed(node)
1784 1781 rcache[fn][lr] = renamed
1785 1782 if renamed:
1786 1783 ncache[fn][node] = renamed
1787 1784 if lr >= endrev:
1788 1785 break
1789 1786 if rev in rcache[fn]:
1790 1787 return rcache[fn][rev]
1791 1788
1792 1789 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1793 1790 # filectx logic.
1794 1791
1795 1792 try:
1796 return repo.changectx(rev).filectx(fn).renamed()
1793 return repo[rev][fn].renamed()
1797 1794 except revlog.LookupError:
1798 1795 pass
1799 1796 return None
1800 1797
1801 1798 df = False
1802 1799 if opts["date"]:
1803 1800 df = util.matchdate(opts["date"])
1804 1801
1805 1802 only_branches = opts['only_branch']
1806 1803
1807 1804 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1808 1805 for st, rev, fns in changeiter:
1809 1806 if st == 'add':
1810 1807 changenode = repo.changelog.node(rev)
1811 1808 parents = [p for p in repo.changelog.parentrevs(rev)
1812 1809 if p != nullrev]
1813 1810 if opts['no_merges'] and len(parents) == 2:
1814 1811 continue
1815 1812 if opts['only_merges'] and len(parents) != 2:
1816 1813 continue
1817 1814
1818 1815 if only_branches:
1819 1816 revbranch = get(rev)[5]['branch']
1820 1817 if revbranch not in only_branches:
1821 1818 continue
1822 1819
1823 1820 if df:
1824 1821 changes = get(rev)
1825 1822 if not df(changes[2][0]):
1826 1823 continue
1827 1824
1828 1825 if opts['keyword']:
1829 1826 changes = get(rev)
1830 1827 miss = 0
1831 1828 for k in [kw.lower() for kw in opts['keyword']]:
1832 1829 if not (k in changes[1].lower() or
1833 1830 k in changes[4].lower() or
1834 1831 k in " ".join(changes[3]).lower()):
1835 1832 miss = 1
1836 1833 break
1837 1834 if miss:
1838 1835 continue
1839 1836
1840 1837 copies = []
1841 1838 if opts.get('copies') and rev:
1842 1839 for fn in get(rev)[3]:
1843 1840 rename = getrenamed(fn, rev)
1844 1841 if rename:
1845 1842 copies.append((fn, rename[0]))
1846 1843 displayer.show(rev, changenode, copies=copies)
1847 1844 elif st == 'iter':
1848 1845 if count == limit: break
1849 1846 if displayer.flush(rev):
1850 1847 count += 1
1851 1848
1852 1849 def manifest(ui, repo, node=None, rev=None):
1853 1850 """output the current or given revision of the project manifest
1854 1851
1855 1852 Print a list of version controlled files for the given revision.
1856 1853 If no revision is given, the parent of the working directory is used,
1857 1854 or tip if no revision is checked out.
1858 1855
1859 1856 The manifest is the list of files being version controlled. If no revision
1860 1857 is given then the first parent of the working directory is used.
1861 1858
1862 1859 With -v flag, print file permissions, symlink and executable bits. With
1863 1860 --debug flag, print file revision hashes.
1864 1861 """
1865 1862
1866 1863 if rev and node:
1867 1864 raise util.Abort(_("please specify just one revision"))
1868 1865
1869 1866 if not node:
1870 1867 node = rev
1871 1868
1872 m = repo.changectx(node).manifest()
1869 m = repo[node].manifest()
1873 1870 files = m.keys()
1874 1871 files.sort()
1875 1872
1876 1873 for f in files:
1877 1874 if ui.debugflag:
1878 1875 ui.write("%40s " % hex(m[f]))
1879 1876 if ui.verbose:
1880 1877 type = m.execf(f) and "*" or m.linkf(f) and "@" or " "
1881 1878 perm = m.execf(f) and "755" or "644"
1882 1879 ui.write("%3s %1s " % (perm, type))
1883 1880 ui.write("%s\n" % f)
1884 1881
1885 1882 def merge(ui, repo, node=None, force=None, rev=None):
1886 1883 """merge working directory with another revision
1887 1884
1888 1885 Merge the contents of the current working directory and the
1889 1886 requested revision. Files that changed between either parent are
1890 1887 marked as changed for the next commit and a commit must be
1891 1888 performed before any further updates are allowed.
1892 1889
1893 1890 If no revision is specified, the working directory's parent is a
1894 1891 head revision, and the current branch contains exactly one other head,
1895 1892 the other head is merged with by default. Otherwise, an explicit
1896 1893 revision to merge with must be provided.
1897 1894 """
1898 1895
1899 1896 if rev and node:
1900 1897 raise util.Abort(_("please specify just one revision"))
1901 1898 if not node:
1902 1899 node = rev
1903 1900
1904 1901 if not node:
1905 1902 branch = repo.changectx(None).branch()
1906 1903 bheads = repo.branchheads()
1907 1904 if len(bheads) > 2:
1908 1905 raise util.Abort(_("branch '%s' has %d heads - "
1909 1906 "please merge with an explicit rev") %
1910 1907 (branch, len(bheads)))
1911 1908
1912 1909 parent = repo.dirstate.parents()[0]
1913 1910 if len(bheads) == 1:
1914 1911 if len(repo.heads()) > 1:
1915 1912 raise util.Abort(_("branch '%s' has one head - "
1916 1913 "please merge with an explicit rev") %
1917 1914 branch)
1918 1915 msg = _('there is nothing to merge')
1919 if parent != repo.lookup(repo.changectx(None).branch()):
1916 if parent != repo.lookup(repo[None].branch()):
1920 1917 msg = _('%s - use "hg update" instead') % msg
1921 1918 raise util.Abort(msg)
1922 1919
1923 1920 if parent not in bheads:
1924 1921 raise util.Abort(_('working dir not at a head rev - '
1925 1922 'use "hg update" or merge with an explicit rev'))
1926 1923 node = parent == bheads[0] and bheads[-1] or bheads[0]
1927 1924 return hg.merge(repo, node, force=force)
1928 1925
1929 1926 def outgoing(ui, repo, dest=None, **opts):
1930 1927 """show changesets not found in destination
1931 1928
1932 1929 Show changesets not found in the specified destination repository or
1933 1930 the default push location. These are the changesets that would be pushed
1934 1931 if a push was requested.
1935 1932
1936 1933 See pull for valid destination format details.
1937 1934 """
1938 1935 limit = cmdutil.loglimit(opts)
1939 1936 dest, revs, checkout = hg.parseurl(
1940 1937 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1941 1938 cmdutil.setremoteconfig(ui, opts)
1942 1939 if revs:
1943 1940 revs = [repo.lookup(rev) for rev in revs]
1944 1941
1945 1942 other = hg.repository(ui, dest)
1946 1943 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1947 1944 o = repo.findoutgoing(other, force=opts['force'])
1948 1945 if not o:
1949 1946 ui.status(_("no changes found\n"))
1950 1947 return 1
1951 1948 o = repo.changelog.nodesbetween(o, revs)[0]
1952 1949 if opts['newest_first']:
1953 1950 o.reverse()
1954 1951 displayer = cmdutil.show_changeset(ui, repo, opts)
1955 1952 count = 0
1956 1953 for n in o:
1957 1954 if count >= limit:
1958 1955 break
1959 1956 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1960 1957 if opts['no_merges'] and len(parents) == 2:
1961 1958 continue
1962 1959 count += 1
1963 1960 displayer.show(changenode=n)
1964 1961
1965 1962 def parents(ui, repo, file_=None, **opts):
1966 1963 """show the parents of the working dir or revision
1967 1964
1968 1965 Print the working directory's parent revisions. If a
1969 1966 revision is given via --rev, the parent of that revision
1970 1967 will be printed. If a file argument is given, revision in
1971 1968 which the file was last changed (before the working directory
1972 1969 revision or the argument to --rev if given) is printed.
1973 1970 """
1974 1971 rev = opts.get('rev')
1975 1972 if rev:
1976 ctx = repo.changectx(rev)
1973 ctx = repo[rev]
1977 1974 else:
1978 ctx = repo.changectx(None)
1975 ctx = repo[None]
1979 1976
1980 1977 if file_:
1981 1978 m = cmdutil.match(repo, (file_,), opts)
1982 1979 if m.anypats() or len(m.files()) != 1:
1983 1980 raise util.Abort(_('can only specify an explicit file name'))
1984 1981 file_ = m.files()[0]
1985 1982 filenodes = []
1986 1983 for cp in ctx.parents():
1987 1984 if not cp:
1988 1985 continue
1989 1986 try:
1990 1987 filenodes.append(cp.filenode(file_))
1991 1988 except revlog.LookupError:
1992 1989 pass
1993 1990 if not filenodes:
1994 1991 raise util.Abort(_("'%s' not found in manifest!") % file_)
1995 1992 fl = repo.file(file_)
1996 1993 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1997 1994 else:
1998 1995 p = [cp.node() for cp in ctx.parents()]
1999 1996
2000 1997 displayer = cmdutil.show_changeset(ui, repo, opts)
2001 1998 for n in p:
2002 1999 if n != nullid:
2003 2000 displayer.show(changenode=n)
2004 2001
2005 2002 def paths(ui, repo, search=None):
2006 2003 """show definition of symbolic path names
2007 2004
2008 2005 Show definition of symbolic path name NAME. If no name is given, show
2009 2006 definition of available names.
2010 2007
2011 2008 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2012 2009 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2013 2010 """
2014 2011 if search:
2015 2012 for name, path in ui.configitems("paths"):
2016 2013 if name == search:
2017 2014 ui.write("%s\n" % util.hidepassword(path))
2018 2015 return
2019 2016 ui.warn(_("not found!\n"))
2020 2017 return 1
2021 2018 else:
2022 2019 for name, path in ui.configitems("paths"):
2023 2020 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
2024 2021
2025 2022 def postincoming(ui, repo, modheads, optupdate, checkout):
2026 2023 if modheads == 0:
2027 2024 return
2028 2025 if optupdate:
2029 2026 if modheads <= 1 or checkout:
2030 2027 return hg.update(repo, checkout)
2031 2028 else:
2032 2029 ui.status(_("not updating, since new heads added\n"))
2033 2030 if modheads > 1:
2034 2031 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2035 2032 else:
2036 2033 ui.status(_("(run 'hg update' to get a working copy)\n"))
2037 2034
2038 2035 def pull(ui, repo, source="default", **opts):
2039 2036 """pull changes from the specified source
2040 2037
2041 2038 Pull changes from a remote repository to a local one.
2042 2039
2043 2040 This finds all changes from the repository at the specified path
2044 2041 or URL and adds them to the local repository. By default, this
2045 2042 does not update the copy of the project in the working directory.
2046 2043
2047 2044 Valid URLs are of the form:
2048 2045
2049 2046 local/filesystem/path (or file://local/filesystem/path)
2050 2047 http://[user@]host[:port]/[path]
2051 2048 https://[user@]host[:port]/[path]
2052 2049 ssh://[user@]host[:port]/[path]
2053 2050 static-http://host[:port]/[path]
2054 2051
2055 2052 Paths in the local filesystem can either point to Mercurial
2056 2053 repositories or to bundle files (as created by 'hg bundle' or
2057 2054 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2058 2055 allows access to a Mercurial repository where you simply use a web
2059 2056 server to publish the .hg directory as static content.
2060 2057
2061 2058 An optional identifier after # indicates a particular branch, tag,
2062 2059 or changeset to pull.
2063 2060
2064 2061 Some notes about using SSH with Mercurial:
2065 2062 - SSH requires an accessible shell account on the destination machine
2066 2063 and a copy of hg in the remote path or specified with as remotecmd.
2067 2064 - path is relative to the remote user's home directory by default.
2068 2065 Use an extra slash at the start of a path to specify an absolute path:
2069 2066 ssh://example.com//tmp/repository
2070 2067 - Mercurial doesn't use its own compression via SSH; the right thing
2071 2068 to do is to configure it in your ~/.ssh/config, e.g.:
2072 2069 Host *.mylocalnetwork.example.com
2073 2070 Compression no
2074 2071 Host *
2075 2072 Compression yes
2076 2073 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2077 2074 with the --ssh command line option.
2078 2075 """
2079 2076 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
2080 2077 cmdutil.setremoteconfig(ui, opts)
2081 2078
2082 2079 other = hg.repository(ui, source)
2083 2080 ui.status(_('pulling from %s\n') % util.hidepassword(source))
2084 2081 if revs:
2085 2082 try:
2086 2083 revs = [other.lookup(rev) for rev in revs]
2087 2084 except NoCapability:
2088 2085 error = _("Other repository doesn't support revision lookup, "
2089 2086 "so a rev cannot be specified.")
2090 2087 raise util.Abort(error)
2091 2088
2092 2089 modheads = repo.pull(other, heads=revs, force=opts['force'])
2093 2090 return postincoming(ui, repo, modheads, opts['update'], checkout)
2094 2091
2095 2092 def push(ui, repo, dest=None, **opts):
2096 2093 """push changes to the specified destination
2097 2094
2098 2095 Push changes from the local repository to the given destination.
2099 2096
2100 2097 This is the symmetrical operation for pull. It helps to move
2101 2098 changes from the current repository to a different one. If the
2102 2099 destination is local this is identical to a pull in that directory
2103 2100 from the current one.
2104 2101
2105 2102 By default, push will refuse to run if it detects the result would
2106 2103 increase the number of remote heads. This generally indicates the
2107 2104 the client has forgotten to pull and merge before pushing.
2108 2105
2109 2106 Valid URLs are of the form:
2110 2107
2111 2108 local/filesystem/path (or file://local/filesystem/path)
2112 2109 ssh://[user@]host[:port]/[path]
2113 2110 http://[user@]host[:port]/[path]
2114 2111 https://[user@]host[:port]/[path]
2115 2112
2116 2113 An optional identifier after # indicates a particular branch, tag,
2117 2114 or changeset to push. If -r is used, the named changeset and all its
2118 2115 ancestors will be pushed to the remote repository.
2119 2116
2120 2117 Look at the help text for the pull command for important details
2121 2118 about ssh:// URLs.
2122 2119
2123 2120 Pushing to http:// and https:// URLs is only possible, if this
2124 2121 feature is explicitly enabled on the remote Mercurial server.
2125 2122 """
2126 2123 dest, revs, checkout = hg.parseurl(
2127 2124 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2128 2125 cmdutil.setremoteconfig(ui, opts)
2129 2126
2130 2127 other = hg.repository(ui, dest)
2131 2128 ui.status('pushing to %s\n' % util.hidepassword(dest))
2132 2129 if revs:
2133 2130 revs = [repo.lookup(rev) for rev in revs]
2134 2131 r = repo.push(other, opts['force'], revs=revs)
2135 2132 return r == 0
2136 2133
2137 2134 def rawcommit(ui, repo, *pats, **opts):
2138 2135 """raw commit interface (DEPRECATED)
2139 2136
2140 2137 (DEPRECATED)
2141 2138 Lowlevel commit, for use in helper scripts.
2142 2139
2143 2140 This command is not intended to be used by normal users, as it is
2144 2141 primarily useful for importing from other SCMs.
2145 2142
2146 2143 This command is now deprecated and will be removed in a future
2147 2144 release, please use debugsetparents and commit instead.
2148 2145 """
2149 2146
2150 2147 ui.warn(_("(the rawcommit command is deprecated)\n"))
2151 2148
2152 2149 message = cmdutil.logmessage(opts)
2153 2150
2154 2151 files = cmdutil.match(repo, pats, opts).files()
2155 2152 if opts['files']:
2156 2153 files += open(opts['files']).read().splitlines()
2157 2154
2158 2155 parents = [repo.lookup(p) for p in opts['parent']]
2159 2156
2160 2157 try:
2161 2158 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2162 2159 except ValueError, inst:
2163 2160 raise util.Abort(str(inst))
2164 2161
2165 2162 def recover(ui, repo):
2166 2163 """roll back an interrupted transaction
2167 2164
2168 2165 Recover from an interrupted commit or pull.
2169 2166
2170 2167 This command tries to fix the repository status after an interrupted
2171 2168 operation. It should only be necessary when Mercurial suggests it.
2172 2169 """
2173 2170 if repo.recover():
2174 2171 return hg.verify(repo)
2175 2172 return 1
2176 2173
2177 2174 def remove(ui, repo, *pats, **opts):
2178 2175 """remove the specified files on the next commit
2179 2176
2180 2177 Schedule the indicated files for removal from the repository.
2181 2178
2182 2179 This only removes files from the current branch, not from the entire
2183 2180 project history. -A can be used to remove only files that have already
2184 2181 been deleted, -f can be used to force deletion, and -Af can be used
2185 2182 to remove files from the next revision without deleting them.
2186 2183
2187 2184 The following table details the behavior of remove for different file
2188 2185 states (columns) and option combinations (rows). The file states are
2189 2186 Added, Clean, Modified and Missing (as reported by hg status). The
2190 2187 actions are Warn, Remove (from branch) and Delete (from disk).
2191 2188
2192 2189 A C M !
2193 2190 none W RD W R
2194 2191 -f R RD RD R
2195 2192 -A W W W R
2196 2193 -Af R R R R
2197 2194
2198 2195 This command schedules the files to be removed at the next commit.
2199 2196 To undo a remove before that, see hg revert.
2200 2197 """
2201 2198
2202 2199 after, force = opts.get('after'), opts.get('force')
2203 2200 if not pats and not after:
2204 2201 raise util.Abort(_('no files specified'))
2205 2202
2206 2203 m = cmdutil.match(repo, pats, opts)
2207 2204 mardu = map(dict.fromkeys, repo.status(match=m))[:5]
2208 2205 modified, added, removed, deleted, unknown = mardu
2209 2206
2210 2207 remove, forget = [], []
2211 2208 for abs in repo.walk(m):
2212 2209
2213 2210 reason = None
2214 2211 if abs in removed or abs in unknown:
2215 2212 continue
2216 2213
2217 2214 # last column
2218 2215 elif abs in deleted:
2219 2216 remove.append(abs)
2220 2217
2221 2218 # rest of the third row
2222 2219 elif after and not force:
2223 2220 reason = _('still exists (use -f to force removal)')
2224 2221
2225 2222 # rest of the first column
2226 2223 elif abs in added:
2227 2224 if not force:
2228 2225 reason = _('has been marked for add (use -f to force removal)')
2229 2226 else:
2230 2227 forget.append(abs)
2231 2228
2232 2229 # rest of the third column
2233 2230 elif abs in modified:
2234 2231 if not force:
2235 2232 reason = _('is modified (use -f to force removal)')
2236 2233 else:
2237 2234 remove.append(abs)
2238 2235
2239 2236 # rest of the second column
2240 2237 elif not reason:
2241 2238 remove.append(abs)
2242 2239
2243 2240 if reason:
2244 2241 ui.warn(_('not removing %s: file %s\n') % (m.rel(abs), reason))
2245 2242 elif ui.verbose or not m.exact(abs):
2246 2243 ui.status(_('removing %s\n') % m.rel(abs))
2247 2244
2248 2245 repo.forget(forget)
2249 2246 repo.remove(remove, unlink=not after)
2250 2247
2251 2248 def rename(ui, repo, *pats, **opts):
2252 2249 """rename files; equivalent of copy + remove
2253 2250
2254 2251 Mark dest as copies of sources; mark sources for deletion. If
2255 2252 dest is a directory, copies are put in that directory. If dest is
2256 2253 a file, there can only be one source.
2257 2254
2258 2255 By default, this command copies the contents of files as they
2259 2256 stand in the working directory. If invoked with --after, the
2260 2257 operation is recorded, but no copying is performed.
2261 2258
2262 2259 This command takes effect in the next commit. To undo a rename
2263 2260 before that, see hg revert.
2264 2261 """
2265 2262 wlock = repo.wlock(False)
2266 2263 try:
2267 2264 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2268 2265 finally:
2269 2266 del wlock
2270 2267
2271 2268 def resolve(ui, repo, *pats, **opts):
2272 2269 """resolve file merges from a branch merge or update
2273 2270
2274 2271 This command will attempt to resolve unresolved merges from the
2275 2272 last update or merge command. This will use the local file
2276 2273 revision preserved at the last update or merge to cleanly retry
2277 2274 the file merge attempt. With no file or options specified, this
2278 2275 command will attempt to resolve all unresolved files.
2279 2276
2280 2277 The codes used to show the status of files are:
2281 2278 U = unresolved
2282 2279 R = resolved
2283 2280 """
2284 2281
2285 2282 if len([x for x in opts if opts[x]]) > 1:
2286 2283 raise util.Abort(_("too many options specified"))
2287 2284
2288 2285 ms = merge_.mergestate(repo)
2289 2286 m = cmdutil.match(repo, pats, opts)
2290 2287
2291 2288 for f in ms:
2292 2289 if m(f):
2293 2290 if opts.get("list"):
2294 2291 ui.write("%s %s\n" % (ms[f].upper(), f))
2295 2292 elif opts.get("mark"):
2296 2293 ms.mark(f, "r")
2297 2294 elif opts.get("unmark"):
2298 2295 ms.mark(f, "u")
2299 2296 else:
2300 wctx = repo.changectx(None)
2297 wctx = repo[None]
2301 2298 mctx = wctx.parents()[-1]
2302 2299 ms.resolve(f, wctx, mctx)
2303 2300
2304 2301 def revert(ui, repo, *pats, **opts):
2305 2302 """restore individual files or dirs to an earlier state
2306 2303
2307 2304 (use update -r to check out earlier revisions, revert does not
2308 2305 change the working dir parents)
2309 2306
2310 2307 With no revision specified, revert the named files or directories
2311 2308 to the contents they had in the parent of the working directory.
2312 2309 This restores the contents of the affected files to an unmodified
2313 2310 state and unschedules adds, removes, copies, and renames. If the
2314 2311 working directory has two parents, you must explicitly specify the
2315 2312 revision to revert to.
2316 2313
2317 2314 Using the -r option, revert the given files or directories to their
2318 2315 contents as of a specific revision. This can be helpful to "roll
2319 2316 back" some or all of an earlier change.
2320 2317 See 'hg help dates' for a list of formats valid for -d/--date.
2321 2318
2322 2319 Revert modifies the working directory. It does not commit any
2323 2320 changes, or change the parent of the working directory. If you
2324 2321 revert to a revision other than the parent of the working
2325 2322 directory, the reverted files will thus appear modified
2326 2323 afterwards.
2327 2324
2328 2325 If a file has been deleted, it is restored. If the executable
2329 2326 mode of a file was changed, it is reset.
2330 2327
2331 2328 If names are given, all files matching the names are reverted.
2332 2329 If no arguments are given, no files are reverted.
2333 2330
2334 2331 Modified files are saved with a .orig suffix before reverting.
2335 2332 To disable these backups, use --no-backup.
2336 2333 """
2337 2334
2338 2335 if opts["date"]:
2339 2336 if opts["rev"]:
2340 2337 raise util.Abort(_("you can't specify a revision and a date"))
2341 2338 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2342 2339
2343 2340 if not pats and not opts['all']:
2344 2341 raise util.Abort(_('no files or directories specified; '
2345 2342 'use --all to revert the whole repo'))
2346 2343
2347 2344 parent, p2 = repo.dirstate.parents()
2348 2345 if not opts['rev'] and p2 != nullid:
2349 2346 raise util.Abort(_('uncommitted merge - please provide a '
2350 2347 'specific revision'))
2351 ctx = repo.changectx(opts['rev'])
2348 ctx = repo[opts['rev']]
2352 2349 node = ctx.node()
2353 2350 mf = ctx.manifest()
2354 2351 if node == parent:
2355 2352 pmf = mf
2356 2353 else:
2357 2354 pmf = None
2358 2355
2359 2356 # need all matching names in dirstate and manifest of target rev,
2360 2357 # so have to walk both. do not print errors if files exist in one
2361 2358 # but not other.
2362 2359
2363 2360 names = {}
2364 2361
2365 2362 wlock = repo.wlock()
2366 2363 try:
2367 2364 # walk dirstate.
2368 2365 files = []
2369 2366
2370 2367 m = cmdutil.match(repo, pats, opts)
2371 2368 m.bad = lambda x,y: False
2372 2369 for abs in repo.walk(m):
2373 2370 names[abs] = m.rel(abs), m.exact(abs)
2374 2371
2375 2372 # walk target manifest.
2376 2373
2377 2374 def badfn(path, msg):
2378 2375 if path in names:
2379 2376 return False
2380 2377 path_ = path + '/'
2381 2378 for f in names:
2382 2379 if f.startswith(path_):
2383 2380 return False
2384 2381 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2385 2382 return False
2386 2383
2387 2384 m = cmdutil.match(repo, pats, opts)
2388 2385 m.bad = badfn
2389 2386 for abs in repo.walk(m, node=node):
2390 2387 if abs not in names:
2391 2388 names[abs] = m.rel(abs), m.exact(abs)
2392 2389
2393 2390 m = cmdutil.matchfiles(repo, names)
2394 2391 changes = repo.status(match=m)[:4]
2395 2392 modified, added, removed, deleted = map(dict.fromkeys, changes)
2396 2393
2397 2394 # if f is a rename, also revert the source
2398 2395 cwd = repo.getcwd()
2399 2396 for f in added:
2400 2397 src = repo.dirstate.copied(f)
2401 2398 if src and src not in names and repo.dirstate[src] == 'r':
2402 2399 removed[src] = None
2403 2400 names[src] = (repo.pathto(src, cwd), True)
2404 2401
2405 2402 def removeforget(abs):
2406 2403 if repo.dirstate[abs] == 'a':
2407 2404 return _('forgetting %s\n')
2408 2405 return _('removing %s\n')
2409 2406
2410 2407 revert = ([], _('reverting %s\n'))
2411 2408 add = ([], _('adding %s\n'))
2412 2409 remove = ([], removeforget)
2413 2410 undelete = ([], _('undeleting %s\n'))
2414 2411
2415 2412 disptable = (
2416 2413 # dispatch table:
2417 2414 # file state
2418 2415 # action if in target manifest
2419 2416 # action if not in target manifest
2420 2417 # make backup if in target manifest
2421 2418 # make backup if not in target manifest
2422 2419 (modified, revert, remove, True, True),
2423 2420 (added, revert, remove, True, False),
2424 2421 (removed, undelete, None, False, False),
2425 2422 (deleted, revert, remove, False, False),
2426 2423 )
2427 2424
2428 2425 entries = names.items()
2429 2426 entries.sort()
2430 2427
2431 2428 for abs, (rel, exact) in entries:
2432 2429 mfentry = mf.get(abs)
2433 2430 target = repo.wjoin(abs)
2434 2431 def handle(xlist, dobackup):
2435 2432 xlist[0].append(abs)
2436 2433 if dobackup and not opts['no_backup'] and util.lexists(target):
2437 2434 bakname = "%s.orig" % rel
2438 2435 ui.note(_('saving current version of %s as %s\n') %
2439 2436 (rel, bakname))
2440 2437 if not opts.get('dry_run'):
2441 2438 util.copyfile(target, bakname)
2442 2439 if ui.verbose or not exact:
2443 2440 msg = xlist[1]
2444 2441 if not isinstance(msg, basestring):
2445 2442 msg = msg(abs)
2446 2443 ui.status(msg % rel)
2447 2444 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2448 2445 if abs not in table: continue
2449 2446 # file has changed in dirstate
2450 2447 if mfentry:
2451 2448 handle(hitlist, backuphit)
2452 2449 elif misslist is not None:
2453 2450 handle(misslist, backupmiss)
2454 2451 break
2455 2452 else:
2456 2453 if abs not in repo.dirstate:
2457 2454 if mfentry:
2458 2455 handle(add, True)
2459 2456 elif exact:
2460 2457 ui.warn(_('file not managed: %s\n') % rel)
2461 2458 continue
2462 2459 # file has not changed in dirstate
2463 2460 if node == parent:
2464 2461 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2465 2462 continue
2466 2463 if pmf is None:
2467 2464 # only need parent manifest in this unlikely case,
2468 2465 # so do not read by default
2469 pmf = repo.changectx(parent).manifest()
2466 pmf = repo[parent].manifest()
2470 2467 if abs in pmf:
2471 2468 if mfentry:
2472 2469 # if version of file is same in parent and target
2473 2470 # manifests, do nothing
2474 2471 if (pmf[abs] != mfentry or
2475 2472 pmf.flags(abs) != mf.flags(abs)):
2476 2473 handle(revert, False)
2477 2474 else:
2478 2475 handle(remove, False)
2479 2476
2480 2477 if not opts.get('dry_run'):
2481 2478 def checkout(f):
2482 2479 fc = ctx[f]
2483 2480 repo.wwrite(f, fc.data(), fc.flags())
2484 2481
2485 2482 audit_path = util.path_auditor(repo.root)
2486 2483 for f in remove[0]:
2487 2484 if repo.dirstate[f] == 'a':
2488 2485 repo.dirstate.forget(f)
2489 2486 continue
2490 2487 audit_path(f)
2491 2488 try:
2492 2489 util.unlink(repo.wjoin(f))
2493 2490 except OSError:
2494 2491 pass
2495 2492 repo.dirstate.remove(f)
2496 2493
2497 2494 normal = None
2498 2495 if node == parent:
2499 2496 # We're reverting to our parent. If possible, we'd like status
2500 2497 # to report the file as clean. We have to use normallookup for
2501 2498 # merges to avoid losing information about merged/dirty files.
2502 2499 if p2 != nullid:
2503 2500 normal = repo.dirstate.normallookup
2504 2501 else:
2505 2502 normal = repo.dirstate.normal
2506 2503 for f in revert[0]:
2507 2504 checkout(f)
2508 2505 if normal:
2509 2506 normal(f)
2510 2507
2511 2508 for f in add[0]:
2512 2509 checkout(f)
2513 2510 repo.dirstate.add(f)
2514 2511
2515 2512 normal = repo.dirstate.normallookup
2516 2513 if node == parent and p2 == nullid:
2517 2514 normal = repo.dirstate.normal
2518 2515 for f in undelete[0]:
2519 2516 checkout(f)
2520 2517 normal(f)
2521 2518
2522 2519 finally:
2523 2520 del wlock
2524 2521
2525 2522 def rollback(ui, repo):
2526 2523 """roll back the last transaction
2527 2524
2528 2525 This command should be used with care. There is only one level of
2529 2526 rollback, and there is no way to undo a rollback. It will also
2530 2527 restore the dirstate at the time of the last transaction, losing
2531 2528 any dirstate changes since that time.
2532 2529
2533 2530 Transactions are used to encapsulate the effects of all commands
2534 2531 that create new changesets or propagate existing changesets into a
2535 2532 repository. For example, the following commands are transactional,
2536 2533 and their effects can be rolled back:
2537 2534
2538 2535 commit
2539 2536 import
2540 2537 pull
2541 2538 push (with this repository as destination)
2542 2539 unbundle
2543 2540
2544 2541 This command is not intended for use on public repositories. Once
2545 2542 changes are visible for pull by other users, rolling a transaction
2546 2543 back locally is ineffective (someone else may already have pulled
2547 2544 the changes). Furthermore, a race is possible with readers of the
2548 2545 repository; for example an in-progress pull from the repository
2549 2546 may fail if a rollback is performed.
2550 2547 """
2551 2548 repo.rollback()
2552 2549
2553 2550 def root(ui, repo):
2554 2551 """print the root (top) of the current working dir
2555 2552
2556 2553 Print the root directory of the current repository.
2557 2554 """
2558 2555 ui.write(repo.root + "\n")
2559 2556
2560 2557 def serve(ui, repo, **opts):
2561 2558 """export the repository via HTTP
2562 2559
2563 2560 Start a local HTTP repository browser and pull server.
2564 2561
2565 2562 By default, the server logs accesses to stdout and errors to
2566 2563 stderr. Use the "-A" and "-E" options to log to files.
2567 2564 """
2568 2565
2569 2566 if opts["stdio"]:
2570 2567 if repo is None:
2571 2568 raise RepoError(_("There is no Mercurial repository here"
2572 2569 " (.hg not found)"))
2573 2570 s = sshserver.sshserver(ui, repo)
2574 2571 s.serve_forever()
2575 2572
2576 2573 parentui = ui.parentui or ui
2577 2574 optlist = ("name templates style address port prefix ipv6"
2578 2575 " accesslog errorlog webdir_conf certificate")
2579 2576 for o in optlist.split():
2580 2577 if opts[o]:
2581 2578 parentui.setconfig("web", o, str(opts[o]))
2582 2579 if (repo is not None) and (repo.ui != parentui):
2583 2580 repo.ui.setconfig("web", o, str(opts[o]))
2584 2581
2585 2582 if repo is None and not ui.config("web", "webdir_conf"):
2586 2583 raise RepoError(_("There is no Mercurial repository here"
2587 2584 " (.hg not found)"))
2588 2585
2589 2586 class service:
2590 2587 def init(self):
2591 2588 util.set_signal_handler()
2592 2589 self.httpd = hgweb.server.create_server(parentui, repo)
2593 2590
2594 2591 if not ui.verbose: return
2595 2592
2596 2593 if self.httpd.prefix:
2597 2594 prefix = self.httpd.prefix.strip('/') + '/'
2598 2595 else:
2599 2596 prefix = ''
2600 2597
2601 2598 port = ':%d' % self.httpd.port
2602 2599 if port == ':80':
2603 2600 port = ''
2604 2601
2605 2602 bindaddr = self.httpd.addr
2606 2603 if bindaddr == '0.0.0.0':
2607 2604 bindaddr = '*'
2608 2605 elif ':' in bindaddr: # IPv6
2609 2606 bindaddr = '[%s]' % bindaddr
2610 2607
2611 2608 fqaddr = self.httpd.fqaddr
2612 2609 if ':' in fqaddr:
2613 2610 fqaddr = '[%s]' % fqaddr
2614 2611 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2615 2612 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2616 2613
2617 2614 def run(self):
2618 2615 self.httpd.serve_forever()
2619 2616
2620 2617 service = service()
2621 2618
2622 2619 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2623 2620
2624 2621 def status(ui, repo, *pats, **opts):
2625 2622 """show changed files in the working directory
2626 2623
2627 2624 Show status of files in the repository. If names are given, only
2628 2625 files that match are shown. Files that are clean or ignored or
2629 2626 source of a copy/move operation, are not listed unless -c (clean),
2630 2627 -i (ignored), -C (copies) or -A is given. Unless options described
2631 2628 with "show only ..." are given, the options -mardu are used.
2632 2629
2633 2630 Option -q/--quiet hides untracked (unknown and ignored) files
2634 2631 unless explicitly requested with -u/--unknown or -i/-ignored.
2635 2632
2636 2633 NOTE: status may appear to disagree with diff if permissions have
2637 2634 changed or a merge has occurred. The standard diff format does not
2638 2635 report permission changes and diff only reports changes relative
2639 2636 to one merge parent.
2640 2637
2641 2638 If one revision is given, it is used as the base revision.
2642 2639 If two revisions are given, the difference between them is shown.
2643 2640
2644 2641 The codes used to show the status of files are:
2645 2642 M = modified
2646 2643 A = added
2647 2644 R = removed
2648 2645 C = clean
2649 2646 ! = deleted, but still tracked
2650 2647 ? = not tracked
2651 2648 I = ignored
2652 2649 = the previous added file was copied from here
2653 2650 """
2654 2651
2655 2652 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2656 2653 cwd = (pats and repo.getcwd()) or ''
2657 2654 end = opts['print0'] and '\0' or '\n'
2658 2655 copy = {}
2659 2656 states = 'modified added removed deleted unknown ignored clean'.split()
2660 2657 show = [k for k in states if opts[k]]
2661 2658 if opts['all']:
2662 2659 show += ui.quiet and (states[:4] + ['clean']) or states
2663 2660 if not show:
2664 2661 show = ui.quiet and states[:4] or states[:5]
2665 2662
2666 2663 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2667 2664 'ignored' in show, 'clean' in show, 'unknown' in show)
2668 2665 changestates = zip(states, 'MAR!?IC', stat)
2669 2666
2670 2667 if (opts['all'] or opts['copies']) and not opts['no_status']:
2671 ctxn = repo.changectx(nullid)
2672 ctx1 = repo.changectx(node1)
2673 ctx2 = repo.changectx(node2)
2668 ctxn = repo[nullid]
2669 ctx1 = repo[node1]
2670 ctx2 = repo[node2]
2674 2671 added = stat[1]
2675 2672 if node2 is None:
2676 2673 added = stat[0] + stat[1] # merged?
2677 2674
2678 2675 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].items():
2679 2676 if k in added:
2680 2677 copy[k] = v
2681 2678 elif v in added:
2682 2679 copy[v] = k
2683 2680
2684 2681 for state, char, files in changestates:
2685 2682 if state in show:
2686 2683 format = "%s %%s%s" % (char, end)
2687 2684 if opts['no_status']:
2688 2685 format = "%%s%s" % end
2689 2686
2690 2687 for f in files:
2691 2688 ui.write(format % repo.pathto(f, cwd))
2692 2689 if f in copy:
2693 2690 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2694 2691
2695 2692 def tag(ui, repo, name1, *names, **opts):
2696 2693 """add one or more tags for the current or given revision
2697 2694
2698 2695 Name a particular revision using <name>.
2699 2696
2700 2697 Tags are used to name particular revisions of the repository and are
2701 2698 very useful to compare different revisions, to go back to significant
2702 2699 earlier versions or to mark branch points as releases, etc.
2703 2700
2704 2701 If no revision is given, the parent of the working directory is used,
2705 2702 or tip if no revision is checked out.
2706 2703
2707 2704 To facilitate version control, distribution, and merging of tags,
2708 2705 they are stored as a file named ".hgtags" which is managed
2709 2706 similarly to other project files and can be hand-edited if
2710 2707 necessary. The file '.hg/localtags' is used for local tags (not
2711 2708 shared among repositories).
2712 2709
2713 2710 See 'hg help dates' for a list of formats valid for -d/--date.
2714 2711 """
2715 2712
2716 2713 rev_ = "."
2717 2714 names = (name1,) + names
2718 2715 if len(names) != len(dict.fromkeys(names)):
2719 2716 raise util.Abort(_('tag names must be unique'))
2720 2717 for n in names:
2721 2718 if n in ['tip', '.', 'null']:
2722 2719 raise util.Abort(_('the name \'%s\' is reserved') % n)
2723 2720 if opts['rev'] and opts['remove']:
2724 2721 raise util.Abort(_("--rev and --remove are incompatible"))
2725 2722 if opts['rev']:
2726 2723 rev_ = opts['rev']
2727 2724 message = opts['message']
2728 2725 if opts['remove']:
2729 2726 expectedtype = opts['local'] and 'local' or 'global'
2730 2727 for n in names:
2731 2728 if not repo.tagtype(n):
2732 2729 raise util.Abort(_('tag \'%s\' does not exist') % n)
2733 2730 if repo.tagtype(n) != expectedtype:
2734 2731 raise util.Abort(_('tag \'%s\' is not a %s tag') %
2735 2732 (n, expectedtype))
2736 2733 rev_ = nullid
2737 2734 if not message:
2738 2735 message = _('Removed tag %s') % ', '.join(names)
2739 2736 elif not opts['force']:
2740 2737 for n in names:
2741 2738 if n in repo.tags():
2742 2739 raise util.Abort(_('tag \'%s\' already exists '
2743 2740 '(use -f to force)') % n)
2744 2741 if not rev_ and repo.dirstate.parents()[1] != nullid:
2745 2742 raise util.Abort(_('uncommitted merge - please provide a '
2746 2743 'specific revision'))
2747 r = repo.changectx(rev_).node()
2744 r = repo[rev_].node()
2748 2745
2749 2746 if not message:
2750 2747 message = (_('Added tag %s for changeset %s') %
2751 2748 (', '.join(names), short(r)))
2752 2749
2753 2750 date = opts.get('date')
2754 2751 if date:
2755 2752 date = util.parsedate(date)
2756 2753
2757 2754 repo.tag(names, r, message, opts['local'], opts['user'], date)
2758 2755
2759 2756 def tags(ui, repo):
2760 2757 """list repository tags
2761 2758
2762 2759 List the repository tags.
2763 2760
2764 2761 This lists both regular and local tags. When the -v/--verbose switch
2765 2762 is used, a third column "local" is printed for local tags.
2766 2763 """
2767 2764
2768 2765 l = repo.tagslist()
2769 2766 l.reverse()
2770 2767 hexfunc = ui.debugflag and hex or short
2771 2768 tagtype = ""
2772 2769
2773 2770 for t, n in l:
2774 2771 if ui.quiet:
2775 2772 ui.write("%s\n" % t)
2776 2773 continue
2777 2774
2778 2775 try:
2779 2776 hn = hexfunc(n)
2780 2777 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2781 2778 except revlog.LookupError:
2782 2779 r = " ?:%s" % hn
2783 2780 else:
2784 2781 spaces = " " * (30 - util.locallen(t))
2785 2782 if ui.verbose:
2786 2783 if repo.tagtype(t) == 'local':
2787 2784 tagtype = " local"
2788 2785 else:
2789 2786 tagtype = ""
2790 2787 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2791 2788
2792 2789 def tip(ui, repo, **opts):
2793 2790 """show the tip revision
2794 2791
2795 2792 The tip revision (usually just called the tip) is the most
2796 2793 recently added changeset in the repository, the most recently
2797 2794 changed head.
2798 2795
2799 2796 If you have just made a commit, that commit will be the tip. If
2800 2797 you have just pulled changes from another repository, the tip of
2801 2798 that repository becomes the current tip. The "tip" tag is special
2802 2799 and cannot be renamed or assigned to a different changeset.
2803 2800 """
2804 2801 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2805 2802
2806 2803 def unbundle(ui, repo, fname1, *fnames, **opts):
2807 2804 """apply one or more changegroup files
2808 2805
2809 2806 Apply one or more compressed changegroup files generated by the
2810 2807 bundle command.
2811 2808 """
2812 2809 fnames = (fname1,) + fnames
2813 2810
2814 2811 lock = None
2815 2812 try:
2816 2813 lock = repo.lock()
2817 2814 for fname in fnames:
2818 2815 if os.path.exists(fname):
2819 2816 f = open(fname, "rb")
2820 2817 else:
2821 2818 f = urllib.urlopen(fname)
2822 2819 gen = changegroup.readbundle(f, fname)
2823 2820 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2824 2821 finally:
2825 2822 del lock
2826 2823
2827 2824 return postincoming(ui, repo, modheads, opts['update'], None)
2828 2825
2829 2826 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2830 2827 """update working directory
2831 2828
2832 2829 Update the repository's working directory to the specified revision,
2833 2830 or the tip of the current branch if none is specified.
2834 2831
2835 2832 If the requested revision is a descendant of the working
2836 2833 directory, any outstanding changes in the working directory will
2837 2834 be merged into the result. If it is not directly descended but is
2838 2835 on the same named branch, update aborts with a suggestion to use
2839 2836 merge or update -C instead.
2840 2837
2841 2838 If the requested revision is on a different named branch and the
2842 2839 working directory is clean, update quietly switches branches.
2843 2840
2844 2841 See 'hg help dates' for a list of formats valid for --date.
2845 2842 """
2846 2843 if rev and node:
2847 2844 raise util.Abort(_("please specify just one revision"))
2848 2845
2849 2846 if not rev:
2850 2847 rev = node
2851 2848
2852 2849 if date:
2853 2850 if rev:
2854 2851 raise util.Abort(_("you can't specify a revision and a date"))
2855 2852 rev = cmdutil.finddate(ui, repo, date)
2856 2853
2857 2854 if clean:
2858 2855 return hg.clean(repo, rev)
2859 2856 else:
2860 2857 return hg.update(repo, rev)
2861 2858
2862 2859 def verify(ui, repo):
2863 2860 """verify the integrity of the repository
2864 2861
2865 2862 Verify the integrity of the current repository.
2866 2863
2867 2864 This will perform an extensive check of the repository's
2868 2865 integrity, validating the hashes and checksums of each entry in
2869 2866 the changelog, manifest, and tracked files, as well as the
2870 2867 integrity of their crosslinks and indices.
2871 2868 """
2872 2869 return hg.verify(repo)
2873 2870
2874 2871 def version_(ui):
2875 2872 """output version and copyright information"""
2876 2873 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2877 2874 % version.get_version())
2878 2875 ui.status(_(
2879 2876 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2880 2877 "This is free software; see the source for copying conditions. "
2881 2878 "There is NO\nwarranty; "
2882 2879 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2883 2880 ))
2884 2881
2885 2882 # Command options and aliases are listed here, alphabetically
2886 2883
2887 2884 globalopts = [
2888 2885 ('R', 'repository', '',
2889 2886 _('repository root directory or symbolic path name')),
2890 2887 ('', 'cwd', '', _('change working directory')),
2891 2888 ('y', 'noninteractive', None,
2892 2889 _('do not prompt, assume \'yes\' for any required answers')),
2893 2890 ('q', 'quiet', None, _('suppress output')),
2894 2891 ('v', 'verbose', None, _('enable additional output')),
2895 2892 ('', 'config', [], _('set/override config option')),
2896 2893 ('', 'debug', None, _('enable debugging output')),
2897 2894 ('', 'debugger', None, _('start debugger')),
2898 2895 ('', 'encoding', util._encoding, _('set the charset encoding')),
2899 2896 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2900 2897 ('', 'lsprof', None, _('print improved command execution profile')),
2901 2898 ('', 'traceback', None, _('print traceback on exception')),
2902 2899 ('', 'time', None, _('time how long the command takes')),
2903 2900 ('', 'profile', None, _('print command execution profile')),
2904 2901 ('', 'version', None, _('output version information and exit')),
2905 2902 ('h', 'help', None, _('display help and exit')),
2906 2903 ]
2907 2904
2908 2905 dryrunopts = [('n', 'dry-run', None,
2909 2906 _('do not perform actions, just print output'))]
2910 2907
2911 2908 remoteopts = [
2912 2909 ('e', 'ssh', '', _('specify ssh command to use')),
2913 2910 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2914 2911 ]
2915 2912
2916 2913 walkopts = [
2917 2914 ('I', 'include', [], _('include names matching the given patterns')),
2918 2915 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2919 2916 ]
2920 2917
2921 2918 commitopts = [
2922 2919 ('m', 'message', '', _('use <text> as commit message')),
2923 2920 ('l', 'logfile', '', _('read commit message from <file>')),
2924 2921 ]
2925 2922
2926 2923 commitopts2 = [
2927 2924 ('d', 'date', '', _('record datecode as commit date')),
2928 2925 ('u', 'user', '', _('record user as committer')),
2929 2926 ]
2930 2927
2931 2928 templateopts = [
2932 2929 ('', 'style', '', _('display using template map file')),
2933 2930 ('', 'template', '', _('display with template')),
2934 2931 ]
2935 2932
2936 2933 logopts = [
2937 2934 ('p', 'patch', None, _('show patch')),
2938 2935 ('l', 'limit', '', _('limit number of changes displayed')),
2939 2936 ('M', 'no-merges', None, _('do not show merges')),
2940 2937 ] + templateopts
2941 2938
2942 2939 diffopts = [
2943 2940 ('a', 'text', None, _('treat all files as text')),
2944 2941 ('g', 'git', None, _('use git extended diff format')),
2945 2942 ('', 'nodates', None, _("don't include dates in diff headers"))
2946 2943 ]
2947 2944
2948 2945 diffopts2 = [
2949 2946 ('p', 'show-function', None, _('show which function each change is in')),
2950 2947 ('w', 'ignore-all-space', None,
2951 2948 _('ignore white space when comparing lines')),
2952 2949 ('b', 'ignore-space-change', None,
2953 2950 _('ignore changes in the amount of white space')),
2954 2951 ('B', 'ignore-blank-lines', None,
2955 2952 _('ignore changes whose lines are all blank')),
2956 2953 ('U', 'unified', '', _('number of lines of context to show'))
2957 2954 ]
2958 2955
2959 2956 table = {
2960 2957 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2961 2958 "addremove":
2962 2959 (addremove,
2963 2960 [('s', 'similarity', '',
2964 2961 _('guess renamed files by similarity (0<=s<=100)')),
2965 2962 ] + walkopts + dryrunopts,
2966 2963 _('hg addremove [OPTION]... [FILE]...')),
2967 2964 "^annotate|blame":
2968 2965 (annotate,
2969 2966 [('r', 'rev', '', _('annotate the specified revision')),
2970 2967 ('f', 'follow', None, _('follow file copies and renames')),
2971 2968 ('a', 'text', None, _('treat all files as text')),
2972 2969 ('u', 'user', None, _('list the author (long with -v)')),
2973 2970 ('d', 'date', None, _('list the date (short with -q)')),
2974 2971 ('n', 'number', None, _('list the revision number (default)')),
2975 2972 ('c', 'changeset', None, _('list the changeset')),
2976 2973 ('l', 'line-number', None,
2977 2974 _('show line number at the first appearance'))
2978 2975 ] + walkopts,
2979 2976 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2980 2977 "archive":
2981 2978 (archive,
2982 2979 [('', 'no-decode', None, _('do not pass files through decoders')),
2983 2980 ('p', 'prefix', '', _('directory prefix for files in archive')),
2984 2981 ('r', 'rev', '', _('revision to distribute')),
2985 2982 ('t', 'type', '', _('type of distribution to create')),
2986 2983 ] + walkopts,
2987 2984 _('hg archive [OPTION]... DEST')),
2988 2985 "backout":
2989 2986 (backout,
2990 2987 [('', 'merge', None,
2991 2988 _('merge with old dirstate parent after backout')),
2992 2989 ('', 'parent', '', _('parent to choose when backing out merge')),
2993 2990 ('r', 'rev', '', _('revision to backout')),
2994 2991 ] + walkopts + commitopts + commitopts2,
2995 2992 _('hg backout [OPTION]... [-r] REV')),
2996 2993 "bisect":
2997 2994 (bisect,
2998 2995 [('r', 'reset', False, _('reset bisect state')),
2999 2996 ('g', 'good', False, _('mark changeset good')),
3000 2997 ('b', 'bad', False, _('mark changeset bad')),
3001 2998 ('s', 'skip', False, _('skip testing changeset')),
3002 2999 ('U', 'noupdate', False, _('do not update to target'))],
3003 3000 _("hg bisect [-gbsr] [REV]")),
3004 3001 "branch":
3005 3002 (branch,
3006 3003 [('f', 'force', None,
3007 3004 _('set branch name even if it shadows an existing branch'))],
3008 3005 _('hg branch [-f] [NAME]')),
3009 3006 "branches":
3010 3007 (branches,
3011 3008 [('a', 'active', False,
3012 3009 _('show only branches that have unmerged heads'))],
3013 3010 _('hg branches [-a]')),
3014 3011 "bundle":
3015 3012 (bundle,
3016 3013 [('f', 'force', None,
3017 3014 _('run even when remote repository is unrelated')),
3018 3015 ('r', 'rev', [],
3019 3016 _('a changeset up to which you would like to bundle')),
3020 3017 ('', 'base', [],
3021 3018 _('a base changeset to specify instead of a destination')),
3022 3019 ('a', 'all', None, _('bundle all changesets in the repository')),
3023 3020 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3024 3021 ] + remoteopts,
3025 3022 _('hg bundle [-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3026 3023 "cat":
3027 3024 (cat,
3028 3025 [('o', 'output', '', _('print output to file with formatted name')),
3029 3026 ('r', 'rev', '', _('print the given revision')),
3030 3027 ('', 'decode', None, _('apply any matching decode filter')),
3031 3028 ] + walkopts,
3032 3029 _('hg cat [OPTION]... FILE...')),
3033 3030 "^clone":
3034 3031 (clone,
3035 3032 [('U', 'noupdate', None,
3036 3033 _('the clone will only contain a repository (no working copy)')),
3037 3034 ('r', 'rev', [],
3038 3035 _('a changeset you would like to have after cloning')),
3039 3036 ('', 'pull', None, _('use pull protocol to copy metadata')),
3040 3037 ('', 'uncompressed', None,
3041 3038 _('use uncompressed transfer (fast over LAN)')),
3042 3039 ] + remoteopts,
3043 3040 _('hg clone [OPTION]... SOURCE [DEST]')),
3044 3041 "^commit|ci":
3045 3042 (commit,
3046 3043 [('A', 'addremove', None,
3047 3044 _('mark new/missing files as added/removed before committing')),
3048 3045 ] + walkopts + commitopts + commitopts2,
3049 3046 _('hg commit [OPTION]... [FILE]...')),
3050 3047 "copy|cp":
3051 3048 (copy,
3052 3049 [('A', 'after', None, _('record a copy that has already occurred')),
3053 3050 ('f', 'force', None,
3054 3051 _('forcibly copy over an existing managed file')),
3055 3052 ] + walkopts + dryrunopts,
3056 3053 _('hg copy [OPTION]... [SOURCE]... DEST')),
3057 3054 "debugancestor": (debugancestor, [],
3058 3055 _('hg debugancestor [INDEX] REV1 REV2')),
3059 3056 "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
3060 3057 "debugcomplete":
3061 3058 (debugcomplete,
3062 3059 [('o', 'options', None, _('show the command options'))],
3063 3060 _('hg debugcomplete [-o] CMD')),
3064 3061 "debugdate":
3065 3062 (debugdate,
3066 3063 [('e', 'extended', None, _('try extended date formats'))],
3067 3064 _('hg debugdate [-e] DATE [RANGE]')),
3068 3065 "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
3069 3066 "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
3070 3067 "debugindex": (debugindex, [], _('hg debugindex FILE')),
3071 3068 "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
3072 3069 "debuginstall": (debuginstall, [], _('hg debuginstall')),
3073 3070 "debugrawcommit|rawcommit":
3074 3071 (rawcommit,
3075 3072 [('p', 'parent', [], _('parent')),
3076 3073 ('F', 'files', '', _('file list'))
3077 3074 ] + commitopts + commitopts2,
3078 3075 _('hg debugrawcommit [OPTION]... [FILE]...')),
3079 3076 "debugrebuildstate":
3080 3077 (debugrebuildstate,
3081 3078 [('r', 'rev', '', _('revision to rebuild to'))],
3082 3079 _('hg debugrebuildstate [-r REV] [REV]')),
3083 3080 "debugrename":
3084 3081 (debugrename,
3085 3082 [('r', 'rev', '', _('revision to debug'))],
3086 3083 _('hg debugrename [-r REV] FILE')),
3087 3084 "debugsetparents":
3088 3085 (debugsetparents,
3089 3086 [],
3090 3087 _('hg debugsetparents REV1 [REV2]')),
3091 3088 "debugstate":
3092 3089 (debugstate,
3093 3090 [('', 'nodates', None, _('do not display the saved mtime'))],
3094 3091 _('hg debugstate [OPTS]')),
3095 3092 "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
3096 3093 "^diff":
3097 3094 (diff,
3098 3095 [('r', 'rev', [], _('revision'))
3099 3096 ] + diffopts + diffopts2 + walkopts,
3100 3097 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3101 3098 "^export":
3102 3099 (export,
3103 3100 [('o', 'output', '', _('print output to file with formatted name')),
3104 3101 ('', 'switch-parent', None, _('diff against the second parent'))
3105 3102 ] + diffopts,
3106 3103 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
3107 3104 "grep":
3108 3105 (grep,
3109 3106 [('0', 'print0', None, _('end fields with NUL')),
3110 3107 ('', 'all', None, _('print all revisions that match')),
3111 3108 ('f', 'follow', None,
3112 3109 _('follow changeset history, or file history across copies and renames')),
3113 3110 ('i', 'ignore-case', None, _('ignore case when matching')),
3114 3111 ('l', 'files-with-matches', None,
3115 3112 _('print only filenames and revs that match')),
3116 3113 ('n', 'line-number', None, _('print matching line numbers')),
3117 3114 ('r', 'rev', [], _('search in given revision range')),
3118 3115 ('u', 'user', None, _('list the author (long with -v)')),
3119 3116 ('d', 'date', None, _('list the date (short with -q)')),
3120 3117 ] + walkopts,
3121 3118 _('hg grep [OPTION]... PATTERN [FILE]...')),
3122 3119 "heads":
3123 3120 (heads,
3124 3121 [('r', 'rev', '', _('show only heads which are descendants of rev')),
3125 3122 ] + templateopts,
3126 3123 _('hg heads [-r REV] [REV]...')),
3127 3124 "help": (help_, [], _('hg help [COMMAND]')),
3128 3125 "identify|id":
3129 3126 (identify,
3130 3127 [('r', 'rev', '', _('identify the specified rev')),
3131 3128 ('n', 'num', None, _('show local revision number')),
3132 3129 ('i', 'id', None, _('show global revision id')),
3133 3130 ('b', 'branch', None, _('show branch')),
3134 3131 ('t', 'tags', None, _('show tags'))],
3135 3132 _('hg identify [-nibt] [-r REV] [SOURCE]')),
3136 3133 "import|patch":
3137 3134 (import_,
3138 3135 [('p', 'strip', 1,
3139 3136 _('directory strip option for patch. This has the same\n'
3140 3137 'meaning as the corresponding patch option')),
3141 3138 ('b', 'base', '', _('base path')),
3142 3139 ('f', 'force', None,
3143 3140 _('skip check for outstanding uncommitted changes')),
3144 3141 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3145 3142 ('', 'exact', None,
3146 3143 _('apply patch to the nodes from which it was generated')),
3147 3144 ('', 'import-branch', None,
3148 3145 _('Use any branch information in patch (implied by --exact)'))] +
3149 3146 commitopts + commitopts2,
3150 3147 _('hg import [OPTION]... PATCH...')),
3151 3148 "incoming|in":
3152 3149 (incoming,
3153 3150 [('f', 'force', None,
3154 3151 _('run even when remote repository is unrelated')),
3155 3152 ('n', 'newest-first', None, _('show newest record first')),
3156 3153 ('', 'bundle', '', _('file to store the bundles into')),
3157 3154 ('r', 'rev', [],
3158 3155 _('a specific revision up to which you would like to pull')),
3159 3156 ] + logopts + remoteopts,
3160 3157 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
3161 3158 ' [--bundle FILENAME] [SOURCE]')),
3162 3159 "^init":
3163 3160 (init,
3164 3161 remoteopts,
3165 3162 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
3166 3163 "locate":
3167 3164 (locate,
3168 3165 [('r', 'rev', '', _('search the repository as it stood at rev')),
3169 3166 ('0', 'print0', None,
3170 3167 _('end filenames with NUL, for use with xargs')),
3171 3168 ('f', 'fullpath', None,
3172 3169 _('print complete paths from the filesystem root')),
3173 3170 ] + walkopts,
3174 3171 _('hg locate [OPTION]... [PATTERN]...')),
3175 3172 "^log|history":
3176 3173 (log,
3177 3174 [('f', 'follow', None,
3178 3175 _('follow changeset history, or file history across copies and renames')),
3179 3176 ('', 'follow-first', None,
3180 3177 _('only follow the first parent of merge changesets')),
3181 3178 ('d', 'date', '', _('show revs matching date spec')),
3182 3179 ('C', 'copies', None, _('show copied files')),
3183 3180 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3184 3181 ('r', 'rev', [], _('show the specified revision or range')),
3185 3182 ('', 'removed', None, _('include revs where files were removed')),
3186 3183 ('m', 'only-merges', None, _('show only merges')),
3187 3184 ('b', 'only-branch', [],
3188 3185 _('show only changesets within the given named branch')),
3189 3186 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3190 3187 ] + logopts + walkopts,
3191 3188 _('hg log [OPTION]... [FILE]')),
3192 3189 "manifest":
3193 3190 (manifest,
3194 3191 [('r', 'rev', '', _('revision to display'))],
3195 3192 _('hg manifest [-r REV]')),
3196 3193 "^merge":
3197 3194 (merge,
3198 3195 [('f', 'force', None, _('force a merge with outstanding changes')),
3199 3196 ('r', 'rev', '', _('revision to merge')),
3200 3197 ],
3201 3198 _('hg merge [-f] [[-r] REV]')),
3202 3199 "outgoing|out":
3203 3200 (outgoing,
3204 3201 [('f', 'force', None,
3205 3202 _('run even when remote repository is unrelated')),
3206 3203 ('r', 'rev', [],
3207 3204 _('a specific revision up to which you would like to push')),
3208 3205 ('n', 'newest-first', None, _('show newest record first')),
3209 3206 ] + logopts + remoteopts,
3210 3207 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3211 3208 "^parents":
3212 3209 (parents,
3213 3210 [('r', 'rev', '', _('show parents from the specified rev')),
3214 3211 ] + templateopts,
3215 3212 _('hg parents [-r REV] [FILE]')),
3216 3213 "paths": (paths, [], _('hg paths [NAME]')),
3217 3214 "^pull":
3218 3215 (pull,
3219 3216 [('u', 'update', None,
3220 3217 _('update to new tip if changesets were pulled')),
3221 3218 ('f', 'force', None,
3222 3219 _('run even when remote repository is unrelated')),
3223 3220 ('r', 'rev', [],
3224 3221 _('a specific revision up to which you would like to pull')),
3225 3222 ] + remoteopts,
3226 3223 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3227 3224 "^push":
3228 3225 (push,
3229 3226 [('f', 'force', None, _('force push')),
3230 3227 ('r', 'rev', [],
3231 3228 _('a specific revision up to which you would like to push')),
3232 3229 ] + remoteopts,
3233 3230 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3234 3231 "recover": (recover, [], _('hg recover')),
3235 3232 "^remove|rm":
3236 3233 (remove,
3237 3234 [('A', 'after', None, _('record delete for missing files')),
3238 3235 ('f', 'force', None,
3239 3236 _('remove (and delete) file even if added or modified')),
3240 3237 ] + walkopts,
3241 3238 _('hg remove [OPTION]... FILE...')),
3242 3239 "rename|mv":
3243 3240 (rename,
3244 3241 [('A', 'after', None, _('record a rename that has already occurred')),
3245 3242 ('f', 'force', None,
3246 3243 _('forcibly copy over an existing managed file')),
3247 3244 ] + walkopts + dryrunopts,
3248 3245 _('hg rename [OPTION]... SOURCE... DEST')),
3249 3246 "resolve":
3250 3247 (resolve,
3251 3248 [('l', 'list', None, _('list state of files needing merge')),
3252 3249 ('m', 'mark', None, _('mark files as resolved')),
3253 3250 ('u', 'unmark', None, _('unmark files as resolved'))],
3254 3251 ('hg resolve [OPTION] [FILES...]')),
3255 3252 "revert":
3256 3253 (revert,
3257 3254 [('a', 'all', None, _('revert all changes when no arguments given')),
3258 3255 ('d', 'date', '', _('tipmost revision matching date')),
3259 3256 ('r', 'rev', '', _('revision to revert to')),
3260 3257 ('', 'no-backup', None, _('do not save backup copies of files')),
3261 3258 ] + walkopts + dryrunopts,
3262 3259 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3263 3260 "rollback": (rollback, [], _('hg rollback')),
3264 3261 "root": (root, [], _('hg root')),
3265 3262 "^serve":
3266 3263 (serve,
3267 3264 [('A', 'accesslog', '', _('name of access log file to write to')),
3268 3265 ('d', 'daemon', None, _('run server in background')),
3269 3266 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3270 3267 ('E', 'errorlog', '', _('name of error log file to write to')),
3271 3268 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3272 3269 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3273 3270 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3274 3271 ('n', 'name', '',
3275 3272 _('name to show in web pages (default: working dir)')),
3276 3273 ('', 'webdir-conf', '', _('name of the webdir config file'
3277 3274 ' (serve more than one repo)')),
3278 3275 ('', 'pid-file', '', _('name of file to write process ID to')),
3279 3276 ('', 'stdio', None, _('for remote clients')),
3280 3277 ('t', 'templates', '', _('web templates to use')),
3281 3278 ('', 'style', '', _('template style to use')),
3282 3279 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3283 3280 ('', 'certificate', '', _('SSL certificate file'))],
3284 3281 _('hg serve [OPTION]...')),
3285 3282 "showconfig|debugconfig":
3286 3283 (showconfig,
3287 3284 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3288 3285 _('hg showconfig [-u] [NAME]...')),
3289 3286 "^status|st":
3290 3287 (status,
3291 3288 [('A', 'all', None, _('show status of all files')),
3292 3289 ('m', 'modified', None, _('show only modified files')),
3293 3290 ('a', 'added', None, _('show only added files')),
3294 3291 ('r', 'removed', None, _('show only removed files')),
3295 3292 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3296 3293 ('c', 'clean', None, _('show only files without changes')),
3297 3294 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3298 3295 ('i', 'ignored', None, _('show only ignored files')),
3299 3296 ('n', 'no-status', None, _('hide status prefix')),
3300 3297 ('C', 'copies', None, _('show source of copied files')),
3301 3298 ('0', 'print0', None,
3302 3299 _('end filenames with NUL, for use with xargs')),
3303 3300 ('', 'rev', [], _('show difference from revision')),
3304 3301 ] + walkopts,
3305 3302 _('hg status [OPTION]... [FILE]...')),
3306 3303 "tag":
3307 3304 (tag,
3308 3305 [('f', 'force', None, _('replace existing tag')),
3309 3306 ('l', 'local', None, _('make the tag local')),
3310 3307 ('r', 'rev', '', _('revision to tag')),
3311 3308 ('', 'remove', None, _('remove a tag')),
3312 3309 # -l/--local is already there, commitopts cannot be used
3313 3310 ('m', 'message', '', _('use <text> as commit message')),
3314 3311 ] + commitopts2,
3315 3312 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3316 3313 "tags": (tags, [], _('hg tags')),
3317 3314 "tip":
3318 3315 (tip,
3319 3316 [('p', 'patch', None, _('show patch')),
3320 3317 ] + templateopts,
3321 3318 _('hg tip [-p]')),
3322 3319 "unbundle":
3323 3320 (unbundle,
3324 3321 [('u', 'update', None,
3325 3322 _('update to new tip if changesets were unbundled'))],
3326 3323 _('hg unbundle [-u] FILE...')),
3327 3324 "^update|up|checkout|co":
3328 3325 (update,
3329 3326 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3330 3327 ('d', 'date', '', _('tipmost revision matching date')),
3331 3328 ('r', 'rev', '', _('revision'))],
3332 3329 _('hg update [-C] [-d DATE] [[-r] REV]')),
3333 3330 "verify": (verify, [], _('hg verify')),
3334 3331 "version": (version_, [], _('hg version')),
3335 3332 }
3336 3333
3337 3334 norepo = ("clone init version help debugcomplete debugdata"
3338 3335 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3339 3336 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,751 +1,751 b''
1 1 # context.py - changeset and file context objects for mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import nullid, nullrev, short
9 9 from i18n import _
10 10 import ancestor, bdiff, revlog, util, os, errno
11 11
12 12 class changectx(object):
13 13 """A changecontext object makes access to data related to a particular
14 14 changeset convenient."""
15 15 def __init__(self, repo, changeid=''):
16 16 """changeid is a revision number, node, or tag"""
17 17 if changeid == '':
18 18 changeid = '.'
19 19 self._repo = repo
20 20 self._node = self._repo.lookup(changeid)
21 21 self._rev = self._repo.changelog.rev(self._node)
22 22
23 23 def __str__(self):
24 24 return short(self.node())
25 25
26 26 def __repr__(self):
27 27 return "<changectx %s>" % str(self)
28 28
29 29 def __hash__(self):
30 30 try:
31 31 return hash(self._rev)
32 32 except AttributeError:
33 33 return id(self)
34 34
35 35 def __eq__(self, other):
36 36 try:
37 37 return self._rev == other._rev
38 38 except AttributeError:
39 39 return False
40 40
41 41 def __ne__(self, other):
42 42 return not (self == other)
43 43
44 44 def __nonzero__(self):
45 45 return self._rev != nullrev
46 46
47 47 def __getattr__(self, name):
48 48 if name == '_changeset':
49 49 self._changeset = self._repo.changelog.read(self.node())
50 50 return self._changeset
51 51 elif name == '_manifest':
52 52 self._manifest = self._repo.manifest.read(self._changeset[0])
53 53 return self._manifest
54 54 elif name == '_manifestdelta':
55 55 md = self._repo.manifest.readdelta(self._changeset[0])
56 56 self._manifestdelta = md
57 57 return self._manifestdelta
58 58 elif name == '_parents':
59 59 p = self._repo.changelog.parents(self._node)
60 60 if p[1] == nullid:
61 61 p = p[:-1]
62 62 self._parents = [changectx(self._repo, x) for x in p]
63 63 return self._parents
64 64 else:
65 65 raise AttributeError, name
66 66
67 67 def __contains__(self, key):
68 68 return key in self._manifest
69 69
70 70 def __getitem__(self, key):
71 71 return self.filectx(key)
72 72
73 73 def __iter__(self):
74 74 a = self._manifest.keys()
75 75 a.sort()
76 76 for f in a:
77 77 yield f
78 78
79 79 def changeset(self): return self._changeset
80 80 def manifest(self): return self._manifest
81 81
82 82 def rev(self): return self._rev
83 83 def node(self): return self._node
84 84 def user(self): return self._changeset[1]
85 85 def date(self): return self._changeset[2]
86 86 def files(self): return self._changeset[3]
87 87 def description(self): return self._changeset[4]
88 88 def branch(self): return self._changeset[5].get("branch")
89 89 def extra(self): return self._changeset[5]
90 90 def tags(self): return self._repo.nodetags(self._node)
91 91
92 92 def parents(self):
93 93 """return contexts for each parent changeset"""
94 94 return self._parents
95 95
96 96 def children(self):
97 97 """return contexts for each child changeset"""
98 98 c = self._repo.changelog.children(self._node)
99 99 return [changectx(self._repo, x) for x in c]
100 100
101 101 def _fileinfo(self, path):
102 102 if '_manifest' in self.__dict__:
103 103 try:
104 104 return self._manifest[path], self._manifest.flags(path)
105 105 except KeyError:
106 106 raise revlog.LookupError(self._node, path,
107 107 _('not found in manifest'))
108 108 if '_manifestdelta' in self.__dict__ or path in self.files():
109 109 if path in self._manifestdelta:
110 110 return self._manifestdelta[path], self._manifestdelta.flags(path)
111 111 node, flag = self._repo.manifest.find(self._changeset[0], path)
112 112 if not node:
113 113 raise revlog.LookupError(self._node, path,
114 114 _('not found in manifest'))
115 115
116 116 return node, flag
117 117
118 118 def filenode(self, path):
119 119 return self._fileinfo(path)[0]
120 120
121 121 def flags(self, path):
122 122 try:
123 123 return self._fileinfo(path)[1]
124 124 except revlog.LookupError:
125 125 return ''
126 126
127 127 def filectx(self, path, fileid=None, filelog=None):
128 128 """get a file context from this changeset"""
129 129 if fileid is None:
130 130 fileid = self.filenode(path)
131 131 return filectx(self._repo, path, fileid=fileid,
132 132 changectx=self, filelog=filelog)
133 133
134 134 def filectxs(self):
135 135 """generate a file context for each file in this changeset's
136 136 manifest"""
137 137 mf = self.manifest()
138 138 m = mf.keys()
139 139 m.sort()
140 140 for f in m:
141 141 yield self.filectx(f, fileid=mf[f])
142 142
143 143 def ancestor(self, c2):
144 144 """
145 145 return the ancestor context of self and c2
146 146 """
147 147 n = self._repo.changelog.ancestor(self._node, c2._node)
148 148 return changectx(self._repo, n)
149 149
150 150 class filectx(object):
151 151 """A filecontext object makes access to data related to a particular
152 152 filerevision convenient."""
153 153 def __init__(self, repo, path, changeid=None, fileid=None,
154 154 filelog=None, changectx=None):
155 155 """changeid can be a changeset revision, node, or tag.
156 156 fileid can be a file revision or node."""
157 157 self._repo = repo
158 158 self._path = path
159 159
160 160 assert (changeid is not None
161 161 or fileid is not None
162 162 or changectx is not None)
163 163
164 164 if filelog:
165 165 self._filelog = filelog
166 166
167 167 if changeid is not None:
168 168 self._changeid = changeid
169 169 if changectx is not None:
170 170 self._changectx = changectx
171 171 if fileid is not None:
172 172 self._fileid = fileid
173 173
174 174 def __getattr__(self, name):
175 175 if name == '_changectx':
176 176 self._changectx = changectx(self._repo, self._changeid)
177 177 return self._changectx
178 178 elif name == '_filelog':
179 179 self._filelog = self._repo.file(self._path)
180 180 return self._filelog
181 181 elif name == '_changeid':
182 182 if '_changectx' in self.__dict__:
183 183 self._changeid = self._changectx.rev()
184 184 else:
185 185 self._changeid = self._filelog.linkrev(self._filenode)
186 186 return self._changeid
187 187 elif name == '_filenode':
188 188 if '_fileid' in self.__dict__:
189 189 self._filenode = self._filelog.lookup(self._fileid)
190 190 else:
191 191 self._filenode = self._changectx.filenode(self._path)
192 192 return self._filenode
193 193 elif name == '_filerev':
194 194 self._filerev = self._filelog.rev(self._filenode)
195 195 return self._filerev
196 196 elif name == '_repopath':
197 197 self._repopath = self._path
198 198 return self._repopath
199 199 else:
200 200 raise AttributeError, name
201 201
202 202 def __nonzero__(self):
203 203 try:
204 204 n = self._filenode
205 205 return True
206 206 except revlog.LookupError:
207 207 # file is missing
208 208 return False
209 209
210 210 def __str__(self):
211 211 return "%s@%s" % (self.path(), short(self.node()))
212 212
213 213 def __repr__(self):
214 214 return "<filectx %s>" % str(self)
215 215
216 216 def __hash__(self):
217 217 try:
218 218 return hash((self._path, self._fileid))
219 219 except AttributeError:
220 220 return id(self)
221 221
222 222 def __eq__(self, other):
223 223 try:
224 224 return (self._path == other._path
225 225 and self._fileid == other._fileid)
226 226 except AttributeError:
227 227 return False
228 228
229 229 def __ne__(self, other):
230 230 return not (self == other)
231 231
232 232 def filectx(self, fileid):
233 233 '''opens an arbitrary revision of the file without
234 234 opening a new filelog'''
235 235 return filectx(self._repo, self._path, fileid=fileid,
236 236 filelog=self._filelog)
237 237
238 238 def filerev(self): return self._filerev
239 239 def filenode(self): return self._filenode
240 240 def flags(self): return self._changectx.flags(self._path)
241 241 def filelog(self): return self._filelog
242 242
243 243 def rev(self):
244 244 if '_changectx' in self.__dict__:
245 245 return self._changectx.rev()
246 246 if '_changeid' in self.__dict__:
247 247 return self._changectx.rev()
248 248 return self._filelog.linkrev(self._filenode)
249 249
250 250 def linkrev(self): return self._filelog.linkrev(self._filenode)
251 251 def node(self): return self._changectx.node()
252 252 def user(self): return self._changectx.user()
253 253 def date(self): return self._changectx.date()
254 254 def files(self): return self._changectx.files()
255 255 def description(self): return self._changectx.description()
256 256 def branch(self): return self._changectx.branch()
257 257 def manifest(self): return self._changectx.manifest()
258 258 def changectx(self): return self._changectx
259 259
260 260 def data(self): return self._filelog.read(self._filenode)
261 261 def path(self): return self._path
262 262 def size(self): return self._filelog.size(self._filerev)
263 263
264 264 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
265 265
266 266 def renamed(self):
267 267 """check if file was actually renamed in this changeset revision
268 268
269 269 If rename logged in file revision, we report copy for changeset only
270 270 if file revisions linkrev points back to the changeset in question
271 271 or both changeset parents contain different file revisions.
272 272 """
273 273
274 274 renamed = self._filelog.renamed(self._filenode)
275 275 if not renamed:
276 276 return renamed
277 277
278 278 if self.rev() == self.linkrev():
279 279 return renamed
280 280
281 281 name = self.path()
282 282 fnode = self._filenode
283 283 for p in self._changectx.parents():
284 284 try:
285 285 if fnode == p.filenode(name):
286 286 return None
287 287 except revlog.LookupError:
288 288 pass
289 289 return renamed
290 290
291 291 def parents(self):
292 292 p = self._path
293 293 fl = self._filelog
294 294 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
295 295
296 296 r = self._filelog.renamed(self._filenode)
297 297 if r:
298 298 pl[0] = (r[0], r[1], None)
299 299
300 300 return [filectx(self._repo, p, fileid=n, filelog=l)
301 301 for p,n,l in pl if n != nullid]
302 302
303 303 def children(self):
304 304 # hard for renames
305 305 c = self._filelog.children(self._filenode)
306 306 return [filectx(self._repo, self._path, fileid=x,
307 307 filelog=self._filelog) for x in c]
308 308
309 309 def annotate(self, follow=False, linenumber=None):
310 310 '''returns a list of tuples of (ctx, line) for each line
311 311 in the file, where ctx is the filectx of the node where
312 312 that line was last changed.
313 313 This returns tuples of ((ctx, linenumber), line) for each line,
314 314 if "linenumber" parameter is NOT "None".
315 315 In such tuples, linenumber means one at the first appearance
316 316 in the managed file.
317 317 To reduce annotation cost,
318 318 this returns fixed value(False is used) as linenumber,
319 319 if "linenumber" parameter is "False".'''
320 320
321 321 def decorate_compat(text, rev):
322 322 return ([rev] * len(text.splitlines()), text)
323 323
324 324 def without_linenumber(text, rev):
325 325 return ([(rev, False)] * len(text.splitlines()), text)
326 326
327 327 def with_linenumber(text, rev):
328 328 size = len(text.splitlines())
329 329 return ([(rev, i) for i in xrange(1, size + 1)], text)
330 330
331 331 decorate = (((linenumber is None) and decorate_compat) or
332 332 (linenumber and with_linenumber) or
333 333 without_linenumber)
334 334
335 335 def pair(parent, child):
336 336 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
337 337 child[0][b1:b2] = parent[0][a1:a2]
338 338 return child
339 339
340 340 getlog = util.cachefunc(lambda x: self._repo.file(x))
341 341 def getctx(path, fileid):
342 342 log = path == self._path and self._filelog or getlog(path)
343 343 return filectx(self._repo, path, fileid=fileid, filelog=log)
344 344 getctx = util.cachefunc(getctx)
345 345
346 346 def parents(f):
347 347 # we want to reuse filectx objects as much as possible
348 348 p = f._path
349 349 if f._filerev is None: # working dir
350 350 pl = [(n.path(), n.filerev()) for n in f.parents()]
351 351 else:
352 352 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
353 353
354 354 if follow:
355 355 r = f.renamed()
356 356 if r:
357 357 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
358 358
359 359 return [getctx(p, n) for p, n in pl if n != nullrev]
360 360
361 361 # use linkrev to find the first changeset where self appeared
362 362 if self.rev() != self.linkrev():
363 363 base = self.filectx(self.filerev())
364 364 else:
365 365 base = self
366 366
367 367 # find all ancestors
368 368 needed = {base: 1}
369 369 visit = [base]
370 370 files = [base._path]
371 371 while visit:
372 372 f = visit.pop(0)
373 373 for p in parents(f):
374 374 if p not in needed:
375 375 needed[p] = 1
376 376 visit.append(p)
377 377 if p._path not in files:
378 378 files.append(p._path)
379 379 else:
380 380 # count how many times we'll use this
381 381 needed[p] += 1
382 382
383 383 # sort by revision (per file) which is a topological order
384 384 visit = []
385 385 for f in files:
386 386 fn = [(n.rev(), n) for n in needed.keys() if n._path == f]
387 387 visit.extend(fn)
388 388 visit.sort()
389 389 hist = {}
390 390
391 391 for r, f in visit:
392 392 curr = decorate(f.data(), f)
393 393 for p in parents(f):
394 394 if p != nullid:
395 395 curr = pair(hist[p], curr)
396 396 # trim the history of unneeded revs
397 397 needed[p] -= 1
398 398 if not needed[p]:
399 399 del hist[p]
400 400 hist[f] = curr
401 401
402 402 return zip(hist[f][0], hist[f][1].splitlines(1))
403 403
404 404 def ancestor(self, fc2):
405 405 """
406 406 find the common ancestor file context, if any, of self, and fc2
407 407 """
408 408
409 409 acache = {}
410 410
411 411 # prime the ancestor cache for the working directory
412 412 for c in (self, fc2):
413 413 if c._filerev == None:
414 414 pl = [(n.path(), n.filenode()) for n in c.parents()]
415 415 acache[(c._path, None)] = pl
416 416
417 417 flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog}
418 418 def parents(vertex):
419 419 if vertex in acache:
420 420 return acache[vertex]
421 421 f, n = vertex
422 422 if f not in flcache:
423 423 flcache[f] = self._repo.file(f)
424 424 fl = flcache[f]
425 425 pl = [(f, p) for p in fl.parents(n) if p != nullid]
426 426 re = fl.renamed(n)
427 427 if re:
428 428 pl.append(re)
429 429 acache[vertex] = pl
430 430 return pl
431 431
432 432 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
433 433 v = ancestor.ancestor(a, b, parents)
434 434 if v:
435 435 f, n = v
436 436 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
437 437
438 438 return None
439 439
440 440 class workingctx(changectx):
441 441 """A workingctx object makes access to data related to
442 442 the current working directory convenient.
443 443 parents - a pair of parent nodeids, or None to use the dirstate.
444 444 date - any valid date string or (unixtime, offset), or None.
445 445 user - username string, or None.
446 446 extra - a dictionary of extra values, or None.
447 447 changes - a list of file lists as returned by localrepo.status()
448 448 or None to use the repository status.
449 449 """
450 450 def __init__(self, repo, parents=None, text="", user=None, date=None,
451 451 extra=None, changes=None):
452 452 self._repo = repo
453 453 self._rev = None
454 454 self._node = None
455 455 self._text = text
456 456 if date:
457 457 self._date = util.parsedate(date)
458 458 else:
459 459 self._date = util.makedate()
460 460 if user:
461 461 self._user = user
462 462 else:
463 463 self._user = self._repo.ui.username()
464 464 if parents:
465 465 p1, p2 = parents
466 self._parents = [self._repo.changectx(p) for p in (p1, p2)]
466 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
467 467 if changes:
468 468 self._status = list(changes)
469 469
470 470 self._extra = {}
471 471 if extra:
472 472 self._extra = extra.copy()
473 473 if 'branch' not in self._extra:
474 474 branch = self._repo.dirstate.branch()
475 475 try:
476 476 branch = branch.decode('UTF-8').encode('UTF-8')
477 477 except UnicodeDecodeError:
478 478 raise util.Abort(_('branch name not in UTF-8!'))
479 479 self._extra['branch'] = branch
480 480 if self._extra['branch'] == '':
481 481 self._extra['branch'] = 'default'
482 482
483 483 def __str__(self):
484 484 return str(self._parents[0]) + "+"
485 485
486 486 def __nonzero__(self):
487 487 return True
488 488
489 489 def __getattr__(self, name):
490 490 if name == '_status':
491 491 self._status = self._repo.status()
492 492 return self._status
493 493 if name == '_manifest':
494 494 self._buildmanifest()
495 495 return self._manifest
496 496 elif name == '_parents':
497 497 p = self._repo.dirstate.parents()
498 498 if p[1] == nullid:
499 499 p = p[:-1]
500 500 self._parents = [changectx(self._repo, x) for x in p]
501 501 return self._parents
502 502 else:
503 503 raise AttributeError, name
504 504
505 505 def _buildmanifest(self):
506 506 """generate a manifest corresponding to the working directory"""
507 507
508 508 man = self._parents[0].manifest().copy()
509 509 copied = self._repo.dirstate.copies()
510 510 cf = lambda x: man.flags(copied.get(x, x))
511 511 ff = self._repo.dirstate.flagfunc(cf)
512 512 modified, added, removed, deleted, unknown = self._status[:5]
513 513 for i, l in (("a", added), ("m", modified), ("u", unknown)):
514 514 for f in l:
515 515 man[f] = man.get(copied.get(f, f), nullid) + i
516 516 try:
517 517 man.set(f, ff(f))
518 518 except OSError:
519 519 pass
520 520
521 521 for f in deleted + removed:
522 522 if f in man:
523 523 del man[f]
524 524
525 525 self._manifest = man
526 526
527 527 def manifest(self): return self._manifest
528 528
529 529 def user(self): return self._user
530 530 def date(self): return self._date
531 531 def description(self): return self._text
532 532 def files(self):
533 533 f = self.modified() + self.added() + self.removed()
534 534 f.sort()
535 535 return f
536 536
537 537 def modified(self): return self._status[0]
538 538 def added(self): return self._status[1]
539 539 def removed(self): return self._status[2]
540 540 def deleted(self): return self._status[3]
541 541 def unknown(self): return self._status[4]
542 542 def clean(self): return self._status[5]
543 543 def branch(self): return self._extra['branch']
544 544 def extra(self): return self._extra
545 545
546 546 def tags(self):
547 547 t = []
548 548 [t.extend(p.tags()) for p in self.parents()]
549 549 return t
550 550
551 551 def children(self):
552 552 return []
553 553
554 554 def flags(self, path):
555 555 if '_manifest' in self.__dict__:
556 556 try:
557 557 return self._manifest.flags(path)
558 558 except KeyError:
559 559 return ''
560 560
561 561 pnode = self._parents[0].changeset()[0]
562 562 orig = self._repo.dirstate.copies().get(path, path)
563 563 node, flag = self._repo.manifest.find(pnode, orig)
564 564 try:
565 565 ff = self._repo.dirstate.flagfunc(lambda x: flag or '')
566 566 return ff(path)
567 567 except OSError:
568 568 pass
569 569
570 570 if not node or path in self.deleted() or path in self.removed():
571 571 return ''
572 572 return flag
573 573
574 574 def filectx(self, path, filelog=None):
575 575 """get a file context from the working directory"""
576 576 return workingfilectx(self._repo, path, workingctx=self,
577 577 filelog=filelog)
578 578
579 579 def ancestor(self, c2):
580 580 """return the ancestor context of self and c2"""
581 581 return self._parents[0].ancestor(c2) # punt on two parents for now
582 582
583 583 class workingfilectx(filectx):
584 584 """A workingfilectx object makes access to data related to a particular
585 585 file in the working directory convenient."""
586 586 def __init__(self, repo, path, filelog=None, workingctx=None):
587 587 """changeid can be a changeset revision, node, or tag.
588 588 fileid can be a file revision or node."""
589 589 self._repo = repo
590 590 self._path = path
591 591 self._changeid = None
592 592 self._filerev = self._filenode = None
593 593
594 594 if filelog:
595 595 self._filelog = filelog
596 596 if workingctx:
597 597 self._changectx = workingctx
598 598
599 599 def __getattr__(self, name):
600 600 if name == '_changectx':
601 601 self._changectx = workingctx(self._repo)
602 602 return self._changectx
603 603 elif name == '_repopath':
604 604 self._repopath = (self._repo.dirstate.copied(self._path)
605 605 or self._path)
606 606 return self._repopath
607 607 elif name == '_filelog':
608 608 self._filelog = self._repo.file(self._repopath)
609 609 return self._filelog
610 610 else:
611 611 raise AttributeError, name
612 612
613 613 def __nonzero__(self):
614 614 return True
615 615
616 616 def __str__(self):
617 617 return "%s@%s" % (self.path(), self._changectx)
618 618
619 619 def filectx(self, fileid):
620 620 '''opens an arbitrary revision of the file without
621 621 opening a new filelog'''
622 622 return filectx(self._repo, self._repopath, fileid=fileid,
623 623 filelog=self._filelog)
624 624
625 625 def rev(self):
626 626 if '_changectx' in self.__dict__:
627 627 return self._changectx.rev()
628 628 return self._filelog.linkrev(self._filenode)
629 629
630 630 def data(self): return self._repo.wread(self._path)
631 631 def renamed(self):
632 632 rp = self._repopath
633 633 if rp == self._path:
634 634 return None
635 635 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
636 636
637 637 def parents(self):
638 638 '''return parent filectxs, following copies if necessary'''
639 639 p = self._path
640 640 rp = self._repopath
641 641 pcl = self._changectx._parents
642 642 fl = self._filelog
643 643 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
644 644 if len(pcl) > 1:
645 645 if rp != p:
646 646 fl = None
647 647 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
648 648
649 649 return [filectx(self._repo, p, fileid=n, filelog=l)
650 650 for p,n,l in pl if n != nullid]
651 651
652 652 def children(self):
653 653 return []
654 654
655 655 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
656 656 def date(self):
657 657 t, tz = self._changectx.date()
658 658 try:
659 659 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
660 660 except OSError, err:
661 661 if err.errno != errno.ENOENT: raise
662 662 return (t, tz)
663 663
664 664 def cmp(self, text): return self._repo.wread(self._path) == text
665 665
666 666 class memctx(object):
667 667 """A memctx is a subset of changectx supposed to be built on memory
668 668 and passed to commit functions.
669 669
670 670 NOTE: this interface and the related memfilectx are experimental and
671 671 may change without notice.
672 672
673 673 parents - a pair of parent nodeids.
674 674 filectxfn - a callable taking (repo, memctx, path) arguments and
675 675 returning a memctx object.
676 676 date - any valid date string or (unixtime, offset), or None.
677 677 user - username string, or None.
678 678 extra - a dictionary of extra values, or None.
679 679 """
680 680 def __init__(self, repo, parents, text, files, filectxfn, user=None,
681 681 date=None, extra=None):
682 682 self._repo = repo
683 683 self._rev = None
684 684 self._node = None
685 685 self._text = text
686 686 self._date = date and util.parsedate(date) or util.makedate()
687 687 self._user = user or self._repo.ui.username()
688 688 parents = [(p or nullid) for p in parents]
689 689 p1, p2 = parents
690 self._parents = [self._repo.changectx(p) for p in (p1, p2)]
690 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
691 691 files = list(files)
692 692 files.sort()
693 693 self._status = [files, [], [], [], []]
694 694 self._filectxfn = filectxfn
695 695
696 696 self._extra = extra and extra.copy() or {}
697 697 if 'branch' not in self._extra:
698 698 self._extra['branch'] = 'default'
699 699 elif self._extra.get('branch') == '':
700 700 self._extra['branch'] = 'default'
701 701
702 702 def __str__(self):
703 703 return str(self._parents[0]) + "+"
704 704
705 705 def __nonzero__(self):
706 706 return True
707 707
708 708 def user(self): return self._user
709 709 def date(self): return self._date
710 710 def description(self): return self._text
711 711 def files(self): return self.modified()
712 712 def modified(self): return self._status[0]
713 713 def added(self): return self._status[1]
714 714 def removed(self): return self._status[2]
715 715 def deleted(self): return self._status[3]
716 716 def unknown(self): return self._status[4]
717 717 def clean(self): return self._status[5]
718 718 def branch(self): return self._extra['branch']
719 719 def extra(self): return self._extra
720 720 def flags(self, f): return self[f].flags()
721 721
722 722 def parents(self):
723 723 """return contexts for each parent changeset"""
724 724 return self._parents
725 725
726 726 def filectx(self, path, filelog=None):
727 727 """get a file context from the working directory"""
728 728 return self._filectxfn(self._repo, self, path)
729 729
730 730 class memfilectx(object):
731 731 """A memfilectx is a subset of filectx supposed to be built by client
732 732 code and passed to commit functions.
733 733 """
734 734 def __init__(self, path, data, islink, isexec, copied):
735 735 """copied is the source file path, or None."""
736 736 self._path = path
737 737 self._data = data
738 738 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
739 739 self._copied = None
740 740 if copied:
741 741 self._copied = (copied, nullid)
742 742
743 743 def __nonzero__(self): return True
744 744 def __str__(self): return "%s@%s" % (self.path(), self._changectx)
745 745 def path(self): return self._path
746 746 def data(self): return self._data
747 747 def flags(self): return self._flags
748 748 def isexec(self): return 'x' in self._flags
749 749 def islink(self): return 'l' in self._flags
750 750 def renamed(self): return self._copied
751 751
@@ -1,74 +1,74 b''
1 1 # Revision graph generator for Mercurial
2 2 #
3 3 # Copyright 2008 Dirkjan Ochtman <dirkjan@ochtman.nl>
4 4 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
5 5 #
6 6 # This software may be used and distributed according to the terms of
7 7 # the GNU General Public License, incorporated herein by reference.
8 8
9 9 from node import nullrev, short
10 10 import ui, hg, util, templatefilters
11 11
12 12 def graph(repo, start_rev, stop_rev):
13 13 """incremental revision grapher
14 14
15 15 This generator function walks through the revision history from
16 16 revision start_rev to revision stop_rev (which must be less than
17 17 or equal to start_rev) and for each revision emits tuples with the
18 18 following elements:
19 19
20 20 - Current node
21 21 - Column and color for the current node
22 22 - Edges; a list of (col, next_col, color) indicating the edges between
23 23 the current node and its parents.
24 24 - First line of the changeset description
25 25 - The changeset author
26 26 - The changeset date/time
27 27 """
28 28
29 29 assert start_rev >= stop_rev
30 30 curr_rev = start_rev
31 31 revs = []
32 32 cl = repo.changelog
33 33 colors = {}
34 34 new_color = 1
35 35
36 36 while curr_rev >= stop_rev:
37 37 node = cl.node(curr_rev)
38 38
39 39 # Compute revs and next_revs
40 40 if curr_rev not in revs:
41 41 revs.append(curr_rev) # new head
42 42 colors[curr_rev] = new_color
43 43 new_color += 1
44 44
45 45 idx = revs.index(curr_rev)
46 46 color = colors.pop(curr_rev)
47 47 next = revs[:]
48 48
49 49 # Add parents to next_revs
50 50 parents = [x for x in cl.parentrevs(curr_rev) if x != nullrev]
51 51 addparents = [p for p in parents if p not in next]
52 52 next[idx:idx + 1] = addparents
53 53
54 54 # Set colors for the parents
55 55 for i, p in enumerate(addparents):
56 56 if not i:
57 57 colors[p] = color
58 58 else:
59 59 colors[p] = new_color
60 60 new_color += 1
61 61
62 62 # Add edges to the graph
63 63 edges = []
64 64 for col, r in enumerate(revs):
65 65 if r in next:
66 66 edges.append((col, next.index(r), colors[r]))
67 67 elif r == curr_rev:
68 68 for p in parents:
69 69 edges.append((col, next.index(p), colors[p]))
70 70
71 71 # Yield and move on
72 yield (repo.changectx(curr_rev), (idx, color), edges)
72 yield (repo[curr_rev], (idx, color), edges)
73 73 revs = next
74 74 curr_rev -= 1
@@ -1,379 +1,379 b''
1 1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import os, mimetypes
10 10 from mercurial.node import hex, nullid
11 11 from mercurial.repo import RepoError
12 12 from mercurial import mdiff, ui, hg, util, patch, hook
13 13 from mercurial import revlog, templater, templatefilters, changegroup
14 14 from common import get_mtime, style_map, paritygen, countgen, ErrorResponse
15 15 from common import HTTP_OK, HTTP_BAD_REQUEST, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
16 16 from request import wsgirequest
17 17 import webcommands, protocol, webutil
18 18
19 19 shortcuts = {
20 20 'cl': [('cmd', ['changelog']), ('rev', None)],
21 21 'sl': [('cmd', ['shortlog']), ('rev', None)],
22 22 'cs': [('cmd', ['changeset']), ('node', None)],
23 23 'f': [('cmd', ['file']), ('filenode', None)],
24 24 'fl': [('cmd', ['filelog']), ('filenode', None)],
25 25 'fd': [('cmd', ['filediff']), ('node', None)],
26 26 'fa': [('cmd', ['annotate']), ('filenode', None)],
27 27 'mf': [('cmd', ['manifest']), ('manifest', None)],
28 28 'ca': [('cmd', ['archive']), ('node', None)],
29 29 'tags': [('cmd', ['tags'])],
30 30 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
31 31 'static': [('cmd', ['static']), ('file', None)]
32 32 }
33 33
34 34 class hgweb(object):
35 35 def __init__(self, repo, name=None):
36 36 if isinstance(repo, str):
37 37 parentui = ui.ui(report_untrusted=False, interactive=False)
38 38 self.repo = hg.repository(parentui, repo)
39 39 else:
40 40 self.repo = repo
41 41
42 42 hook.redirect(True)
43 43 self.mtime = -1
44 44 self.reponame = name
45 45 self.archives = 'zip', 'gz', 'bz2'
46 46 self.stripecount = 1
47 47 self._capabilities = None
48 48 # a repo owner may set web.templates in .hg/hgrc to get any file
49 49 # readable by the user running the CGI script
50 50 self.templatepath = self.config("web", "templates",
51 51 templater.templatepath(),
52 52 untrusted=False)
53 53
54 54 # The CGI scripts are often run by a user different from the repo owner.
55 55 # Trust the settings from the .hg/hgrc files by default.
56 56 def config(self, section, name, default=None, untrusted=True):
57 57 return self.repo.ui.config(section, name, default,
58 58 untrusted=untrusted)
59 59
60 60 def configbool(self, section, name, default=False, untrusted=True):
61 61 return self.repo.ui.configbool(section, name, default,
62 62 untrusted=untrusted)
63 63
64 64 def configlist(self, section, name, default=None, untrusted=True):
65 65 return self.repo.ui.configlist(section, name, default,
66 66 untrusted=untrusted)
67 67
68 68 def refresh(self):
69 69 mtime = get_mtime(self.repo.root)
70 70 if mtime != self.mtime:
71 71 self.mtime = mtime
72 72 self.repo = hg.repository(self.repo.ui, self.repo.root)
73 73 self.maxchanges = int(self.config("web", "maxchanges", 10))
74 74 self.stripecount = int(self.config("web", "stripes", 1))
75 75 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
76 76 self.maxfiles = int(self.config("web", "maxfiles", 10))
77 77 self.allowpull = self.configbool("web", "allowpull", True)
78 78 self.encoding = self.config("web", "encoding", util._encoding)
79 79 self._capabilities = None
80 80
81 81 def capabilities(self):
82 82 if self._capabilities is not None:
83 83 return self._capabilities
84 84 caps = ['lookup', 'changegroupsubset']
85 85 if self.configbool('server', 'uncompressed'):
86 86 caps.append('stream=%d' % self.repo.changelog.version)
87 87 if changegroup.bundlepriority:
88 88 caps.append('unbundle=%s' % ','.join(changegroup.bundlepriority))
89 89 self._capabilities = caps
90 90 return caps
91 91
92 92 def run(self):
93 93 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
94 94 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
95 95 import mercurial.hgweb.wsgicgi as wsgicgi
96 96 wsgicgi.launch(self)
97 97
98 98 def __call__(self, env, respond):
99 99 req = wsgirequest(env, respond)
100 100 self.run_wsgi(req)
101 101 return req
102 102
103 103 def run_wsgi(self, req):
104 104
105 105 self.refresh()
106 106
107 107 # expand form shortcuts
108 108
109 109 for k in shortcuts.iterkeys():
110 110 if k in req.form:
111 111 for name, value in shortcuts[k]:
112 112 if value is None:
113 113 value = req.form[k]
114 114 req.form[name] = value
115 115 del req.form[k]
116 116
117 117 # work with CGI variables to create coherent structure
118 118 # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME
119 119
120 120 req.url = req.env['SCRIPT_NAME']
121 121 if not req.url.endswith('/'):
122 122 req.url += '/'
123 123 if 'REPO_NAME' in req.env:
124 124 req.url += req.env['REPO_NAME'] + '/'
125 125
126 126 if 'PATH_INFO' in req.env:
127 127 parts = req.env['PATH_INFO'].strip('/').split('/')
128 128 repo_parts = req.env.get('REPO_NAME', '').split('/')
129 129 if parts[:len(repo_parts)] == repo_parts:
130 130 parts = parts[len(repo_parts):]
131 131 query = '/'.join(parts)
132 132 else:
133 133 query = req.env['QUERY_STRING'].split('&', 1)[0]
134 134 query = query.split(';', 1)[0]
135 135
136 136 # translate user-visible url structure to internal structure
137 137
138 138 args = query.split('/', 2)
139 139 if 'cmd' not in req.form and args and args[0]:
140 140
141 141 cmd = args.pop(0)
142 142 style = cmd.rfind('-')
143 143 if style != -1:
144 144 req.form['style'] = [cmd[:style]]
145 145 cmd = cmd[style+1:]
146 146
147 147 # avoid accepting e.g. style parameter as command
148 148 if hasattr(webcommands, cmd) or hasattr(protocol, cmd):
149 149 req.form['cmd'] = [cmd]
150 150
151 151 if args and args[0]:
152 152 node = args.pop(0)
153 153 req.form['node'] = [node]
154 154 if args:
155 155 req.form['file'] = args
156 156
157 157 if cmd == 'static':
158 158 req.form['file'] = req.form['node']
159 159 elif cmd == 'archive':
160 160 fn = req.form['node'][0]
161 161 for type_, spec in self.archive_specs.iteritems():
162 162 ext = spec[2]
163 163 if fn.endswith(ext):
164 164 req.form['node'] = [fn[:-len(ext)]]
165 165 req.form['type'] = [type_]
166 166
167 167 # process this if it's a protocol request
168 168
169 169 cmd = req.form.get('cmd', [''])[0]
170 170 if cmd in protocol.__all__:
171 171 method = getattr(protocol, cmd)
172 172 method(self, req)
173 173 return
174 174
175 175 # process the web interface request
176 176
177 177 try:
178 178
179 179 tmpl = self.templater(req)
180 180 ctype = tmpl('mimetype', encoding=self.encoding)
181 181 ctype = templater.stringify(ctype)
182 182
183 183 if cmd == '':
184 184 req.form['cmd'] = [tmpl.cache['default']]
185 185 cmd = req.form['cmd'][0]
186 186
187 187 if cmd not in webcommands.__all__:
188 188 msg = 'no such method: %s' % cmd
189 189 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
190 190 elif cmd == 'file' and 'raw' in req.form.get('style', []):
191 191 self.ctype = ctype
192 192 content = webcommands.rawfile(self, req, tmpl)
193 193 else:
194 194 content = getattr(webcommands, cmd)(self, req, tmpl)
195 195 req.respond(HTTP_OK, ctype)
196 196
197 197 req.write(content)
198 198 del tmpl
199 199
200 200 except revlog.LookupError, err:
201 201 req.respond(HTTP_NOT_FOUND, ctype)
202 202 msg = str(err)
203 203 if 'manifest' not in msg:
204 204 msg = 'revision not found: %s' % err.name
205 205 req.write(tmpl('error', error=msg))
206 206 except (RepoError, revlog.RevlogError), inst:
207 207 req.respond(HTTP_SERVER_ERROR, ctype)
208 208 req.write(tmpl('error', error=str(inst)))
209 209 except ErrorResponse, inst:
210 210 req.respond(inst.code, ctype)
211 211 req.write(tmpl('error', error=inst.message))
212 212
213 213 def templater(self, req):
214 214
215 215 # determine scheme, port and server name
216 216 # this is needed to create absolute urls
217 217
218 218 proto = req.env.get('wsgi.url_scheme')
219 219 if proto == 'https':
220 220 proto = 'https'
221 221 default_port = "443"
222 222 else:
223 223 proto = 'http'
224 224 default_port = "80"
225 225
226 226 port = req.env["SERVER_PORT"]
227 227 port = port != default_port and (":" + port) or ""
228 228 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
229 229 staticurl = self.config("web", "staticurl") or req.url + 'static/'
230 230 if not staticurl.endswith('/'):
231 231 staticurl += '/'
232 232
233 233 # some functions for the templater
234 234
235 235 def header(**map):
236 236 yield tmpl('header', encoding=self.encoding, **map)
237 237
238 238 def footer(**map):
239 239 yield tmpl("footer", **map)
240 240
241 241 def motd(**map):
242 242 yield self.config("web", "motd", "")
243 243
244 244 def sessionvars(**map):
245 245 fields = []
246 246 if 'style' in req.form:
247 247 style = req.form['style'][0]
248 248 if style != self.config('web', 'style', ''):
249 249 fields.append(('style', style))
250 250
251 251 separator = req.url[-1] == '?' and ';' or '?'
252 252 for name, value in fields:
253 253 yield dict(name=name, value=value, separator=separator)
254 254 separator = ';'
255 255
256 256 # figure out which style to use
257 257
258 258 style = self.config("web", "style", "")
259 259 if 'style' in req.form:
260 260 style = req.form['style'][0]
261 261 mapfile = style_map(self.templatepath, style)
262 262
263 263 if not self.reponame:
264 264 self.reponame = (self.config("web", "name")
265 265 or req.env.get('REPO_NAME')
266 266 or req.url.strip('/') or self.repo.root)
267 267
268 268 # create the templater
269 269
270 270 tmpl = templater.templater(mapfile, templatefilters.filters,
271 271 defaults={"url": req.url,
272 272 "staticurl": staticurl,
273 273 "urlbase": urlbase,
274 274 "repo": self.reponame,
275 275 "header": header,
276 276 "footer": footer,
277 277 "motd": motd,
278 278 "sessionvars": sessionvars
279 279 })
280 280 return tmpl
281 281
282 282 def archivelist(self, nodeid):
283 283 allowed = self.configlist("web", "allow_archive")
284 284 for i, spec in self.archive_specs.iteritems():
285 285 if i in allowed or self.configbool("web", "allow" + i):
286 286 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
287 287
288 288 def listfilediffs(self, tmpl, files, changeset):
289 289 for f in files[:self.maxfiles]:
290 290 yield tmpl("filedifflink", node=hex(changeset), file=f)
291 291 if len(files) > self.maxfiles:
292 292 yield tmpl("fileellipses")
293 293
294 294 def diff(self, tmpl, node1, node2, files):
295 295 def filterfiles(filters, files):
296 296 l = [x for x in files if x in filters]
297 297
298 298 for t in filters:
299 299 if t and t[-1] != os.sep:
300 300 t += os.sep
301 301 l += [x for x in files if x.startswith(t)]
302 302 return l
303 303
304 304 parity = paritygen(self.stripecount)
305 305 def diffblock(diff, f, fn):
306 306 yield tmpl("diffblock",
307 307 lines=prettyprintlines(diff),
308 308 parity=parity.next(),
309 309 file=f,
310 310 filenode=hex(fn or nullid))
311 311
312 312 blockcount = countgen()
313 313 def prettyprintlines(diff):
314 314 blockno = blockcount.next()
315 315 for lineno, l in enumerate(diff.splitlines(1)):
316 316 if blockno == 0:
317 317 lineno = lineno + 1
318 318 else:
319 319 lineno = "%d.%d" % (blockno, lineno + 1)
320 320 if l.startswith('+'):
321 321 ltype = "difflineplus"
322 322 elif l.startswith('-'):
323 323 ltype = "difflineminus"
324 324 elif l.startswith('@'):
325 325 ltype = "difflineat"
326 326 else:
327 327 ltype = "diffline"
328 328 yield tmpl(ltype,
329 329 line=l,
330 330 lineid="l%s" % lineno,
331 331 linenumber="% 8s" % lineno)
332 332
333 333 r = self.repo
334 c1 = r.changectx(node1)
335 c2 = r.changectx(node2)
334 c1 = r[node1]
335 c2 = r[node2]
336 336 date1 = util.datestr(c1.date())
337 337 date2 = util.datestr(c2.date())
338 338
339 339 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
340 340 if files:
341 341 modified, added, removed = map(lambda x: filterfiles(files, x),
342 342 (modified, added, removed))
343 343
344 344 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
345 345 for f in modified:
346 346 to = c1.filectx(f).data()
347 347 tn = c2.filectx(f).data()
348 348 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
349 349 opts=diffopts), f, tn)
350 350 for f in added:
351 351 to = None
352 352 tn = c2.filectx(f).data()
353 353 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
354 354 opts=diffopts), f, tn)
355 355 for f in removed:
356 356 to = c1.filectx(f).data()
357 357 tn = None
358 358 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
359 359 opts=diffopts), f, tn)
360 360
361 361 archive_specs = {
362 362 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
363 363 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
364 364 'zip': ('application/zip', 'zip', '.zip', None),
365 365 }
366 366
367 367 def check_perm(self, req, op, default):
368 368 '''check permission for operation based on user auth.
369 369 return true if op allowed, else false.
370 370 default is policy to use if no config given.'''
371 371
372 372 user = req.env.get('REMOTE_USER')
373 373
374 374 deny = self.configlist('web', 'deny_' + op)
375 375 if deny and (not user or deny == ['*'] or user in deny):
376 376 return False
377 377
378 378 allow = self.configlist('web', 'allow_' + op)
379 379 return (allow and (allow == ['*'] or user in allow)) or default
@@ -1,613 +1,612 b''
1 1 #
2 2 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import os, mimetypes, re, cgi
9 9 import webutil
10 10 from mercurial import revlog, archival, templatefilters
11 11 from mercurial.node import short, hex, nullid
12 12 from mercurial.util import binary, datestr
13 13 from mercurial.repo import RepoError
14 14 from common import paritygen, staticfile, get_contact, ErrorResponse
15 15 from common import HTTP_OK, HTTP_NOT_FOUND
16 16 from mercurial import graphmod
17 17
18 18 # __all__ is populated with the allowed commands. Be sure to add to it if
19 19 # you're adding a new command, or the new command won't work.
20 20
21 21 __all__ = [
22 22 'log', 'rawfile', 'file', 'changelog', 'shortlog', 'changeset', 'rev',
23 23 'manifest', 'tags', 'summary', 'filediff', 'diff', 'annotate', 'filelog',
24 24 'archive', 'static', 'graph',
25 25 ]
26 26
27 27 def log(web, req, tmpl):
28 28 if 'file' in req.form and req.form['file'][0]:
29 29 return filelog(web, req, tmpl)
30 30 else:
31 31 return changelog(web, req, tmpl)
32 32
33 33 def rawfile(web, req, tmpl):
34 34 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
35 35 if not path:
36 36 content = manifest(web, req, tmpl)
37 37 req.respond(HTTP_OK, web.ctype)
38 38 return content
39 39
40 40 try:
41 41 fctx = webutil.filectx(web.repo, req)
42 42 except revlog.LookupError, inst:
43 43 try:
44 44 content = manifest(web, req, tmpl)
45 45 req.respond(HTTP_OK, web.ctype)
46 46 return content
47 47 except ErrorResponse:
48 48 raise inst
49 49
50 50 path = fctx.path()
51 51 text = fctx.data()
52 52 mt = mimetypes.guess_type(path)[0]
53 53 if mt is None or binary(text):
54 54 mt = mt or 'application/octet-stream'
55 55
56 56 req.respond(HTTP_OK, mt, path, len(text))
57 57 return [text]
58 58
59 59 def _filerevision(web, tmpl, fctx):
60 60 f = fctx.path()
61 61 text = fctx.data()
62 62 fl = fctx.filelog()
63 63 n = fctx.filenode()
64 64 parity = paritygen(web.stripecount)
65 65
66 66 if binary(text):
67 67 mt = mimetypes.guess_type(f)[0] or 'application/octet-stream'
68 68 text = '(binary:%s)' % mt
69 69
70 70 def lines():
71 71 for lineno, t in enumerate(text.splitlines(1)):
72 72 yield {"line": t,
73 73 "lineid": "l%d" % (lineno + 1),
74 74 "linenumber": "% 6d" % (lineno + 1),
75 75 "parity": parity.next()}
76 76
77 77 return tmpl("filerevision",
78 78 file=f,
79 79 path=webutil.up(f),
80 80 text=lines(),
81 81 rev=fctx.rev(),
82 82 node=hex(fctx.node()),
83 83 author=fctx.user(),
84 84 date=fctx.date(),
85 85 desc=fctx.description(),
86 86 branch=webutil.nodebranchnodefault(fctx),
87 87 parent=webutil.siblings(fctx.parents()),
88 88 child=webutil.siblings(fctx.children()),
89 89 rename=webutil.renamelink(fctx),
90 90 permissions=fctx.manifest().flags(f))
91 91
92 92 def file(web, req, tmpl):
93 93 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
94 94 if path:
95 95 try:
96 96 return _filerevision(web, tmpl, webutil.filectx(web.repo, req))
97 97 except revlog.LookupError, inst:
98 98 pass
99 99
100 100 try:
101 101 return manifest(web, req, tmpl)
102 102 except ErrorResponse:
103 103 raise inst
104 104
105 105 def _search(web, tmpl, query):
106 106
107 107 def changelist(**map):
108 108 cl = web.repo.changelog
109 109 count = 0
110 110 qw = query.lower().split()
111 111
112 112 def revgen():
113 113 for i in xrange(cl.count() - 1, 0, -100):
114 114 l = []
115 115 for j in xrange(max(0, i - 100), i + 1):
116 ctx = web.repo.changectx(j)
116 ctx = web.repo[j]
117 117 l.append(ctx)
118 118 l.reverse()
119 119 for e in l:
120 120 yield e
121 121
122 122 for ctx in revgen():
123 123 miss = 0
124 124 for q in qw:
125 125 if not (q in ctx.user().lower() or
126 126 q in ctx.description().lower() or
127 127 q in " ".join(ctx.files()).lower()):
128 128 miss = 1
129 129 break
130 130 if miss:
131 131 continue
132 132
133 133 count += 1
134 134 n = ctx.node()
135 135 showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n)
136 136
137 137 yield tmpl('searchentry',
138 138 parity=parity.next(),
139 139 author=ctx.user(),
140 140 parent=webutil.siblings(ctx.parents()),
141 141 child=webutil.siblings(ctx.children()),
142 142 changelogtag=showtags,
143 143 desc=ctx.description(),
144 144 date=ctx.date(),
145 145 files=web.listfilediffs(tmpl, ctx.files(), n),
146 146 rev=ctx.rev(),
147 147 node=hex(n),
148 148 tags=webutil.nodetagsdict(web.repo, n),
149 149 inbranch=webutil.nodeinbranch(web.repo, ctx),
150 150 branches=webutil.nodebranchdict(web.repo, ctx))
151 151
152 152 if count >= web.maxchanges:
153 153 break
154 154
155 155 cl = web.repo.changelog
156 156 parity = paritygen(web.stripecount)
157 157
158 158 return tmpl('search',
159 159 query=query,
160 160 node=hex(cl.tip()),
161 161 entries=changelist,
162 162 archives=web.archivelist("tip"))
163 163
164 164 def changelog(web, req, tmpl, shortlog = False):
165 165 if 'node' in req.form:
166 166 ctx = webutil.changectx(web.repo, req)
167 167 else:
168 168 if 'rev' in req.form:
169 169 hi = req.form['rev'][0]
170 170 else:
171 171 hi = web.repo.changelog.count() - 1
172 172 try:
173 ctx = web.repo.changectx(hi)
173 ctx = web.repo[hi]
174 174 except RepoError:
175 175 return _search(web, tmpl, hi) # XXX redirect to 404 page?
176 176
177 177 def changelist(limit=0, **map):
178 178 cl = web.repo.changelog
179 179 l = [] # build a list in forward order for efficiency
180 180 for i in xrange(start, end):
181 ctx = web.repo.changectx(i)
181 ctx = web.repo[i]
182 182 n = ctx.node()
183 183 showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n)
184 184
185 185 l.insert(0, {"parity": parity.next(),
186 186 "author": ctx.user(),
187 187 "parent": webutil.siblings(ctx.parents(), i - 1),
188 188 "child": webutil.siblings(ctx.children(), i + 1),
189 189 "changelogtag": showtags,
190 190 "desc": ctx.description(),
191 191 "date": ctx.date(),
192 192 "files": web.listfilediffs(tmpl, ctx.files(), n),
193 193 "rev": i,
194 194 "node": hex(n),
195 195 "tags": webutil.nodetagsdict(web.repo, n),
196 196 "inbranch": webutil.nodeinbranch(web.repo, ctx),
197 197 "branches": webutil.nodebranchdict(web.repo, ctx)
198 198 })
199 199
200 200 if limit > 0:
201 201 l = l[:limit]
202 202
203 203 for e in l:
204 204 yield e
205 205
206 206 maxchanges = shortlog and web.maxshortchanges or web.maxchanges
207 207 cl = web.repo.changelog
208 208 count = cl.count()
209 209 pos = ctx.rev()
210 210 start = max(0, pos - maxchanges + 1)
211 211 end = min(count, start + maxchanges)
212 212 pos = end - 1
213 213 parity = paritygen(web.stripecount, offset=start-end)
214 214
215 215 changenav = webutil.revnavgen(pos, maxchanges, count, web.repo.changectx)
216 216
217 217 return tmpl(shortlog and 'shortlog' or 'changelog',
218 218 changenav=changenav,
219 219 node=hex(ctx.node()),
220 220 rev=pos, changesets=count,
221 221 entries=lambda **x: changelist(limit=0,**x),
222 222 latestentry=lambda **x: changelist(limit=1,**x),
223 223 archives=web.archivelist("tip"))
224 224
225 225 def shortlog(web, req, tmpl):
226 226 return changelog(web, req, tmpl, shortlog = True)
227 227
228 228 def changeset(web, req, tmpl):
229 229 ctx = webutil.changectx(web.repo, req)
230 230 n = ctx.node()
231 231 showtags = webutil.showtag(web.repo, tmpl, 'changesettag', n)
232 232 parents = ctx.parents()
233 233 p1 = parents[0].node()
234 234
235 235 files = []
236 236 parity = paritygen(web.stripecount)
237 237 for f in ctx.files():
238 238 files.append(tmpl("filenodelink",
239 239 node=hex(n), file=f,
240 240 parity=parity.next()))
241 241
242 242 diffs = web.diff(tmpl, p1, n, None)
243 243 return tmpl('changeset',
244 244 diff=diffs,
245 245 rev=ctx.rev(),
246 246 node=hex(n),
247 247 parent=webutil.siblings(parents),
248 248 child=webutil.siblings(ctx.children()),
249 249 changesettag=showtags,
250 250 author=ctx.user(),
251 251 desc=ctx.description(),
252 252 date=ctx.date(),
253 253 files=files,
254 254 archives=web.archivelist(hex(n)),
255 255 tags=webutil.nodetagsdict(web.repo, n),
256 256 branch=webutil.nodebranchnodefault(ctx),
257 257 inbranch=webutil.nodeinbranch(web.repo, ctx),
258 258 branches=webutil.nodebranchdict(web.repo, ctx))
259 259
260 260 rev = changeset
261 261
262 262 def manifest(web, req, tmpl):
263 263 ctx = webutil.changectx(web.repo, req)
264 264 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
265 265 mf = ctx.manifest()
266 266 node = ctx.node()
267 267
268 268 files = {}
269 269 parity = paritygen(web.stripecount)
270 270
271 271 if path and path[-1] != "/":
272 272 path += "/"
273 273 l = len(path)
274 274 abspath = "/" + path
275 275
276 276 for f, n in mf.items():
277 277 if f[:l] != path:
278 278 continue
279 279 remain = f[l:]
280 280 if "/" in remain:
281 281 short = remain[:remain.index("/") + 1] # bleah
282 282 files[short] = (f, None)
283 283 else:
284 284 short = os.path.basename(remain)
285 285 files[short] = (f, n)
286 286
287 287 if not files:
288 288 raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path)
289 289
290 290 def filelist(**map):
291 291 fl = files.keys()
292 292 fl.sort()
293 293 for f in fl:
294 294 full, fnode = files[f]
295 295 if not fnode:
296 296 continue
297 297
298 298 fctx = ctx.filectx(full)
299 299 yield {"file": full,
300 300 "parity": parity.next(),
301 301 "basename": f,
302 "date": fctx.changectx().date(),
302 "date": fctx.date(),
303 303 "size": fctx.size(),
304 304 "permissions": mf.flags(full)}
305 305
306 306 def dirlist(**map):
307 307 fl = files.keys()
308 308 fl.sort()
309 309 for f in fl:
310 310 full, fnode = files[f]
311 311 if fnode:
312 312 continue
313 313
314 314 yield {"parity": parity.next(),
315 315 "path": "%s%s" % (abspath, f),
316 316 "basename": f[:-1]}
317 317
318 318 return tmpl("manifest",
319 319 rev=ctx.rev(),
320 320 node=hex(node),
321 321 path=abspath,
322 322 up=webutil.up(abspath),
323 323 upparity=parity.next(),
324 324 fentries=filelist,
325 325 dentries=dirlist,
326 326 archives=web.archivelist(hex(node)),
327 327 tags=webutil.nodetagsdict(web.repo, node),
328 328 inbranch=webutil.nodeinbranch(web.repo, ctx),
329 329 branches=webutil.nodebranchdict(web.repo, ctx))
330 330
331 331 def tags(web, req, tmpl):
332 332 i = web.repo.tagslist()
333 333 i.reverse()
334 334 parity = paritygen(web.stripecount)
335 335
336 336 def entries(notip=False,limit=0, **map):
337 337 count = 0
338 338 for k, n in i:
339 339 if notip and k == "tip":
340 340 continue
341 341 if limit > 0 and count >= limit:
342 342 continue
343 343 count = count + 1
344 344 yield {"parity": parity.next(),
345 345 "tag": k,
346 "date": web.repo.changectx(n).date(),
346 "date": web.repo[n].date(),
347 347 "node": hex(n)}
348 348
349 349 return tmpl("tags",
350 350 node=hex(web.repo.changelog.tip()),
351 351 entries=lambda **x: entries(False,0, **x),
352 352 entriesnotip=lambda **x: entries(True,0, **x),
353 353 latestentry=lambda **x: entries(True,1, **x))
354 354
355 355 def summary(web, req, tmpl):
356 356 i = web.repo.tagslist()
357 357 i.reverse()
358 358
359 359 def tagentries(**map):
360 360 parity = paritygen(web.stripecount)
361 361 count = 0
362 362 for k, n in i:
363 363 if k == "tip": # skip tip
364 364 continue
365 365
366 366 count += 1
367 367 if count > 10: # limit to 10 tags
368 368 break
369 369
370 370 yield tmpl("tagentry",
371 371 parity=parity.next(),
372 372 tag=k,
373 373 node=hex(n),
374 date=web.repo.changectx(n).date())
374 date=web.repo[n].date())
375 375
376 376 def branches(**map):
377 377 parity = paritygen(web.stripecount)
378 378
379 379 b = web.repo.branchtags()
380 380 l = [(-web.repo.changelog.rev(n), n, t) for t, n in b.items()]
381 381 l.sort()
382 382
383 383 for r,n,t in l:
384 ctx = web.repo.changectx(n)
385 384 yield {'parity': parity.next(),
386 385 'branch': t,
387 386 'node': hex(n),
388 'date': ctx.date()}
387 'date': web.repo[n].date()}
389 388
390 389 def changelist(**map):
391 390 parity = paritygen(web.stripecount, offset=start-end)
392 391 l = [] # build a list in forward order for efficiency
393 392 for i in xrange(start, end):
394 ctx = web.repo.changectx(i)
393 ctx = web.repo[i]
395 394 n = ctx.node()
396 395 hn = hex(n)
397 396
398 397 l.insert(0, tmpl(
399 398 'shortlogentry',
400 399 parity=parity.next(),
401 400 author=ctx.user(),
402 401 desc=ctx.description(),
403 402 date=ctx.date(),
404 403 rev=i,
405 404 node=hn,
406 405 tags=webutil.nodetagsdict(web.repo, n),
407 406 inbranch=webutil.nodeinbranch(web.repo, ctx),
408 407 branches=webutil.nodebranchdict(web.repo, ctx)))
409 408
410 409 yield l
411 410
412 411 cl = web.repo.changelog
413 412 count = cl.count()
414 413 start = max(0, count - web.maxchanges)
415 414 end = min(count, start + web.maxchanges)
416 415
417 416 return tmpl("summary",
418 417 desc=web.config("web", "description", "unknown"),
419 418 owner=get_contact(web.config) or "unknown",
420 419 lastchange=cl.read(cl.tip())[2],
421 420 tags=tagentries,
422 421 branches=branches,
423 422 shortlog=changelist,
424 423 node=hex(cl.tip()),
425 424 archives=web.archivelist("tip"))
426 425
427 426 def filediff(web, req, tmpl):
428 427 fctx = webutil.filectx(web.repo, req)
429 428 n = fctx.node()
430 429 path = fctx.path()
431 430 parents = fctx.parents()
432 431 p1 = parents and parents[0].node() or nullid
433 432
434 433 diffs = web.diff(tmpl, p1, n, [path])
435 434 return tmpl("filediff",
436 435 file=path,
437 436 node=hex(n),
438 437 rev=fctx.rev(),
439 438 date=fctx.date(),
440 439 desc=fctx.description(),
441 440 author=fctx.user(),
442 441 rename=webutil.renamelink(fctx),
443 442 branch=webutil.nodebranchnodefault(fctx),
444 443 parent=webutil.siblings(parents),
445 444 child=webutil.siblings(fctx.children()),
446 445 diff=diffs)
447 446
448 447 diff = filediff
449 448
450 449 def annotate(web, req, tmpl):
451 450 fctx = webutil.filectx(web.repo, req)
452 451 f = fctx.path()
453 452 n = fctx.filenode()
454 453 fl = fctx.filelog()
455 454 parity = paritygen(web.stripecount)
456 455
457 456 def annotate(**map):
458 457 last = None
459 458 if binary(fctx.data()):
460 459 mt = (mimetypes.guess_type(fctx.path())[0]
461 460 or 'application/octet-stream')
462 461 lines = enumerate([((fctx.filectx(fctx.filerev()), 1),
463 462 '(binary:%s)' % mt)])
464 463 else:
465 464 lines = enumerate(fctx.annotate(follow=True, linenumber=True))
466 465 for lineno, ((f, targetline), l) in lines:
467 466 fnode = f.filenode()
468 467
469 468 if last != fnode:
470 469 last = fnode
471 470
472 471 yield {"parity": parity.next(),
473 472 "node": hex(f.node()),
474 473 "rev": f.rev(),
475 474 "author": f.user(),
476 475 "desc": f.description(),
477 476 "file": f.path(),
478 477 "targetline": targetline,
479 478 "line": l,
480 479 "lineid": "l%d" % (lineno + 1),
481 480 "linenumber": "% 6d" % (lineno + 1)}
482 481
483 482 return tmpl("fileannotate",
484 483 file=f,
485 484 annotate=annotate,
486 485 path=webutil.up(f),
487 486 rev=fctx.rev(),
488 487 node=hex(fctx.node()),
489 488 author=fctx.user(),
490 489 date=fctx.date(),
491 490 desc=fctx.description(),
492 491 rename=webutil.renamelink(fctx),
493 492 branch=webutil.nodebranchnodefault(fctx),
494 493 parent=webutil.siblings(fctx.parents()),
495 494 child=webutil.siblings(fctx.children()),
496 495 permissions=fctx.manifest().flags(f))
497 496
498 497 def filelog(web, req, tmpl):
499 498 fctx = webutil.filectx(web.repo, req)
500 499 f = fctx.path()
501 500 fl = fctx.filelog()
502 501 count = fl.count()
503 502 pagelen = web.maxshortchanges
504 503 pos = fctx.filerev()
505 504 start = max(0, pos - pagelen + 1)
506 505 end = min(count, start + pagelen)
507 506 pos = end - 1
508 507 parity = paritygen(web.stripecount, offset=start-end)
509 508
510 509 def entries(limit=0, **map):
511 510 l = []
512 511
513 512 for i in xrange(start, end):
514 513 ctx = fctx.filectx(i)
515 514 n = fl.node(i)
516 515
517 516 l.insert(0, {"parity": parity.next(),
518 517 "filerev": i,
519 518 "file": f,
520 519 "node": hex(ctx.node()),
521 520 "author": ctx.user(),
522 521 "date": ctx.date(),
523 522 "rename": webutil.renamelink(fctx),
524 523 "parent": webutil.siblings(fctx.parents()),
525 524 "child": webutil.siblings(fctx.children()),
526 525 "desc": ctx.description()})
527 526
528 527 if limit > 0:
529 528 l = l[:limit]
530 529
531 530 for e in l:
532 531 yield e
533 532
534 533 nodefunc = lambda x: fctx.filectx(fileid=x)
535 534 nav = webutil.revnavgen(pos, pagelen, count, nodefunc)
536 535 return tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav,
537 536 entries=lambda **x: entries(limit=0, **x),
538 537 latestentry=lambda **x: entries(limit=1, **x))
539 538
540 539
541 540 def archive(web, req, tmpl):
542 541 type_ = req.form.get('type', [None])[0]
543 542 allowed = web.configlist("web", "allow_archive")
544 543 key = req.form['node'][0]
545 544
546 545 if not (type_ in web.archives and (type_ in allowed or
547 546 web.configbool("web", "allow" + type_, False))):
548 547 msg = 'Unsupported archive type: %s' % type_
549 548 raise ErrorResponse(HTTP_NOT_FOUND, msg)
550 549
551 550 reponame = re.sub(r"\W+", "-", os.path.basename(web.reponame))
552 551 cnode = web.repo.lookup(key)
553 552 arch_version = key
554 553 if cnode == key or key == 'tip':
555 554 arch_version = short(cnode)
556 555 name = "%s-%s" % (reponame, arch_version)
557 556 mimetype, artype, extension, encoding = web.archive_specs[type_]
558 557 headers = [
559 558 ('Content-Type', mimetype),
560 559 ('Content-Disposition', 'attachment; filename=%s%s' % (name, extension))
561 560 ]
562 561 if encoding:
563 562 headers.append(('Content-Encoding', encoding))
564 563 req.header(headers)
565 564 req.respond(HTTP_OK)
566 565 archival.archive(web.repo, req, cnode, artype, prefix=name)
567 566 return []
568 567
569 568
570 569 def static(web, req, tmpl):
571 570 fname = req.form['file'][0]
572 571 # a repo owner may set web.static in .hg/hgrc to get any file
573 572 # readable by the user running the CGI script
574 573 static = web.config("web", "static",
575 574 os.path.join(web.templatepath, "static"),
576 575 untrusted=False)
577 576 return [staticfile(static, fname, req)]
578 577
579 578 def graph(web, req, tmpl):
580 579 rev = webutil.changectx(web.repo, req).rev()
581 580 bg_height = 39
582 581
583 582 max_rev = web.repo.changelog.count() - 1
584 583 revcount = min(max_rev, int(req.form.get('revcount', [25])[0]))
585 584 revnode = web.repo.changelog.node(rev)
586 585 revnode_hex = hex(revnode)
587 586 uprev = min(max_rev, rev + revcount)
588 587 downrev = max(0, rev - revcount)
589 588 lessrev = max(0, rev - revcount / 2)
590 589
591 590 maxchanges = web.maxshortchanges or web.maxchanges
592 591 count = web.repo.changelog.count()
593 592 changenav = webutil.revnavgen(rev, maxchanges, count, web.repo.changectx)
594 593
595 594 tree = list(graphmod.graph(web.repo, rev, rev - revcount))
596 595 canvasheight = (len(tree) + 1) * bg_height - 27;
597 596
598 597 data = []
599 598 for i, (ctx, vtx, edges) in enumerate(tree):
600 599 node = short(ctx.node())
601 600 age = templatefilters.age(ctx.date())
602 601 desc = templatefilters.firstline(ctx.description())
603 602 desc = cgi.escape(desc)
604 603 user = cgi.escape(templatefilters.person(ctx.user()))
605 604 branch = ctx.branch()
606 605 branch = branch, web.repo.branchtags().get(branch) == ctx.node()
607 606 data.append((node, vtx, edges, desc, user, age, branch, ctx.tags()))
608 607
609 608 return tmpl('graph', rev=rev, revcount=revcount, uprev=uprev,
610 609 lessrev=lessrev, revcountmore=revcount and 2 * revcount or 1,
611 610 revcountless=revcount / 2, downrev=downrev,
612 611 canvasheight=canvasheight, bg_height=bg_height,
613 612 jsdata=data, node=revnode_hex, changenav=changenav)
@@ -1,143 +1,141 b''
1 1 # hgweb/webutil.py - utility library for the web interface.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import os
10 10 from mercurial.node import hex, nullid
11 11 from mercurial.repo import RepoError
12 12 from mercurial import util
13 13
14 14 def up(p):
15 15 if p[0] != "/":
16 16 p = "/" + p
17 17 if p[-1] == "/":
18 18 p = p[:-1]
19 19 up = os.path.dirname(p)
20 20 if up == "/":
21 21 return "/"
22 22 return up + "/"
23 23
24 24 def revnavgen(pos, pagelen, limit, nodefunc):
25 25 def seq(factor, limit=None):
26 26 if limit:
27 27 yield limit
28 28 if limit >= 20 and limit <= 40:
29 29 yield 50
30 30 else:
31 31 yield 1 * factor
32 32 yield 3 * factor
33 33 for f in seq(factor * 10):
34 34 yield f
35 35
36 36 def nav(**map):
37 37 l = []
38 38 last = 0
39 39 for f in seq(1, pagelen):
40 40 if f < pagelen or f <= last:
41 41 continue
42 42 if f > limit:
43 43 break
44 44 last = f
45 45 if pos + f < limit:
46 46 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
47 47 if pos - f >= 0:
48 48 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
49 49
50 50 try:
51 51 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
52 52
53 53 for label, node in l:
54 54 yield {"label": label, "node": node}
55 55
56 56 yield {"label": "tip", "node": "tip"}
57 57 except RepoError:
58 58 pass
59 59
60 60 return nav
61 61
62 62 def siblings(siblings=[], hiderev=None, **args):
63 63 siblings = [s for s in siblings if s.node() != nullid]
64 64 if len(siblings) == 1 and siblings[0].rev() == hiderev:
65 65 return
66 66 for s in siblings:
67 67 d = {'node': hex(s.node()), 'rev': s.rev()}
68 68 if hasattr(s, 'path'):
69 69 d['file'] = s.path()
70 70 d.update(args)
71 71 yield d
72 72
73 73 def renamelink(fctx):
74 74 r = fctx.renamed()
75 75 if r:
76 76 return [dict(file=r[0], node=hex(r[1]))]
77 77 return []
78 78
79 79 def nodetagsdict(repo, node):
80 80 return [{"name": i} for i in repo.nodetags(node)]
81 81
82 82 def nodebranchdict(repo, ctx):
83 83 branches = []
84 84 branch = ctx.branch()
85 85 # If this is an empty repo, ctx.node() == nullid,
86 86 # ctx.branch() == 'default', but branchtags() is
87 87 # an empty dict. Using dict.get avoids a traceback.
88 88 if repo.branchtags().get(branch) == ctx.node():
89 89 branches.append({"name": branch})
90 90 return branches
91 91
92 92 def nodeinbranch(repo, ctx):
93 93 branches = []
94 94 branch = ctx.branch()
95 95 if branch != 'default' and repo.branchtags().get(branch) != ctx.node():
96 96 branches.append({"name": branch})
97 97 return branches
98 98
99 99 def nodebranchnodefault(ctx):
100 100 branches = []
101 101 branch = ctx.branch()
102 102 if branch != 'default':
103 103 branches.append({"name": branch})
104 104 return branches
105 105
106 106 def showtag(repo, tmpl, t1, node=nullid, **args):
107 107 for t in repo.nodetags(node):
108 108 yield tmpl(t1, tag=t, **args)
109 109
110 110 def cleanpath(repo, path):
111 111 path = path.lstrip('/')
112 112 return util.canonpath(repo.root, '', path)
113 113
114 114 def changectx(repo, req):
115 115 if 'node' in req.form:
116 116 changeid = req.form['node'][0]
117 117 elif 'manifest' in req.form:
118 118 changeid = req.form['manifest'][0]
119 119 else:
120 120 changeid = repo.changelog.count() - 1
121 121
122 122 try:
123 ctx = repo.changectx(changeid)
123 ctx = repo[changeid]
124 124 except RepoError:
125 125 man = repo.manifest
126 mn = man.lookup(changeid)
127 ctx = repo.changectx(man.linkrev(mn))
126 ctx = repo[man.linkrev(man.lookup(changeid))]
128 127
129 128 return ctx
130 129
131 130 def filectx(repo, req):
132 131 path = cleanpath(repo, req.form['file'][0])
133 132 if 'node' in req.form:
134 133 changeid = req.form['node'][0]
135 134 else:
136 135 changeid = req.form['filenode'][0]
137 136 try:
138 ctx = repo.changectx(changeid)
139 fctx = ctx.filectx(path)
137 fctx = repo[changeid][path]
140 138 except RepoError:
141 139 fctx = repo.filectx(path, fileid=changeid)
142 140
143 141 return fctx
@@ -1,2123 +1,2127 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import bin, hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import repo, changegroup
11 11 import changelog, dirstate, filelog, manifest, context, weakref
12 12 import lock, transaction, stat, errno, ui
13 13 import os, revlog, time, util, extensions, hook, inspect
14 14 import match as match_
15 15
16 16 class localrepository(repo.repository):
17 17 capabilities = util.set(('lookup', 'changegroupsubset'))
18 18 supported = ('revlogv1', 'store')
19 19
20 20 def __init__(self, parentui, path=None, create=0):
21 21 repo.repository.__init__(self)
22 22 self.root = os.path.realpath(path)
23 23 self.path = os.path.join(self.root, ".hg")
24 24 self.origroot = path
25 25 self.opener = util.opener(self.path)
26 26 self.wopener = util.opener(self.root)
27 27
28 28 if not os.path.isdir(self.path):
29 29 if create:
30 30 if not os.path.exists(path):
31 31 os.mkdir(path)
32 32 os.mkdir(self.path)
33 33 requirements = ["revlogv1"]
34 34 if parentui.configbool('format', 'usestore', True):
35 35 os.mkdir(os.path.join(self.path, "store"))
36 36 requirements.append("store")
37 37 # create an invalid changelog
38 38 self.opener("00changelog.i", "a").write(
39 39 '\0\0\0\2' # represents revlogv2
40 40 ' dummy changelog to prevent using the old repo layout'
41 41 )
42 42 reqfile = self.opener("requires", "w")
43 43 for r in requirements:
44 44 reqfile.write("%s\n" % r)
45 45 reqfile.close()
46 46 else:
47 47 raise repo.RepoError(_("repository %s not found") % path)
48 48 elif create:
49 49 raise repo.RepoError(_("repository %s already exists") % path)
50 50 else:
51 51 # find requirements
52 52 try:
53 53 requirements = self.opener("requires").read().splitlines()
54 54 except IOError, inst:
55 55 if inst.errno != errno.ENOENT:
56 56 raise
57 57 requirements = []
58 58 # check them
59 59 for r in requirements:
60 60 if r not in self.supported:
61 61 raise repo.RepoError(_("requirement '%s' not supported") % r)
62 62
63 63 # setup store
64 64 if "store" in requirements:
65 65 self.encodefn = util.encodefilename
66 66 self.decodefn = util.decodefilename
67 67 self.spath = os.path.join(self.path, "store")
68 68 else:
69 69 self.encodefn = lambda x: x
70 70 self.decodefn = lambda x: x
71 71 self.spath = self.path
72 72
73 73 try:
74 74 # files in .hg/ will be created using this mode
75 75 mode = os.stat(self.spath).st_mode
76 76 # avoid some useless chmods
77 77 if (0777 & ~util._umask) == (0777 & mode):
78 78 mode = None
79 79 except OSError:
80 80 mode = None
81 81
82 82 self._createmode = mode
83 83 self.opener.createmode = mode
84 84 sopener = util.opener(self.spath)
85 85 sopener.createmode = mode
86 86 self.sopener = util.encodedopener(sopener, self.encodefn)
87 87
88 88 self.ui = ui.ui(parentui=parentui)
89 89 try:
90 90 self.ui.readconfig(self.join("hgrc"), self.root)
91 91 extensions.loadall(self.ui)
92 92 except IOError:
93 93 pass
94 94
95 95 self.tagscache = None
96 96 self._tagstypecache = None
97 97 self.branchcache = None
98 98 self._ubranchcache = None # UTF-8 version of branchcache
99 99 self._branchcachetip = None
100 100 self.nodetagscache = None
101 101 self.filterpats = {}
102 102 self._datafilters = {}
103 103 self._transref = self._lockref = self._wlockref = None
104 104
105 105 def __getattr__(self, name):
106 106 if name == 'changelog':
107 107 self.changelog = changelog.changelog(self.sopener)
108 108 self.sopener.defversion = self.changelog.version
109 109 return self.changelog
110 110 if name == 'manifest':
111 111 self.changelog
112 112 self.manifest = manifest.manifest(self.sopener)
113 113 return self.manifest
114 114 if name == 'dirstate':
115 115 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
116 116 return self.dirstate
117 117 else:
118 118 raise AttributeError, name
119 119
120 def __getitem__(self, changeid):
121 if changeid == None:
122 return context.workingctx(self)
123 return context.changectx(self, changeid)
124
120 125 def url(self):
121 126 return 'file:' + self.root
122 127
123 128 def hook(self, name, throw=False, **args):
124 129 return hook.hook(self.ui, self, name, throw, **args)
125 130
126 131 tag_disallowed = ':\r\n'
127 132
128 133 def _tag(self, names, node, message, local, user, date, parent=None,
129 134 extra={}):
130 135 use_dirstate = parent is None
131 136
132 137 if isinstance(names, str):
133 138 allchars = names
134 139 names = (names,)
135 140 else:
136 141 allchars = ''.join(names)
137 142 for c in self.tag_disallowed:
138 143 if c in allchars:
139 144 raise util.Abort(_('%r cannot be used in a tag name') % c)
140 145
141 146 for name in names:
142 147 self.hook('pretag', throw=True, node=hex(node), tag=name,
143 148 local=local)
144 149
145 150 def writetags(fp, names, munge, prevtags):
146 151 fp.seek(0, 2)
147 152 if prevtags and prevtags[-1] != '\n':
148 153 fp.write('\n')
149 154 for name in names:
150 155 m = munge and munge(name) or name
151 156 if self._tagstypecache and name in self._tagstypecache:
152 157 old = self.tagscache.get(name, nullid)
153 158 fp.write('%s %s\n' % (hex(old), m))
154 159 fp.write('%s %s\n' % (hex(node), m))
155 160 fp.close()
156 161
157 162 prevtags = ''
158 163 if local:
159 164 try:
160 165 fp = self.opener('localtags', 'r+')
161 166 except IOError, err:
162 167 fp = self.opener('localtags', 'a')
163 168 else:
164 169 prevtags = fp.read()
165 170
166 171 # local tags are stored in the current charset
167 172 writetags(fp, names, None, prevtags)
168 173 for name in names:
169 174 self.hook('tag', node=hex(node), tag=name, local=local)
170 175 return
171 176
172 177 if use_dirstate:
173 178 try:
174 179 fp = self.wfile('.hgtags', 'rb+')
175 180 except IOError, err:
176 181 fp = self.wfile('.hgtags', 'ab')
177 182 else:
178 183 prevtags = fp.read()
179 184 else:
180 185 try:
181 186 prevtags = self.filectx('.hgtags', parent).data()
182 187 except revlog.LookupError:
183 188 pass
184 189 fp = self.wfile('.hgtags', 'wb')
185 190 if prevtags:
186 191 fp.write(prevtags)
187 192
188 193 # committed tags are stored in UTF-8
189 194 writetags(fp, names, util.fromlocal, prevtags)
190 195
191 196 if use_dirstate and '.hgtags' not in self.dirstate:
192 197 self.add(['.hgtags'])
193 198
194 199 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
195 200 extra=extra)
196 201
197 202 for name in names:
198 203 self.hook('tag', node=hex(node), tag=name, local=local)
199 204
200 205 return tagnode
201 206
202 207 def tag(self, names, node, message, local, user, date):
203 208 '''tag a revision with one or more symbolic names.
204 209
205 210 names is a list of strings or, when adding a single tag, names may be a
206 211 string.
207 212
208 213 if local is True, the tags are stored in a per-repository file.
209 214 otherwise, they are stored in the .hgtags file, and a new
210 215 changeset is committed with the change.
211 216
212 217 keyword arguments:
213 218
214 219 local: whether to store tags in non-version-controlled file
215 220 (default False)
216 221
217 222 message: commit message to use if committing
218 223
219 224 user: name of user to use if committing
220 225
221 226 date: date tuple to use if committing'''
222 227
223 228 for x in self.status()[:5]:
224 229 if '.hgtags' in x:
225 230 raise util.Abort(_('working copy of .hgtags is changed '
226 231 '(please commit .hgtags manually)'))
227 232
228 233 self._tag(names, node, message, local, user, date)
229 234
230 235 def tags(self):
231 236 '''return a mapping of tag to node'''
232 237 if self.tagscache:
233 238 return self.tagscache
234 239
235 240 globaltags = {}
236 241 tagtypes = {}
237 242
238 243 def readtags(lines, fn, tagtype):
239 244 filetags = {}
240 245 count = 0
241 246
242 247 def warn(msg):
243 248 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
244 249
245 250 for l in lines:
246 251 count += 1
247 252 if not l:
248 253 continue
249 254 s = l.split(" ", 1)
250 255 if len(s) != 2:
251 256 warn(_("cannot parse entry"))
252 257 continue
253 258 node, key = s
254 259 key = util.tolocal(key.strip()) # stored in UTF-8
255 260 try:
256 261 bin_n = bin(node)
257 262 except TypeError:
258 263 warn(_("node '%s' is not well formed") % node)
259 264 continue
260 265 if bin_n not in self.changelog.nodemap:
261 266 warn(_("tag '%s' refers to unknown node") % key)
262 267 continue
263 268
264 269 h = []
265 270 if key in filetags:
266 271 n, h = filetags[key]
267 272 h.append(n)
268 273 filetags[key] = (bin_n, h)
269 274
270 275 for k, nh in filetags.items():
271 276 if k not in globaltags:
272 277 globaltags[k] = nh
273 278 tagtypes[k] = tagtype
274 279 continue
275 280
276 281 # we prefer the global tag if:
277 282 # it supercedes us OR
278 283 # mutual supercedes and it has a higher rank
279 284 # otherwise we win because we're tip-most
280 285 an, ah = nh
281 286 bn, bh = globaltags[k]
282 287 if (bn != an and an in bh and
283 288 (bn not in ah or len(bh) > len(ah))):
284 289 an = bn
285 290 ah.extend([n for n in bh if n not in ah])
286 291 globaltags[k] = an, ah
287 292 tagtypes[k] = tagtype
288 293
289 294 # read the tags file from each head, ending with the tip
290 295 f = None
291 296 for rev, node, fnode in self._hgtagsnodes():
292 297 f = (f and f.filectx(fnode) or
293 298 self.filectx('.hgtags', fileid=fnode))
294 299 readtags(f.data().splitlines(), f, "global")
295 300
296 301 try:
297 302 data = util.fromlocal(self.opener("localtags").read())
298 303 # localtags are stored in the local character set
299 304 # while the internal tag table is stored in UTF-8
300 305 readtags(data.splitlines(), "localtags", "local")
301 306 except IOError:
302 307 pass
303 308
304 309 self.tagscache = {}
305 310 self._tagstypecache = {}
306 311 for k,nh in globaltags.items():
307 312 n = nh[0]
308 313 if n != nullid:
309 314 self.tagscache[k] = n
310 315 self._tagstypecache[k] = tagtypes[k]
311 316 self.tagscache['tip'] = self.changelog.tip()
312 317 return self.tagscache
313 318
314 319 def tagtype(self, tagname):
315 320 '''
316 321 return the type of the given tag. result can be:
317 322
318 323 'local' : a local tag
319 324 'global' : a global tag
320 325 None : tag does not exist
321 326 '''
322 327
323 328 self.tags()
324 329
325 330 return self._tagstypecache.get(tagname)
326 331
327 332 def _hgtagsnodes(self):
328 333 heads = self.heads()
329 334 heads.reverse()
330 335 last = {}
331 336 ret = []
332 337 for node in heads:
333 c = self.changectx(node)
338 c = self[node]
334 339 rev = c.rev()
335 340 try:
336 341 fnode = c.filenode('.hgtags')
337 342 except revlog.LookupError:
338 343 continue
339 344 ret.append((rev, node, fnode))
340 345 if fnode in last:
341 346 ret[last[fnode]] = None
342 347 last[fnode] = len(ret) - 1
343 348 return [item for item in ret if item]
344 349
345 350 def tagslist(self):
346 351 '''return a list of tags ordered by revision'''
347 352 l = []
348 353 for t, n in self.tags().items():
349 354 try:
350 355 r = self.changelog.rev(n)
351 356 except:
352 357 r = -2 # sort to the beginning of the list if unknown
353 358 l.append((r, t, n))
354 359 l.sort()
355 360 return [(t, n) for r, t, n in l]
356 361
357 362 def nodetags(self, node):
358 363 '''return the tags associated with a node'''
359 364 if not self.nodetagscache:
360 365 self.nodetagscache = {}
361 366 for t, n in self.tags().items():
362 367 self.nodetagscache.setdefault(n, []).append(t)
363 368 return self.nodetagscache.get(node, [])
364 369
365 370 def _branchtags(self, partial, lrev):
366 371 tiprev = self.changelog.count() - 1
367 372 if lrev != tiprev:
368 373 self._updatebranchcache(partial, lrev+1, tiprev+1)
369 374 self._writebranchcache(partial, self.changelog.tip(), tiprev)
370 375
371 376 return partial
372 377
373 378 def branchtags(self):
374 379 tip = self.changelog.tip()
375 380 if self.branchcache is not None and self._branchcachetip == tip:
376 381 return self.branchcache
377 382
378 383 oldtip = self._branchcachetip
379 384 self._branchcachetip = tip
380 385 if self.branchcache is None:
381 386 self.branchcache = {} # avoid recursion in changectx
382 387 else:
383 388 self.branchcache.clear() # keep using the same dict
384 389 if oldtip is None or oldtip not in self.changelog.nodemap:
385 390 partial, last, lrev = self._readbranchcache()
386 391 else:
387 392 lrev = self.changelog.rev(oldtip)
388 393 partial = self._ubranchcache
389 394
390 395 self._branchtags(partial, lrev)
391 396
392 397 # the branch cache is stored on disk as UTF-8, but in the local
393 398 # charset internally
394 399 for k, v in partial.items():
395 400 self.branchcache[util.tolocal(k)] = v
396 401 self._ubranchcache = partial
397 402 return self.branchcache
398 403
399 404 def _readbranchcache(self):
400 405 partial = {}
401 406 try:
402 407 f = self.opener("branch.cache")
403 408 lines = f.read().split('\n')
404 409 f.close()
405 410 except (IOError, OSError):
406 411 return {}, nullid, nullrev
407 412
408 413 try:
409 414 last, lrev = lines.pop(0).split(" ", 1)
410 415 last, lrev = bin(last), int(lrev)
411 416 if not (lrev < self.changelog.count() and
412 417 self.changelog.node(lrev) == last): # sanity check
413 418 # invalidate the cache
414 419 raise ValueError('invalidating branch cache (tip differs)')
415 420 for l in lines:
416 421 if not l: continue
417 422 node, label = l.split(" ", 1)
418 423 partial[label.strip()] = bin(node)
419 424 except (KeyboardInterrupt, util.SignalInterrupt):
420 425 raise
421 426 except Exception, inst:
422 427 if self.ui.debugflag:
423 428 self.ui.warn(str(inst), '\n')
424 429 partial, last, lrev = {}, nullid, nullrev
425 430 return partial, last, lrev
426 431
427 432 def _writebranchcache(self, branches, tip, tiprev):
428 433 try:
429 434 f = self.opener("branch.cache", "w", atomictemp=True)
430 435 f.write("%s %s\n" % (hex(tip), tiprev))
431 436 for label, node in branches.iteritems():
432 437 f.write("%s %s\n" % (hex(node), label))
433 438 f.rename()
434 439 except (IOError, OSError):
435 440 pass
436 441
437 442 def _updatebranchcache(self, partial, start, end):
438 443 for r in xrange(start, end):
439 c = self.changectx(r)
444 c = self[r]
440 445 b = c.branch()
441 446 partial[b] = c.node()
442 447
443 448 def lookup(self, key):
444 449 if key == '.':
445 450 return self.dirstate.parents()[0]
446 451 elif key == 'null':
447 452 return nullid
448 453 n = self.changelog._match(key)
449 454 if n:
450 455 return n
451 456 if key in self.tags():
452 457 return self.tags()[key]
453 458 if key in self.branchtags():
454 459 return self.branchtags()[key]
455 460 n = self.changelog._partialmatch(key)
456 461 if n:
457 462 return n
458 463 try:
459 464 if len(key) == 20:
460 465 key = hex(key)
461 466 except:
462 467 pass
463 468 raise repo.RepoError(_("unknown revision '%s'") % key)
464 469
465 470 def local(self):
466 471 return True
467 472
468 473 def join(self, f):
469 474 return os.path.join(self.path, f)
470 475
471 476 def sjoin(self, f):
472 477 f = self.encodefn(f)
473 478 return os.path.join(self.spath, f)
474 479
475 480 def wjoin(self, f):
476 481 return os.path.join(self.root, f)
477 482
478 483 def rjoin(self, f):
479 484 return os.path.join(self.root, util.pconvert(f))
480 485
481 486 def file(self, f):
482 487 if f[0] == '/':
483 488 f = f[1:]
484 489 return filelog.filelog(self.sopener, f)
485 490
486 491 def changectx(self, changeid):
487 if changeid == None:
488 return context.workingctx(self)
489 return context.changectx(self, changeid)
492 return self[changeid]
490 493
491 494 def parents(self, changeid=None):
492 495 '''get list of changectxs for parents of changeid'''
493 return self.changectx(changeid).parents()
496 return self[changeid].parents()
494 497
495 498 def filectx(self, path, changeid=None, fileid=None):
496 499 """changeid can be a changeset revision, node, or tag.
497 500 fileid can be a file revision or node."""
498 501 return context.filectx(self, path, changeid, fileid)
499 502
500 503 def getcwd(self):
501 504 return self.dirstate.getcwd()
502 505
503 506 def pathto(self, f, cwd=None):
504 507 return self.dirstate.pathto(f, cwd)
505 508
506 509 def wfile(self, f, mode='r'):
507 510 return self.wopener(f, mode)
508 511
509 512 def _link(self, f):
510 513 return os.path.islink(self.wjoin(f))
511 514
512 515 def _filter(self, filter, filename, data):
513 516 if filter not in self.filterpats:
514 517 l = []
515 518 for pat, cmd in self.ui.configitems(filter):
516 519 mf = util.matcher(self.root, "", [pat], [], [])[1]
517 520 fn = None
518 521 params = cmd
519 522 for name, filterfn in self._datafilters.iteritems():
520 523 if cmd.startswith(name):
521 524 fn = filterfn
522 525 params = cmd[len(name):].lstrip()
523 526 break
524 527 if not fn:
525 528 fn = lambda s, c, **kwargs: util.filter(s, c)
526 529 # Wrap old filters not supporting keyword arguments
527 530 if not inspect.getargspec(fn)[2]:
528 531 oldfn = fn
529 532 fn = lambda s, c, **kwargs: oldfn(s, c)
530 533 l.append((mf, fn, params))
531 534 self.filterpats[filter] = l
532 535
533 536 for mf, fn, cmd in self.filterpats[filter]:
534 537 if mf(filename):
535 538 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
536 539 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
537 540 break
538 541
539 542 return data
540 543
541 544 def adddatafilter(self, name, filter):
542 545 self._datafilters[name] = filter
543 546
544 547 def wread(self, filename):
545 548 if self._link(filename):
546 549 data = os.readlink(self.wjoin(filename))
547 550 else:
548 551 data = self.wopener(filename, 'r').read()
549 552 return self._filter("encode", filename, data)
550 553
551 554 def wwrite(self, filename, data, flags):
552 555 data = self._filter("decode", filename, data)
553 556 try:
554 557 os.unlink(self.wjoin(filename))
555 558 except OSError:
556 559 pass
557 560 self.wopener(filename, 'w').write(data)
558 561 util.set_flags(self.wjoin(filename), flags)
559 562
560 563 def wwritedata(self, filename, data):
561 564 return self._filter("decode", filename, data)
562 565
563 566 def transaction(self):
564 567 if self._transref and self._transref():
565 568 return self._transref().nest()
566 569
567 570 # abort here if the journal already exists
568 571 if os.path.exists(self.sjoin("journal")):
569 572 raise repo.RepoError(_("journal already exists - run hg recover"))
570 573
571 574 # save dirstate for rollback
572 575 try:
573 576 ds = self.opener("dirstate").read()
574 577 except IOError:
575 578 ds = ""
576 579 self.opener("journal.dirstate", "w").write(ds)
577 580 self.opener("journal.branch", "w").write(self.dirstate.branch())
578 581
579 582 renames = [(self.sjoin("journal"), self.sjoin("undo")),
580 583 (self.join("journal.dirstate"), self.join("undo.dirstate")),
581 584 (self.join("journal.branch"), self.join("undo.branch"))]
582 585 tr = transaction.transaction(self.ui.warn, self.sopener,
583 586 self.sjoin("journal"),
584 587 aftertrans(renames),
585 588 self._createmode)
586 589 self._transref = weakref.ref(tr)
587 590 return tr
588 591
589 592 def recover(self):
590 593 l = self.lock()
591 594 try:
592 595 if os.path.exists(self.sjoin("journal")):
593 596 self.ui.status(_("rolling back interrupted transaction\n"))
594 597 transaction.rollback(self.sopener, self.sjoin("journal"))
595 598 self.invalidate()
596 599 return True
597 600 else:
598 601 self.ui.warn(_("no interrupted transaction available\n"))
599 602 return False
600 603 finally:
601 604 del l
602 605
603 606 def rollback(self):
604 607 wlock = lock = None
605 608 try:
606 609 wlock = self.wlock()
607 610 lock = self.lock()
608 611 if os.path.exists(self.sjoin("undo")):
609 612 self.ui.status(_("rolling back last transaction\n"))
610 613 transaction.rollback(self.sopener, self.sjoin("undo"))
611 614 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
612 615 try:
613 616 branch = self.opener("undo.branch").read()
614 617 self.dirstate.setbranch(branch)
615 618 except IOError:
616 619 self.ui.warn(_("Named branch could not be reset, "
617 620 "current branch still is: %s\n")
618 621 % util.tolocal(self.dirstate.branch()))
619 622 self.invalidate()
620 623 self.dirstate.invalidate()
621 624 else:
622 625 self.ui.warn(_("no rollback information available\n"))
623 626 finally:
624 627 del lock, wlock
625 628
626 629 def invalidate(self):
627 630 for a in "changelog manifest".split():
628 631 if a in self.__dict__:
629 632 delattr(self, a)
630 633 self.tagscache = None
631 634 self._tagstypecache = None
632 635 self.nodetagscache = None
633 636 self.branchcache = None
634 637 self._ubranchcache = None
635 638 self._branchcachetip = None
636 639
637 640 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
638 641 try:
639 642 l = lock.lock(lockname, 0, releasefn, desc=desc)
640 643 except lock.LockHeld, inst:
641 644 if not wait:
642 645 raise
643 646 self.ui.warn(_("waiting for lock on %s held by %r\n") %
644 647 (desc, inst.locker))
645 648 # default to 600 seconds timeout
646 649 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
647 650 releasefn, desc=desc)
648 651 if acquirefn:
649 652 acquirefn()
650 653 return l
651 654
652 655 def lock(self, wait=True):
653 656 if self._lockref and self._lockref():
654 657 return self._lockref()
655 658
656 659 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
657 660 _('repository %s') % self.origroot)
658 661 self._lockref = weakref.ref(l)
659 662 return l
660 663
661 664 def wlock(self, wait=True):
662 665 if self._wlockref and self._wlockref():
663 666 return self._wlockref()
664 667
665 668 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
666 669 self.dirstate.invalidate, _('working directory of %s') %
667 670 self.origroot)
668 671 self._wlockref = weakref.ref(l)
669 672 return l
670 673
671 674 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
672 675 """
673 676 commit an individual file as part of a larger transaction
674 677 """
675 678
676 679 fn = fctx.path()
677 680 t = fctx.data()
678 681 fl = self.file(fn)
679 682 fp1 = manifest1.get(fn, nullid)
680 683 fp2 = manifest2.get(fn, nullid)
681 684
682 685 meta = {}
683 686 cp = fctx.renamed()
684 687 if cp and cp[0] != fn:
685 688 cp = cp[0]
686 689 # Mark the new revision of this file as a copy of another
687 690 # file. This copy data will effectively act as a parent
688 691 # of this new revision. If this is a merge, the first
689 692 # parent will be the nullid (meaning "look up the copy data")
690 693 # and the second one will be the other parent. For example:
691 694 #
692 695 # 0 --- 1 --- 3 rev1 changes file foo
693 696 # \ / rev2 renames foo to bar and changes it
694 697 # \- 2 -/ rev3 should have bar with all changes and
695 698 # should record that bar descends from
696 699 # bar in rev2 and foo in rev1
697 700 #
698 701 # this allows this merge to succeed:
699 702 #
700 703 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
701 704 # \ / merging rev3 and rev4 should use bar@rev2
702 705 # \- 2 --- 4 as the merge base
703 706 #
704 707 meta["copy"] = cp
705 708 if not manifest2: # not a branch merge
706 709 meta["copyrev"] = hex(manifest1[cp])
707 710 fp2 = nullid
708 711 elif fp2 != nullid: # copied on remote side
709 712 meta["copyrev"] = hex(manifest1[cp])
710 713 elif fp1 != nullid: # copied on local side, reversed
711 714 meta["copyrev"] = hex(manifest2[cp])
712 715 fp2 = fp1
713 716 elif cp in manifest2: # directory rename on local side
714 717 meta["copyrev"] = hex(manifest2[cp])
715 718 else: # directory rename on remote side
716 719 meta["copyrev"] = hex(manifest1[cp])
717 720 self.ui.debug(_(" %s: copy %s:%s\n") %
718 721 (fn, cp, meta["copyrev"]))
719 722 fp1 = nullid
720 723 elif fp2 != nullid:
721 724 # is one parent an ancestor of the other?
722 725 fpa = fl.ancestor(fp1, fp2)
723 726 if fpa == fp1:
724 727 fp1, fp2 = fp2, nullid
725 728 elif fpa == fp2:
726 729 fp2 = nullid
727 730
728 731 # is the file unmodified from the parent? report existing entry
729 732 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
730 733 return fp1
731 734
732 735 changelist.append(fn)
733 736 return fl.add(t, meta, tr, linkrev, fp1, fp2)
734 737
735 738 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
736 739 if p1 is None:
737 740 p1, p2 = self.dirstate.parents()
738 741 return self.commit(files=files, text=text, user=user, date=date,
739 742 p1=p1, p2=p2, extra=extra, empty_ok=True)
740 743
741 744 def commit(self, files=None, text="", user=None, date=None,
742 745 match=None, force=False, force_editor=False,
743 746 p1=None, p2=None, extra={}, empty_ok=False):
744 747 wlock = lock = None
745 748 if files:
746 749 files = util.unique(files)
747 750 try:
748 751 wlock = self.wlock()
749 752 lock = self.lock()
750 753 use_dirstate = (p1 is None) # not rawcommit
751 754
752 755 if use_dirstate:
753 756 p1, p2 = self.dirstate.parents()
754 757 update_dirstate = True
755 758
756 759 if (not force and p2 != nullid and
757 760 (match and (match.files() or match.anypats()))):
758 761 raise util.Abort(_('cannot partially commit a merge '
759 762 '(do not specify files or patterns)'))
760 763
761 764 if files:
762 765 modified, removed = [], []
763 766 for f in files:
764 767 s = self.dirstate[f]
765 768 if s in 'nma':
766 769 modified.append(f)
767 770 elif s == 'r':
768 771 removed.append(f)
769 772 else:
770 773 self.ui.warn(_("%s not tracked!\n") % f)
771 774 changes = [modified, [], removed, [], []]
772 775 else:
773 776 changes = self.status(match=match)
774 777 else:
775 778 p1, p2 = p1, p2 or nullid
776 779 update_dirstate = (self.dirstate.parents()[0] == p1)
777 780 changes = [files, [], [], [], []]
778 781
779 782 wctx = context.workingctx(self, (p1, p2), text, user, date,
780 783 extra, changes)
781 784 return self._commitctx(wctx, force, force_editor, empty_ok,
782 785 use_dirstate, update_dirstate)
783 786 finally:
784 787 del lock, wlock
785 788
786 789 def commitctx(self, ctx):
787 790 wlock = lock = None
788 791 try:
789 792 wlock = self.wlock()
790 793 lock = self.lock()
791 794 return self._commitctx(ctx, force=True, force_editor=False,
792 795 empty_ok=True, use_dirstate=False,
793 796 update_dirstate=False)
794 797 finally:
795 798 del lock, wlock
796 799
797 800 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
798 801 use_dirstate=True, update_dirstate=True):
799 802 tr = None
800 803 valid = 0 # don't save the dirstate if this isn't set
801 804 try:
802 805 commit = wctx.modified() + wctx.added()
803 806 remove = wctx.removed()
804 807 extra = wctx.extra().copy()
805 808 branchname = extra['branch']
806 809 user = wctx.user()
807 810 text = wctx.description()
808 811
809 812 p1, p2 = [p.node() for p in wctx.parents()]
810 813 c1 = self.changelog.read(p1)
811 814 c2 = self.changelog.read(p2)
812 815 m1 = self.manifest.read(c1[0]).copy()
813 816 m2 = self.manifest.read(c2[0])
814 817
815 818 if use_dirstate:
816 819 oldname = c1[5].get("branch") # stored in UTF-8
817 820 if (not commit and not remove and not force and p2 == nullid
818 821 and branchname == oldname):
819 822 self.ui.status(_("nothing changed\n"))
820 823 return None
821 824
822 825 xp1 = hex(p1)
823 826 if p2 == nullid: xp2 = ''
824 827 else: xp2 = hex(p2)
825 828
826 829 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
827 830
828 831 tr = self.transaction()
829 832 trp = weakref.proxy(tr)
830 833
831 834 # check in files
832 835 new = {}
833 836 changed = []
834 837 linkrev = self.changelog.count()
835 838 commit.sort()
836 839 for f in commit:
837 840 self.ui.note(f + "\n")
838 841 try:
839 842 fctx = wctx.filectx(f)
840 843 newflags = fctx.flags()
841 844 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
842 845 if ((not changed or changed[-1] != f) and
843 846 m2.get(f) != new[f]):
844 847 # mention the file in the changelog if some
845 848 # flag changed, even if there was no content
846 849 # change.
847 850 if m1.flags(f) != newflags:
848 851 changed.append(f)
849 852 m1.set(f, newflags)
850 853 if use_dirstate:
851 854 self.dirstate.normal(f)
852 855
853 856 except (OSError, IOError):
854 857 if use_dirstate:
855 858 self.ui.warn(_("trouble committing %s!\n") % f)
856 859 raise
857 860 else:
858 861 remove.append(f)
859 862
860 863 # update manifest
861 864 m1.update(new)
862 865 remove.sort()
863 866 removed = []
864 867
865 868 for f in remove:
866 869 if f in m1:
867 870 del m1[f]
868 871 removed.append(f)
869 872 elif f in m2:
870 873 removed.append(f)
871 874 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
872 875 (new, removed))
873 876
874 877 # add changeset
875 878 if (not empty_ok and not text) or force_editor:
876 879 edittext = []
877 880 if text:
878 881 edittext.append(text)
879 882 edittext.append("")
880 883 edittext.append(_("HG: Enter commit message."
881 884 " Lines beginning with 'HG:' are removed."))
882 885 edittext.append("HG: --")
883 886 edittext.append("HG: user: %s" % user)
884 887 if p2 != nullid:
885 888 edittext.append("HG: branch merge")
886 889 if branchname:
887 890 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
888 891 edittext.extend(["HG: changed %s" % f for f in changed])
889 892 edittext.extend(["HG: removed %s" % f for f in removed])
890 893 if not changed and not remove:
891 894 edittext.append("HG: no files changed")
892 895 edittext.append("")
893 896 # run editor in the repository root
894 897 olddir = os.getcwd()
895 898 os.chdir(self.root)
896 899 text = self.ui.edit("\n".join(edittext), user)
897 900 os.chdir(olddir)
898 901
899 902 lines = [line.rstrip() for line in text.rstrip().splitlines()]
900 903 while lines and not lines[0]:
901 904 del lines[0]
902 905 if not lines and use_dirstate:
903 906 raise util.Abort(_("empty commit message"))
904 907 text = '\n'.join(lines)
905 908
906 909 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
907 910 user, wctx.date(), extra)
908 911 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
909 912 parent2=xp2)
910 913 tr.close()
911 914
912 915 if self.branchcache:
913 916 self.branchtags()
914 917
915 918 if use_dirstate or update_dirstate:
916 919 self.dirstate.setparents(n)
917 920 if use_dirstate:
918 921 for f in removed:
919 922 self.dirstate.forget(f)
920 923 valid = 1 # our dirstate updates are complete
921 924
922 925 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
923 926 return n
924 927 finally:
925 928 if not valid: # don't save our updated dirstate
926 929 self.dirstate.invalidate()
927 930 del tr
928 931
929 932 def walk(self, match, node=None):
930 933 '''
931 934 walk recursively through the directory tree or a given
932 935 changeset, finding all files matched by the match
933 936 function
934 937 '''
935 938
936 939 if node:
937 940 fdict = dict.fromkeys(match.files())
938 941 # for dirstate.walk, files=['.'] means "walk the whole tree".
939 942 # follow that here, too
940 943 fdict.pop('.', None)
941 944 mdict = self.manifest.read(self.changelog.read(node)[0])
942 945 mfiles = mdict.keys()
943 946 mfiles.sort()
944 947 for fn in mfiles:
945 948 for ffn in fdict:
946 949 # match if the file is the exact name or a directory
947 950 if ffn == fn or fn.startswith("%s/" % ffn):
948 951 del fdict[ffn]
949 952 break
950 953 if match(fn):
951 954 yield fn
952 955 ffiles = fdict.keys()
953 956 ffiles.sort()
954 957 for fn in ffiles:
955 958 if match.bad(fn, 'No such file in rev ' + short(node)) \
956 959 and match(fn):
957 960 yield fn
958 961 else:
959 962 for fn in self.dirstate.walk(match):
960 963 yield fn
961 964
962 965 def status(self, node1=None, node2=None, match=None,
963 966 list_ignored=False, list_clean=False, list_unknown=True):
964 967 """return status of files between two nodes or node and working directory
965 968
966 969 If node1 is None, use the first dirstate parent instead.
967 970 If node2 is None, compare node1 with working directory.
968 971 """
969 972
970 973 def fcmp(fn, getnode):
971 974 t1 = self.wread(fn)
972 975 return self.file(fn).cmp(getnode(fn), t1)
973 976
974 977 def mfmatches(node):
975 978 change = self.changelog.read(node)
976 979 mf = self.manifest.read(change[0]).copy()
977 980 for fn in mf.keys():
978 981 if not match(fn):
979 982 del mf[fn]
980 983 return mf
981 984
982 985 if not match:
983 986 match = match_.always(self.root, self.getcwd())
984 987
985 988 modified, added, removed, deleted, unknown = [], [], [], [], []
986 989 ignored, clean = [], []
987 990
988 991 compareworking = False
989 992 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
990 993 compareworking = True
991 994
992 995 if not compareworking:
993 996 # read the manifest from node1 before the manifest from node2,
994 997 # so that we'll hit the manifest cache if we're going through
995 998 # all the revisions in parent->child order.
996 999 mf1 = mfmatches(node1)
997 1000
998 1001 # are we comparing the working directory?
999 1002 if not node2:
1000 1003 (lookup, modified, added, removed, deleted, unknown,
1001 1004 ignored, clean) = self.dirstate.status(match, list_ignored,
1002 1005 list_clean, list_unknown)
1003 1006 # are we comparing working dir against its parent?
1004 1007 if compareworking:
1005 1008 if lookup:
1006 1009 fixup = []
1007 1010 # do a full compare of any files that might have changed
1008 ctx = self.changectx('')
1011 ctx = self['.']
1009 1012 ff = self.dirstate.flagfunc(ctx.flags)
1010 1013 for f in lookup:
1011 1014 if (f not in ctx or ff(f) != ctx.flags(f)
1012 1015 or ctx[f].cmp(self.wread(f))):
1013 1016 modified.append(f)
1014 1017 else:
1015 1018 fixup.append(f)
1016 1019 if list_clean:
1017 1020 clean.append(f)
1018 1021
1019 1022 # update dirstate for files that are actually clean
1020 1023 if fixup:
1021 1024 wlock = None
1022 1025 try:
1023 1026 try:
1024 1027 wlock = self.wlock(False)
1025 1028 except lock.LockException:
1026 1029 pass
1027 1030 if wlock:
1028 1031 for f in fixup:
1029 1032 self.dirstate.normal(f)
1030 1033 finally:
1031 1034 del wlock
1032 1035 else:
1033 1036 # we are comparing working dir against non-parent
1034 1037 # generate a pseudo-manifest for the working dir
1035 1038 # XXX: create it in dirstate.py ?
1036 1039 mf2 = mfmatches(self.dirstate.parents()[0])
1037 1040 ff = self.dirstate.flagfunc(mf2.flags)
1038 1041 for f in lookup + modified + added:
1039 1042 mf2[f] = ""
1040 1043 mf2.set(f, ff(f))
1041 1044 for f in removed:
1042 1045 if f in mf2:
1043 1046 del mf2[f]
1044 1047
1045 1048 else:
1046 1049 # we are comparing two revisions
1047 1050 mf2 = mfmatches(node2)
1048 1051
1049 1052 if not compareworking:
1050 1053 # flush lists from dirstate before comparing manifests
1051 1054 modified, added, clean = [], [], []
1052 1055
1053 1056 # make sure to sort the files so we talk to the disk in a
1054 1057 # reasonable order
1055 1058 mf2keys = mf2.keys()
1056 1059 mf2keys.sort()
1057 1060 getnode = lambda fn: mf1.get(fn, nullid)
1058 1061 for fn in mf2keys:
1059 1062 if fn in mf1:
1060 1063 if (mf1.flags(fn) != mf2.flags(fn) or
1061 1064 (mf1[fn] != mf2[fn] and
1062 1065 (mf2[fn] != "" or fcmp(fn, getnode)))):
1063 1066 modified.append(fn)
1064 1067 elif list_clean:
1065 1068 clean.append(fn)
1066 1069 del mf1[fn]
1067 1070 else:
1068 1071 added.append(fn)
1069 1072
1070 1073 removed = mf1.keys()
1071 1074
1072 1075 # sort and return results:
1073 1076 for l in modified, added, removed, deleted, unknown, ignored, clean:
1074 1077 l.sort()
1075 1078 return (modified, added, removed, deleted, unknown, ignored, clean)
1076 1079
1077 1080 def add(self, list):
1078 1081 wlock = self.wlock()
1079 1082 try:
1080 1083 rejected = []
1081 1084 for f in list:
1082 1085 p = self.wjoin(f)
1083 1086 try:
1084 1087 st = os.lstat(p)
1085 1088 except:
1086 1089 self.ui.warn(_("%s does not exist!\n") % f)
1087 1090 rejected.append(f)
1088 1091 continue
1089 1092 if st.st_size > 10000000:
1090 1093 self.ui.warn(_("%s: files over 10MB may cause memory and"
1091 1094 " performance problems\n"
1092 1095 "(use 'hg revert %s' to unadd the file)\n")
1093 1096 % (f, f))
1094 1097 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1095 1098 self.ui.warn(_("%s not added: only files and symlinks "
1096 1099 "supported currently\n") % f)
1097 1100 rejected.append(p)
1098 1101 elif self.dirstate[f] in 'amn':
1099 1102 self.ui.warn(_("%s already tracked!\n") % f)
1100 1103 elif self.dirstate[f] == 'r':
1101 1104 self.dirstate.normallookup(f)
1102 1105 else:
1103 1106 self.dirstate.add(f)
1104 1107 return rejected
1105 1108 finally:
1106 1109 del wlock
1107 1110
1108 1111 def forget(self, list):
1109 1112 wlock = self.wlock()
1110 1113 try:
1111 1114 for f in list:
1112 1115 if self.dirstate[f] != 'a':
1113 1116 self.ui.warn(_("%s not added!\n") % f)
1114 1117 else:
1115 1118 self.dirstate.forget(f)
1116 1119 finally:
1117 1120 del wlock
1118 1121
1119 1122 def remove(self, list, unlink=False):
1120 1123 wlock = None
1121 1124 try:
1122 1125 if unlink:
1123 1126 for f in list:
1124 1127 try:
1125 1128 util.unlink(self.wjoin(f))
1126 1129 except OSError, inst:
1127 1130 if inst.errno != errno.ENOENT:
1128 1131 raise
1129 1132 wlock = self.wlock()
1130 1133 for f in list:
1131 1134 if unlink and os.path.exists(self.wjoin(f)):
1132 1135 self.ui.warn(_("%s still exists!\n") % f)
1133 1136 elif self.dirstate[f] == 'a':
1134 1137 self.dirstate.forget(f)
1135 1138 elif f not in self.dirstate:
1136 1139 self.ui.warn(_("%s not tracked!\n") % f)
1137 1140 else:
1138 1141 self.dirstate.remove(f)
1139 1142 finally:
1140 1143 del wlock
1141 1144
1142 1145 def undelete(self, list):
1143 1146 wlock = None
1144 1147 try:
1145 1148 manifests = [self.manifest.read(self.changelog.read(p)[0])
1146 1149 for p in self.dirstate.parents() if p != nullid]
1147 1150 wlock = self.wlock()
1148 1151 for f in list:
1149 1152 if self.dirstate[f] != 'r':
1150 1153 self.ui.warn("%s not removed!\n" % f)
1151 1154 else:
1152 1155 m = f in manifests[0] and manifests[0] or manifests[1]
1153 1156 t = self.file(f).read(m[f])
1154 1157 self.wwrite(f, t, m.flags(f))
1155 1158 self.dirstate.normal(f)
1156 1159 finally:
1157 1160 del wlock
1158 1161
1159 1162 def copy(self, source, dest):
1160 1163 wlock = None
1161 1164 try:
1162 1165 p = self.wjoin(dest)
1163 1166 if not (os.path.exists(p) or os.path.islink(p)):
1164 1167 self.ui.warn(_("%s does not exist!\n") % dest)
1165 1168 elif not (os.path.isfile(p) or os.path.islink(p)):
1166 1169 self.ui.warn(_("copy failed: %s is not a file or a "
1167 1170 "symbolic link\n") % dest)
1168 1171 else:
1169 1172 wlock = self.wlock()
1170 1173 if dest not in self.dirstate:
1171 1174 self.dirstate.add(dest)
1172 1175 self.dirstate.copy(source, dest)
1173 1176 finally:
1174 1177 del wlock
1175 1178
1176 1179 def heads(self, start=None):
1177 1180 heads = self.changelog.heads(start)
1178 1181 # sort the output in rev descending order
1179 1182 heads = [(-self.changelog.rev(h), h) for h in heads]
1180 1183 heads.sort()
1181 1184 return [n for (r, n) in heads]
1182 1185
1183 1186 def branchheads(self, branch=None, start=None):
1184 branch = branch is None and self.changectx(None).branch() or branch
1187 if branch is None:
1188 branch = self[None].branch()
1185 1189 branches = self.branchtags()
1186 1190 if branch not in branches:
1187 1191 return []
1188 1192 # The basic algorithm is this:
1189 1193 #
1190 1194 # Start from the branch tip since there are no later revisions that can
1191 1195 # possibly be in this branch, and the tip is a guaranteed head.
1192 1196 #
1193 1197 # Remember the tip's parents as the first ancestors, since these by
1194 1198 # definition are not heads.
1195 1199 #
1196 1200 # Step backwards from the brach tip through all the revisions. We are
1197 1201 # guaranteed by the rules of Mercurial that we will now be visiting the
1198 1202 # nodes in reverse topological order (children before parents).
1199 1203 #
1200 1204 # If a revision is one of the ancestors of a head then we can toss it
1201 1205 # out of the ancestors set (we've already found it and won't be
1202 1206 # visiting it again) and put its parents in the ancestors set.
1203 1207 #
1204 1208 # Otherwise, if a revision is in the branch it's another head, since it
1205 1209 # wasn't in the ancestor list of an existing head. So add it to the
1206 1210 # head list, and add its parents to the ancestor list.
1207 1211 #
1208 1212 # If it is not in the branch ignore it.
1209 1213 #
1210 1214 # Once we have a list of heads, use nodesbetween to filter out all the
1211 1215 # heads that cannot be reached from startrev. There may be a more
1212 1216 # efficient way to do this as part of the previous algorithm.
1213 1217
1214 1218 set = util.set
1215 1219 heads = [self.changelog.rev(branches[branch])]
1216 1220 # Don't care if ancestors contains nullrev or not.
1217 1221 ancestors = set(self.changelog.parentrevs(heads[0]))
1218 1222 for rev in xrange(heads[0] - 1, nullrev, -1):
1219 1223 if rev in ancestors:
1220 1224 ancestors.update(self.changelog.parentrevs(rev))
1221 1225 ancestors.remove(rev)
1222 elif self.changectx(rev).branch() == branch:
1226 elif self[rev].branch() == branch:
1223 1227 heads.append(rev)
1224 1228 ancestors.update(self.changelog.parentrevs(rev))
1225 1229 heads = [self.changelog.node(rev) for rev in heads]
1226 1230 if start is not None:
1227 1231 heads = self.changelog.nodesbetween([start], heads)[2]
1228 1232 return heads
1229 1233
1230 1234 def branches(self, nodes):
1231 1235 if not nodes:
1232 1236 nodes = [self.changelog.tip()]
1233 1237 b = []
1234 1238 for n in nodes:
1235 1239 t = n
1236 1240 while 1:
1237 1241 p = self.changelog.parents(n)
1238 1242 if p[1] != nullid or p[0] == nullid:
1239 1243 b.append((t, n, p[0], p[1]))
1240 1244 break
1241 1245 n = p[0]
1242 1246 return b
1243 1247
1244 1248 def between(self, pairs):
1245 1249 r = []
1246 1250
1247 1251 for top, bottom in pairs:
1248 1252 n, l, i = top, [], 0
1249 1253 f = 1
1250 1254
1251 1255 while n != bottom:
1252 1256 p = self.changelog.parents(n)[0]
1253 1257 if i == f:
1254 1258 l.append(n)
1255 1259 f = f * 2
1256 1260 n = p
1257 1261 i += 1
1258 1262
1259 1263 r.append(l)
1260 1264
1261 1265 return r
1262 1266
1263 1267 def findincoming(self, remote, base=None, heads=None, force=False):
1264 1268 """Return list of roots of the subsets of missing nodes from remote
1265 1269
1266 1270 If base dict is specified, assume that these nodes and their parents
1267 1271 exist on the remote side and that no child of a node of base exists
1268 1272 in both remote and self.
1269 1273 Furthermore base will be updated to include the nodes that exists
1270 1274 in self and remote but no children exists in self and remote.
1271 1275 If a list of heads is specified, return only nodes which are heads
1272 1276 or ancestors of these heads.
1273 1277
1274 1278 All the ancestors of base are in self and in remote.
1275 1279 All the descendants of the list returned are missing in self.
1276 1280 (and so we know that the rest of the nodes are missing in remote, see
1277 1281 outgoing)
1278 1282 """
1279 1283 m = self.changelog.nodemap
1280 1284 search = []
1281 1285 fetch = {}
1282 1286 seen = {}
1283 1287 seenbranch = {}
1284 1288 if base == None:
1285 1289 base = {}
1286 1290
1287 1291 if not heads:
1288 1292 heads = remote.heads()
1289 1293
1290 1294 if self.changelog.tip() == nullid:
1291 1295 base[nullid] = 1
1292 1296 if heads != [nullid]:
1293 1297 return [nullid]
1294 1298 return []
1295 1299
1296 1300 # assume we're closer to the tip than the root
1297 1301 # and start by examining the heads
1298 1302 self.ui.status(_("searching for changes\n"))
1299 1303
1300 1304 unknown = []
1301 1305 for h in heads:
1302 1306 if h not in m:
1303 1307 unknown.append(h)
1304 1308 else:
1305 1309 base[h] = 1
1306 1310
1307 1311 if not unknown:
1308 1312 return []
1309 1313
1310 1314 req = dict.fromkeys(unknown)
1311 1315 reqcnt = 0
1312 1316
1313 1317 # search through remote branches
1314 1318 # a 'branch' here is a linear segment of history, with four parts:
1315 1319 # head, root, first parent, second parent
1316 1320 # (a branch always has two parents (or none) by definition)
1317 1321 unknown = remote.branches(unknown)
1318 1322 while unknown:
1319 1323 r = []
1320 1324 while unknown:
1321 1325 n = unknown.pop(0)
1322 1326 if n[0] in seen:
1323 1327 continue
1324 1328
1325 1329 self.ui.debug(_("examining %s:%s\n")
1326 1330 % (short(n[0]), short(n[1])))
1327 1331 if n[0] == nullid: # found the end of the branch
1328 1332 pass
1329 1333 elif n in seenbranch:
1330 1334 self.ui.debug(_("branch already found\n"))
1331 1335 continue
1332 1336 elif n[1] and n[1] in m: # do we know the base?
1333 1337 self.ui.debug(_("found incomplete branch %s:%s\n")
1334 1338 % (short(n[0]), short(n[1])))
1335 1339 search.append(n) # schedule branch range for scanning
1336 1340 seenbranch[n] = 1
1337 1341 else:
1338 1342 if n[1] not in seen and n[1] not in fetch:
1339 1343 if n[2] in m and n[3] in m:
1340 1344 self.ui.debug(_("found new changeset %s\n") %
1341 1345 short(n[1]))
1342 1346 fetch[n[1]] = 1 # earliest unknown
1343 1347 for p in n[2:4]:
1344 1348 if p in m:
1345 1349 base[p] = 1 # latest known
1346 1350
1347 1351 for p in n[2:4]:
1348 1352 if p not in req and p not in m:
1349 1353 r.append(p)
1350 1354 req[p] = 1
1351 1355 seen[n[0]] = 1
1352 1356
1353 1357 if r:
1354 1358 reqcnt += 1
1355 1359 self.ui.debug(_("request %d: %s\n") %
1356 1360 (reqcnt, " ".join(map(short, r))))
1357 1361 for p in xrange(0, len(r), 10):
1358 1362 for b in remote.branches(r[p:p+10]):
1359 1363 self.ui.debug(_("received %s:%s\n") %
1360 1364 (short(b[0]), short(b[1])))
1361 1365 unknown.append(b)
1362 1366
1363 1367 # do binary search on the branches we found
1364 1368 while search:
1365 1369 n = search.pop(0)
1366 1370 reqcnt += 1
1367 1371 l = remote.between([(n[0], n[1])])[0]
1368 1372 l.append(n[1])
1369 1373 p = n[0]
1370 1374 f = 1
1371 1375 for i in l:
1372 1376 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1373 1377 if i in m:
1374 1378 if f <= 2:
1375 1379 self.ui.debug(_("found new branch changeset %s\n") %
1376 1380 short(p))
1377 1381 fetch[p] = 1
1378 1382 base[i] = 1
1379 1383 else:
1380 1384 self.ui.debug(_("narrowed branch search to %s:%s\n")
1381 1385 % (short(p), short(i)))
1382 1386 search.append((p, i))
1383 1387 break
1384 1388 p, f = i, f * 2
1385 1389
1386 1390 # sanity check our fetch list
1387 1391 for f in fetch.keys():
1388 1392 if f in m:
1389 1393 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1390 1394
1391 1395 if base.keys() == [nullid]:
1392 1396 if force:
1393 1397 self.ui.warn(_("warning: repository is unrelated\n"))
1394 1398 else:
1395 1399 raise util.Abort(_("repository is unrelated"))
1396 1400
1397 1401 self.ui.debug(_("found new changesets starting at ") +
1398 1402 " ".join([short(f) for f in fetch]) + "\n")
1399 1403
1400 1404 self.ui.debug(_("%d total queries\n") % reqcnt)
1401 1405
1402 1406 return fetch.keys()
1403 1407
1404 1408 def findoutgoing(self, remote, base=None, heads=None, force=False):
1405 1409 """Return list of nodes that are roots of subsets not in remote
1406 1410
1407 1411 If base dict is specified, assume that these nodes and their parents
1408 1412 exist on the remote side.
1409 1413 If a list of heads is specified, return only nodes which are heads
1410 1414 or ancestors of these heads, and return a second element which
1411 1415 contains all remote heads which get new children.
1412 1416 """
1413 1417 if base == None:
1414 1418 base = {}
1415 1419 self.findincoming(remote, base, heads, force=force)
1416 1420
1417 1421 self.ui.debug(_("common changesets up to ")
1418 1422 + " ".join(map(short, base.keys())) + "\n")
1419 1423
1420 1424 remain = dict.fromkeys(self.changelog.nodemap)
1421 1425
1422 1426 # prune everything remote has from the tree
1423 1427 del remain[nullid]
1424 1428 remove = base.keys()
1425 1429 while remove:
1426 1430 n = remove.pop(0)
1427 1431 if n in remain:
1428 1432 del remain[n]
1429 1433 for p in self.changelog.parents(n):
1430 1434 remove.append(p)
1431 1435
1432 1436 # find every node whose parents have been pruned
1433 1437 subset = []
1434 1438 # find every remote head that will get new children
1435 1439 updated_heads = {}
1436 1440 for n in remain:
1437 1441 p1, p2 = self.changelog.parents(n)
1438 1442 if p1 not in remain and p2 not in remain:
1439 1443 subset.append(n)
1440 1444 if heads:
1441 1445 if p1 in heads:
1442 1446 updated_heads[p1] = True
1443 1447 if p2 in heads:
1444 1448 updated_heads[p2] = True
1445 1449
1446 1450 # this is the set of all roots we have to push
1447 1451 if heads:
1448 1452 return subset, updated_heads.keys()
1449 1453 else:
1450 1454 return subset
1451 1455
1452 1456 def pull(self, remote, heads=None, force=False):
1453 1457 lock = self.lock()
1454 1458 try:
1455 1459 fetch = self.findincoming(remote, heads=heads, force=force)
1456 1460 if fetch == [nullid]:
1457 1461 self.ui.status(_("requesting all changes\n"))
1458 1462
1459 1463 if not fetch:
1460 1464 self.ui.status(_("no changes found\n"))
1461 1465 return 0
1462 1466
1463 1467 if heads is None:
1464 1468 cg = remote.changegroup(fetch, 'pull')
1465 1469 else:
1466 1470 if 'changegroupsubset' not in remote.capabilities:
1467 1471 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1468 1472 cg = remote.changegroupsubset(fetch, heads, 'pull')
1469 1473 return self.addchangegroup(cg, 'pull', remote.url())
1470 1474 finally:
1471 1475 del lock
1472 1476
1473 1477 def push(self, remote, force=False, revs=None):
1474 1478 # there are two ways to push to remote repo:
1475 1479 #
1476 1480 # addchangegroup assumes local user can lock remote
1477 1481 # repo (local filesystem, old ssh servers).
1478 1482 #
1479 1483 # unbundle assumes local user cannot lock remote repo (new ssh
1480 1484 # servers, http servers).
1481 1485
1482 1486 if remote.capable('unbundle'):
1483 1487 return self.push_unbundle(remote, force, revs)
1484 1488 return self.push_addchangegroup(remote, force, revs)
1485 1489
1486 1490 def prepush(self, remote, force, revs):
1487 1491 base = {}
1488 1492 remote_heads = remote.heads()
1489 1493 inc = self.findincoming(remote, base, remote_heads, force=force)
1490 1494
1491 1495 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1492 1496 if revs is not None:
1493 1497 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1494 1498 else:
1495 1499 bases, heads = update, self.changelog.heads()
1496 1500
1497 1501 if not bases:
1498 1502 self.ui.status(_("no changes found\n"))
1499 1503 return None, 1
1500 1504 elif not force:
1501 1505 # check if we're creating new remote heads
1502 1506 # to be a remote head after push, node must be either
1503 1507 # - unknown locally
1504 1508 # - a local outgoing head descended from update
1505 1509 # - a remote head that's known locally and not
1506 1510 # ancestral to an outgoing head
1507 1511
1508 1512 warn = 0
1509 1513
1510 1514 if remote_heads == [nullid]:
1511 1515 warn = 0
1512 1516 elif not revs and len(heads) > len(remote_heads):
1513 1517 warn = 1
1514 1518 else:
1515 1519 newheads = list(heads)
1516 1520 for r in remote_heads:
1517 1521 if r in self.changelog.nodemap:
1518 1522 desc = self.changelog.heads(r, heads)
1519 1523 l = [h for h in heads if h in desc]
1520 1524 if not l:
1521 1525 newheads.append(r)
1522 1526 else:
1523 1527 newheads.append(r)
1524 1528 if len(newheads) > len(remote_heads):
1525 1529 warn = 1
1526 1530
1527 1531 if warn:
1528 1532 self.ui.warn(_("abort: push creates new remote heads!\n"))
1529 1533 self.ui.status(_("(did you forget to merge?"
1530 1534 " use push -f to force)\n"))
1531 1535 return None, 0
1532 1536 elif inc:
1533 1537 self.ui.warn(_("note: unsynced remote changes!\n"))
1534 1538
1535 1539
1536 1540 if revs is None:
1537 1541 cg = self.changegroup(update, 'push')
1538 1542 else:
1539 1543 cg = self.changegroupsubset(update, revs, 'push')
1540 1544 return cg, remote_heads
1541 1545
1542 1546 def push_addchangegroup(self, remote, force, revs):
1543 1547 lock = remote.lock()
1544 1548 try:
1545 1549 ret = self.prepush(remote, force, revs)
1546 1550 if ret[0] is not None:
1547 1551 cg, remote_heads = ret
1548 1552 return remote.addchangegroup(cg, 'push', self.url())
1549 1553 return ret[1]
1550 1554 finally:
1551 1555 del lock
1552 1556
1553 1557 def push_unbundle(self, remote, force, revs):
1554 1558 # local repo finds heads on server, finds out what revs it
1555 1559 # must push. once revs transferred, if server finds it has
1556 1560 # different heads (someone else won commit/push race), server
1557 1561 # aborts.
1558 1562
1559 1563 ret = self.prepush(remote, force, revs)
1560 1564 if ret[0] is not None:
1561 1565 cg, remote_heads = ret
1562 1566 if force: remote_heads = ['force']
1563 1567 return remote.unbundle(cg, remote_heads, 'push')
1564 1568 return ret[1]
1565 1569
1566 1570 def changegroupinfo(self, nodes, source):
1567 1571 if self.ui.verbose or source == 'bundle':
1568 1572 self.ui.status(_("%d changesets found\n") % len(nodes))
1569 1573 if self.ui.debugflag:
1570 1574 self.ui.debug(_("List of changesets:\n"))
1571 1575 for node in nodes:
1572 1576 self.ui.debug("%s\n" % hex(node))
1573 1577
1574 1578 def changegroupsubset(self, bases, heads, source, extranodes=None):
1575 1579 """This function generates a changegroup consisting of all the nodes
1576 1580 that are descendents of any of the bases, and ancestors of any of
1577 1581 the heads.
1578 1582
1579 1583 It is fairly complex as determining which filenodes and which
1580 1584 manifest nodes need to be included for the changeset to be complete
1581 1585 is non-trivial.
1582 1586
1583 1587 Another wrinkle is doing the reverse, figuring out which changeset in
1584 1588 the changegroup a particular filenode or manifestnode belongs to.
1585 1589
1586 1590 The caller can specify some nodes that must be included in the
1587 1591 changegroup using the extranodes argument. It should be a dict
1588 1592 where the keys are the filenames (or 1 for the manifest), and the
1589 1593 values are lists of (node, linknode) tuples, where node is a wanted
1590 1594 node and linknode is the changelog node that should be transmitted as
1591 1595 the linkrev.
1592 1596 """
1593 1597
1594 1598 self.hook('preoutgoing', throw=True, source=source)
1595 1599
1596 1600 # Set up some initial variables
1597 1601 # Make it easy to refer to self.changelog
1598 1602 cl = self.changelog
1599 1603 # msng is short for missing - compute the list of changesets in this
1600 1604 # changegroup.
1601 1605 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1602 1606 self.changegroupinfo(msng_cl_lst, source)
1603 1607 # Some bases may turn out to be superfluous, and some heads may be
1604 1608 # too. nodesbetween will return the minimal set of bases and heads
1605 1609 # necessary to re-create the changegroup.
1606 1610
1607 1611 # Known heads are the list of heads that it is assumed the recipient
1608 1612 # of this changegroup will know about.
1609 1613 knownheads = {}
1610 1614 # We assume that all parents of bases are known heads.
1611 1615 for n in bases:
1612 1616 for p in cl.parents(n):
1613 1617 if p != nullid:
1614 1618 knownheads[p] = 1
1615 1619 knownheads = knownheads.keys()
1616 1620 if knownheads:
1617 1621 # Now that we know what heads are known, we can compute which
1618 1622 # changesets are known. The recipient must know about all
1619 1623 # changesets required to reach the known heads from the null
1620 1624 # changeset.
1621 1625 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1622 1626 junk = None
1623 1627 # Transform the list into an ersatz set.
1624 1628 has_cl_set = dict.fromkeys(has_cl_set)
1625 1629 else:
1626 1630 # If there were no known heads, the recipient cannot be assumed to
1627 1631 # know about any changesets.
1628 1632 has_cl_set = {}
1629 1633
1630 1634 # Make it easy to refer to self.manifest
1631 1635 mnfst = self.manifest
1632 1636 # We don't know which manifests are missing yet
1633 1637 msng_mnfst_set = {}
1634 1638 # Nor do we know which filenodes are missing.
1635 1639 msng_filenode_set = {}
1636 1640
1637 1641 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1638 1642 junk = None
1639 1643
1640 1644 # A changeset always belongs to itself, so the changenode lookup
1641 1645 # function for a changenode is identity.
1642 1646 def identity(x):
1643 1647 return x
1644 1648
1645 1649 # A function generating function. Sets up an environment for the
1646 1650 # inner function.
1647 1651 def cmp_by_rev_func(revlog):
1648 1652 # Compare two nodes by their revision number in the environment's
1649 1653 # revision history. Since the revision number both represents the
1650 1654 # most efficient order to read the nodes in, and represents a
1651 1655 # topological sorting of the nodes, this function is often useful.
1652 1656 def cmp_by_rev(a, b):
1653 1657 return cmp(revlog.rev(a), revlog.rev(b))
1654 1658 return cmp_by_rev
1655 1659
1656 1660 # If we determine that a particular file or manifest node must be a
1657 1661 # node that the recipient of the changegroup will already have, we can
1658 1662 # also assume the recipient will have all the parents. This function
1659 1663 # prunes them from the set of missing nodes.
1660 1664 def prune_parents(revlog, hasset, msngset):
1661 1665 haslst = hasset.keys()
1662 1666 haslst.sort(cmp_by_rev_func(revlog))
1663 1667 for node in haslst:
1664 1668 parentlst = [p for p in revlog.parents(node) if p != nullid]
1665 1669 while parentlst:
1666 1670 n = parentlst.pop()
1667 1671 if n not in hasset:
1668 1672 hasset[n] = 1
1669 1673 p = [p for p in revlog.parents(n) if p != nullid]
1670 1674 parentlst.extend(p)
1671 1675 for n in hasset:
1672 1676 msngset.pop(n, None)
1673 1677
1674 1678 # This is a function generating function used to set up an environment
1675 1679 # for the inner function to execute in.
1676 1680 def manifest_and_file_collector(changedfileset):
1677 1681 # This is an information gathering function that gathers
1678 1682 # information from each changeset node that goes out as part of
1679 1683 # the changegroup. The information gathered is a list of which
1680 1684 # manifest nodes are potentially required (the recipient may
1681 1685 # already have them) and total list of all files which were
1682 1686 # changed in any changeset in the changegroup.
1683 1687 #
1684 1688 # We also remember the first changenode we saw any manifest
1685 1689 # referenced by so we can later determine which changenode 'owns'
1686 1690 # the manifest.
1687 1691 def collect_manifests_and_files(clnode):
1688 1692 c = cl.read(clnode)
1689 1693 for f in c[3]:
1690 1694 # This is to make sure we only have one instance of each
1691 1695 # filename string for each filename.
1692 1696 changedfileset.setdefault(f, f)
1693 1697 msng_mnfst_set.setdefault(c[0], clnode)
1694 1698 return collect_manifests_and_files
1695 1699
1696 1700 # Figure out which manifest nodes (of the ones we think might be part
1697 1701 # of the changegroup) the recipient must know about and remove them
1698 1702 # from the changegroup.
1699 1703 def prune_manifests():
1700 1704 has_mnfst_set = {}
1701 1705 for n in msng_mnfst_set:
1702 1706 # If a 'missing' manifest thinks it belongs to a changenode
1703 1707 # the recipient is assumed to have, obviously the recipient
1704 1708 # must have that manifest.
1705 1709 linknode = cl.node(mnfst.linkrev(n))
1706 1710 if linknode in has_cl_set:
1707 1711 has_mnfst_set[n] = 1
1708 1712 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1709 1713
1710 1714 # Use the information collected in collect_manifests_and_files to say
1711 1715 # which changenode any manifestnode belongs to.
1712 1716 def lookup_manifest_link(mnfstnode):
1713 1717 return msng_mnfst_set[mnfstnode]
1714 1718
1715 1719 # A function generating function that sets up the initial environment
1716 1720 # the inner function.
1717 1721 def filenode_collector(changedfiles):
1718 1722 next_rev = [0]
1719 1723 # This gathers information from each manifestnode included in the
1720 1724 # changegroup about which filenodes the manifest node references
1721 1725 # so we can include those in the changegroup too.
1722 1726 #
1723 1727 # It also remembers which changenode each filenode belongs to. It
1724 1728 # does this by assuming the a filenode belongs to the changenode
1725 1729 # the first manifest that references it belongs to.
1726 1730 def collect_msng_filenodes(mnfstnode):
1727 1731 r = mnfst.rev(mnfstnode)
1728 1732 if r == next_rev[0]:
1729 1733 # If the last rev we looked at was the one just previous,
1730 1734 # we only need to see a diff.
1731 1735 deltamf = mnfst.readdelta(mnfstnode)
1732 1736 # For each line in the delta
1733 1737 for f, fnode in deltamf.items():
1734 1738 f = changedfiles.get(f, None)
1735 1739 # And if the file is in the list of files we care
1736 1740 # about.
1737 1741 if f is not None:
1738 1742 # Get the changenode this manifest belongs to
1739 1743 clnode = msng_mnfst_set[mnfstnode]
1740 1744 # Create the set of filenodes for the file if
1741 1745 # there isn't one already.
1742 1746 ndset = msng_filenode_set.setdefault(f, {})
1743 1747 # And set the filenode's changelog node to the
1744 1748 # manifest's if it hasn't been set already.
1745 1749 ndset.setdefault(fnode, clnode)
1746 1750 else:
1747 1751 # Otherwise we need a full manifest.
1748 1752 m = mnfst.read(mnfstnode)
1749 1753 # For every file in we care about.
1750 1754 for f in changedfiles:
1751 1755 fnode = m.get(f, None)
1752 1756 # If it's in the manifest
1753 1757 if fnode is not None:
1754 1758 # See comments above.
1755 1759 clnode = msng_mnfst_set[mnfstnode]
1756 1760 ndset = msng_filenode_set.setdefault(f, {})
1757 1761 ndset.setdefault(fnode, clnode)
1758 1762 # Remember the revision we hope to see next.
1759 1763 next_rev[0] = r + 1
1760 1764 return collect_msng_filenodes
1761 1765
1762 1766 # We have a list of filenodes we think we need for a file, lets remove
1763 1767 # all those we now the recipient must have.
1764 1768 def prune_filenodes(f, filerevlog):
1765 1769 msngset = msng_filenode_set[f]
1766 1770 hasset = {}
1767 1771 # If a 'missing' filenode thinks it belongs to a changenode we
1768 1772 # assume the recipient must have, then the recipient must have
1769 1773 # that filenode.
1770 1774 for n in msngset:
1771 1775 clnode = cl.node(filerevlog.linkrev(n))
1772 1776 if clnode in has_cl_set:
1773 1777 hasset[n] = 1
1774 1778 prune_parents(filerevlog, hasset, msngset)
1775 1779
1776 1780 # A function generator function that sets up the a context for the
1777 1781 # inner function.
1778 1782 def lookup_filenode_link_func(fname):
1779 1783 msngset = msng_filenode_set[fname]
1780 1784 # Lookup the changenode the filenode belongs to.
1781 1785 def lookup_filenode_link(fnode):
1782 1786 return msngset[fnode]
1783 1787 return lookup_filenode_link
1784 1788
1785 1789 # Add the nodes that were explicitly requested.
1786 1790 def add_extra_nodes(name, nodes):
1787 1791 if not extranodes or name not in extranodes:
1788 1792 return
1789 1793
1790 1794 for node, linknode in extranodes[name]:
1791 1795 if node not in nodes:
1792 1796 nodes[node] = linknode
1793 1797
1794 1798 # Now that we have all theses utility functions to help out and
1795 1799 # logically divide up the task, generate the group.
1796 1800 def gengroup():
1797 1801 # The set of changed files starts empty.
1798 1802 changedfiles = {}
1799 1803 # Create a changenode group generator that will call our functions
1800 1804 # back to lookup the owning changenode and collect information.
1801 1805 group = cl.group(msng_cl_lst, identity,
1802 1806 manifest_and_file_collector(changedfiles))
1803 1807 for chnk in group:
1804 1808 yield chnk
1805 1809
1806 1810 # The list of manifests has been collected by the generator
1807 1811 # calling our functions back.
1808 1812 prune_manifests()
1809 1813 add_extra_nodes(1, msng_mnfst_set)
1810 1814 msng_mnfst_lst = msng_mnfst_set.keys()
1811 1815 # Sort the manifestnodes by revision number.
1812 1816 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1813 1817 # Create a generator for the manifestnodes that calls our lookup
1814 1818 # and data collection functions back.
1815 1819 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1816 1820 filenode_collector(changedfiles))
1817 1821 for chnk in group:
1818 1822 yield chnk
1819 1823
1820 1824 # These are no longer needed, dereference and toss the memory for
1821 1825 # them.
1822 1826 msng_mnfst_lst = None
1823 1827 msng_mnfst_set.clear()
1824 1828
1825 1829 if extranodes:
1826 1830 for fname in extranodes:
1827 1831 if isinstance(fname, int):
1828 1832 continue
1829 1833 add_extra_nodes(fname,
1830 1834 msng_filenode_set.setdefault(fname, {}))
1831 1835 changedfiles[fname] = 1
1832 1836 changedfiles = changedfiles.keys()
1833 1837 changedfiles.sort()
1834 1838 # Go through all our files in order sorted by name.
1835 1839 for fname in changedfiles:
1836 1840 filerevlog = self.file(fname)
1837 1841 if filerevlog.count() == 0:
1838 1842 raise util.Abort(_("empty or missing revlog for %s") % fname)
1839 1843 # Toss out the filenodes that the recipient isn't really
1840 1844 # missing.
1841 1845 if fname in msng_filenode_set:
1842 1846 prune_filenodes(fname, filerevlog)
1843 1847 msng_filenode_lst = msng_filenode_set[fname].keys()
1844 1848 else:
1845 1849 msng_filenode_lst = []
1846 1850 # If any filenodes are left, generate the group for them,
1847 1851 # otherwise don't bother.
1848 1852 if len(msng_filenode_lst) > 0:
1849 1853 yield changegroup.chunkheader(len(fname))
1850 1854 yield fname
1851 1855 # Sort the filenodes by their revision #
1852 1856 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1853 1857 # Create a group generator and only pass in a changenode
1854 1858 # lookup function as we need to collect no information
1855 1859 # from filenodes.
1856 1860 group = filerevlog.group(msng_filenode_lst,
1857 1861 lookup_filenode_link_func(fname))
1858 1862 for chnk in group:
1859 1863 yield chnk
1860 1864 if fname in msng_filenode_set:
1861 1865 # Don't need this anymore, toss it to free memory.
1862 1866 del msng_filenode_set[fname]
1863 1867 # Signal that no more groups are left.
1864 1868 yield changegroup.closechunk()
1865 1869
1866 1870 if msng_cl_lst:
1867 1871 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1868 1872
1869 1873 return util.chunkbuffer(gengroup())
1870 1874
1871 1875 def changegroup(self, basenodes, source):
1872 1876 """Generate a changegroup of all nodes that we have that a recipient
1873 1877 doesn't.
1874 1878
1875 1879 This is much easier than the previous function as we can assume that
1876 1880 the recipient has any changenode we aren't sending them."""
1877 1881
1878 1882 self.hook('preoutgoing', throw=True, source=source)
1879 1883
1880 1884 cl = self.changelog
1881 1885 nodes = cl.nodesbetween(basenodes, None)[0]
1882 1886 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1883 1887 self.changegroupinfo(nodes, source)
1884 1888
1885 1889 def identity(x):
1886 1890 return x
1887 1891
1888 1892 def gennodelst(revlog):
1889 1893 for r in xrange(0, revlog.count()):
1890 1894 n = revlog.node(r)
1891 1895 if revlog.linkrev(n) in revset:
1892 1896 yield n
1893 1897
1894 1898 def changed_file_collector(changedfileset):
1895 1899 def collect_changed_files(clnode):
1896 1900 c = cl.read(clnode)
1897 1901 for fname in c[3]:
1898 1902 changedfileset[fname] = 1
1899 1903 return collect_changed_files
1900 1904
1901 1905 def lookuprevlink_func(revlog):
1902 1906 def lookuprevlink(n):
1903 1907 return cl.node(revlog.linkrev(n))
1904 1908 return lookuprevlink
1905 1909
1906 1910 def gengroup():
1907 1911 # construct a list of all changed files
1908 1912 changedfiles = {}
1909 1913
1910 1914 for chnk in cl.group(nodes, identity,
1911 1915 changed_file_collector(changedfiles)):
1912 1916 yield chnk
1913 1917 changedfiles = changedfiles.keys()
1914 1918 changedfiles.sort()
1915 1919
1916 1920 mnfst = self.manifest
1917 1921 nodeiter = gennodelst(mnfst)
1918 1922 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1919 1923 yield chnk
1920 1924
1921 1925 for fname in changedfiles:
1922 1926 filerevlog = self.file(fname)
1923 1927 if filerevlog.count() == 0:
1924 1928 raise util.Abort(_("empty or missing revlog for %s") % fname)
1925 1929 nodeiter = gennodelst(filerevlog)
1926 1930 nodeiter = list(nodeiter)
1927 1931 if nodeiter:
1928 1932 yield changegroup.chunkheader(len(fname))
1929 1933 yield fname
1930 1934 lookup = lookuprevlink_func(filerevlog)
1931 1935 for chnk in filerevlog.group(nodeiter, lookup):
1932 1936 yield chnk
1933 1937
1934 1938 yield changegroup.closechunk()
1935 1939
1936 1940 if nodes:
1937 1941 self.hook('outgoing', node=hex(nodes[0]), source=source)
1938 1942
1939 1943 return util.chunkbuffer(gengroup())
1940 1944
1941 1945 def addchangegroup(self, source, srctype, url, emptyok=False):
1942 1946 """add changegroup to repo.
1943 1947
1944 1948 return values:
1945 1949 - nothing changed or no source: 0
1946 1950 - more heads than before: 1+added heads (2..n)
1947 1951 - less heads than before: -1-removed heads (-2..-n)
1948 1952 - number of heads stays the same: 1
1949 1953 """
1950 1954 def csmap(x):
1951 1955 self.ui.debug(_("add changeset %s\n") % short(x))
1952 1956 return cl.count()
1953 1957
1954 1958 def revmap(x):
1955 1959 return cl.rev(x)
1956 1960
1957 1961 if not source:
1958 1962 return 0
1959 1963
1960 1964 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1961 1965
1962 1966 changesets = files = revisions = 0
1963 1967
1964 1968 # write changelog data to temp files so concurrent readers will not see
1965 1969 # inconsistent view
1966 1970 cl = self.changelog
1967 1971 cl.delayupdate()
1968 1972 oldheads = len(cl.heads())
1969 1973
1970 1974 tr = self.transaction()
1971 1975 try:
1972 1976 trp = weakref.proxy(tr)
1973 1977 # pull off the changeset group
1974 1978 self.ui.status(_("adding changesets\n"))
1975 1979 cor = cl.count() - 1
1976 1980 chunkiter = changegroup.chunkiter(source)
1977 1981 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
1978 1982 raise util.Abort(_("received changelog group is empty"))
1979 1983 cnr = cl.count() - 1
1980 1984 changesets = cnr - cor
1981 1985
1982 1986 # pull off the manifest group
1983 1987 self.ui.status(_("adding manifests\n"))
1984 1988 chunkiter = changegroup.chunkiter(source)
1985 1989 # no need to check for empty manifest group here:
1986 1990 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1987 1991 # no new manifest will be created and the manifest group will
1988 1992 # be empty during the pull
1989 1993 self.manifest.addgroup(chunkiter, revmap, trp)
1990 1994
1991 1995 # process the files
1992 1996 self.ui.status(_("adding file changes\n"))
1993 1997 while 1:
1994 1998 f = changegroup.getchunk(source)
1995 1999 if not f:
1996 2000 break
1997 2001 self.ui.debug(_("adding %s revisions\n") % f)
1998 2002 fl = self.file(f)
1999 2003 o = fl.count()
2000 2004 chunkiter = changegroup.chunkiter(source)
2001 2005 if fl.addgroup(chunkiter, revmap, trp) is None:
2002 2006 raise util.Abort(_("received file revlog group is empty"))
2003 2007 revisions += fl.count() - o
2004 2008 files += 1
2005 2009
2006 2010 # make changelog see real files again
2007 2011 cl.finalize(trp)
2008 2012
2009 2013 newheads = len(self.changelog.heads())
2010 2014 heads = ""
2011 2015 if oldheads and newheads != oldheads:
2012 2016 heads = _(" (%+d heads)") % (newheads - oldheads)
2013 2017
2014 2018 self.ui.status(_("added %d changesets"
2015 2019 " with %d changes to %d files%s\n")
2016 2020 % (changesets, revisions, files, heads))
2017 2021
2018 2022 if changesets > 0:
2019 2023 self.hook('pretxnchangegroup', throw=True,
2020 2024 node=hex(self.changelog.node(cor+1)), source=srctype,
2021 2025 url=url)
2022 2026
2023 2027 tr.close()
2024 2028 finally:
2025 2029 del tr
2026 2030
2027 2031 if changesets > 0:
2028 2032 # forcefully update the on-disk branch cache
2029 2033 self.ui.debug(_("updating the branch cache\n"))
2030 2034 self.branchtags()
2031 2035 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
2032 2036 source=srctype, url=url)
2033 2037
2034 2038 for i in xrange(cor + 1, cnr + 1):
2035 2039 self.hook("incoming", node=hex(self.changelog.node(i)),
2036 2040 source=srctype, url=url)
2037 2041
2038 2042 # never return 0 here:
2039 2043 if newheads < oldheads:
2040 2044 return newheads - oldheads - 1
2041 2045 else:
2042 2046 return newheads - oldheads + 1
2043 2047
2044 2048
2045 2049 def stream_in(self, remote):
2046 2050 fp = remote.stream_out()
2047 2051 l = fp.readline()
2048 2052 try:
2049 2053 resp = int(l)
2050 2054 except ValueError:
2051 2055 raise util.UnexpectedOutput(
2052 2056 _('Unexpected response from remote server:'), l)
2053 2057 if resp == 1:
2054 2058 raise util.Abort(_('operation forbidden by server'))
2055 2059 elif resp == 2:
2056 2060 raise util.Abort(_('locking the remote repository failed'))
2057 2061 elif resp != 0:
2058 2062 raise util.Abort(_('the server sent an unknown error code'))
2059 2063 self.ui.status(_('streaming all changes\n'))
2060 2064 l = fp.readline()
2061 2065 try:
2062 2066 total_files, total_bytes = map(int, l.split(' ', 1))
2063 2067 except (ValueError, TypeError):
2064 2068 raise util.UnexpectedOutput(
2065 2069 _('Unexpected response from remote server:'), l)
2066 2070 self.ui.status(_('%d files to transfer, %s of data\n') %
2067 2071 (total_files, util.bytecount(total_bytes)))
2068 2072 start = time.time()
2069 2073 for i in xrange(total_files):
2070 2074 # XXX doesn't support '\n' or '\r' in filenames
2071 2075 l = fp.readline()
2072 2076 try:
2073 2077 name, size = l.split('\0', 1)
2074 2078 size = int(size)
2075 2079 except ValueError, TypeError:
2076 2080 raise util.UnexpectedOutput(
2077 2081 _('Unexpected response from remote server:'), l)
2078 2082 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2079 2083 ofp = self.sopener(name, 'w')
2080 2084 for chunk in util.filechunkiter(fp, limit=size):
2081 2085 ofp.write(chunk)
2082 2086 ofp.close()
2083 2087 elapsed = time.time() - start
2084 2088 if elapsed <= 0:
2085 2089 elapsed = 0.001
2086 2090 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2087 2091 (util.bytecount(total_bytes), elapsed,
2088 2092 util.bytecount(total_bytes / elapsed)))
2089 2093 self.invalidate()
2090 2094 return len(self.heads()) + 1
2091 2095
2092 2096 def clone(self, remote, heads=[], stream=False):
2093 2097 '''clone remote repository.
2094 2098
2095 2099 keyword arguments:
2096 2100 heads: list of revs to clone (forces use of pull)
2097 2101 stream: use streaming clone if possible'''
2098 2102
2099 2103 # now, all clients that can request uncompressed clones can
2100 2104 # read repo formats supported by all servers that can serve
2101 2105 # them.
2102 2106
2103 2107 # if revlog format changes, client will have to check version
2104 2108 # and format flags on "stream" capability, and use
2105 2109 # uncompressed only if compatible.
2106 2110
2107 2111 if stream and not heads and remote.capable('stream'):
2108 2112 return self.stream_in(remote)
2109 2113 return self.pull(remote, heads)
2110 2114
2111 2115 # used to avoid circular references so destructors work
2112 2116 def aftertrans(files):
2113 2117 renamefiles = [tuple(t) for t in files]
2114 2118 def a():
2115 2119 for src, dest in renamefiles:
2116 2120 util.rename(src, dest)
2117 2121 return a
2118 2122
2119 2123 def instance(ui, path, create):
2120 2124 return localrepository(ui, util.drop_scheme('file', path), create)
2121 2125
2122 2126 def islocal(path):
2123 2127 return True
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now