##// END OF EJS Templates
Expand import * to allow Pyflakes to find problems
Joel Rosdahl -
r6211:f89fd07f default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,124 +1,124 b''
1 1 # acl.py - changeset access control for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 #
8 8 # this hook allows to allow or deny access to parts of a repo when
9 9 # taking incoming changesets.
10 10 #
11 11 # authorization is against local user name on system where hook is
12 12 # run, not committer of original changeset (since that is easy to
13 13 # spoof).
14 14 #
15 15 # acl hook is best to use if you use hgsh to set up restricted shells
16 16 # for authenticated users to only push to / pull from. not safe if
17 17 # user has interactive shell access, because they can disable hook.
18 18 # also not safe if remote users share one local account, because then
19 19 # no way to tell remote users apart.
20 20 #
21 21 # to use, configure acl extension in hgrc like this:
22 22 #
23 23 # [extensions]
24 24 # hgext.acl =
25 25 #
26 26 # [hooks]
27 27 # pretxnchangegroup.acl = python:hgext.acl.hook
28 28 #
29 29 # [acl]
30 30 # sources = serve # check if source of incoming changes in this list
31 31 # # ("serve" == ssh or http, "push", "pull", "bundle")
32 32 #
33 33 # allow and deny lists have subtree pattern (default syntax is glob)
34 34 # on left, user names on right. deny list checked before allow list.
35 35 #
36 36 # [acl.allow]
37 37 # # if acl.allow not present, all users allowed by default
38 38 # # empty acl.allow = no users allowed
39 39 # docs/** = doc_writer
40 40 # .hgtags = release_engineer
41 41 #
42 42 # [acl.deny]
43 43 # # if acl.deny not present, no users denied by default
44 44 # # empty acl.deny = all users allowed
45 45 # glob pattern = user4, user5
46 46 # ** = user6
47 47
48 48 from mercurial.i18n import _
49 from mercurial.node import *
49 from mercurial.node import bin, short
50 50 from mercurial import util
51 51 import getpass
52 52
53 53 class checker(object):
54 54 '''acl checker.'''
55 55
56 56 def buildmatch(self, key):
57 57 '''return tuple of (match function, list enabled).'''
58 58 if not self.ui.has_section(key):
59 59 self.ui.debug(_('acl: %s not enabled\n') % key)
60 60 return None, False
61 61
62 62 thisuser = self.getuser()
63 63 pats = [pat for pat, users in self.ui.configitems(key)
64 64 if thisuser in users.replace(',', ' ').split()]
65 65 self.ui.debug(_('acl: %s enabled, %d entries for user %s\n') %
66 66 (key, len(pats), thisuser))
67 67 if pats:
68 68 match = util.matcher(self.repo.root, names=pats)[1]
69 69 else:
70 70 match = util.never
71 71 return match, True
72 72
73 73 def getuser(self):
74 74 '''return name of authenticated user.'''
75 75 return self.user
76 76
77 77 def __init__(self, ui, repo):
78 78 self.ui = ui
79 79 self.repo = repo
80 80 self.user = getpass.getuser()
81 81 cfg = self.ui.config('acl', 'config')
82 82 if cfg:
83 83 self.ui.readsections(cfg, 'acl.allow', 'acl.deny')
84 84 self.allow, self.allowable = self.buildmatch('acl.allow')
85 85 self.deny, self.deniable = self.buildmatch('acl.deny')
86 86
87 87 def skipsource(self, source):
88 88 '''true if incoming changes from this source should be skipped.'''
89 89 ok_sources = self.ui.config('acl', 'sources', 'serve').split()
90 90 return source not in ok_sources
91 91
92 92 def check(self, node):
93 93 '''return if access allowed, raise exception if not.'''
94 94 files = self.repo.changectx(node).files()
95 95 if self.deniable:
96 96 for f in files:
97 97 if self.deny(f):
98 98 self.ui.debug(_('acl: user %s denied on %s\n') %
99 99 (self.getuser(), f))
100 100 raise util.Abort(_('acl: access denied for changeset %s') %
101 101 short(node))
102 102 if self.allowable:
103 103 for f in files:
104 104 if not self.allow(f):
105 105 self.ui.debug(_('acl: user %s not allowed on %s\n') %
106 106 (self.getuser(), f))
107 107 raise util.Abort(_('acl: access denied for changeset %s') %
108 108 short(node))
109 109 self.ui.debug(_('acl: allowing changeset %s\n') % short(node))
110 110
111 111 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
112 112 if hooktype != 'pretxnchangegroup':
113 113 raise util.Abort(_('config error - hook type "%s" cannot stop '
114 114 'incoming changesets') % hooktype)
115 115
116 116 c = checker(ui, repo)
117 117 if c.skipsource(source):
118 118 ui.debug(_('acl: changes have source "%s" - skipping\n') % source)
119 119 return
120 120
121 121 start = repo.changelog.rev(bin(node))
122 122 end = repo.changelog.count()
123 123 for rev in xrange(start, end):
124 124 c.check(repo.changelog.node(rev))
@@ -1,311 +1,311 b''
1 1 # bugzilla.py - bugzilla integration for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 #
8 8 # hook extension to update comments of bugzilla bugs when changesets
9 9 # that refer to bugs by id are seen. this hook does not change bug
10 10 # status, only comments.
11 11 #
12 12 # to configure, add items to '[bugzilla]' section of hgrc.
13 13 #
14 14 # to use, configure bugzilla extension and enable like this:
15 15 #
16 16 # [extensions]
17 17 # hgext.bugzilla =
18 18 #
19 19 # [hooks]
20 20 # # run bugzilla hook on every change pulled or pushed in here
21 21 # incoming.bugzilla = python:hgext.bugzilla.hook
22 22 #
23 23 # config items:
24 24 #
25 25 # section name is 'bugzilla'.
26 26 # [bugzilla]
27 27 #
28 28 # REQUIRED:
29 29 # host = bugzilla # mysql server where bugzilla database lives
30 30 # password = ** # user's password
31 31 # version = 2.16 # version of bugzilla installed
32 32 #
33 33 # OPTIONAL:
34 34 # bzuser = ... # fallback bugzilla user name to record comments with
35 35 # db = bugs # database to connect to
36 36 # notify = ... # command to run to get bugzilla to send mail
37 37 # regexp = ... # regexp to match bug ids (must contain one "()" group)
38 38 # strip = 0 # number of slashes to strip for url paths
39 39 # style = ... # style file to use when formatting comments
40 40 # template = ... # template to use when formatting comments
41 41 # timeout = 5 # database connection timeout (seconds)
42 42 # user = bugs # user to connect to database as
43 43 # [web]
44 44 # baseurl = http://hgserver/... # root of hg web site for browsing commits
45 45 #
46 46 # if hg committer names are not same as bugzilla user names, use
47 47 # "usermap" feature to map from committer email to bugzilla user name.
48 48 # usermap can be in hgrc or separate config file.
49 49 #
50 50 # [bugzilla]
51 51 # usermap = filename # cfg file with "committer"="bugzilla user" info
52 52 # [usermap]
53 53 # committer_email = bugzilla_user_name
54 54
55 55 from mercurial.i18n import _
56 from mercurial.node import *
56 from mercurial.node import short
57 57 from mercurial import cmdutil, templater, util
58 58 import os, re, time
59 59
60 60 MySQLdb = None
61 61
62 62 def buglist(ids):
63 63 return '(' + ','.join(map(str, ids)) + ')'
64 64
65 65 class bugzilla_2_16(object):
66 66 '''support for bugzilla version 2.16.'''
67 67
68 68 def __init__(self, ui):
69 69 self.ui = ui
70 70 host = self.ui.config('bugzilla', 'host', 'localhost')
71 71 user = self.ui.config('bugzilla', 'user', 'bugs')
72 72 passwd = self.ui.config('bugzilla', 'password')
73 73 db = self.ui.config('bugzilla', 'db', 'bugs')
74 74 timeout = int(self.ui.config('bugzilla', 'timeout', 5))
75 75 usermap = self.ui.config('bugzilla', 'usermap')
76 76 if usermap:
77 77 self.ui.readsections(usermap, 'usermap')
78 78 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
79 79 (host, db, user, '*' * len(passwd)))
80 80 self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd,
81 81 db=db, connect_timeout=timeout)
82 82 self.cursor = self.conn.cursor()
83 83 self.run('select fieldid from fielddefs where name = "longdesc"')
84 84 ids = self.cursor.fetchall()
85 85 if len(ids) != 1:
86 86 raise util.Abort(_('unknown database schema'))
87 87 self.longdesc_id = ids[0][0]
88 88 self.user_ids = {}
89 89
90 90 def run(self, *args, **kwargs):
91 91 '''run a query.'''
92 92 self.ui.note(_('query: %s %s\n') % (args, kwargs))
93 93 try:
94 94 self.cursor.execute(*args, **kwargs)
95 95 except MySQLdb.MySQLError, err:
96 96 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
97 97 raise
98 98
99 99 def filter_real_bug_ids(self, ids):
100 100 '''filter not-existing bug ids from list.'''
101 101 self.run('select bug_id from bugs where bug_id in %s' % buglist(ids))
102 102 ids = [c[0] for c in self.cursor.fetchall()]
103 103 ids.sort()
104 104 return ids
105 105
106 106 def filter_unknown_bug_ids(self, node, ids):
107 107 '''filter bug ids from list that already refer to this changeset.'''
108 108
109 109 self.run('''select bug_id from longdescs where
110 110 bug_id in %s and thetext like "%%%s%%"''' %
111 111 (buglist(ids), short(node)))
112 112 unknown = dict.fromkeys(ids)
113 113 for (id,) in self.cursor.fetchall():
114 114 self.ui.status(_('bug %d already knows about changeset %s\n') %
115 115 (id, short(node)))
116 116 unknown.pop(id, None)
117 117 ids = unknown.keys()
118 118 ids.sort()
119 119 return ids
120 120
121 121 def notify(self, ids):
122 122 '''tell bugzilla to send mail.'''
123 123
124 124 self.ui.status(_('telling bugzilla to send mail:\n'))
125 125 for id in ids:
126 126 self.ui.status(_(' bug %s\n') % id)
127 127 cmd = self.ui.config('bugzilla', 'notify',
128 128 'cd /var/www/html/bugzilla && '
129 129 './processmail %s nobody@nowhere.com') % id
130 130 fp = os.popen('(%s) 2>&1' % cmd)
131 131 out = fp.read()
132 132 ret = fp.close()
133 133 if ret:
134 134 self.ui.warn(out)
135 135 raise util.Abort(_('bugzilla notify command %s') %
136 136 util.explain_exit(ret)[0])
137 137 self.ui.status(_('done\n'))
138 138
139 139 def get_user_id(self, user):
140 140 '''look up numeric bugzilla user id.'''
141 141 try:
142 142 return self.user_ids[user]
143 143 except KeyError:
144 144 try:
145 145 userid = int(user)
146 146 except ValueError:
147 147 self.ui.note(_('looking up user %s\n') % user)
148 148 self.run('''select userid from profiles
149 149 where login_name like %s''', user)
150 150 all = self.cursor.fetchall()
151 151 if len(all) != 1:
152 152 raise KeyError(user)
153 153 userid = int(all[0][0])
154 154 self.user_ids[user] = userid
155 155 return userid
156 156
157 157 def map_committer(self, user):
158 158 '''map name of committer to bugzilla user name.'''
159 159 for committer, bzuser in self.ui.configitems('usermap'):
160 160 if committer.lower() == user.lower():
161 161 return bzuser
162 162 return user
163 163
164 164 def add_comment(self, bugid, text, committer):
165 165 '''add comment to bug. try adding comment as committer of
166 166 changeset, otherwise as default bugzilla user.'''
167 167 user = self.map_committer(committer)
168 168 try:
169 169 userid = self.get_user_id(user)
170 170 except KeyError:
171 171 try:
172 172 defaultuser = self.ui.config('bugzilla', 'bzuser')
173 173 if not defaultuser:
174 174 raise util.Abort(_('cannot find bugzilla user id for %s') %
175 175 user)
176 176 userid = self.get_user_id(defaultuser)
177 177 except KeyError:
178 178 raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
179 179 (user, defaultuser))
180 180 now = time.strftime('%Y-%m-%d %H:%M:%S')
181 181 self.run('''insert into longdescs
182 182 (bug_id, who, bug_when, thetext)
183 183 values (%s, %s, %s, %s)''',
184 184 (bugid, userid, now, text))
185 185 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
186 186 values (%s, %s, %s, %s)''',
187 187 (bugid, userid, now, self.longdesc_id))
188 188
189 189 class bugzilla(object):
190 190 # supported versions of bugzilla. different versions have
191 191 # different schemas.
192 192 _versions = {
193 193 '2.16': bugzilla_2_16,
194 194 }
195 195
196 196 _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
197 197 r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
198 198
199 199 _bz = None
200 200
201 201 def __init__(self, ui, repo):
202 202 self.ui = ui
203 203 self.repo = repo
204 204
205 205 def bz(self):
206 206 '''return object that knows how to talk to bugzilla version in
207 207 use.'''
208 208
209 209 if bugzilla._bz is None:
210 210 bzversion = self.ui.config('bugzilla', 'version')
211 211 try:
212 212 bzclass = bugzilla._versions[bzversion]
213 213 except KeyError:
214 214 raise util.Abort(_('bugzilla version %s not supported') %
215 215 bzversion)
216 216 bugzilla._bz = bzclass(self.ui)
217 217 return bugzilla._bz
218 218
219 219 def __getattr__(self, key):
220 220 return getattr(self.bz(), key)
221 221
222 222 _bug_re = None
223 223 _split_re = None
224 224
225 225 def find_bug_ids(self, ctx):
226 226 '''find valid bug ids that are referred to in changeset
227 227 comments and that do not already have references to this
228 228 changeset.'''
229 229
230 230 if bugzilla._bug_re is None:
231 231 bugzilla._bug_re = re.compile(
232 232 self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
233 233 re.IGNORECASE)
234 234 bugzilla._split_re = re.compile(r'\D+')
235 235 start = 0
236 236 ids = {}
237 237 while True:
238 238 m = bugzilla._bug_re.search(ctx.description(), start)
239 239 if not m:
240 240 break
241 241 start = m.end()
242 242 for id in bugzilla._split_re.split(m.group(1)):
243 243 if not id: continue
244 244 ids[int(id)] = 1
245 245 ids = ids.keys()
246 246 if ids:
247 247 ids = self.filter_real_bug_ids(ids)
248 248 if ids:
249 249 ids = self.filter_unknown_bug_ids(ctx.node(), ids)
250 250 return ids
251 251
252 252 def update(self, bugid, ctx):
253 253 '''update bugzilla bug with reference to changeset.'''
254 254
255 255 def webroot(root):
256 256 '''strip leading prefix of repo root and turn into
257 257 url-safe path.'''
258 258 count = int(self.ui.config('bugzilla', 'strip', 0))
259 259 root = util.pconvert(root)
260 260 while count > 0:
261 261 c = root.find('/')
262 262 if c == -1:
263 263 break
264 264 root = root[c+1:]
265 265 count -= 1
266 266 return root
267 267
268 268 mapfile = self.ui.config('bugzilla', 'style')
269 269 tmpl = self.ui.config('bugzilla', 'template')
270 270 t = cmdutil.changeset_templater(self.ui, self.repo,
271 271 False, mapfile, False)
272 272 if not mapfile and not tmpl:
273 273 tmpl = _('changeset {node|short} in repo {root} refers '
274 274 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
275 275 if tmpl:
276 276 tmpl = templater.parsestring(tmpl, quoted=False)
277 277 t.use_template(tmpl)
278 278 self.ui.pushbuffer()
279 279 t.show(changenode=ctx.node(), changes=ctx.changeset(),
280 280 bug=str(bugid),
281 281 hgweb=self.ui.config('web', 'baseurl'),
282 282 root=self.repo.root,
283 283 webroot=webroot(self.repo.root))
284 284 data = self.ui.popbuffer()
285 285 self.add_comment(bugid, data, util.email(ctx.user()))
286 286
287 287 def hook(ui, repo, hooktype, node=None, **kwargs):
288 288 '''add comment to bugzilla for each changeset that refers to a
289 289 bugzilla bug id. only add a comment once per bug, so same change
290 290 seen multiple times does not fill bug with duplicate data.'''
291 291 try:
292 292 import MySQLdb as mysql
293 293 global MySQLdb
294 294 MySQLdb = mysql
295 295 except ImportError, err:
296 296 raise util.Abort(_('python mysql support not available: %s') % err)
297 297
298 298 if node is None:
299 299 raise util.Abort(_('hook type %s does not pass a changeset id') %
300 300 hooktype)
301 301 try:
302 302 bz = bugzilla(ui, repo)
303 303 ctx = repo.changectx(node)
304 304 ids = bz.find_bug_ids(ctx)
305 305 if ids:
306 306 for id in ids:
307 307 bz.update(id, ctx)
308 308 bz.notify(ids)
309 309 except MySQLdb.MySQLError, err:
310 310 raise util.Abort(_('database error: %s') % err[1])
311 311
@@ -1,301 +1,301 b''
1 1 # hg backend for convert extension
2 2
3 3 # Notes for hg->hg conversion:
4 4 #
5 5 # * Old versions of Mercurial didn't trim the whitespace from the ends
6 6 # of commit messages, but new versions do. Changesets created by
7 7 # those older versions, then converted, may thus have different
8 8 # hashes for changesets that are otherwise identical.
9 9 #
10 10 # * By default, the source revision is stored in the converted
11 11 # revision. This will cause the converted revision to have a
12 12 # different identity than the source. To avoid this, use the
13 13 # following option: "--config convert.hg.saverev=false"
14 14
15 15
16 16 import os, time
17 17 from mercurial.i18n import _
18 from mercurial.node import *
18 from mercurial.node import bin, hex, nullid
19 19 from mercurial import hg, lock, revlog, util
20 20
21 21 from common import NoRepo, commit, converter_source, converter_sink
22 22
23 23 class mercurial_sink(converter_sink):
24 24 def __init__(self, ui, path):
25 25 converter_sink.__init__(self, ui, path)
26 26 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
27 27 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
28 28 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
29 29 self.lastbranch = None
30 30 if os.path.isdir(path) and len(os.listdir(path)) > 0:
31 31 try:
32 32 self.repo = hg.repository(self.ui, path)
33 33 if not self.repo.local():
34 34 raise NoRepo(_('%s is not a local Mercurial repo') % path)
35 35 except hg.RepoError, err:
36 36 ui.print_exc()
37 37 raise NoRepo(err.args[0])
38 38 else:
39 39 try:
40 40 ui.status(_('initializing destination %s repository\n') % path)
41 41 self.repo = hg.repository(self.ui, path, create=True)
42 42 if not self.repo.local():
43 43 raise NoRepo(_('%s is not a local Mercurial repo') % path)
44 44 self.created.append(path)
45 45 except hg.RepoError, err:
46 46 ui.print_exc()
47 47 raise NoRepo("could not create hg repo %s as sink" % path)
48 48 self.lock = None
49 49 self.wlock = None
50 50 self.filemapmode = False
51 51
52 52 def before(self):
53 53 self.ui.debug(_('run hg sink pre-conversion action\n'))
54 54 self.wlock = self.repo.wlock()
55 55 self.lock = self.repo.lock()
56 56 self.repo.dirstate.clear()
57 57
58 58 def after(self):
59 59 self.ui.debug(_('run hg sink post-conversion action\n'))
60 60 self.repo.dirstate.invalidate()
61 61 self.lock = None
62 62 self.wlock = None
63 63
64 64 def revmapfile(self):
65 65 return os.path.join(self.path, ".hg", "shamap")
66 66
67 67 def authorfile(self):
68 68 return os.path.join(self.path, ".hg", "authormap")
69 69
70 70 def getheads(self):
71 71 h = self.repo.changelog.heads()
72 72 return [ hex(x) for x in h ]
73 73
74 74 def putfile(self, f, e, data):
75 75 self.repo.wwrite(f, data, e)
76 76 if f not in self.repo.dirstate:
77 77 self.repo.dirstate.normallookup(f)
78 78
79 79 def copyfile(self, source, dest):
80 80 self.repo.copy(source, dest)
81 81
82 82 def delfile(self, f):
83 83 try:
84 84 util.unlink(self.repo.wjoin(f))
85 85 #self.repo.remove([f])
86 86 except OSError:
87 87 pass
88 88
89 89 def setbranch(self, branch, pbranches):
90 90 if not self.clonebranches:
91 91 return
92 92
93 93 setbranch = (branch != self.lastbranch)
94 94 self.lastbranch = branch
95 95 if not branch:
96 96 branch = 'default'
97 97 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
98 98 pbranch = pbranches and pbranches[0][1] or 'default'
99 99
100 100 branchpath = os.path.join(self.path, branch)
101 101 if setbranch:
102 102 self.after()
103 103 try:
104 104 self.repo = hg.repository(self.ui, branchpath)
105 105 except:
106 106 self.repo = hg.repository(self.ui, branchpath, create=True)
107 107 self.before()
108 108
109 109 # pbranches may bring revisions from other branches (merge parents)
110 110 # Make sure we have them, or pull them.
111 111 missings = {}
112 112 for b in pbranches:
113 113 try:
114 114 self.repo.lookup(b[0])
115 115 except:
116 116 missings.setdefault(b[1], []).append(b[0])
117 117
118 118 if missings:
119 119 self.after()
120 120 for pbranch, heads in missings.iteritems():
121 121 pbranchpath = os.path.join(self.path, pbranch)
122 122 prepo = hg.repository(self.ui, pbranchpath)
123 123 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
124 124 self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
125 125 self.before()
126 126
127 127 def putcommit(self, files, parents, commit):
128 128 seen = {}
129 129 pl = []
130 130 for p in parents:
131 131 if p not in seen:
132 132 pl.append(p)
133 133 seen[p] = 1
134 134 parents = pl
135 135 nparents = len(parents)
136 136 if self.filemapmode and nparents == 1:
137 137 m1node = self.repo.changelog.read(bin(parents[0]))[0]
138 138 parent = parents[0]
139 139
140 140 if len(parents) < 2: parents.append("0" * 40)
141 141 if len(parents) < 2: parents.append("0" * 40)
142 142 p2 = parents.pop(0)
143 143
144 144 text = commit.desc
145 145 extra = commit.extra.copy()
146 146 if self.branchnames and commit.branch:
147 147 extra['branch'] = commit.branch
148 148 if commit.rev:
149 149 extra['convert_revision'] = commit.rev
150 150
151 151 while parents:
152 152 p1 = p2
153 153 p2 = parents.pop(0)
154 154 a = self.repo.rawcommit(files, text, commit.author, commit.date,
155 155 bin(p1), bin(p2), extra=extra)
156 156 self.repo.dirstate.clear()
157 157 text = "(octopus merge fixup)\n"
158 158 p2 = hg.hex(self.repo.changelog.tip())
159 159
160 160 if self.filemapmode and nparents == 1:
161 161 man = self.repo.manifest
162 162 mnode = self.repo.changelog.read(bin(p2))[0]
163 163 if not man.cmp(m1node, man.revision(mnode)):
164 164 self.repo.rollback()
165 165 self.repo.dirstate.clear()
166 166 return parent
167 167 return p2
168 168
169 169 def puttags(self, tags):
170 170 try:
171 171 old = self.repo.wfile(".hgtags").read()
172 172 oldlines = old.splitlines(1)
173 173 oldlines.sort()
174 174 except:
175 175 oldlines = []
176 176
177 177 k = tags.keys()
178 178 k.sort()
179 179 newlines = []
180 180 for tag in k:
181 181 newlines.append("%s %s\n" % (tags[tag], tag))
182 182
183 183 newlines.sort()
184 184
185 185 if newlines != oldlines:
186 186 self.ui.status("updating tags\n")
187 187 f = self.repo.wfile(".hgtags", "w")
188 188 f.write("".join(newlines))
189 189 f.close()
190 190 if not oldlines: self.repo.add([".hgtags"])
191 191 date = "%s 0" % int(time.mktime(time.gmtime()))
192 192 extra = {}
193 193 if self.tagsbranch != 'default':
194 194 extra['branch'] = self.tagsbranch
195 195 try:
196 196 tagparent = self.repo.changectx(self.tagsbranch).node()
197 197 except hg.RepoError, inst:
198 198 tagparent = nullid
199 199 self.repo.rawcommit([".hgtags"], "update tags", "convert-repo",
200 200 date, tagparent, nullid, extra=extra)
201 201 return hex(self.repo.changelog.tip())
202 202
203 203 def setfilemapmode(self, active):
204 204 self.filemapmode = active
205 205
206 206 class mercurial_source(converter_source):
207 207 def __init__(self, ui, path, rev=None):
208 208 converter_source.__init__(self, ui, path, rev)
209 209 self.saverev = ui.configbool('convert', 'hg.saverev', True)
210 210 try:
211 211 self.repo = hg.repository(self.ui, path)
212 212 # try to provoke an exception if this isn't really a hg
213 213 # repo, but some other bogus compatible-looking url
214 214 if not self.repo.local():
215 215 raise hg.RepoError()
216 216 except hg.RepoError:
217 217 ui.print_exc()
218 218 raise NoRepo("%s is not a local Mercurial repo" % path)
219 219 self.lastrev = None
220 220 self.lastctx = None
221 221 self._changescache = None
222 222 self.convertfp = None
223 223
224 224 def changectx(self, rev):
225 225 if self.lastrev != rev:
226 226 self.lastctx = self.repo.changectx(rev)
227 227 self.lastrev = rev
228 228 return self.lastctx
229 229
230 230 def getheads(self):
231 231 if self.rev:
232 232 return [hex(self.repo.changectx(self.rev).node())]
233 233 else:
234 234 return [hex(node) for node in self.repo.heads()]
235 235
236 236 def getfile(self, name, rev):
237 237 try:
238 238 return self.changectx(rev).filectx(name).data()
239 239 except revlog.LookupError, err:
240 240 raise IOError(err)
241 241
242 242 def getmode(self, name, rev):
243 243 m = self.changectx(rev).manifest()
244 244 return (m.execf(name) and 'x' or '') + (m.linkf(name) and 'l' or '')
245 245
246 246 def getchanges(self, rev):
247 247 ctx = self.changectx(rev)
248 248 if self._changescache and self._changescache[0] == rev:
249 249 m, a, r = self._changescache[1]
250 250 else:
251 251 m, a, r = self.repo.status(ctx.parents()[0].node(), ctx.node())[:3]
252 252 changes = [(name, rev) for name in m + a + r]
253 253 changes.sort()
254 254 return (changes, self.getcopies(ctx, m + a))
255 255
256 256 def getcopies(self, ctx, files):
257 257 copies = {}
258 258 for name in files:
259 259 try:
260 260 copies[name] = ctx.filectx(name).renamed()[0]
261 261 except TypeError:
262 262 pass
263 263 return copies
264 264
265 265 def getcommit(self, rev):
266 266 ctx = self.changectx(rev)
267 267 parents = [hex(p.node()) for p in ctx.parents() if p.node() != nullid]
268 268 if self.saverev:
269 269 crev = rev
270 270 else:
271 271 crev = None
272 272 return commit(author=ctx.user(), date=util.datestr(ctx.date()),
273 273 desc=ctx.description(), rev=crev, parents=parents,
274 274 branch=ctx.branch(), extra=ctx.extra())
275 275
276 276 def gettags(self):
277 277 tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
278 278 return dict([(name, hex(node)) for name, node in tags])
279 279
280 280 def getchangedfiles(self, rev, i):
281 281 ctx = self.changectx(rev)
282 282 i = i or 0
283 283 changes = self.repo.status(ctx.parents()[i].node(), ctx.node())[:3]
284 284
285 285 if i == 0:
286 286 self._changescache = (rev, changes)
287 287
288 288 return changes[0] + changes[1] + changes[2]
289 289
290 290 def converted(self, rev, destrev):
291 291 if self.convertfp is None:
292 292 self.convertfp = open(os.path.join(self.path, '.hg', 'shamap'),
293 293 'a')
294 294 self.convertfp.write('%s %s\n' % (destrev, rev))
295 295 self.convertfp.flush()
296 296
297 297 def before(self):
298 298 self.ui.debug(_('run hg source pre-conversion action\n'))
299 299
300 300 def after(self):
301 301 self.ui.debug(_('run hg source post-conversion action\n'))
@@ -1,251 +1,251 b''
1 1 # extdiff.py - external diff program support for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 '''
9 9 The `extdiff' Mercurial extension allows you to use external programs
10 10 to compare revisions, or revision with working dir. The external diff
11 11 programs are called with a configurable set of options and two
12 12 non-option arguments: paths to directories containing snapshots of
13 13 files to compare.
14 14
15 15 To enable this extension:
16 16
17 17 [extensions]
18 18 hgext.extdiff =
19 19
20 20 The `extdiff' extension also allows to configure new diff commands, so
21 21 you do not need to type "hg extdiff -p kdiff3" always.
22 22
23 23 [extdiff]
24 24 # add new command that runs GNU diff(1) in 'context diff' mode
25 25 cdiff = gdiff -Nprc5
26 26 ## or the old way:
27 27 #cmd.cdiff = gdiff
28 28 #opts.cdiff = -Nprc5
29 29
30 30 # add new command called vdiff, runs kdiff3
31 31 vdiff = kdiff3
32 32
33 33 # add new command called meld, runs meld (no need to name twice)
34 34 meld =
35 35
36 36 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
37 37 #(see http://www.vim.org/scripts/script.php?script_id=102)
38 38 # Non english user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
39 39 # your .vimrc
40 40 vimdiff = gvim -f '+next' '+execute "DirDiff" argv(0) argv(1)'
41 41
42 42 You can use -I/-X and list of file or directory names like normal
43 43 "hg diff" command. The `extdiff' extension makes snapshots of only
44 44 needed files, so running the external diff program will actually be
45 45 pretty fast (at least faster than having to compare the entire tree).
46 46 '''
47 47
48 48 from mercurial.i18n import _
49 from mercurial.node import *
49 from mercurial.node import short
50 50 from mercurial import cmdutil, util, commands
51 51 import os, shlex, shutil, tempfile
52 52
53 53 def snapshot_node(ui, repo, files, node, tmproot):
54 54 '''snapshot files as of some revision'''
55 55 mf = repo.changectx(node).manifest()
56 56 dirname = os.path.basename(repo.root)
57 57 if dirname == "":
58 58 dirname = "root"
59 59 dirname = '%s.%s' % (dirname, short(node))
60 60 base = os.path.join(tmproot, dirname)
61 61 os.mkdir(base)
62 62 ui.note(_('making snapshot of %d files from rev %s\n') %
63 63 (len(files), short(node)))
64 64 for fn in files:
65 65 if not fn in mf:
66 66 # skipping new file after a merge ?
67 67 continue
68 68 wfn = util.pconvert(fn)
69 69 ui.note(' %s\n' % wfn)
70 70 dest = os.path.join(base, wfn)
71 71 destdir = os.path.dirname(dest)
72 72 if not os.path.isdir(destdir):
73 73 os.makedirs(destdir)
74 74 data = repo.wwritedata(wfn, repo.file(wfn).read(mf[wfn]))
75 75 open(dest, 'wb').write(data)
76 76 return dirname
77 77
78 78
79 79 def snapshot_wdir(ui, repo, files, tmproot):
80 80 '''snapshot files from working directory.
81 81 if not using snapshot, -I/-X does not work and recursive diff
82 82 in tools like kdiff3 and meld displays too many files.'''
83 83 repo_root = repo.root
84 84
85 85 dirname = os.path.basename(repo_root)
86 86 if dirname == "":
87 87 dirname = "root"
88 88 base = os.path.join(tmproot, dirname)
89 89 os.mkdir(base)
90 90 ui.note(_('making snapshot of %d files from working dir\n') %
91 91 (len(files)))
92 92
93 93 fns_and_mtime = []
94 94
95 95 for fn in files:
96 96 wfn = util.pconvert(fn)
97 97 ui.note(' %s\n' % wfn)
98 98 dest = os.path.join(base, wfn)
99 99 destdir = os.path.dirname(dest)
100 100 if not os.path.isdir(destdir):
101 101 os.makedirs(destdir)
102 102
103 103 fp = open(dest, 'wb')
104 104 for chunk in util.filechunkiter(repo.wopener(wfn)):
105 105 fp.write(chunk)
106 106 fp.close()
107 107
108 108 fns_and_mtime.append((dest, os.path.join(repo_root, fn),
109 109 os.path.getmtime(dest)))
110 110
111 111
112 112 return dirname, fns_and_mtime
113 113
114 114
115 115 def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
116 116 '''Do the actuall diff:
117 117
118 118 - copy to a temp structure if diffing 2 internal revisions
119 119 - copy to a temp structure if diffing working revision with
120 120 another one and more than 1 file is changed
121 121 - just invoke the diff for a single file in the working dir
122 122 '''
123 123 node1, node2 = cmdutil.revpair(repo, opts['rev'])
124 124 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
125 125 modified, added, removed, deleted, unknown = repo.status(
126 126 node1, node2, files, match=matchfn)[:5]
127 127 if not (modified or added or removed):
128 128 return 0
129 129
130 130 tmproot = tempfile.mkdtemp(prefix='extdiff.')
131 131 dir2root = ''
132 132 try:
133 133 # Always make a copy of node1
134 134 dir1 = snapshot_node(ui, repo, modified + removed, node1, tmproot)
135 135 changes = len(modified) + len(removed) + len(added)
136 136
137 137 fns_and_mtime = []
138 138
139 139 # If node2 in not the wc or there is >1 change, copy it
140 140 if node2:
141 141 dir2 = snapshot_node(ui, repo, modified + added, node2, tmproot)
142 142 elif changes > 1:
143 143 #we only actually need to get the files to copy back to the working
144 144 #dir in this case (because the other cases are: diffing 2 revisions
145 145 #or single file -- in which case the file is already directly passed
146 146 #to the diff tool).
147 147 dir2, fns_and_mtime = snapshot_wdir(ui, repo, modified + added, tmproot)
148 148 else:
149 149 # This lets the diff tool open the changed file directly
150 150 dir2 = ''
151 151 dir2root = repo.root
152 152
153 153 # If only one change, diff the files instead of the directories
154 154 if changes == 1 :
155 155 if len(modified):
156 156 dir1 = os.path.join(dir1, util.localpath(modified[0]))
157 157 dir2 = os.path.join(dir2root, dir2, util.localpath(modified[0]))
158 158 elif len(removed) :
159 159 dir1 = os.path.join(dir1, util.localpath(removed[0]))
160 160 dir2 = os.devnull
161 161 else:
162 162 dir1 = os.devnull
163 163 dir2 = os.path.join(dir2root, dir2, util.localpath(added[0]))
164 164
165 165 cmdline = ('%s %s %s %s' %
166 166 (util.shellquote(diffcmd), ' '.join(diffopts),
167 167 util.shellquote(dir1), util.shellquote(dir2)))
168 168 ui.debug('running %r in %s\n' % (cmdline, tmproot))
169 169 util.system(cmdline, cwd=tmproot)
170 170
171 171 for copy_fn, working_fn, mtime in fns_and_mtime:
172 172 if os.path.getmtime(copy_fn) != mtime:
173 173 ui.debug('File changed while diffing. '
174 174 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
175 175 util.copyfile(copy_fn, working_fn)
176 176
177 177 return 1
178 178 finally:
179 179 ui.note(_('cleaning up temp directory\n'))
180 180 shutil.rmtree(tmproot)
181 181
182 182 def extdiff(ui, repo, *pats, **opts):
183 183 '''use external program to diff repository (or selected files)
184 184
185 185 Show differences between revisions for the specified files, using
186 186 an external program. The default program used is diff, with
187 187 default options "-Npru".
188 188
189 189 To select a different program, use the -p option. The program
190 190 will be passed the names of two directories to compare. To pass
191 191 additional options to the program, use the -o option. These will
192 192 be passed before the names of the directories to compare.
193 193
194 194 When two revision arguments are given, then changes are
195 195 shown between those revisions. If only one revision is
196 196 specified then that revision is compared to the working
197 197 directory, and, when no revisions are specified, the
198 198 working directory files are compared to its parent.'''
199 199 program = opts['program'] or 'diff'
200 200 if opts['program']:
201 201 option = opts['option']
202 202 else:
203 203 option = opts['option'] or ['-Npru']
204 204 return dodiff(ui, repo, program, option, pats, opts)
205 205
206 206 cmdtable = {
207 207 "extdiff":
208 208 (extdiff,
209 209 [('p', 'program', '', _('comparison program to run')),
210 210 ('o', 'option', [], _('pass option to comparison program')),
211 211 ('r', 'rev', [], _('revision')),
212 212 ] + commands.walkopts,
213 213 _('hg extdiff [OPT]... [FILE]...')),
214 214 }
215 215
216 216 def uisetup(ui):
217 217 for cmd, path in ui.configitems('extdiff'):
218 218 if cmd.startswith('cmd.'):
219 219 cmd = cmd[4:]
220 220 if not path: path = cmd
221 221 diffopts = ui.config('extdiff', 'opts.' + cmd, '')
222 222 diffopts = diffopts and [diffopts] or []
223 223 elif cmd.startswith('opts.'):
224 224 continue
225 225 else:
226 226 # command = path opts
227 227 if path:
228 228 diffopts = shlex.split(path)
229 229 path = diffopts.pop(0)
230 230 else:
231 231 path, diffopts = cmd, []
232 232 def save(cmd, path, diffopts):
233 233 '''use closure to save diff command to use'''
234 234 def mydiff(ui, repo, *pats, **opts):
235 235 return dodiff(ui, repo, path, diffopts, pats, opts)
236 236 mydiff.__doc__ = '''use %(path)s to diff repository (or selected files)
237 237
238 238 Show differences between revisions for the specified
239 239 files, using the %(path)s program.
240 240
241 241 When two revision arguments are given, then changes are
242 242 shown between those revisions. If only one revision is
243 243 specified then that revision is compared to the working
244 244 directory, and, when no revisions are specified, the
245 245 working directory files are compared to its parent.''' % {
246 246 'path': util.uirepr(path),
247 247 }
248 248 return mydiff
249 249 cmdtable[cmd] = (save(cmd, path, diffopts),
250 250 cmdtable['extdiff'][1][1:],
251 251 _('hg %s [OPTION]... [FILE]...') % cmd)
@@ -1,123 +1,123 b''
1 1 # fetch.py - pull and merge remote changes
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from mercurial.i18n import _
9 from mercurial.node import *
9 from mercurial.node import nullid, short
10 10 from mercurial import commands, cmdutil, hg, node, util
11 11
12 12 def fetch(ui, repo, source='default', **opts):
13 13 '''Pull changes from a remote repository, merge new changes if needed.
14 14
15 15 This finds all changes from the repository at the specified path
16 16 or URL and adds them to the local repository.
17 17
18 18 If the pulled changes add a new head, the head is automatically
19 19 merged, and the result of the merge is committed. Otherwise, the
20 20 working directory is updated to include the new changes.
21 21
22 22 When a merge occurs, the newly pulled changes are assumed to be
23 23 "authoritative". The head of the new changes is used as the first
24 24 parent, with local changes as the second. To switch the merge
25 25 order, use --switch-parent.
26 26
27 27 See 'hg help dates' for a list of formats valid for -d/--date.
28 28 '''
29 29
30 30 def postincoming(other, modheads):
31 31 if modheads == 0:
32 32 return 0
33 33 if modheads == 1:
34 34 return hg.clean(repo, repo.changelog.tip())
35 35 newheads = repo.heads(parent)
36 36 newchildren = [n for n in repo.heads(parent) if n != parent]
37 37 newparent = parent
38 38 if newchildren:
39 39 newparent = newchildren[0]
40 40 hg.clean(repo, newparent)
41 41 newheads = [n for n in repo.heads() if n != newparent]
42 42 if len(newheads) > 1:
43 43 ui.status(_('not merging with %d other new heads '
44 44 '(use "hg heads" and "hg merge" to merge them)') %
45 45 (len(newheads) - 1))
46 46 return
47 47 err = False
48 48 if newheads:
49 49 # By default, we consider the repository we're pulling
50 50 # *from* as authoritative, so we merge our changes into
51 51 # theirs.
52 52 if opts['switch_parent']:
53 53 firstparent, secondparent = newparent, newheads[0]
54 54 else:
55 55 firstparent, secondparent = newheads[0], newparent
56 56 ui.status(_('updating to %d:%s\n') %
57 57 (repo.changelog.rev(firstparent),
58 58 short(firstparent)))
59 59 hg.clean(repo, firstparent)
60 60 ui.status(_('merging with %d:%s\n') %
61 61 (repo.changelog.rev(secondparent), short(secondparent)))
62 62 err = hg.merge(repo, secondparent, remind=False)
63 63 if not err:
64 64 mod, add, rem = repo.status()[:3]
65 65 message = (cmdutil.logmessage(opts) or
66 66 (_('Automated merge with %s') %
67 67 util.removeauth(other.url())))
68 68 n = repo.commit(mod + add + rem, message,
69 69 opts['user'], opts['date'],
70 70 force_editor=opts.get('force_editor'))
71 71 ui.status(_('new changeset %d:%s merges remote changes '
72 72 'with local\n') % (repo.changelog.rev(n),
73 73 short(n)))
74 74
75 75 def pull():
76 76 cmdutil.setremoteconfig(ui, opts)
77 77
78 78 other = hg.repository(ui, ui.expandpath(source))
79 79 ui.status(_('pulling from %s\n') %
80 80 util.hidepassword(ui.expandpath(source)))
81 81 revs = None
82 82 if opts['rev']:
83 83 if not other.local():
84 84 raise util.Abort(_("fetch -r doesn't work for remote "
85 85 "repositories yet"))
86 86 else:
87 87 revs = [other.lookup(rev) for rev in opts['rev']]
88 88 modheads = repo.pull(other, heads=revs)
89 89 return postincoming(other, modheads)
90 90
91 91 date = opts.get('date')
92 92 if date:
93 93 opts['date'] = util.parsedate(date)
94 94
95 95 parent, p2 = repo.dirstate.parents()
96 96 if parent != repo.changelog.tip():
97 97 raise util.Abort(_('working dir not at tip '
98 98 '(use "hg update" to check out tip)'))
99 99 if p2 != nullid:
100 100 raise util.Abort(_('outstanding uncommitted merge'))
101 101 wlock = lock = None
102 102 try:
103 103 wlock = repo.wlock()
104 104 lock = repo.lock()
105 105 mod, add, rem = repo.status()[:3]
106 106 if mod or add or rem:
107 107 raise util.Abort(_('outstanding uncommitted changes'))
108 108 if len(repo.heads()) > 1:
109 109 raise util.Abort(_('multiple heads in this repository '
110 110 '(use "hg heads" and "hg merge" to merge)'))
111 111 return pull()
112 112 finally:
113 113 del lock, wlock
114 114
115 115 cmdtable = {
116 116 'fetch':
117 117 (fetch,
118 118 [('r', 'rev', [], _('a specific revision you would like to pull')),
119 119 ('f', 'force-editor', None, _('edit commit message')),
120 120 ('', 'switch-parent', None, _('switch parents when merging')),
121 121 ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
122 122 _('hg fetch [SOURCE]')),
123 123 }
@@ -1,406 +1,406 b''
1 1 # Copyright (C) 2007 Brendan Cully <brendan@kublai.com>
2 2 # Published under the GNU GPL
3 3
4 4 '''
5 5 imerge - interactive merge
6 6 '''
7 7
8 8 from mercurial.i18n import _
9 from mercurial.node import *
9 from mercurial.node import hex, short
10 10 from mercurial import commands, cmdutil, dispatch, fancyopts
11 11 from mercurial import hg, filemerge, util
12 12 import os, tarfile
13 13
14 14 class InvalidStateFileException(Exception): pass
15 15
16 16 class ImergeStateFile(object):
17 17 def __init__(self, im):
18 18 self.im = im
19 19
20 20 def save(self, dest):
21 21 tf = tarfile.open(dest, 'w:gz')
22 22
23 23 st = os.path.join(self.im.path, 'status')
24 24 tf.add(st, os.path.join('.hg', 'imerge', 'status'))
25 25
26 26 for f in self.im.resolved:
27 27 (fd, fo) = self.im.conflicts[f]
28 28 abssrc = self.im.repo.wjoin(fd)
29 29 tf.add(abssrc, fd)
30 30
31 31 tf.close()
32 32
33 33 def load(self, source):
34 34 wlock = self.im.repo.wlock()
35 35 lock = self.im.repo.lock()
36 36
37 37 tf = tarfile.open(source, 'r')
38 38 contents = tf.getnames()
39 39 # tarfile normalizes path separators to '/'
40 40 statusfile = '.hg/imerge/status'
41 41 if statusfile not in contents:
42 42 raise InvalidStateFileException('no status file')
43 43
44 44 tf.extract(statusfile, self.im.repo.root)
45 45 p1, p2 = self.im.load()
46 46 if self.im.repo.dirstate.parents()[0] != p1.node():
47 47 hg.clean(self.im.repo, p1.node())
48 48 self.im.start(p2.node())
49 49 for tarinfo in tf:
50 50 tf.extract(tarinfo, self.im.repo.root)
51 51 self.im.load()
52 52
53 53 class Imerge(object):
54 54 def __init__(self, ui, repo):
55 55 self.ui = ui
56 56 self.repo = repo
57 57
58 58 self.path = repo.join('imerge')
59 59 self.opener = util.opener(self.path)
60 60
61 61 self.wctx = self.repo.workingctx()
62 62 self.conflicts = {}
63 63 self.resolved = []
64 64
65 65 def merging(self):
66 66 return len(self.wctx.parents()) > 1
67 67
68 68 def load(self):
69 69 # status format. \0-delimited file, fields are
70 70 # p1, p2, conflict count, conflict filenames, resolved filenames
71 71 # conflict filenames are tuples of localname, remoteorig, remotenew
72 72
73 73 statusfile = self.opener('status')
74 74
75 75 status = statusfile.read().split('\0')
76 76 if len(status) < 3:
77 77 raise util.Abort('invalid imerge status file')
78 78
79 79 try:
80 80 parents = [self.repo.changectx(n) for n in status[:2]]
81 81 except LookupError:
82 82 raise util.Abort('merge parent %s not in repository' % short(p))
83 83
84 84 status = status[2:]
85 85 conflicts = int(status.pop(0)) * 3
86 86 self.resolved = status[conflicts:]
87 87 for i in xrange(0, conflicts, 3):
88 88 self.conflicts[status[i]] = (status[i+1], status[i+2])
89 89
90 90 return parents
91 91
92 92 def save(self):
93 93 lock = self.repo.lock()
94 94
95 95 if not os.path.isdir(self.path):
96 96 os.mkdir(self.path)
97 97 statusfile = self.opener('status', 'wb')
98 98
99 99 out = [hex(n.node()) for n in self.wctx.parents()]
100 100 out.append(str(len(self.conflicts)))
101 101 conflicts = self.conflicts.items()
102 102 conflicts.sort()
103 103 for fw, fd_fo in conflicts:
104 104 out.append(fw)
105 105 out.extend(fd_fo)
106 106 out.extend(self.resolved)
107 107
108 108 statusfile.write('\0'.join(out))
109 109
110 110 def remaining(self):
111 111 return [f for f in self.conflicts if f not in self.resolved]
112 112
113 113 def filemerge(self, fn, interactive=True):
114 114 wlock = self.repo.wlock()
115 115
116 116 (fd, fo) = self.conflicts[fn]
117 117 p1, p2 = self.wctx.parents()
118 118
119 119 # this could be greatly improved
120 120 realmerge = os.environ.get('HGMERGE')
121 121 if not interactive:
122 122 os.environ['HGMERGE'] = 'merge'
123 123
124 124 # The filemerge ancestor algorithm does not work if self.wctx
125 125 # already has two parents (in normal merge it doesn't yet). But
126 126 # this is very dirty.
127 127 self.wctx._parents.pop()
128 128 try:
129 129 # TODO: we should probably revert the file if merge fails
130 130 return filemerge.filemerge(self.repo, fn, fd, fo, self.wctx, p2)
131 131 finally:
132 132 self.wctx._parents.append(p2)
133 133 if realmerge:
134 134 os.environ['HGMERGE'] = realmerge
135 135 elif not interactive:
136 136 del os.environ['HGMERGE']
137 137
138 138 def start(self, rev=None):
139 139 _filemerge = filemerge.filemerge
140 140 def filemerge_(repo, fw, fd, fo, wctx, mctx):
141 141 self.conflicts[fw] = (fd, fo)
142 142
143 143 filemerge.filemerge = filemerge_
144 144 commands.merge(self.ui, self.repo, rev=rev)
145 145 filemerge.filemerge = _filemerge
146 146
147 147 self.wctx = self.repo.workingctx()
148 148 self.save()
149 149
150 150 def resume(self):
151 151 self.load()
152 152
153 153 dp = self.repo.dirstate.parents()
154 154 p1, p2 = self.wctx.parents()
155 155 if p1.node() != dp[0] or p2.node() != dp[1]:
156 156 raise util.Abort('imerge state does not match working directory')
157 157
158 158 def next(self):
159 159 remaining = self.remaining()
160 160 return remaining and remaining[0]
161 161
162 162 def resolve(self, files):
163 163 resolved = dict.fromkeys(self.resolved)
164 164 for fn in files:
165 165 if fn not in self.conflicts:
166 166 raise util.Abort('%s is not in the merge set' % fn)
167 167 resolved[fn] = True
168 168 self.resolved = resolved.keys()
169 169 self.resolved.sort()
170 170 self.save()
171 171 return 0
172 172
173 173 def unresolve(self, files):
174 174 resolved = dict.fromkeys(self.resolved)
175 175 for fn in files:
176 176 if fn not in resolved:
177 177 raise util.Abort('%s is not resolved' % fn)
178 178 del resolved[fn]
179 179 self.resolved = resolved.keys()
180 180 self.resolved.sort()
181 181 self.save()
182 182 return 0
183 183
184 184 def pickle(self, dest):
185 185 '''write current merge state to file to be resumed elsewhere'''
186 186 state = ImergeStateFile(self)
187 187 return state.save(dest)
188 188
189 189 def unpickle(self, source):
190 190 '''read merge state from file'''
191 191 state = ImergeStateFile(self)
192 192 return state.load(source)
193 193
194 194 def load(im, source):
195 195 if im.merging():
196 196 raise util.Abort('there is already a merge in progress '
197 197 '(update -C <rev> to abort it)' )
198 198 m, a, r, d = im.repo.status()[:4]
199 199 if m or a or r or d:
200 200 raise util.Abort('working directory has uncommitted changes')
201 201
202 202 rc = im.unpickle(source)
203 203 if not rc:
204 204 status(im)
205 205 return rc
206 206
207 207 def merge_(im, filename=None, auto=False):
208 208 success = True
209 209 if auto and not filename:
210 210 for fn in im.remaining():
211 211 rc = im.filemerge(fn, interactive=False)
212 212 if rc:
213 213 success = False
214 214 else:
215 215 im.resolve([fn])
216 216 if success:
217 217 im.ui.write('all conflicts resolved\n')
218 218 else:
219 219 status(im)
220 220 return 0
221 221
222 222 if not filename:
223 223 filename = im.next()
224 224 if not filename:
225 225 im.ui.write('all conflicts resolved\n')
226 226 return 0
227 227
228 228 rc = im.filemerge(filename, interactive=not auto)
229 229 if not rc:
230 230 im.resolve([filename])
231 231 if not im.next():
232 232 im.ui.write('all conflicts resolved\n')
233 233 return rc
234 234
235 235 def next(im):
236 236 n = im.next()
237 237 if n:
238 238 im.ui.write('%s\n' % n)
239 239 else:
240 240 im.ui.write('all conflicts resolved\n')
241 241 return 0
242 242
243 243 def resolve(im, *files):
244 244 if not files:
245 245 raise util.Abort('resolve requires at least one filename')
246 246 return im.resolve(files)
247 247
248 248 def save(im, dest):
249 249 return im.pickle(dest)
250 250
251 251 def status(im, **opts):
252 252 if not opts.get('resolved') and not opts.get('unresolved'):
253 253 opts['resolved'] = True
254 254 opts['unresolved'] = True
255 255
256 256 if im.ui.verbose:
257 257 p1, p2 = [short(p.node()) for p in im.wctx.parents()]
258 258 im.ui.note(_('merging %s and %s\n') % (p1, p2))
259 259
260 260 conflicts = im.conflicts.keys()
261 261 conflicts.sort()
262 262 remaining = dict.fromkeys(im.remaining())
263 263 st = []
264 264 for fn in conflicts:
265 265 if opts.get('no_status'):
266 266 mode = ''
267 267 elif fn in remaining:
268 268 mode = 'U '
269 269 else:
270 270 mode = 'R '
271 271 if ((opts.get('resolved') and fn not in remaining)
272 272 or (opts.get('unresolved') and fn in remaining)):
273 273 st.append((mode, fn))
274 274 st.sort()
275 275 for (mode, fn) in st:
276 276 if im.ui.verbose:
277 277 fo, fd = im.conflicts[fn]
278 278 if fd != fn:
279 279 fn = '%s (%s)' % (fn, fd)
280 280 im.ui.write('%s%s\n' % (mode, fn))
281 281 if opts.get('unresolved') and not remaining:
282 282 im.ui.write(_('all conflicts resolved\n'))
283 283
284 284 return 0
285 285
286 286 def unresolve(im, *files):
287 287 if not files:
288 288 raise util.Abort('unresolve requires at least one filename')
289 289 return im.unresolve(files)
290 290
291 291 subcmdtable = {
292 292 'load': (load, []),
293 293 'merge':
294 294 (merge_,
295 295 [('a', 'auto', None, _('automatically resolve if possible'))]),
296 296 'next': (next, []),
297 297 'resolve': (resolve, []),
298 298 'save': (save, []),
299 299 'status':
300 300 (status,
301 301 [('n', 'no-status', None, _('hide status prefix')),
302 302 ('', 'resolved', None, _('only show resolved conflicts')),
303 303 ('', 'unresolved', None, _('only show unresolved conflicts'))]),
304 304 'unresolve': (unresolve, [])
305 305 }
306 306
307 307 def dispatch_(im, args, opts):
308 308 def complete(s, choices):
309 309 candidates = []
310 310 for choice in choices:
311 311 if choice.startswith(s):
312 312 candidates.append(choice)
313 313 return candidates
314 314
315 315 c, args = args[0], list(args[1:])
316 316 cmd = complete(c, subcmdtable.keys())
317 317 if not cmd:
318 318 raise cmdutil.UnknownCommand('imerge ' + c)
319 319 if len(cmd) > 1:
320 320 cmd.sort()
321 321 raise cmdutil.AmbiguousCommand('imerge ' + c, cmd)
322 322 cmd = cmd[0]
323 323
324 324 func, optlist = subcmdtable[cmd]
325 325 opts = {}
326 326 try:
327 327 args = fancyopts.fancyopts(args, optlist, opts)
328 328 return func(im, *args, **opts)
329 329 except fancyopts.getopt.GetoptError, inst:
330 330 raise dispatch.ParseError('imerge', '%s: %s' % (cmd, inst))
331 331 except TypeError:
332 332 raise dispatch.ParseError('imerge', _('%s: invalid arguments') % cmd)
333 333
334 334 def imerge(ui, repo, *args, **opts):
335 335 '''interactive merge
336 336
337 337 imerge lets you split a merge into pieces. When you start a merge
338 338 with imerge, the names of all files with conflicts are recorded.
339 339 You can then merge any of these files, and if the merge is
340 340 successful, they will be marked as resolved. When all files are
341 341 resolved, the merge is complete.
342 342
343 343 If no merge is in progress, hg imerge [rev] will merge the working
344 344 directory with rev (defaulting to the other head if the repository
345 345 only has two heads). You may also resume a saved merge with
346 346 hg imerge load <file>.
347 347
348 348 If a merge is in progress, hg imerge will default to merging the
349 349 next unresolved file.
350 350
351 351 The following subcommands are available:
352 352
353 353 status:
354 354 show the current state of the merge
355 355 options:
356 356 -n --no-status: do not print the status prefix
357 357 --resolved: only print resolved conflicts
358 358 --unresolved: only print unresolved conflicts
359 359 next:
360 360 show the next unresolved file merge
361 361 merge [<file>]:
362 362 merge <file>. If the file merge is successful, the file will be
363 363 recorded as resolved. If no file is given, the next unresolved
364 364 file will be merged.
365 365 resolve <file>...:
366 366 mark files as successfully merged
367 367 unresolve <file>...:
368 368 mark files as requiring merging.
369 369 save <file>:
370 370 save the state of the merge to a file to be resumed elsewhere
371 371 load <file>:
372 372 load the state of the merge from a file created by save
373 373 '''
374 374
375 375 im = Imerge(ui, repo)
376 376
377 377 if im.merging():
378 378 im.resume()
379 379 else:
380 380 rev = opts.get('rev')
381 381 if rev and args:
382 382 raise util.Abort('please specify just one revision')
383 383
384 384 if len(args) == 2 and args[0] == 'load':
385 385 pass
386 386 else:
387 387 if args:
388 388 rev = args[0]
389 389 im.start(rev=rev)
390 390 if opts.get('auto'):
391 391 args = ['merge', '--auto']
392 392 else:
393 393 args = ['status']
394 394
395 395 if not args:
396 396 args = ['merge']
397 397
398 398 return dispatch_(im, args, opts)
399 399
400 400 cmdtable = {
401 401 '^imerge':
402 402 (imerge,
403 403 [('r', 'rev', '', _('revision to merge')),
404 404 ('a', 'auto', None, _('automatically merge where possible'))],
405 405 'hg imerge [command]')
406 406 }
@@ -1,556 +1,556 b''
1 1 # keyword.py - $Keyword$ expansion for Mercurial
2 2 #
3 3 # Copyright 2007, 2008 Christian Ebert <blacktrash@gmx.net>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 #
8 8 # $Id$
9 9 #
10 10 # Keyword expansion hack against the grain of a DSCM
11 11 #
12 12 # There are many good reasons why this is not needed in a distributed
13 13 # SCM, still it may be useful in very small projects based on single
14 14 # files (like LaTeX packages), that are mostly addressed to an audience
15 15 # not running a version control system.
16 16 #
17 17 # For in-depth discussion refer to
18 18 # <http://www.selenic.com/mercurial/wiki/index.cgi/KeywordPlan>.
19 19 #
20 20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 21 #
22 22 # Binary files are not touched.
23 23 #
24 24 # Setup in hgrc:
25 25 #
26 26 # [extensions]
27 27 # # enable extension
28 28 # hgext.keyword =
29 29 #
30 30 # Files to act upon/ignore are specified in the [keyword] section.
31 31 # Customized keyword template mappings in the [keywordmaps] section.
32 32 #
33 33 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
34 34
35 35 '''keyword expansion in local repositories
36 36
37 37 This extension expands RCS/CVS-like or self-customized $Keywords$
38 38 in tracked text files selected by your configuration.
39 39
40 40 Keywords are only expanded in local repositories and not stored in
41 41 the change history. The mechanism can be regarded as a convenience
42 42 for the current user or for archive distribution.
43 43
44 44 Configuration is done in the [keyword] and [keywordmaps] sections
45 45 of hgrc files.
46 46
47 47 Example:
48 48
49 49 [keyword]
50 50 # expand keywords in every python file except those matching "x*"
51 51 **.py =
52 52 x* = ignore
53 53
54 54 Note: the more specific you are in your filename patterns
55 55 the less you lose speed in huge repos.
56 56
57 57 For [keywordmaps] template mapping and expansion demonstration and
58 58 control run "hg kwdemo".
59 59
60 60 An additional date template filter {date|utcdate} is provided.
61 61
62 62 The default template mappings (view with "hg kwdemo -d") can be replaced
63 63 with customized keywords and templates.
64 64 Again, run "hg kwdemo" to control the results of your config changes.
65 65
66 66 Before changing/disabling active keywords, run "hg kwshrink" to avoid
67 67 the risk of inadvertedly storing expanded keywords in the change history.
68 68
69 69 To force expansion after enabling it, or a configuration change, run
70 70 "hg kwexpand".
71 71
72 72 Also, when committing with the record extension or using mq's qrecord, be aware
73 73 that keywords cannot be updated. Again, run "hg kwexpand" on the files in
74 74 question to update keyword expansions after all changes have been checked in.
75 75
76 76 Expansions spanning more than one line and incremental expansions,
77 77 like CVS' $Log$, are not supported. A keyword template map
78 78 "Log = {desc}" expands to the first line of the changeset description.
79 79 '''
80 80
81 81 from mercurial import commands, cmdutil, context, dispatch, filelog, revlog
82 82 from mercurial import patch, localrepo, templater, templatefilters, util
83 83 from mercurial.hgweb import webcommands
84 from mercurial.node import *
84 from mercurial.node import nullid, hex
85 85 from mercurial.i18n import _
86 86 import re, shutil, tempfile, time
87 87
88 88 commands.optionalrepo += ' kwdemo'
89 89
90 90 # hg commands that do not act on keywords
91 91 nokwcommands = ('add addremove bundle copy export grep incoming init'
92 92 ' log outgoing push rename rollback tip'
93 93 ' convert email glog')
94 94
95 95 # hg commands that trigger expansion only when writing to working dir,
96 96 # not when reading filelog, and unexpand when reading from working dir
97 97 restricted = 'record qfold qimport qnew qpush qrefresh qrecord'
98 98
99 99 def utcdate(date):
100 100 '''Returns hgdate in cvs-like UTC format.'''
101 101 return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0]))
102 102
103 103
104 104 # make keyword tools accessible
105 105 kwtools = {'templater': None, 'hgcmd': None}
106 106
107 107 # store originals of monkeypatches
108 108 _patchfile_init = patch.patchfile.__init__
109 109 _patch_diff = patch.diff
110 110 _dispatch_parse = dispatch._parse
111 111
112 112 def _kwpatchfile_init(self, ui, fname, missing=False):
113 113 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
114 114 rejects or conflicts due to expanded keywords in working dir.'''
115 115 _patchfile_init(self, ui, fname, missing=missing)
116 116 # shrink keywords read from working dir
117 117 kwt = kwtools['templater']
118 118 self.lines = kwt.shrinklines(self.fname, self.lines)
119 119
120 120 def _kw_diff(repo, node1=None, node2=None, files=None, match=util.always,
121 121 fp=None, changes=None, opts=None):
122 122 '''Monkeypatch patch.diff to avoid expansion except when
123 123 comparing against working dir.'''
124 124 if node2 is not None:
125 125 kwtools['templater'].matcher = util.never
126 126 elif node1 is not None and node1 != repo.changectx().node():
127 127 kwtools['templater'].restrict = True
128 128 _patch_diff(repo, node1=node1, node2=node2, files=files, match=match,
129 129 fp=fp, changes=changes, opts=opts)
130 130
131 131 def _kwweb_changeset(web, req, tmpl):
132 132 '''Wraps webcommands.changeset turning off keyword expansion.'''
133 133 kwtools['templater'].matcher = util.never
134 134 return web.changeset(tmpl, web.changectx(req))
135 135
136 136 def _kwweb_filediff(web, req, tmpl):
137 137 '''Wraps webcommands.filediff turning off keyword expansion.'''
138 138 kwtools['templater'].matcher = util.never
139 139 return web.filediff(tmpl, web.filectx(req))
140 140
141 141 def _kwdispatch_parse(ui, args):
142 142 '''Monkeypatch dispatch._parse to obtain running hg command.'''
143 143 cmd, func, args, options, cmdoptions = _dispatch_parse(ui, args)
144 144 kwtools['hgcmd'] = cmd
145 145 return cmd, func, args, options, cmdoptions
146 146
147 147 # dispatch._parse is run before reposetup, so wrap it here
148 148 dispatch._parse = _kwdispatch_parse
149 149
150 150
151 151 class kwtemplater(object):
152 152 '''
153 153 Sets up keyword templates, corresponding keyword regex, and
154 154 provides keyword substitution functions.
155 155 '''
156 156 templates = {
157 157 'Revision': '{node|short}',
158 158 'Author': '{author|user}',
159 159 'Date': '{date|utcdate}',
160 160 'RCSFile': '{file|basename},v',
161 161 'Source': '{root}/{file},v',
162 162 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
163 163 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
164 164 }
165 165
166 166 def __init__(self, ui, repo, inc, exc):
167 167 self.ui = ui
168 168 self.repo = repo
169 169 self.matcher = util.matcher(repo.root, inc=inc, exc=exc)[1]
170 170 self.restrict = kwtools['hgcmd'] in restricted.split()
171 171
172 172 kwmaps = self.ui.configitems('keywordmaps')
173 173 if kwmaps: # override default templates
174 174 kwmaps = [(k, templater.parsestring(v, quoted=False))
175 175 for (k, v) in kwmaps]
176 176 self.templates = dict(kwmaps)
177 177 escaped = map(re.escape, self.templates.keys())
178 178 kwpat = r'\$(%s)(: [^$\n\r]*? )??\$' % '|'.join(escaped)
179 179 self.re_kw = re.compile(kwpat)
180 180
181 181 templatefilters.filters['utcdate'] = utcdate
182 182 self.ct = cmdutil.changeset_templater(self.ui, self.repo,
183 183 False, '', False)
184 184
185 185 def getnode(self, path, fnode):
186 186 '''Derives changenode from file path and filenode.'''
187 187 # used by kwfilelog.read and kwexpand
188 188 c = context.filectx(self.repo, path, fileid=fnode)
189 189 return c.node()
190 190
191 191 def substitute(self, data, path, node, subfunc):
192 192 '''Replaces keywords in data with expanded template.'''
193 193 def kwsub(mobj):
194 194 kw = mobj.group(1)
195 195 self.ct.use_template(self.templates[kw])
196 196 self.ui.pushbuffer()
197 197 self.ct.show(changenode=node, root=self.repo.root, file=path)
198 198 ekw = templatefilters.firstline(self.ui.popbuffer())
199 199 return '$%s: %s $' % (kw, ekw)
200 200 return subfunc(kwsub, data)
201 201
202 202 def expand(self, path, node, data):
203 203 '''Returns data with keywords expanded.'''
204 204 if not self.restrict and self.matcher(path) and not util.binary(data):
205 205 changenode = self.getnode(path, node)
206 206 return self.substitute(data, path, changenode, self.re_kw.sub)
207 207 return data
208 208
209 209 def iskwfile(self, path, islink):
210 210 '''Returns true if path matches [keyword] pattern
211 211 and is not a symbolic link.
212 212 Caveat: localrepository._link fails on Windows.'''
213 213 return self.matcher(path) and not islink(path)
214 214
215 215 def overwrite(self, node=None, expand=True, files=None):
216 216 '''Overwrites selected files expanding/shrinking keywords.'''
217 217 ctx = self.repo.changectx(node)
218 218 mf = ctx.manifest()
219 219 if node is not None: # commit
220 220 files = [f for f in ctx.files() if f in mf]
221 221 notify = self.ui.debug
222 222 else: # kwexpand/kwshrink
223 223 notify = self.ui.note
224 224 candidates = [f for f in files if self.iskwfile(f, mf.linkf)]
225 225 if candidates:
226 226 self.restrict = True # do not expand when reading
227 227 candidates.sort()
228 228 action = expand and 'expanding' or 'shrinking'
229 229 for f in candidates:
230 230 fp = self.repo.file(f)
231 231 data = fp.read(mf[f])
232 232 if util.binary(data):
233 233 continue
234 234 if expand:
235 235 changenode = node or self.getnode(f, mf[f])
236 236 data, found = self.substitute(data, f, changenode,
237 237 self.re_kw.subn)
238 238 else:
239 239 found = self.re_kw.search(data)
240 240 if found:
241 241 notify(_('overwriting %s %s keywords\n') % (f, action))
242 242 self.repo.wwrite(f, data, mf.flags(f))
243 243 self.repo.dirstate.normal(f)
244 244 self.restrict = False
245 245
246 246 def shrinktext(self, text):
247 247 '''Unconditionally removes all keyword substitutions from text.'''
248 248 return self.re_kw.sub(r'$\1$', text)
249 249
250 250 def shrink(self, fname, text):
251 251 '''Returns text with all keyword substitutions removed.'''
252 252 if self.matcher(fname) and not util.binary(text):
253 253 return self.shrinktext(text)
254 254 return text
255 255
256 256 def shrinklines(self, fname, lines):
257 257 '''Returns lines with keyword substitutions removed.'''
258 258 if self.matcher(fname):
259 259 text = ''.join(lines)
260 260 if not util.binary(text):
261 261 return self.shrinktext(text).splitlines(True)
262 262 return lines
263 263
264 264 def wread(self, fname, data):
265 265 '''If in restricted mode returns data read from wdir with
266 266 keyword substitutions removed.'''
267 267 return self.restrict and self.shrink(fname, data) or data
268 268
269 269 class kwfilelog(filelog.filelog):
270 270 '''
271 271 Subclass of filelog to hook into its read, add, cmp methods.
272 272 Keywords are "stored" unexpanded, and processed on reading.
273 273 '''
274 274 def __init__(self, opener, path):
275 275 super(kwfilelog, self).__init__(opener, path)
276 276 self.kwt = kwtools['templater']
277 277 self.path = path
278 278
279 279 def read(self, node):
280 280 '''Expands keywords when reading filelog.'''
281 281 data = super(kwfilelog, self).read(node)
282 282 return self.kwt.expand(self.path, node, data)
283 283
284 284 def add(self, text, meta, tr, link, p1=None, p2=None):
285 285 '''Removes keyword substitutions when adding to filelog.'''
286 286 text = self.kwt.shrink(self.path, text)
287 287 return super(kwfilelog, self).add(text, meta, tr, link, p1=p1, p2=p2)
288 288
289 289 def cmp(self, node, text):
290 290 '''Removes keyword substitutions for comparison.'''
291 291 text = self.kwt.shrink(self.path, text)
292 292 if self.renamed(node):
293 293 t2 = super(kwfilelog, self).read(node)
294 294 return t2 != text
295 295 return revlog.revlog.cmp(self, node, text)
296 296
297 297 def _status(ui, repo, kwt, *pats, **opts):
298 298 '''Bails out if [keyword] configuration is not active.
299 299 Returns status of working directory.'''
300 300 if kwt:
301 301 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
302 302 return repo.status(files=files, match=match, list_clean=True)
303 303 if ui.configitems('keyword'):
304 304 raise util.Abort(_('[keyword] patterns cannot match'))
305 305 raise util.Abort(_('no [keyword] patterns configured'))
306 306
307 307 def _kwfwrite(ui, repo, expand, *pats, **opts):
308 308 '''Selects files and passes them to kwtemplater.overwrite.'''
309 309 kwt = kwtools['templater']
310 310 status = _status(ui, repo, kwt, *pats, **opts)
311 311 modified, added, removed, deleted, unknown, ignored, clean = status
312 312 if modified or added or removed or deleted:
313 313 raise util.Abort(_('outstanding uncommitted changes in given files'))
314 314 wlock = lock = None
315 315 try:
316 316 wlock = repo.wlock()
317 317 lock = repo.lock()
318 318 kwt.overwrite(expand=expand, files=clean)
319 319 finally:
320 320 del wlock, lock
321 321
322 322
323 323 def demo(ui, repo, *args, **opts):
324 324 '''print [keywordmaps] configuration and an expansion example
325 325
326 326 Show current, custom, or default keyword template maps
327 327 and their expansion.
328 328
329 329 Extend current configuration by specifying maps as arguments
330 330 and optionally by reading from an additional hgrc file.
331 331
332 332 Override current keyword template maps with "default" option.
333 333 '''
334 334 def demostatus(stat):
335 335 ui.status(_('\n\t%s\n') % stat)
336 336
337 337 def demoitems(section, items):
338 338 ui.write('[%s]\n' % section)
339 339 for k, v in items:
340 340 ui.write('%s = %s\n' % (k, v))
341 341
342 342 msg = 'hg keyword config and expansion example'
343 343 kwstatus = 'current'
344 344 fn = 'demo.txt'
345 345 branchname = 'demobranch'
346 346 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
347 347 ui.note(_('creating temporary repo at %s\n') % tmpdir)
348 348 repo = localrepo.localrepository(ui, path=tmpdir, create=True)
349 349 ui.setconfig('keyword', fn, '')
350 350 if args or opts.get('rcfile'):
351 351 kwstatus = 'custom'
352 352 if opts.get('rcfile'):
353 353 ui.readconfig(opts.get('rcfile'))
354 354 if opts.get('default'):
355 355 kwstatus = 'default'
356 356 kwmaps = kwtemplater.templates
357 357 if ui.configitems('keywordmaps'):
358 358 # override maps from optional rcfile
359 359 for k, v in kwmaps.iteritems():
360 360 ui.setconfig('keywordmaps', k, v)
361 361 elif args:
362 362 # simulate hgrc parsing
363 363 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
364 364 fp = repo.opener('hgrc', 'w')
365 365 fp.writelines(rcmaps)
366 366 fp.close()
367 367 ui.readconfig(repo.join('hgrc'))
368 368 if not opts.get('default'):
369 369 kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates
370 370 reposetup(ui, repo)
371 371 for k, v in ui.configitems('extensions'):
372 372 if k.endswith('keyword'):
373 373 extension = '%s = %s' % (k, v)
374 374 break
375 375 demostatus('config using %s keyword template maps' % kwstatus)
376 376 ui.write('[extensions]\n%s\n' % extension)
377 377 demoitems('keyword', ui.configitems('keyword'))
378 378 demoitems('keywordmaps', kwmaps.iteritems())
379 379 keywords = '$' + '$\n$'.join(kwmaps.keys()) + '$\n'
380 380 repo.wopener(fn, 'w').write(keywords)
381 381 repo.add([fn])
382 382 path = repo.wjoin(fn)
383 383 ui.note(_('\n%s keywords written to %s:\n') % (kwstatus, path))
384 384 ui.note(keywords)
385 385 ui.note('\nhg -R "%s" branch "%s"\n' % (tmpdir, branchname))
386 386 # silence branch command if not verbose
387 387 quiet = ui.quiet
388 388 ui.quiet = not ui.verbose
389 389 commands.branch(ui, repo, branchname)
390 390 ui.quiet = quiet
391 391 for name, cmd in ui.configitems('hooks'):
392 392 if name.split('.', 1)[0].find('commit') > -1:
393 393 repo.ui.setconfig('hooks', name, '')
394 394 ui.note(_('unhooked all commit hooks\n'))
395 395 ui.note('hg -R "%s" ci -m "%s"\n' % (tmpdir, msg))
396 396 repo.commit(text=msg)
397 397 format = ui.verbose and ' in %s' % path or ''
398 398 demostatus('%s keywords expanded%s' % (kwstatus, format))
399 399 ui.write(repo.wread(fn))
400 400 ui.debug(_('\nremoving temporary repo %s\n') % tmpdir)
401 401 shutil.rmtree(tmpdir, ignore_errors=True)
402 402
403 403 def expand(ui, repo, *pats, **opts):
404 404 '''expand keywords in working directory
405 405
406 406 Run after (re)enabling keyword expansion.
407 407
408 408 kwexpand refuses to run if given files contain local changes.
409 409 '''
410 410 # 3rd argument sets expansion to True
411 411 _kwfwrite(ui, repo, True, *pats, **opts)
412 412
413 413 def files(ui, repo, *pats, **opts):
414 414 '''print files currently configured for keyword expansion
415 415
416 416 Crosscheck which files in working directory are potential targets for
417 417 keyword expansion.
418 418 That is, files matched by [keyword] config patterns but not symlinks.
419 419 '''
420 420 kwt = kwtools['templater']
421 421 status = _status(ui, repo, kwt, *pats, **opts)
422 422 modified, added, removed, deleted, unknown, ignored, clean = status
423 423 files = modified + added + clean
424 424 if opts.get('untracked'):
425 425 files += unknown
426 426 files.sort()
427 427 wctx = repo.workingctx()
428 428 islink = lambda p: 'l' in wctx.fileflags(p)
429 429 kwfiles = [f for f in files if kwt.iskwfile(f, islink)]
430 430 cwd = pats and repo.getcwd() or ''
431 431 kwfstats = not opts.get('ignore') and (('K', kwfiles),) or ()
432 432 if opts.get('all') or opts.get('ignore'):
433 433 kwfstats += (('I', [f for f in files if f not in kwfiles]),)
434 434 for char, filenames in kwfstats:
435 435 format = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
436 436 for f in filenames:
437 437 ui.write(format % repo.pathto(f, cwd))
438 438
439 439 def shrink(ui, repo, *pats, **opts):
440 440 '''revert expanded keywords in working directory
441 441
442 442 Run before changing/disabling active keywords
443 443 or if you experience problems with "hg import" or "hg merge".
444 444
445 445 kwshrink refuses to run if given files contain local changes.
446 446 '''
447 447 # 3rd argument sets expansion to False
448 448 _kwfwrite(ui, repo, False, *pats, **opts)
449 449
450 450
451 451 def reposetup(ui, repo):
452 452 '''Sets up repo as kwrepo for keyword substitution.
453 453 Overrides file method to return kwfilelog instead of filelog
454 454 if file matches user configuration.
455 455 Wraps commit to overwrite configured files with updated
456 456 keyword substitutions.
457 457 This is done for local repos only, and only if there are
458 458 files configured at all for keyword substitution.'''
459 459
460 460 try:
461 461 if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
462 462 or '.hg' in util.splitpath(repo.root)
463 463 or repo._url.startswith('bundle:')):
464 464 return
465 465 except AttributeError:
466 466 pass
467 467
468 468 inc, exc = [], ['.hg*']
469 469 for pat, opt in ui.configitems('keyword'):
470 470 if opt != 'ignore':
471 471 inc.append(pat)
472 472 else:
473 473 exc.append(pat)
474 474 if not inc:
475 475 return
476 476
477 477 kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc)
478 478
479 479 class kwrepo(repo.__class__):
480 480 def file(self, f):
481 481 if f[0] == '/':
482 482 f = f[1:]
483 483 return kwfilelog(self.sopener, f)
484 484
485 485 def wread(self, filename):
486 486 data = super(kwrepo, self).wread(filename)
487 487 return kwt.wread(filename, data)
488 488
489 489 def commit(self, files=None, text='', user=None, date=None,
490 490 match=util.always, force=False, force_editor=False,
491 491 p1=None, p2=None, extra={}, empty_ok=False):
492 492 wlock = lock = None
493 493 _p1 = _p2 = None
494 494 try:
495 495 wlock = self.wlock()
496 496 lock = self.lock()
497 497 # store and postpone commit hooks
498 498 commithooks = {}
499 499 for name, cmd in ui.configitems('hooks'):
500 500 if name.split('.', 1)[0] == 'commit':
501 501 commithooks[name] = cmd
502 502 ui.setconfig('hooks', name, None)
503 503 if commithooks:
504 504 # store parents for commit hook environment
505 505 if p1 is None:
506 506 _p1, _p2 = repo.dirstate.parents()
507 507 else:
508 508 _p1, _p2 = p1, p2 or nullid
509 509 _p1 = hex(_p1)
510 510 if _p2 == nullid:
511 511 _p2 = ''
512 512 else:
513 513 _p2 = hex(_p2)
514 514
515 515 node = super(kwrepo,
516 516 self).commit(files=files, text=text, user=user,
517 517 date=date, match=match, force=force,
518 518 force_editor=force_editor,
519 519 p1=p1, p2=p2, extra=extra,
520 520 empty_ok=empty_ok)
521 521
522 522 # restore commit hooks
523 523 for name, cmd in commithooks.iteritems():
524 524 ui.setconfig('hooks', name, cmd)
525 525 if node is not None:
526 526 kwt.overwrite(node=node)
527 527 repo.hook('commit', node=node, parent1=_p1, parent2=_p2)
528 528 return node
529 529 finally:
530 530 del wlock, lock
531 531
532 532 repo.__class__ = kwrepo
533 533 patch.patchfile.__init__ = _kwpatchfile_init
534 534 patch.diff = _kw_diff
535 535 webcommands.changeset = webcommands.rev = _kwweb_changeset
536 536 webcommands.filediff = webcommands.diff = _kwweb_filediff
537 537
538 538
539 539 cmdtable = {
540 540 'kwdemo':
541 541 (demo,
542 542 [('d', 'default', None, _('show default keyword template maps')),
543 543 ('f', 'rcfile', [], _('read maps from rcfile'))],
544 544 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...')),
545 545 'kwexpand': (expand, commands.walkopts,
546 546 _('hg kwexpand [OPTION]... [FILE]...')),
547 547 'kwfiles':
548 548 (files,
549 549 [('a', 'all', None, _('show keyword status flags of all files')),
550 550 ('i', 'ignore', None, _('show files excluded from expansion')),
551 551 ('u', 'untracked', None, _('additionally show untracked files')),
552 552 ] + commands.walkopts,
553 553 _('hg kwfiles [OPTION]... [FILE]...')),
554 554 'kwshrink': (shrink, commands.walkopts,
555 555 _('hg kwshrink [OPTION]... [FILE]...')),
556 556 }
@@ -1,285 +1,285 b''
1 1 # notify.py - email notifications for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 #
8 8 # hook extension to email notifications to people when changesets are
9 9 # committed to a repo they subscribe to.
10 10 #
11 11 # default mode is to print messages to stdout, for testing and
12 12 # configuring.
13 13 #
14 14 # to use, configure notify extension and enable in hgrc like this:
15 15 #
16 16 # [extensions]
17 17 # hgext.notify =
18 18 #
19 19 # [hooks]
20 20 # # one email for each incoming changeset
21 21 # incoming.notify = python:hgext.notify.hook
22 22 # # batch emails when many changesets incoming at one time
23 23 # changegroup.notify = python:hgext.notify.hook
24 24 #
25 25 # [notify]
26 26 # # config items go in here
27 27 #
28 28 # config items:
29 29 #
30 30 # REQUIRED:
31 31 # config = /path/to/file # file containing subscriptions
32 32 #
33 33 # OPTIONAL:
34 34 # test = True # print messages to stdout for testing
35 35 # strip = 3 # number of slashes to strip for url paths
36 36 # domain = example.com # domain to use if committer missing domain
37 37 # style = ... # style file to use when formatting email
38 38 # template = ... # template to use when formatting email
39 39 # incoming = ... # template to use when run as incoming hook
40 40 # changegroup = ... # template when run as changegroup hook
41 41 # maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
42 42 # maxsubject = 67 # truncate subject line longer than this
43 43 # diffstat = True # add a diffstat before the diff content
44 44 # sources = serve # notify if source of incoming changes in this list
45 45 # # (serve == ssh or http, push, pull, bundle)
46 46 # [email]
47 47 # from = user@host.com # email address to send as if none given
48 48 # [web]
49 49 # baseurl = http://hgserver/... # root of hg web site for browsing commits
50 50 #
51 51 # notify config file has same format as regular hgrc. it has two
52 52 # sections so you can express subscriptions in whatever way is handier
53 53 # for you.
54 54 #
55 55 # [usersubs]
56 56 # # key is subscriber email, value is ","-separated list of glob patterns
57 57 # user@host = pattern
58 58 #
59 59 # [reposubs]
60 60 # # key is glob pattern, value is ","-separated list of subscriber emails
61 61 # pattern = user@host
62 62 #
63 63 # glob patterns are matched against path to repo root.
64 64 #
65 65 # if you like, you can put notify config file in repo that users can
66 66 # push changes to, they can manage their own subscriptions.
67 67
68 68 from mercurial.i18n import _
69 from mercurial.node import *
69 from mercurial.node import bin, short
70 70 from mercurial import patch, cmdutil, templater, util, mail
71 71 import email.Parser, fnmatch, socket, time
72 72
73 73 # template for single changeset can include email headers.
74 74 single_template = '''
75 75 Subject: changeset in {webroot}: {desc|firstline|strip}
76 76 From: {author}
77 77
78 78 changeset {node|short} in {root}
79 79 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
80 80 description:
81 81 \t{desc|tabindent|strip}
82 82 '''.lstrip()
83 83
84 84 # template for multiple changesets should not contain email headers,
85 85 # because only first set of headers will be used and result will look
86 86 # strange.
87 87 multiple_template = '''
88 88 changeset {node|short} in {root}
89 89 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
90 90 summary: {desc|firstline}
91 91 '''
92 92
93 93 deftemplates = {
94 94 'changegroup': multiple_template,
95 95 }
96 96
97 97 class notifier(object):
98 98 '''email notification class.'''
99 99
100 100 def __init__(self, ui, repo, hooktype):
101 101 self.ui = ui
102 102 cfg = self.ui.config('notify', 'config')
103 103 if cfg:
104 104 self.ui.readsections(cfg, 'usersubs', 'reposubs')
105 105 self.repo = repo
106 106 self.stripcount = int(self.ui.config('notify', 'strip', 0))
107 107 self.root = self.strip(self.repo.root)
108 108 self.domain = self.ui.config('notify', 'domain')
109 109 self.subs = self.subscribers()
110 110
111 111 mapfile = self.ui.config('notify', 'style')
112 112 template = (self.ui.config('notify', hooktype) or
113 113 self.ui.config('notify', 'template'))
114 114 self.t = cmdutil.changeset_templater(self.ui, self.repo,
115 115 False, mapfile, False)
116 116 if not mapfile and not template:
117 117 template = deftemplates.get(hooktype) or single_template
118 118 if template:
119 119 template = templater.parsestring(template, quoted=False)
120 120 self.t.use_template(template)
121 121
122 122 def strip(self, path):
123 123 '''strip leading slashes from local path, turn into web-safe path.'''
124 124
125 125 path = util.pconvert(path)
126 126 count = self.stripcount
127 127 while count > 0:
128 128 c = path.find('/')
129 129 if c == -1:
130 130 break
131 131 path = path[c+1:]
132 132 count -= 1
133 133 return path
134 134
135 135 def fixmail(self, addr):
136 136 '''try to clean up email addresses.'''
137 137
138 138 addr = util.email(addr.strip())
139 139 if self.domain:
140 140 a = addr.find('@localhost')
141 141 if a != -1:
142 142 addr = addr[:a]
143 143 if '@' not in addr:
144 144 return addr + '@' + self.domain
145 145 return addr
146 146
147 147 def subscribers(self):
148 148 '''return list of email addresses of subscribers to this repo.'''
149 149
150 150 subs = {}
151 151 for user, pats in self.ui.configitems('usersubs'):
152 152 for pat in pats.split(','):
153 153 if fnmatch.fnmatch(self.repo.root, pat.strip()):
154 154 subs[self.fixmail(user)] = 1
155 155 for pat, users in self.ui.configitems('reposubs'):
156 156 if fnmatch.fnmatch(self.repo.root, pat):
157 157 for user in users.split(','):
158 158 subs[self.fixmail(user)] = 1
159 159 subs = subs.keys()
160 160 subs.sort()
161 161 return subs
162 162
163 163 def url(self, path=None):
164 164 return self.ui.config('web', 'baseurl') + (path or self.root)
165 165
166 166 def node(self, node):
167 167 '''format one changeset.'''
168 168
169 169 self.t.show(changenode=node, changes=self.repo.changelog.read(node),
170 170 baseurl=self.ui.config('web', 'baseurl'),
171 171 root=self.repo.root,
172 172 webroot=self.root)
173 173
174 174 def skipsource(self, source):
175 175 '''true if incoming changes from this source should be skipped.'''
176 176 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
177 177 return source not in ok_sources
178 178
179 179 def send(self, node, count, data):
180 180 '''send message.'''
181 181
182 182 p = email.Parser.Parser()
183 183 msg = p.parsestr(data)
184 184
185 185 def fix_subject():
186 186 '''try to make subject line exist and be useful.'''
187 187
188 188 subject = msg['Subject']
189 189 if not subject:
190 190 if count > 1:
191 191 subject = _('%s: %d new changesets') % (self.root, count)
192 192 else:
193 193 changes = self.repo.changelog.read(node)
194 194 s = changes[4].lstrip().split('\n', 1)[0].rstrip()
195 195 subject = '%s: %s' % (self.root, s)
196 196 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
197 197 if maxsubject and len(subject) > maxsubject:
198 198 subject = subject[:maxsubject-3] + '...'
199 199 del msg['Subject']
200 200 msg['Subject'] = subject
201 201
202 202 def fix_sender():
203 203 '''try to make message have proper sender.'''
204 204
205 205 sender = msg['From']
206 206 if not sender:
207 207 sender = self.ui.config('email', 'from') or self.ui.username()
208 208 if '@' not in sender or '@localhost' in sender:
209 209 sender = self.fixmail(sender)
210 210 del msg['From']
211 211 msg['From'] = sender
212 212
213 213 msg['Date'] = util.datestr(date=util.makedate(),
214 214 format="%a, %d %b %Y %H:%M:%S",
215 215 timezone=True)
216 216 fix_subject()
217 217 fix_sender()
218 218
219 219 msg['X-Hg-Notification'] = 'changeset ' + short(node)
220 220 if not msg['Message-Id']:
221 221 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
222 222 (short(node), int(time.time()),
223 223 hash(self.repo.root), socket.getfqdn()))
224 224 msg['To'] = ', '.join(self.subs)
225 225
226 226 msgtext = msg.as_string(0)
227 227 if self.ui.configbool('notify', 'test', True):
228 228 self.ui.write(msgtext)
229 229 if not msgtext.endswith('\n'):
230 230 self.ui.write('\n')
231 231 else:
232 232 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
233 233 (len(self.subs), count))
234 234 mail.sendmail(self.ui, util.email(msg['From']),
235 235 self.subs, msgtext)
236 236
237 237 def diff(self, node, ref):
238 238 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
239 239 if maxdiff == 0:
240 240 return
241 241 prev = self.repo.changelog.parents(node)[0]
242 242 self.ui.pushbuffer()
243 243 patch.diff(self.repo, prev, ref)
244 244 difflines = self.ui.popbuffer().splitlines(1)
245 245 if self.ui.configbool('notify', 'diffstat', True):
246 246 s = patch.diffstat(difflines)
247 247 # s may be nil, don't include the header if it is
248 248 if s:
249 249 self.ui.write('\ndiffstat:\n\n%s' % s)
250 250 if maxdiff > 0 and len(difflines) > maxdiff:
251 251 self.ui.write(_('\ndiffs (truncated from %d to %d lines):\n\n') %
252 252 (len(difflines), maxdiff))
253 253 difflines = difflines[:maxdiff]
254 254 elif difflines:
255 255 self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
256 256 self.ui.write(*difflines)
257 257
258 258 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
259 259 '''send email notifications to interested subscribers.
260 260
261 261 if used as changegroup hook, send one email for all changesets in
262 262 changegroup. else send one email per changeset.'''
263 263 n = notifier(ui, repo, hooktype)
264 264 if not n.subs:
265 265 ui.debug(_('notify: no subscribers to repo %s\n') % n.root)
266 266 return
267 267 if n.skipsource(source):
268 268 ui.debug(_('notify: changes have source "%s" - skipping\n') %
269 269 source)
270 270 return
271 271 node = bin(node)
272 272 ui.pushbuffer()
273 273 if hooktype == 'changegroup':
274 274 start = repo.changelog.rev(node)
275 275 end = repo.changelog.count()
276 276 count = end - start
277 277 for rev in xrange(start, end):
278 278 n.node(repo.changelog.node(rev))
279 279 n.diff(node, repo.changelog.tip())
280 280 else:
281 281 count = 1
282 282 n.node(node)
283 283 n.diff(node, node)
284 284 data = ui.popbuffer()
285 285 n.send(node, count, data)
@@ -1,466 +1,466 b''
1 1 # Command for sending a collection of Mercurial changesets as a series
2 2 # of patch emails.
3 3 #
4 4 # The series is started off with a "[PATCH 0 of N]" introduction,
5 5 # which describes the series as a whole.
6 6 #
7 7 # Each patch email has a Subject line of "[PATCH M of N] ...", using
8 8 # the first line of the changeset description as the subject text.
9 9 # The message contains two or three body parts:
10 10 #
11 11 # The remainder of the changeset description.
12 12 #
13 13 # [Optional] If the diffstat program is installed, the result of
14 14 # running diffstat on the patch.
15 15 #
16 16 # The patch itself, as generated by "hg export".
17 17 #
18 18 # Each message refers to all of its predecessors using the In-Reply-To
19 19 # and References headers, so they will show up as a sequence in
20 20 # threaded mail and news readers, and in mail archives.
21 21 #
22 22 # For each changeset, you will be prompted with a diffstat summary and
23 23 # the changeset summary, so you can be sure you are sending the right
24 24 # changes.
25 25 #
26 26 # To enable this extension:
27 27 #
28 28 # [extensions]
29 29 # hgext.patchbomb =
30 30 #
31 31 # To configure other defaults, add a section like this to your hgrc
32 32 # file:
33 33 #
34 34 # [email]
35 35 # from = My Name <my@email>
36 36 # to = recipient1, recipient2, ...
37 37 # cc = cc1, cc2, ...
38 38 # bcc = bcc1, bcc2, ...
39 39 #
40 40 # Then you can use the "hg email" command to mail a series of changesets
41 41 # as a patchbomb.
42 42 #
43 43 # To avoid sending patches prematurely, it is a good idea to first run
44 44 # the "email" command with the "-n" option (test only). You will be
45 45 # prompted for an email recipient address, a subject an an introductory
46 46 # message describing the patches of your patchbomb. Then when all is
47 47 # done, patchbomb messages are displayed. If PAGER environment variable
48 48 # is set, your pager will be fired up once for each patchbomb message, so
49 49 # you can verify everything is alright.
50 50 #
51 51 # The "-m" (mbox) option is also very useful. Instead of previewing
52 52 # each patchbomb message in a pager or sending the messages directly,
53 53 # it will create a UNIX mailbox file with the patch emails. This
54 54 # mailbox file can be previewed with any mail user agent which supports
55 55 # UNIX mbox files, i.e. with mutt:
56 56 #
57 57 # % mutt -R -f mbox
58 58 #
59 59 # When you are previewing the patchbomb messages, you can use `formail'
60 60 # (a utility that is commonly installed as part of the procmail package),
61 61 # to send each message out:
62 62 #
63 63 # % formail -s sendmail -bm -t < mbox
64 64 #
65 65 # That should be all. Now your patchbomb is on its way out.
66 66
67 67 import os, errno, socket, tempfile
68 68 import email.MIMEMultipart, email.MIMEText, email.MIMEBase
69 69 import email.Utils, email.Encoders
70 70 from mercurial import cmdutil, commands, hg, mail, ui, patch, util
71 71 from mercurial.i18n import _
72 from mercurial.node import *
72 from mercurial.node import bin
73 73
74 74 def patchbomb(ui, repo, *revs, **opts):
75 75 '''send changesets by email
76 76
77 77 By default, diffs are sent in the format generated by hg export,
78 78 one per message. The series starts with a "[PATCH 0 of N]"
79 79 introduction, which describes the series as a whole.
80 80
81 81 Each patch email has a Subject line of "[PATCH M of N] ...", using
82 82 the first line of the changeset description as the subject text.
83 83 The message contains two or three body parts. First, the rest of
84 84 the changeset description. Next, (optionally) if the diffstat
85 85 program is installed, the result of running diffstat on the patch.
86 86 Finally, the patch itself, as generated by "hg export".
87 87
88 88 With --outgoing, emails will be generated for patches not
89 89 found in the destination repository (or only those which are
90 90 ancestors of the specified revisions if any are provided)
91 91
92 92 With --bundle, changesets are selected as for --outgoing,
93 93 but a single email containing a binary Mercurial bundle as an
94 94 attachment will be sent.
95 95
96 96 Examples:
97 97
98 98 hg email -r 3000 # send patch 3000 only
99 99 hg email -r 3000 -r 3001 # send patches 3000 and 3001
100 100 hg email -r 3000:3005 # send patches 3000 through 3005
101 101 hg email 3000 # send patch 3000 (deprecated)
102 102
103 103 hg email -o # send all patches not in default
104 104 hg email -o DEST # send all patches not in DEST
105 105 hg email -o -r 3000 # send all ancestors of 3000 not in default
106 106 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
107 107
108 108 hg email -b # send bundle of all patches not in default
109 109 hg email -b DEST # send bundle of all patches not in DEST
110 110 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
111 111 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
112 112
113 113 Before using this command, you will need to enable email in your hgrc.
114 114 See the [email] section in hgrc(5) for details.
115 115 '''
116 116
117 117 def prompt(prompt, default = None, rest = ': ', empty_ok = False):
118 118 if not ui.interactive:
119 119 return default
120 120 if default:
121 121 prompt += ' [%s]' % default
122 122 prompt += rest
123 123 while True:
124 124 r = ui.prompt(prompt, default=default)
125 125 if r:
126 126 return r
127 127 if default is not None:
128 128 return default
129 129 if empty_ok:
130 130 return r
131 131 ui.warn(_('Please enter a valid value.\n'))
132 132
133 133 def confirm(s, denial):
134 134 if not prompt(s, default = 'y', rest = '? ').lower().startswith('y'):
135 135 raise util.Abort(denial)
136 136
137 137 def cdiffstat(summary, patchlines):
138 138 s = patch.diffstat(patchlines)
139 139 if s:
140 140 if summary:
141 141 ui.write(summary, '\n')
142 142 ui.write(s, '\n')
143 143 confirm(_('Does the diffstat above look okay'),
144 144 _('diffstat rejected'))
145 145 elif s is None:
146 146 ui.warn(_('No diffstat information available.\n'))
147 147 s = ''
148 148 return s
149 149
150 150 def makepatch(patch, idx, total):
151 151 desc = []
152 152 node = None
153 153 body = ''
154 154 for line in patch:
155 155 if line.startswith('#'):
156 156 if line.startswith('# Node ID'):
157 157 node = line.split()[-1]
158 158 continue
159 159 if line.startswith('diff -r') or line.startswith('diff --git'):
160 160 break
161 161 desc.append(line)
162 162 if not node:
163 163 raise ValueError
164 164
165 165 if opts['attach']:
166 166 body = ('\n'.join(desc[1:]).strip() or
167 167 'Patch subject is complete summary.')
168 168 body += '\n\n\n'
169 169
170 170 if opts.get('plain'):
171 171 while patch and patch[0].startswith('# '):
172 172 patch.pop(0)
173 173 if patch:
174 174 patch.pop(0)
175 175 while patch and not patch[0].strip():
176 176 patch.pop(0)
177 177 if opts.get('diffstat'):
178 178 body += cdiffstat('\n'.join(desc), patch) + '\n\n'
179 179 if opts.get('attach') or opts.get('inline'):
180 180 msg = email.MIMEMultipart.MIMEMultipart()
181 181 if body:
182 182 msg.attach(email.MIMEText.MIMEText(body, 'plain'))
183 183 p = email.MIMEText.MIMEText('\n'.join(patch), 'x-patch')
184 184 binnode = bin(node)
185 185 # if node is mq patch, it will have patch file name as tag
186 186 patchname = [t for t in repo.nodetags(binnode)
187 187 if t.endswith('.patch') or t.endswith('.diff')]
188 188 if patchname:
189 189 patchname = patchname[0]
190 190 elif total > 1:
191 191 patchname = cmdutil.make_filename(repo, '%b-%n.patch',
192 192 binnode, idx, total)
193 193 else:
194 194 patchname = cmdutil.make_filename(repo, '%b.patch', binnode)
195 195 disposition = 'inline'
196 196 if opts['attach']:
197 197 disposition = 'attachment'
198 198 p['Content-Disposition'] = disposition + '; filename=' + patchname
199 199 msg.attach(p)
200 200 else:
201 201 body += '\n'.join(patch)
202 202 msg = email.MIMEText.MIMEText(body)
203 203
204 204 subj = desc[0].strip().rstrip('. ')
205 205 if total == 1:
206 206 subj = '[PATCH] ' + (opts.get('subject') or subj)
207 207 else:
208 208 tlen = len(str(total))
209 209 subj = '[PATCH %0*d of %d] %s' % (tlen, idx, total, subj)
210 210 msg['Subject'] = subj
211 211 msg['X-Mercurial-Node'] = node
212 212 return msg
213 213
214 214 def outgoing(dest, revs):
215 215 '''Return the revisions present locally but not in dest'''
216 216 dest = ui.expandpath(dest or 'default-push', dest or 'default')
217 217 revs = [repo.lookup(rev) for rev in revs]
218 218 other = hg.repository(ui, dest)
219 219 ui.status(_('comparing with %s\n') % dest)
220 220 o = repo.findoutgoing(other)
221 221 if not o:
222 222 ui.status(_("no changes found\n"))
223 223 return []
224 224 o = repo.changelog.nodesbetween(o, revs or None)[0]
225 225 return [str(repo.changelog.rev(r)) for r in o]
226 226
227 227 def getbundle(dest):
228 228 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
229 229 tmpfn = os.path.join(tmpdir, 'bundle')
230 230 try:
231 231 commands.bundle(ui, repo, tmpfn, dest, **opts)
232 232 return open(tmpfn, 'rb').read()
233 233 finally:
234 234 try:
235 235 os.unlink(tmpfn)
236 236 except:
237 237 pass
238 238 os.rmdir(tmpdir)
239 239
240 240 if not (opts.get('test') or opts.get('mbox')):
241 241 # really sending
242 242 mail.validateconfig(ui)
243 243
244 244 if not (revs or opts.get('rev')
245 245 or opts.get('outgoing') or opts.get('bundle')):
246 246 raise util.Abort(_('specify at least one changeset with -r or -o'))
247 247
248 248 cmdutil.setremoteconfig(ui, opts)
249 249 if opts.get('outgoing') and opts.get('bundle'):
250 250 raise util.Abort(_("--outgoing mode always on with --bundle;"
251 251 " do not re-specify --outgoing"))
252 252
253 253 if opts.get('outgoing') or opts.get('bundle'):
254 254 if len(revs) > 1:
255 255 raise util.Abort(_("too many destinations"))
256 256 dest = revs and revs[0] or None
257 257 revs = []
258 258
259 259 if opts.get('rev'):
260 260 if revs:
261 261 raise util.Abort(_('use only one form to specify the revision'))
262 262 revs = opts.get('rev')
263 263
264 264 if opts.get('outgoing'):
265 265 revs = outgoing(dest, opts.get('rev'))
266 266 if opts.get('bundle'):
267 267 opts['revs'] = revs
268 268
269 269 # start
270 270 if opts.get('date'):
271 271 start_time = util.parsedate(opts.get('date'))
272 272 else:
273 273 start_time = util.makedate()
274 274
275 275 def genmsgid(id):
276 276 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
277 277
278 278 def getdescription(body, sender):
279 279 if opts.get('desc'):
280 280 body = open(opts.get('desc')).read()
281 281 else:
282 282 ui.write(_('\nWrite the introductory message for the '
283 283 'patch series.\n\n'))
284 284 body = ui.edit(body, sender)
285 285 return body
286 286
287 287 def getexportmsgs():
288 288 patches = []
289 289
290 290 class exportee:
291 291 def __init__(self, container):
292 292 self.lines = []
293 293 self.container = container
294 294 self.name = 'email'
295 295
296 296 def write(self, data):
297 297 self.lines.append(data)
298 298
299 299 def close(self):
300 300 self.container.append(''.join(self.lines).split('\n'))
301 301 self.lines = []
302 302
303 303 commands.export(ui, repo, *revs, **{'output': exportee(patches),
304 304 'switch_parent': False,
305 305 'text': None,
306 306 'git': opts.get('git')})
307 307
308 308 jumbo = []
309 309 msgs = []
310 310
311 311 ui.write(_('This patch series consists of %d patches.\n\n')
312 312 % len(patches))
313 313
314 314 for p, i in zip(patches, xrange(len(patches))):
315 315 jumbo.extend(p)
316 316 msgs.append(makepatch(p, i + 1, len(patches)))
317 317
318 318 if len(patches) > 1:
319 319 tlen = len(str(len(patches)))
320 320
321 321 subj = '[PATCH %0*d of %d] %s' % (
322 322 tlen, 0, len(patches),
323 323 opts.get('subject') or
324 324 prompt('Subject:',
325 325 rest=' [PATCH %0*d of %d] ' % (tlen, 0, len(patches))))
326 326
327 327 body = ''
328 328 if opts.get('diffstat'):
329 329 d = cdiffstat(_('Final summary:\n'), jumbo)
330 330 if d:
331 331 body = '\n' + d
332 332
333 333 body = getdescription(body, sender)
334 334 msg = email.MIMEText.MIMEText(body)
335 335 msg['Subject'] = subj
336 336
337 337 msgs.insert(0, msg)
338 338 return msgs
339 339
340 340 def getbundlemsgs(bundle):
341 341 subj = (opts.get('subject')
342 342 or prompt('Subject:', default='A bundle for your repository'))
343 343
344 344 body = getdescription('', sender)
345 345 msg = email.MIMEMultipart.MIMEMultipart()
346 346 if body:
347 347 msg.attach(email.MIMEText.MIMEText(body, 'plain'))
348 348 datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
349 349 datapart.set_payload(bundle)
350 350 datapart.add_header('Content-Disposition', 'attachment',
351 351 filename='bundle.hg')
352 352 email.Encoders.encode_base64(datapart)
353 353 msg.attach(datapart)
354 354 msg['Subject'] = subj
355 355 return [msg]
356 356
357 357 sender = (opts.get('from') or ui.config('email', 'from') or
358 358 ui.config('patchbomb', 'from') or
359 359 prompt('From', ui.username()))
360 360
361 361 if opts.get('bundle'):
362 362 msgs = getbundlemsgs(getbundle(dest))
363 363 else:
364 364 msgs = getexportmsgs()
365 365
366 366 def getaddrs(opt, prpt, default = None):
367 367 addrs = opts.get(opt) or (ui.config('email', opt) or
368 368 ui.config('patchbomb', opt) or
369 369 prompt(prpt, default = default)).split(',')
370 370 return [a.strip() for a in addrs if a.strip()]
371 371
372 372 to = getaddrs('to', 'To')
373 373 cc = getaddrs('cc', 'Cc', '')
374 374
375 375 bcc = opts.get('bcc') or (ui.config('email', 'bcc') or
376 376 ui.config('patchbomb', 'bcc') or '').split(',')
377 377 bcc = [a.strip() for a in bcc if a.strip()]
378 378
379 379 ui.write('\n')
380 380
381 381 parent = None
382 382
383 383 sender_addr = email.Utils.parseaddr(sender)[1]
384 384 sendmail = None
385 385 for m in msgs:
386 386 try:
387 387 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
388 388 except TypeError:
389 389 m['Message-Id'] = genmsgid('patchbomb')
390 390 if parent:
391 391 m['In-Reply-To'] = parent
392 392 else:
393 393 parent = m['Message-Id']
394 394 m['Date'] = util.datestr(date=start_time,
395 395 format="%a, %d %b %Y %H:%M:%S", timezone=True)
396 396
397 397 start_time = (start_time[0] + 1, start_time[1])
398 398 m['From'] = sender
399 399 m['To'] = ', '.join(to)
400 400 if cc:
401 401 m['Cc'] = ', '.join(cc)
402 402 if bcc:
403 403 m['Bcc'] = ', '.join(bcc)
404 404 if opts.get('test'):
405 405 ui.status('Displaying ', m['Subject'], ' ...\n')
406 406 ui.flush()
407 407 if 'PAGER' in os.environ:
408 408 fp = os.popen(os.environ['PAGER'], 'w')
409 409 else:
410 410 fp = ui
411 411 try:
412 412 fp.write(m.as_string(0))
413 413 fp.write('\n')
414 414 except IOError, inst:
415 415 if inst.errno != errno.EPIPE:
416 416 raise
417 417 if fp is not ui:
418 418 fp.close()
419 419 elif opts.get('mbox'):
420 420 ui.status('Writing ', m['Subject'], ' ...\n')
421 421 fp = open(opts.get('mbox'), 'In-Reply-To' in m and 'ab+' or 'wb+')
422 422 date = util.datestr(date=start_time,
423 423 format='%a %b %d %H:%M:%S %Y', timezone=False)
424 424 fp.write('From %s %s\n' % (sender_addr, date))
425 425 fp.write(m.as_string(0))
426 426 fp.write('\n\n')
427 427 fp.close()
428 428 else:
429 429 if not sendmail:
430 430 sendmail = mail.connect(ui)
431 431 ui.status('Sending ', m['Subject'], ' ...\n')
432 432 # Exim does not remove the Bcc field
433 433 del m['Bcc']
434 434 sendmail(sender, to + bcc + cc, m.as_string(0))
435 435
436 436 cmdtable = {
437 437 "email":
438 438 (patchbomb,
439 439 [('a', 'attach', None, _('send patches as attachments')),
440 440 ('i', 'inline', None, _('send patches as inline attachments')),
441 441 ('', 'bcc', [], _('email addresses of blind copy recipients')),
442 442 ('c', 'cc', [], _('email addresses of copy recipients')),
443 443 ('d', 'diffstat', None, _('add diffstat output to messages')),
444 444 ('', 'date', '', _('use the given date as the sending date')),
445 445 ('', 'desc', '', _('use the given file as the series description')),
446 446 ('g', 'git', None, _('use git extended diff format')),
447 447 ('f', 'from', '', _('email address of sender')),
448 448 ('', 'plain', None, _('omit hg patch header')),
449 449 ('n', 'test', None, _('print messages that would be sent')),
450 450 ('m', 'mbox', '',
451 451 _('write messages to mbox file instead of sending them')),
452 452 ('o', 'outgoing', None,
453 453 _('send changes not found in the target repository')),
454 454 ('b', 'bundle', None,
455 455 _('send changes not in target as a binary bundle')),
456 456 ('r', 'rev', [], _('a revision to send')),
457 457 ('s', 'subject', '',
458 458 _('subject of first message (intro or single patch)')),
459 459 ('t', 'to', [], _('email addresses of recipients')),
460 460 ('', 'force', None,
461 461 _('run even when remote repository is unrelated (with -b)')),
462 462 ('', 'base', [],
463 463 _('a base changeset to specify instead of a destination (with -b)')),
464 464 ] + commands.remoteopts,
465 465 _('hg email [OPTION]... [DEST]...'))
466 466 }
@@ -1,107 +1,107 b''
1 1 # win32text.py - LF <-> CRLF translation utilities for Windows users
2 2 #
3 3 # This software may be used and distributed according to the terms
4 4 # of the GNU General Public License, incorporated herein by reference.
5 5 #
6 6 # To perform automatic newline conversion, use:
7 7 #
8 8 # [extensions]
9 9 # hgext.win32text =
10 10 # [encode]
11 11 # ** = cleverencode:
12 12 # [decode]
13 13 # ** = cleverdecode:
14 14 #
15 15 # If not doing conversion, to make sure you do not commit CRLF by accident:
16 16 #
17 17 # [hooks]
18 18 # pretxncommit.crlf = python:hgext.win32text.forbidcrlf
19 19 #
20 20 # To do the same check on a server to prevent CRLF from being pushed or pulled:
21 21 #
22 22 # [hooks]
23 23 # pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf
24 24
25 25 from mercurial import util, ui
26 26 from mercurial.i18n import gettext as _
27 from mercurial.node import *
27 from mercurial.node import bin, short
28 28 import re
29 29
30 30 # regexp for single LF without CR preceding.
31 31 re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE)
32 32
33 33 def dumbdecode(s, cmd, ui=None, repo=None, filename=None, **kwargs):
34 34 # warn if already has CRLF in repository.
35 35 # it might cause unexpected eol conversion.
36 36 # see issue 302:
37 37 # http://www.selenic.com/mercurial/bts/issue302
38 38 if '\r\n' in s and ui and filename and repo:
39 39 ui.warn(_('WARNING: %s already has CRLF line endings\n'
40 40 'and does not need EOL conversion by the win32text plugin.\n'
41 41 'Before your next commit, please reconsider your '
42 42 'encode/decode settings in \nMercurial.ini or %s.\n') %
43 43 (filename, repo.join('hgrc')))
44 44 # replace single LF to CRLF
45 45 return re_single_lf.sub('\\1\r\n', s)
46 46
47 47 def dumbencode(s, cmd):
48 48 return s.replace('\r\n', '\n')
49 49
50 50 def clevertest(s, cmd):
51 51 if '\0' in s: return False
52 52 return True
53 53
54 54 def cleverdecode(s, cmd, **kwargs):
55 55 if clevertest(s, cmd):
56 56 return dumbdecode(s, cmd, **kwargs)
57 57 return s
58 58
59 59 def cleverencode(s, cmd):
60 60 if clevertest(s, cmd):
61 61 return dumbencode(s, cmd)
62 62 return s
63 63
64 64 _filters = {
65 65 'dumbdecode:': dumbdecode,
66 66 'dumbencode:': dumbencode,
67 67 'cleverdecode:': cleverdecode,
68 68 'cleverencode:': cleverencode,
69 69 }
70 70
71 71 def forbidcrlf(ui, repo, hooktype, node, **kwargs):
72 72 halt = False
73 73 for rev in xrange(repo.changelog.rev(bin(node)), repo.changelog.count()):
74 74 c = repo.changectx(rev)
75 75 for f in c.files():
76 76 if f not in c:
77 77 continue
78 78 data = c[f].data()
79 79 if '\0' not in data and '\r\n' in data:
80 80 if not halt:
81 81 ui.warn(_('Attempt to commit or push text file(s) '
82 82 'using CRLF line endings\n'))
83 83 ui.warn(_('in %s: %s\n') % (short(c.node()), f))
84 84 halt = True
85 85 if halt and hooktype == 'pretxnchangegroup':
86 86 ui.warn(_('\nTo prevent this mistake in your local repository,\n'
87 87 'add to Mercurial.ini or .hg/hgrc:\n'
88 88 '\n'
89 89 '[hooks]\n'
90 90 'pretxncommit.crlf = python:hgext.win32text.forbidcrlf\n'
91 91 '\n'
92 92 'and also consider adding:\n'
93 93 '\n'
94 94 '[extensions]\n'
95 95 'hgext.win32text =\n'
96 96 '[encode]\n'
97 97 '** = cleverencode:\n'
98 98 '[decode]\n'
99 99 '** = cleverdecode:\n'))
100 100 return halt
101 101
102 102 def reposetup(ui, repo):
103 103 if not repo.local():
104 104 return
105 105 for name, fn in _filters.iteritems():
106 106 repo.adddatafilter(name, fn)
107 107
@@ -1,224 +1,224 b''
1 1 # archival.py - revision archival for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of
6 6 # the GNU General Public License, incorporated herein by reference.
7 7
8 8 from i18n import _
9 from node import *
9 from node import hex
10 10 import cStringIO, os, stat, tarfile, time, util, zipfile
11 11 import zlib, gzip
12 12
13 13 def tidyprefix(dest, prefix, suffixes):
14 14 '''choose prefix to use for names in archive. make sure prefix is
15 15 safe for consumers.'''
16 16
17 17 if prefix:
18 18 prefix = util.normpath(prefix)
19 19 else:
20 20 if not isinstance(dest, str):
21 21 raise ValueError('dest must be string if no prefix')
22 22 prefix = os.path.basename(dest)
23 23 lower = prefix.lower()
24 24 for sfx in suffixes:
25 25 if lower.endswith(sfx):
26 26 prefix = prefix[:-len(sfx)]
27 27 break
28 28 lpfx = os.path.normpath(util.localpath(prefix))
29 29 prefix = util.pconvert(lpfx)
30 30 if not prefix.endswith('/'):
31 31 prefix += '/'
32 32 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
33 33 raise util.Abort(_('archive prefix contains illegal components'))
34 34 return prefix
35 35
36 36 class tarit:
37 37 '''write archive to tar file or stream. can write uncompressed,
38 38 or compress with gzip or bzip2.'''
39 39
40 40 class GzipFileWithTime(gzip.GzipFile):
41 41
42 42 def __init__(self, *args, **kw):
43 43 timestamp = None
44 44 if 'timestamp' in kw:
45 45 timestamp = kw.pop('timestamp')
46 46 if timestamp == None:
47 47 self.timestamp = time.time()
48 48 else:
49 49 self.timestamp = timestamp
50 50 gzip.GzipFile.__init__(self, *args, **kw)
51 51
52 52 def _write_gzip_header(self):
53 53 self.fileobj.write('\037\213') # magic header
54 54 self.fileobj.write('\010') # compression method
55 55 fname = self.filename[:-3]
56 56 flags = 0
57 57 if fname:
58 58 flags = gzip.FNAME
59 59 self.fileobj.write(chr(flags))
60 60 gzip.write32u(self.fileobj, long(self.timestamp))
61 61 self.fileobj.write('\002')
62 62 self.fileobj.write('\377')
63 63 if fname:
64 64 self.fileobj.write(fname + '\000')
65 65
66 66 def __init__(self, dest, prefix, mtime, kind=''):
67 67 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
68 68 '.tgz', '.tbz2'])
69 69 self.mtime = mtime
70 70
71 71 def taropen(name, mode, fileobj=None):
72 72 if kind == 'gz':
73 73 mode = mode[0]
74 74 if not fileobj:
75 75 fileobj = open(name, mode + 'b')
76 76 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
77 77 zlib.Z_BEST_COMPRESSION,
78 78 fileobj, timestamp=mtime)
79 79 return tarfile.TarFile.taropen(name, mode, gzfileobj)
80 80 else:
81 81 return tarfile.open(name, mode + kind, fileobj)
82 82
83 83 if isinstance(dest, str):
84 84 self.z = taropen(dest, mode='w:')
85 85 else:
86 86 # Python 2.5-2.5.1 have a regression that requires a name arg
87 87 self.z = taropen(name='', mode='w|', fileobj=dest)
88 88
89 89 def addfile(self, name, mode, islink, data):
90 90 i = tarfile.TarInfo(self.prefix + name)
91 91 i.mtime = self.mtime
92 92 i.size = len(data)
93 93 if islink:
94 94 i.type = tarfile.SYMTYPE
95 95 i.mode = 0777
96 96 i.linkname = data
97 97 data = None
98 98 else:
99 99 i.mode = mode
100 100 data = cStringIO.StringIO(data)
101 101 self.z.addfile(i, data)
102 102
103 103 def done(self):
104 104 self.z.close()
105 105
106 106 class tellable:
107 107 '''provide tell method for zipfile.ZipFile when writing to http
108 108 response file object.'''
109 109
110 110 def __init__(self, fp):
111 111 self.fp = fp
112 112 self.offset = 0
113 113
114 114 def __getattr__(self, key):
115 115 return getattr(self.fp, key)
116 116
117 117 def write(self, s):
118 118 self.fp.write(s)
119 119 self.offset += len(s)
120 120
121 121 def tell(self):
122 122 return self.offset
123 123
124 124 class zipit:
125 125 '''write archive to zip file or stream. can write uncompressed,
126 126 or compressed with deflate.'''
127 127
128 128 def __init__(self, dest, prefix, mtime, compress=True):
129 129 self.prefix = tidyprefix(dest, prefix, ('.zip',))
130 130 if not isinstance(dest, str):
131 131 try:
132 132 dest.tell()
133 133 except (AttributeError, IOError):
134 134 dest = tellable(dest)
135 135 self.z = zipfile.ZipFile(dest, 'w',
136 136 compress and zipfile.ZIP_DEFLATED or
137 137 zipfile.ZIP_STORED)
138 138 self.date_time = time.gmtime(mtime)[:6]
139 139
140 140 def addfile(self, name, mode, islink, data):
141 141 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
142 142 i.compress_type = self.z.compression
143 143 # unzip will not honor unix file modes unless file creator is
144 144 # set to unix (id 3).
145 145 i.create_system = 3
146 146 ftype = stat.S_IFREG
147 147 if islink:
148 148 mode = 0777
149 149 ftype = stat.S_IFLNK
150 150 i.external_attr = (mode | ftype) << 16L
151 151 self.z.writestr(i, data)
152 152
153 153 def done(self):
154 154 self.z.close()
155 155
156 156 class fileit:
157 157 '''write archive as files in directory.'''
158 158
159 159 def __init__(self, name, prefix, mtime):
160 160 if prefix:
161 161 raise util.Abort(_('cannot give prefix when archiving to files'))
162 162 self.basedir = name
163 163 self.opener = util.opener(self.basedir)
164 164
165 165 def addfile(self, name, mode, islink, data):
166 166 if islink:
167 167 self.opener.symlink(data, name)
168 168 return
169 169 f = self.opener(name, "w", atomictemp=True)
170 170 f.write(data)
171 171 f.rename()
172 172 destfile = os.path.join(self.basedir, name)
173 173 os.chmod(destfile, mode)
174 174
175 175 def done(self):
176 176 pass
177 177
178 178 archivers = {
179 179 'files': fileit,
180 180 'tar': tarit,
181 181 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
182 182 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
183 183 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
184 184 'zip': zipit,
185 185 }
186 186
187 187 def archive(repo, dest, node, kind, decode=True, matchfn=None,
188 188 prefix=None, mtime=None):
189 189 '''create archive of repo as it was at node.
190 190
191 191 dest can be name of directory, name of archive file, or file
192 192 object to write archive to.
193 193
194 194 kind is type of archive to create.
195 195
196 196 decode tells whether to put files through decode filters from
197 197 hgrc.
198 198
199 199 matchfn is function to filter names of files to write to archive.
200 200
201 201 prefix is name of path to put before every archive member.'''
202 202
203 203 def write(name, mode, islink, getdata):
204 204 if matchfn and not matchfn(name): return
205 205 data = getdata()
206 206 if decode:
207 207 data = repo.wwritedata(name, data)
208 208 archiver.addfile(name, mode, islink, data)
209 209
210 210 ctx = repo.changectx(node)
211 211 if kind not in archivers:
212 212 raise util.Abort(_("unknown archive type '%s'" % kind))
213 213 archiver = archivers[kind](dest, prefix, mtime or ctx.date()[0])
214 214 m = ctx.manifest()
215 215 items = m.items()
216 216 items.sort()
217 217 if repo.ui.configbool("ui", "archivemeta", True):
218 218 write('.hg_archival.txt', 0644, False,
219 219 lambda: 'repo: %s\nnode: %s\n' % (
220 220 hex(repo.changelog.node(0)), hex(node)))
221 221 for filename, filenode in items:
222 222 write(filename, m.execf(filename) and 0755 or 0644, m.linkf(filename),
223 223 lambda: repo.file(filename).read(filenode))
224 224 archiver.done()
@@ -1,282 +1,282 b''
1 1 """
2 2 bundlerepo.py - repository class for viewing uncompressed bundles
3 3
4 4 This provides a read-only repository interface to bundles as if
5 5 they were part of the actual repository.
6 6
7 7 Copyright 2006, 2007 Benoit Boissinot <bboissin@gmail.com>
8 8
9 9 This software may be used and distributed according to the terms
10 10 of the GNU General Public License, incorporated herein by reference.
11 11 """
12 12
13 from node import *
13 from node import hex, nullid, short
14 14 from i18n import _
15 15 import changegroup, util, os, struct, bz2, tempfile, mdiff
16 16 import localrepo, changelog, manifest, filelog, revlog
17 17
18 18 class bundlerevlog(revlog.revlog):
19 19 def __init__(self, opener, indexfile, bundlefile,
20 20 linkmapper=None):
21 21 # How it works:
22 22 # to retrieve a revision, we need to know the offset of
23 23 # the revision in the bundlefile (an opened file).
24 24 #
25 25 # We store this offset in the index (start), to differentiate a
26 26 # rev in the bundle and from a rev in the revlog, we check
27 27 # len(index[r]). If the tuple is bigger than 7, it is a bundle
28 28 # (it is bigger since we store the node to which the delta is)
29 29 #
30 30 revlog.revlog.__init__(self, opener, indexfile)
31 31 self.bundlefile = bundlefile
32 32 self.basemap = {}
33 33 def chunkpositer():
34 34 for chunk in changegroup.chunkiter(bundlefile):
35 35 pos = bundlefile.tell()
36 36 yield chunk, pos - len(chunk)
37 37 n = self.count()
38 38 prev = None
39 39 for chunk, start in chunkpositer():
40 40 size = len(chunk)
41 41 if size < 80:
42 42 raise util.Abort("invalid changegroup")
43 43 start += 80
44 44 size -= 80
45 45 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
46 46 if node in self.nodemap:
47 47 prev = node
48 48 continue
49 49 for p in (p1, p2):
50 50 if not p in self.nodemap:
51 51 raise revlog.LookupError(hex(p1), _("unknown parent %s") % short(p1))
52 52 if linkmapper is None:
53 53 link = n
54 54 else:
55 55 link = linkmapper(cs)
56 56
57 57 if not prev:
58 58 prev = p1
59 59 # start, size, full unc. size, base (unused), link, p1, p2, node
60 60 e = (revlog.offset_type(start, 0), size, -1, -1, link,
61 61 self.rev(p1), self.rev(p2), node)
62 62 self.basemap[n] = prev
63 63 self.index.insert(-1, e)
64 64 self.nodemap[node] = n
65 65 prev = node
66 66 n += 1
67 67
68 68 def bundle(self, rev):
69 69 """is rev from the bundle"""
70 70 if rev < 0:
71 71 return False
72 72 return rev in self.basemap
73 73 def bundlebase(self, rev): return self.basemap[rev]
74 74 def chunk(self, rev, df=None, cachelen=4096):
75 75 # Warning: in case of bundle, the diff is against bundlebase,
76 76 # not against rev - 1
77 77 # XXX: could use some caching
78 78 if not self.bundle(rev):
79 79 return revlog.revlog.chunk(self, rev, df)
80 80 self.bundlefile.seek(self.start(rev))
81 81 return self.bundlefile.read(self.length(rev))
82 82
83 83 def revdiff(self, rev1, rev2):
84 84 """return or calculate a delta between two revisions"""
85 85 if self.bundle(rev1) and self.bundle(rev2):
86 86 # hot path for bundle
87 87 revb = self.rev(self.bundlebase(rev2))
88 88 if revb == rev1:
89 89 return self.chunk(rev2)
90 90 elif not self.bundle(rev1) and not self.bundle(rev2):
91 91 return revlog.revlog.revdiff(self, rev1, rev2)
92 92
93 93 return mdiff.textdiff(self.revision(self.node(rev1)),
94 94 self.revision(self.node(rev2)))
95 95
96 96 def revision(self, node):
97 97 """return an uncompressed revision of a given"""
98 98 if node == nullid: return ""
99 99
100 100 text = None
101 101 chain = []
102 102 iter_node = node
103 103 rev = self.rev(iter_node)
104 104 # reconstruct the revision if it is from a changegroup
105 105 while self.bundle(rev):
106 106 if self._cache and self._cache[0] == iter_node:
107 107 text = self._cache[2]
108 108 break
109 109 chain.append(rev)
110 110 iter_node = self.bundlebase(rev)
111 111 rev = self.rev(iter_node)
112 112 if text is None:
113 113 text = revlog.revlog.revision(self, iter_node)
114 114
115 115 while chain:
116 116 delta = self.chunk(chain.pop())
117 117 text = mdiff.patches(text, [delta])
118 118
119 119 p1, p2 = self.parents(node)
120 120 if node != revlog.hash(text, p1, p2):
121 121 raise revlog.RevlogError(_("integrity check failed on %s:%d")
122 122 % (self.datafile, self.rev(node)))
123 123
124 124 self._cache = (node, self.rev(node), text)
125 125 return text
126 126
127 127 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
128 128 raise NotImplementedError
129 129 def addgroup(self, revs, linkmapper, transaction, unique=0):
130 130 raise NotImplementedError
131 131 def strip(self, rev, minlink):
132 132 raise NotImplementedError
133 133 def checksize(self):
134 134 raise NotImplementedError
135 135
136 136 class bundlechangelog(bundlerevlog, changelog.changelog):
137 137 def __init__(self, opener, bundlefile):
138 138 changelog.changelog.__init__(self, opener)
139 139 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile)
140 140
141 141 class bundlemanifest(bundlerevlog, manifest.manifest):
142 142 def __init__(self, opener, bundlefile, linkmapper):
143 143 manifest.manifest.__init__(self, opener)
144 144 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
145 145 linkmapper)
146 146
147 147 class bundlefilelog(bundlerevlog, filelog.filelog):
148 148 def __init__(self, opener, path, bundlefile, linkmapper):
149 149 filelog.filelog.__init__(self, opener, path)
150 150 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
151 151 linkmapper)
152 152
153 153 class bundlerepository(localrepo.localrepository):
154 154 def __init__(self, ui, path, bundlename):
155 155 localrepo.localrepository.__init__(self, ui, path)
156 156
157 157 if path:
158 158 self._url = 'bundle:' + path + '+' + bundlename
159 159 else:
160 160 self._url = 'bundle:' + bundlename
161 161
162 162 self.tempfile = None
163 163 self.bundlefile = open(bundlename, "rb")
164 164 header = self.bundlefile.read(6)
165 165 if not header.startswith("HG"):
166 166 raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename)
167 167 elif not header.startswith("HG10"):
168 168 raise util.Abort(_("%s: unknown bundle version") % bundlename)
169 169 elif header == "HG10BZ":
170 170 fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-",
171 171 suffix=".hg10un", dir=self.path)
172 172 self.tempfile = temp
173 173 fptemp = os.fdopen(fdtemp, 'wb')
174 174 def generator(f):
175 175 zd = bz2.BZ2Decompressor()
176 176 zd.decompress("BZ")
177 177 for chunk in f:
178 178 yield zd.decompress(chunk)
179 179 gen = generator(util.filechunkiter(self.bundlefile, 4096))
180 180
181 181 try:
182 182 fptemp.write("HG10UN")
183 183 for chunk in gen:
184 184 fptemp.write(chunk)
185 185 finally:
186 186 fptemp.close()
187 187 self.bundlefile.close()
188 188
189 189 self.bundlefile = open(self.tempfile, "rb")
190 190 # seek right after the header
191 191 self.bundlefile.seek(6)
192 192 elif header == "HG10UN":
193 193 # nothing to do
194 194 pass
195 195 else:
196 196 raise util.Abort(_("%s: unknown bundle compression type")
197 197 % bundlename)
198 198 # dict with the mapping 'filename' -> position in the bundle
199 199 self.bundlefilespos = {}
200 200
201 201 def __getattr__(self, name):
202 202 if name == 'changelog':
203 203 self.changelog = bundlechangelog(self.sopener, self.bundlefile)
204 204 self.manstart = self.bundlefile.tell()
205 205 return self.changelog
206 206 if name == 'manifest':
207 207 self.bundlefile.seek(self.manstart)
208 208 self.manifest = bundlemanifest(self.sopener, self.bundlefile,
209 209 self.changelog.rev)
210 210 self.filestart = self.bundlefile.tell()
211 211 return self.manifest
212 212 if name == 'manstart':
213 213 self.changelog
214 214 return self.manstart
215 215 if name == 'filestart':
216 216 self.manifest
217 217 return self.filestart
218 218 return localrepo.localrepository.__getattr__(self, name)
219 219
220 220 def url(self):
221 221 return self._url
222 222
223 223 def dev(self):
224 224 return -1
225 225
226 226 def file(self, f):
227 227 if not self.bundlefilespos:
228 228 self.bundlefile.seek(self.filestart)
229 229 while 1:
230 230 chunk = changegroup.getchunk(self.bundlefile)
231 231 if not chunk:
232 232 break
233 233 self.bundlefilespos[chunk] = self.bundlefile.tell()
234 234 for c in changegroup.chunkiter(self.bundlefile):
235 235 pass
236 236
237 237 if f[0] == '/':
238 238 f = f[1:]
239 239 if f in self.bundlefilespos:
240 240 self.bundlefile.seek(self.bundlefilespos[f])
241 241 return bundlefilelog(self.sopener, f, self.bundlefile,
242 242 self.changelog.rev)
243 243 else:
244 244 return filelog.filelog(self.sopener, f)
245 245
246 246 def close(self):
247 247 """Close assigned bundle file immediately."""
248 248 self.bundlefile.close()
249 249
250 250 def __del__(self):
251 251 bundlefile = getattr(self, 'bundlefile', None)
252 252 if bundlefile and not bundlefile.closed:
253 253 bundlefile.close()
254 254 tempfile = getattr(self, 'tempfile', None)
255 255 if tempfile is not None:
256 256 os.unlink(tempfile)
257 257
258 258 def instance(ui, path, create):
259 259 if create:
260 260 raise util.Abort(_('cannot create new bundle repository'))
261 261 parentpath = ui.config("bundle", "mainreporoot", "")
262 262 if parentpath:
263 263 # Try to make the full path relative so we get a nice, short URL.
264 264 # In particular, we don't want temp dir names in test outputs.
265 265 cwd = os.getcwd()
266 266 if parentpath == cwd:
267 267 parentpath = ''
268 268 else:
269 269 cwd = os.path.join(cwd,'')
270 270 if parentpath.startswith(cwd):
271 271 parentpath = parentpath[len(cwd):]
272 272 path = util.drop_scheme('file', path)
273 273 if path.startswith('bundle:'):
274 274 path = util.drop_scheme('bundle', path)
275 275 s = path.split("+", 1)
276 276 if len(s) == 1:
277 277 repopath, bundlename = parentpath, s[0]
278 278 else:
279 279 repopath, bundlename = s
280 280 else:
281 281 repopath, bundlename = parentpath, path
282 282 return bundlerepository(ui, repopath, bundlename)
@@ -1,192 +1,193 b''
1 1 # changelog.py - changelog class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 from revlog import *
8 from node import bin, hex, nullid
9 from revlog import revlog
9 10 from i18n import _
10 11 import os, time, util
11 12
12 13 def _string_escape(text):
13 14 """
14 15 >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
15 16 >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d
16 17 >>> s
17 18 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n'
18 19 >>> res = _string_escape(s)
19 20 >>> s == res.decode('string_escape')
20 21 True
21 22 """
22 23 # subset of the string_escape codec
23 24 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
24 25 return text.replace('\0', '\\0')
25 26
26 27 class appender:
27 28 '''the changelog index must be update last on disk, so we use this class
28 29 to delay writes to it'''
29 30 def __init__(self, fp, buf):
30 31 self.data = buf
31 32 self.fp = fp
32 33 self.offset = fp.tell()
33 34 self.size = util.fstat(fp).st_size
34 35
35 36 def end(self):
36 37 return self.size + len("".join(self.data))
37 38 def tell(self):
38 39 return self.offset
39 40 def flush(self):
40 41 pass
41 42 def close(self):
42 43 self.fp.close()
43 44
44 45 def seek(self, offset, whence=0):
45 46 '''virtual file offset spans real file and data'''
46 47 if whence == 0:
47 48 self.offset = offset
48 49 elif whence == 1:
49 50 self.offset += offset
50 51 elif whence == 2:
51 52 self.offset = self.end() + offset
52 53 if self.offset < self.size:
53 54 self.fp.seek(self.offset)
54 55
55 56 def read(self, count=-1):
56 57 '''only trick here is reads that span real file and data'''
57 58 ret = ""
58 59 if self.offset < self.size:
59 60 s = self.fp.read(count)
60 61 ret = s
61 62 self.offset += len(s)
62 63 if count > 0:
63 64 count -= len(s)
64 65 if count != 0:
65 66 doff = self.offset - self.size
66 67 self.data.insert(0, "".join(self.data))
67 68 del self.data[1:]
68 69 s = self.data[0][doff:doff+count]
69 70 self.offset += len(s)
70 71 ret += s
71 72 return ret
72 73
73 74 def write(self, s):
74 75 self.data.append(str(s))
75 76 self.offset += len(s)
76 77
77 78 class changelog(revlog):
78 79 def __init__(self, opener):
79 80 revlog.__init__(self, opener, "00changelog.i")
80 81
81 82 def delayupdate(self):
82 83 "delay visibility of index updates to other readers"
83 84 self._realopener = self.opener
84 85 self.opener = self._delayopener
85 86 self._delaycount = self.count()
86 87 self._delaybuf = []
87 88 self._delayname = None
88 89
89 90 def finalize(self, tr):
90 91 "finalize index updates"
91 92 self.opener = self._realopener
92 93 # move redirected index data back into place
93 94 if self._delayname:
94 95 util.rename(self._delayname + ".a", self._delayname)
95 96 elif self._delaybuf:
96 97 fp = self.opener(self.indexfile, 'a')
97 98 fp.write("".join(self._delaybuf))
98 99 fp.close()
99 100 del self._delaybuf
100 101 # split when we're done
101 102 self.checkinlinesize(tr)
102 103
103 104 def _delayopener(self, name, mode='r'):
104 105 fp = self._realopener(name, mode)
105 106 # only divert the index
106 107 if not name == self.indexfile:
107 108 return fp
108 109 # if we're doing an initial clone, divert to another file
109 110 if self._delaycount == 0:
110 111 self._delayname = fp.name
111 112 return self._realopener(name + ".a", mode)
112 113 # otherwise, divert to memory
113 114 return appender(fp, self._delaybuf)
114 115
115 116 def checkinlinesize(self, tr, fp=None):
116 117 if self.opener == self._delayopener:
117 118 return
118 119 return revlog.checkinlinesize(self, tr, fp)
119 120
120 121 def decode_extra(self, text):
121 122 extra = {}
122 123 for l in text.split('\0'):
123 124 if l:
124 125 k, v = l.decode('string_escape').split(':', 1)
125 126 extra[k] = v
126 127 return extra
127 128
128 129 def encode_extra(self, d):
129 130 # keys must be sorted to produce a deterministic changelog entry
130 131 keys = d.keys()
131 132 keys.sort()
132 133 items = [_string_escape('%s:%s' % (k, d[k])) for k in keys]
133 134 return "\0".join(items)
134 135
135 136 def read(self, node):
136 137 """
137 138 format used:
138 139 nodeid\n : manifest node in ascii
139 140 user\n : user, no \n or \r allowed
140 141 time tz extra\n : date (time is int or float, timezone is int)
141 142 : extra is metadatas, encoded and separated by '\0'
142 143 : older versions ignore it
143 144 files\n\n : files modified by the cset, no \n or \r allowed
144 145 (.*) : comment (free text, ideally utf-8)
145 146
146 147 changelog v0 doesn't use extra
147 148 """
148 149 text = self.revision(node)
149 150 if not text:
150 151 return (nullid, "", (0, 0), [], "", {'branch': 'default'})
151 152 last = text.index("\n\n")
152 153 desc = util.tolocal(text[last + 2:])
153 154 l = text[:last].split('\n')
154 155 manifest = bin(l[0])
155 156 user = util.tolocal(l[1])
156 157
157 158 extra_data = l[2].split(' ', 2)
158 159 if len(extra_data) != 3:
159 160 time = float(extra_data.pop(0))
160 161 try:
161 162 # various tools did silly things with the time zone field.
162 163 timezone = int(extra_data[0])
163 164 except:
164 165 timezone = 0
165 166 extra = {}
166 167 else:
167 168 time, timezone, extra = extra_data
168 169 time, timezone = float(time), int(timezone)
169 170 extra = self.decode_extra(extra)
170 171 if not extra.get('branch'):
171 172 extra['branch'] = 'default'
172 173 files = l[3:]
173 174 return (manifest, user, (time, timezone), files, desc, extra)
174 175
175 176 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
176 177 user=None, date=None, extra={}):
177 178
178 179 user, desc = util.fromlocal(user), util.fromlocal(desc)
179 180
180 181 if date:
181 182 parseddate = "%d %d" % util.parsedate(date)
182 183 else:
183 184 parseddate = "%d %d" % util.makedate()
184 185 if extra and extra.get("branch") in ("default", ""):
185 186 del extra["branch"]
186 187 if extra:
187 188 extra = self.encode_extra(extra)
188 189 parseddate = "%s %s" % (parseddate, extra)
189 190 list.sort()
190 191 l = [hex(manifest), user, parseddate] + list + ["", desc]
191 192 text = "\n".join(l)
192 193 return self.addrevision(text, transaction, self.count(), p1, p2)
@@ -1,1176 +1,1176 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 from node import *
8 from node import hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, bisect, stat
11 11 import mdiff, bdiff, util, templater, templatefilters, patch, errno
12 12
13 13 revrangesep = ':'
14 14
15 15 class UnknownCommand(Exception):
16 16 """Exception raised if command is not in the command table."""
17 17 class AmbiguousCommand(Exception):
18 18 """Exception raised if command shortcut matches more than one command."""
19 19
20 20 def findpossible(ui, cmd, table):
21 21 """
22 22 Return cmd -> (aliases, command table entry)
23 23 for each matching command.
24 24 Return debug commands (or their aliases) only if no normal command matches.
25 25 """
26 26 choice = {}
27 27 debugchoice = {}
28 28 for e in table.keys():
29 29 aliases = e.lstrip("^").split("|")
30 30 found = None
31 31 if cmd in aliases:
32 32 found = cmd
33 33 elif not ui.config("ui", "strict"):
34 34 for a in aliases:
35 35 if a.startswith(cmd):
36 36 found = a
37 37 break
38 38 if found is not None:
39 39 if aliases[0].startswith("debug") or found.startswith("debug"):
40 40 debugchoice[found] = (aliases, table[e])
41 41 else:
42 42 choice[found] = (aliases, table[e])
43 43
44 44 if not choice and debugchoice:
45 45 choice = debugchoice
46 46
47 47 return choice
48 48
49 49 def findcmd(ui, cmd, table):
50 50 """Return (aliases, command table entry) for command string."""
51 51 choice = findpossible(ui, cmd, table)
52 52
53 53 if cmd in choice:
54 54 return choice[cmd]
55 55
56 56 if len(choice) > 1:
57 57 clist = choice.keys()
58 58 clist.sort()
59 59 raise AmbiguousCommand(cmd, clist)
60 60
61 61 if choice:
62 62 return choice.values()[0]
63 63
64 64 raise UnknownCommand(cmd)
65 65
66 66 def bail_if_changed(repo):
67 67 if repo.dirstate.parents()[1] != nullid:
68 68 raise util.Abort(_('outstanding uncommitted merge'))
69 69 modified, added, removed, deleted = repo.status()[:4]
70 70 if modified or added or removed or deleted:
71 71 raise util.Abort(_("outstanding uncommitted changes"))
72 72
73 73 def logmessage(opts):
74 74 """ get the log message according to -m and -l option """
75 75 message = opts['message']
76 76 logfile = opts['logfile']
77 77
78 78 if message and logfile:
79 79 raise util.Abort(_('options --message and --logfile are mutually '
80 80 'exclusive'))
81 81 if not message and logfile:
82 82 try:
83 83 if logfile == '-':
84 84 message = sys.stdin.read()
85 85 else:
86 86 message = open(logfile).read()
87 87 except IOError, inst:
88 88 raise util.Abort(_("can't read commit message '%s': %s") %
89 89 (logfile, inst.strerror))
90 90 return message
91 91
92 92 def loglimit(opts):
93 93 """get the log limit according to option -l/--limit"""
94 94 limit = opts.get('limit')
95 95 if limit:
96 96 try:
97 97 limit = int(limit)
98 98 except ValueError:
99 99 raise util.Abort(_('limit must be a positive integer'))
100 100 if limit <= 0: raise util.Abort(_('limit must be positive'))
101 101 else:
102 102 limit = sys.maxint
103 103 return limit
104 104
105 105 def setremoteconfig(ui, opts):
106 106 "copy remote options to ui tree"
107 107 if opts.get('ssh'):
108 108 ui.setconfig("ui", "ssh", opts['ssh'])
109 109 if opts.get('remotecmd'):
110 110 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
111 111
112 112 def revpair(repo, revs):
113 113 '''return pair of nodes, given list of revisions. second item can
114 114 be None, meaning use working dir.'''
115 115
116 116 def revfix(repo, val, defval):
117 117 if not val and val != 0 and defval is not None:
118 118 val = defval
119 119 return repo.lookup(val)
120 120
121 121 if not revs:
122 122 return repo.dirstate.parents()[0], None
123 123 end = None
124 124 if len(revs) == 1:
125 125 if revrangesep in revs[0]:
126 126 start, end = revs[0].split(revrangesep, 1)
127 127 start = revfix(repo, start, 0)
128 128 end = revfix(repo, end, repo.changelog.count() - 1)
129 129 else:
130 130 start = revfix(repo, revs[0], None)
131 131 elif len(revs) == 2:
132 132 if revrangesep in revs[0] or revrangesep in revs[1]:
133 133 raise util.Abort(_('too many revisions specified'))
134 134 start = revfix(repo, revs[0], None)
135 135 end = revfix(repo, revs[1], None)
136 136 else:
137 137 raise util.Abort(_('too many revisions specified'))
138 138 return start, end
139 139
140 140 def revrange(repo, revs):
141 141 """Yield revision as strings from a list of revision specifications."""
142 142
143 143 def revfix(repo, val, defval):
144 144 if not val and val != 0 and defval is not None:
145 145 return defval
146 146 return repo.changelog.rev(repo.lookup(val))
147 147
148 148 seen, l = {}, []
149 149 for spec in revs:
150 150 if revrangesep in spec:
151 151 start, end = spec.split(revrangesep, 1)
152 152 start = revfix(repo, start, 0)
153 153 end = revfix(repo, end, repo.changelog.count() - 1)
154 154 step = start > end and -1 or 1
155 155 for rev in xrange(start, end+step, step):
156 156 if rev in seen:
157 157 continue
158 158 seen[rev] = 1
159 159 l.append(rev)
160 160 else:
161 161 rev = revfix(repo, spec, None)
162 162 if rev in seen:
163 163 continue
164 164 seen[rev] = 1
165 165 l.append(rev)
166 166
167 167 return l
168 168
169 169 def make_filename(repo, pat, node,
170 170 total=None, seqno=None, revwidth=None, pathname=None):
171 171 node_expander = {
172 172 'H': lambda: hex(node),
173 173 'R': lambda: str(repo.changelog.rev(node)),
174 174 'h': lambda: short(node),
175 175 }
176 176 expander = {
177 177 '%': lambda: '%',
178 178 'b': lambda: os.path.basename(repo.root),
179 179 }
180 180
181 181 try:
182 182 if node:
183 183 expander.update(node_expander)
184 184 if node:
185 185 expander['r'] = (lambda:
186 186 str(repo.changelog.rev(node)).zfill(revwidth or 0))
187 187 if total is not None:
188 188 expander['N'] = lambda: str(total)
189 189 if seqno is not None:
190 190 expander['n'] = lambda: str(seqno)
191 191 if total is not None and seqno is not None:
192 192 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
193 193 if pathname is not None:
194 194 expander['s'] = lambda: os.path.basename(pathname)
195 195 expander['d'] = lambda: os.path.dirname(pathname) or '.'
196 196 expander['p'] = lambda: pathname
197 197
198 198 newname = []
199 199 patlen = len(pat)
200 200 i = 0
201 201 while i < patlen:
202 202 c = pat[i]
203 203 if c == '%':
204 204 i += 1
205 205 c = pat[i]
206 206 c = expander[c]()
207 207 newname.append(c)
208 208 i += 1
209 209 return ''.join(newname)
210 210 except KeyError, inst:
211 211 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
212 212 inst.args[0])
213 213
214 214 def make_file(repo, pat, node=None,
215 215 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
216 216 if not pat or pat == '-':
217 217 return 'w' in mode and sys.stdout or sys.stdin
218 218 if hasattr(pat, 'write') and 'w' in mode:
219 219 return pat
220 220 if hasattr(pat, 'read') and 'r' in mode:
221 221 return pat
222 222 return open(make_filename(repo, pat, node, total, seqno, revwidth,
223 223 pathname),
224 224 mode)
225 225
226 226 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
227 227 cwd = repo.getcwd()
228 228 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
229 229 opts.get('exclude'), globbed=globbed,
230 230 default=default)
231 231
232 232 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
233 233 default=None):
234 234 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
235 235 default=default)
236 236 exact = dict.fromkeys(files)
237 237 cwd = repo.getcwd()
238 238 for src, fn in repo.walk(node=node, files=files, match=matchfn,
239 239 badmatch=badmatch):
240 240 yield src, fn, repo.pathto(fn, cwd), fn in exact
241 241
242 242 def findrenames(repo, added=None, removed=None, threshold=0.5):
243 243 '''find renamed files -- yields (before, after, score) tuples'''
244 244 if added is None or removed is None:
245 245 added, removed = repo.status()[1:3]
246 246 ctx = repo.changectx()
247 247 for a in added:
248 248 aa = repo.wread(a)
249 249 bestname, bestscore = None, threshold
250 250 for r in removed:
251 251 rr = ctx.filectx(r).data()
252 252
253 253 # bdiff.blocks() returns blocks of matching lines
254 254 # count the number of bytes in each
255 255 equal = 0
256 256 alines = mdiff.splitnewlines(aa)
257 257 matches = bdiff.blocks(aa, rr)
258 258 for x1,x2,y1,y2 in matches:
259 259 for line in alines[x1:x2]:
260 260 equal += len(line)
261 261
262 262 lengths = len(aa) + len(rr)
263 263 if lengths:
264 264 myscore = equal*2.0 / lengths
265 265 if myscore >= bestscore:
266 266 bestname, bestscore = r, myscore
267 267 if bestname:
268 268 yield bestname, a, bestscore
269 269
270 270 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
271 271 if dry_run is None:
272 272 dry_run = opts.get('dry_run')
273 273 if similarity is None:
274 274 similarity = float(opts.get('similarity') or 0)
275 275 add, remove = [], []
276 276 mapping = {}
277 277 for src, abs, rel, exact in walk(repo, pats, opts):
278 278 target = repo.wjoin(abs)
279 279 if src == 'f' and abs not in repo.dirstate:
280 280 add.append(abs)
281 281 mapping[abs] = rel, exact
282 282 if repo.ui.verbose or not exact:
283 283 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
284 284 if repo.dirstate[abs] != 'r' and (not util.lexists(target)
285 285 or (os.path.isdir(target) and not os.path.islink(target))):
286 286 remove.append(abs)
287 287 mapping[abs] = rel, exact
288 288 if repo.ui.verbose or not exact:
289 289 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
290 290 if not dry_run:
291 291 repo.remove(remove)
292 292 repo.add(add)
293 293 if similarity > 0:
294 294 for old, new, score in findrenames(repo, add, remove, similarity):
295 295 oldrel, oldexact = mapping[old]
296 296 newrel, newexact = mapping[new]
297 297 if repo.ui.verbose or not oldexact or not newexact:
298 298 repo.ui.status(_('recording removal of %s as rename to %s '
299 299 '(%d%% similar)\n') %
300 300 (oldrel, newrel, score * 100))
301 301 if not dry_run:
302 302 repo.copy(old, new)
303 303
304 304 def copy(ui, repo, pats, opts, rename=False):
305 305 # called with the repo lock held
306 306 #
307 307 # hgsep => pathname that uses "/" to separate directories
308 308 # ossep => pathname that uses os.sep to separate directories
309 309 cwd = repo.getcwd()
310 310 targets = {}
311 311 after = opts.get("after")
312 312 dryrun = opts.get("dry_run")
313 313
314 314 def walkpat(pat):
315 315 srcs = []
316 316 for tag, abs, rel, exact in walk(repo, [pat], opts, globbed=True):
317 317 state = repo.dirstate[abs]
318 318 if state in '?r':
319 319 if exact and state == '?':
320 320 ui.warn(_('%s: not copying - file is not managed\n') % rel)
321 321 if exact and state == 'r':
322 322 ui.warn(_('%s: not copying - file has been marked for'
323 323 ' remove\n') % rel)
324 324 continue
325 325 # abs: hgsep
326 326 # rel: ossep
327 327 srcs.append((abs, rel, exact))
328 328 return srcs
329 329
330 330 # abssrc: hgsep
331 331 # relsrc: ossep
332 332 # otarget: ossep
333 333 def copyfile(abssrc, relsrc, otarget, exact):
334 334 abstarget = util.canonpath(repo.root, cwd, otarget)
335 335 reltarget = repo.pathto(abstarget, cwd)
336 336 target = repo.wjoin(abstarget)
337 337 src = repo.wjoin(abssrc)
338 338 state = repo.dirstate[abstarget]
339 339
340 340 # check for collisions
341 341 prevsrc = targets.get(abstarget)
342 342 if prevsrc is not None:
343 343 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
344 344 (reltarget, repo.pathto(abssrc, cwd),
345 345 repo.pathto(prevsrc, cwd)))
346 346 return
347 347
348 348 # check for overwrites
349 349 exists = os.path.exists(target)
350 350 if (not after and exists or after and state in 'mn'):
351 351 if not opts['force']:
352 352 ui.warn(_('%s: not overwriting - file exists\n') %
353 353 reltarget)
354 354 return
355 355
356 356 if after:
357 357 if not exists:
358 358 return
359 359 elif not dryrun:
360 360 try:
361 361 if exists:
362 362 os.unlink(target)
363 363 targetdir = os.path.dirname(target) or '.'
364 364 if not os.path.isdir(targetdir):
365 365 os.makedirs(targetdir)
366 366 util.copyfile(src, target)
367 367 except IOError, inst:
368 368 if inst.errno == errno.ENOENT:
369 369 ui.warn(_('%s: deleted in working copy\n') % relsrc)
370 370 else:
371 371 ui.warn(_('%s: cannot copy - %s\n') %
372 372 (relsrc, inst.strerror))
373 373 return True # report a failure
374 374
375 375 if ui.verbose or not exact:
376 376 action = rename and "moving" or "copying"
377 377 ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
378 378
379 379 targets[abstarget] = abssrc
380 380
381 381 # fix up dirstate
382 382 origsrc = repo.dirstate.copied(abssrc) or abssrc
383 383 if abstarget == origsrc: # copying back a copy?
384 384 if state not in 'mn' and not dryrun:
385 385 repo.dirstate.normallookup(abstarget)
386 386 else:
387 387 if repo.dirstate[origsrc] == 'a':
388 388 if not ui.quiet:
389 389 ui.warn(_("%s has not been committed yet, so no copy "
390 390 "data will be stored for %s.\n")
391 391 % (repo.pathto(origsrc, cwd), reltarget))
392 392 if abstarget not in repo.dirstate and not dryrun:
393 393 repo.add([abstarget])
394 394 elif not dryrun:
395 395 repo.copy(origsrc, abstarget)
396 396
397 397 if rename and not dryrun:
398 398 repo.remove([abssrc], True)
399 399
400 400 # pat: ossep
401 401 # dest ossep
402 402 # srcs: list of (hgsep, hgsep, ossep, bool)
403 403 # return: function that takes hgsep and returns ossep
404 404 def targetpathfn(pat, dest, srcs):
405 405 if os.path.isdir(pat):
406 406 abspfx = util.canonpath(repo.root, cwd, pat)
407 407 abspfx = util.localpath(abspfx)
408 408 if destdirexists:
409 409 striplen = len(os.path.split(abspfx)[0])
410 410 else:
411 411 striplen = len(abspfx)
412 412 if striplen:
413 413 striplen += len(os.sep)
414 414 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
415 415 elif destdirexists:
416 416 res = lambda p: os.path.join(dest,
417 417 os.path.basename(util.localpath(p)))
418 418 else:
419 419 res = lambda p: dest
420 420 return res
421 421
422 422 # pat: ossep
423 423 # dest ossep
424 424 # srcs: list of (hgsep, hgsep, ossep, bool)
425 425 # return: function that takes hgsep and returns ossep
426 426 def targetpathafterfn(pat, dest, srcs):
427 427 if util.patkind(pat, None)[0]:
428 428 # a mercurial pattern
429 429 res = lambda p: os.path.join(dest,
430 430 os.path.basename(util.localpath(p)))
431 431 else:
432 432 abspfx = util.canonpath(repo.root, cwd, pat)
433 433 if len(abspfx) < len(srcs[0][0]):
434 434 # A directory. Either the target path contains the last
435 435 # component of the source path or it does not.
436 436 def evalpath(striplen):
437 437 score = 0
438 438 for s in srcs:
439 439 t = os.path.join(dest, util.localpath(s[0])[striplen:])
440 440 if os.path.exists(t):
441 441 score += 1
442 442 return score
443 443
444 444 abspfx = util.localpath(abspfx)
445 445 striplen = len(abspfx)
446 446 if striplen:
447 447 striplen += len(os.sep)
448 448 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
449 449 score = evalpath(striplen)
450 450 striplen1 = len(os.path.split(abspfx)[0])
451 451 if striplen1:
452 452 striplen1 += len(os.sep)
453 453 if evalpath(striplen1) > score:
454 454 striplen = striplen1
455 455 res = lambda p: os.path.join(dest,
456 456 util.localpath(p)[striplen:])
457 457 else:
458 458 # a file
459 459 if destdirexists:
460 460 res = lambda p: os.path.join(dest,
461 461 os.path.basename(util.localpath(p)))
462 462 else:
463 463 res = lambda p: dest
464 464 return res
465 465
466 466
467 467 pats = util.expand_glob(pats)
468 468 if not pats:
469 469 raise util.Abort(_('no source or destination specified'))
470 470 if len(pats) == 1:
471 471 raise util.Abort(_('no destination specified'))
472 472 dest = pats.pop()
473 473 destdirexists = os.path.isdir(dest)
474 474 if not destdirexists:
475 475 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
476 476 raise util.Abort(_('with multiple sources, destination must be an '
477 477 'existing directory'))
478 478 if util.endswithsep(dest):
479 479 raise util.Abort(_('destination %s is not a directory') % dest)
480 480
481 481 tfn = targetpathfn
482 482 if after:
483 483 tfn = targetpathafterfn
484 484 copylist = []
485 485 for pat in pats:
486 486 srcs = walkpat(pat)
487 487 if not srcs:
488 488 continue
489 489 copylist.append((tfn(pat, dest, srcs), srcs))
490 490 if not copylist:
491 491 raise util.Abort(_('no files to copy'))
492 492
493 493 errors = 0
494 494 for targetpath, srcs in copylist:
495 495 for abssrc, relsrc, exact in srcs:
496 496 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
497 497 errors += 1
498 498
499 499 if errors:
500 500 ui.warn(_('(consider using --after)\n'))
501 501
502 502 return errors
503 503
504 504 def service(opts, parentfn=None, initfn=None, runfn=None):
505 505 '''Run a command as a service.'''
506 506
507 507 if opts['daemon'] and not opts['daemon_pipefds']:
508 508 rfd, wfd = os.pipe()
509 509 args = sys.argv[:]
510 510 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
511 511 # Don't pass --cwd to the child process, because we've already
512 512 # changed directory.
513 513 for i in xrange(1,len(args)):
514 514 if args[i].startswith('--cwd='):
515 515 del args[i]
516 516 break
517 517 elif args[i].startswith('--cwd'):
518 518 del args[i:i+2]
519 519 break
520 520 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
521 521 args[0], args)
522 522 os.close(wfd)
523 523 os.read(rfd, 1)
524 524 if parentfn:
525 525 return parentfn(pid)
526 526 else:
527 527 os._exit(0)
528 528
529 529 if initfn:
530 530 initfn()
531 531
532 532 if opts['pid_file']:
533 533 fp = open(opts['pid_file'], 'w')
534 534 fp.write(str(os.getpid()) + '\n')
535 535 fp.close()
536 536
537 537 if opts['daemon_pipefds']:
538 538 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
539 539 os.close(rfd)
540 540 try:
541 541 os.setsid()
542 542 except AttributeError:
543 543 pass
544 544 os.write(wfd, 'y')
545 545 os.close(wfd)
546 546 sys.stdout.flush()
547 547 sys.stderr.flush()
548 548 fd = os.open(util.nulldev, os.O_RDWR)
549 549 if fd != 0: os.dup2(fd, 0)
550 550 if fd != 1: os.dup2(fd, 1)
551 551 if fd != 2: os.dup2(fd, 2)
552 552 if fd not in (0, 1, 2): os.close(fd)
553 553
554 554 if runfn:
555 555 return runfn()
556 556
557 557 class changeset_printer(object):
558 558 '''show changeset information when templating not requested.'''
559 559
560 560 def __init__(self, ui, repo, patch, buffered):
561 561 self.ui = ui
562 562 self.repo = repo
563 563 self.buffered = buffered
564 564 self.patch = patch
565 565 self.header = {}
566 566 self.hunk = {}
567 567 self.lastheader = None
568 568
569 569 def flush(self, rev):
570 570 if rev in self.header:
571 571 h = self.header[rev]
572 572 if h != self.lastheader:
573 573 self.lastheader = h
574 574 self.ui.write(h)
575 575 del self.header[rev]
576 576 if rev in self.hunk:
577 577 self.ui.write(self.hunk[rev])
578 578 del self.hunk[rev]
579 579 return 1
580 580 return 0
581 581
582 582 def show(self, rev=0, changenode=None, copies=(), **props):
583 583 if self.buffered:
584 584 self.ui.pushbuffer()
585 585 self._show(rev, changenode, copies, props)
586 586 self.hunk[rev] = self.ui.popbuffer()
587 587 else:
588 588 self._show(rev, changenode, copies, props)
589 589
590 590 def _show(self, rev, changenode, copies, props):
591 591 '''show a single changeset or file revision'''
592 592 log = self.repo.changelog
593 593 if changenode is None:
594 594 changenode = log.node(rev)
595 595 elif not rev:
596 596 rev = log.rev(changenode)
597 597
598 598 if self.ui.quiet:
599 599 self.ui.write("%d:%s\n" % (rev, short(changenode)))
600 600 return
601 601
602 602 changes = log.read(changenode)
603 603 date = util.datestr(changes[2])
604 604 extra = changes[5]
605 605 branch = extra.get("branch")
606 606
607 607 hexfunc = self.ui.debugflag and hex or short
608 608
609 609 parents = [(p, hexfunc(log.node(p)))
610 610 for p in self._meaningful_parentrevs(log, rev)]
611 611
612 612 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
613 613
614 614 # don't show the default branch name
615 615 if branch != 'default':
616 616 branch = util.tolocal(branch)
617 617 self.ui.write(_("branch: %s\n") % branch)
618 618 for tag in self.repo.nodetags(changenode):
619 619 self.ui.write(_("tag: %s\n") % tag)
620 620 for parent in parents:
621 621 self.ui.write(_("parent: %d:%s\n") % parent)
622 622
623 623 if self.ui.debugflag:
624 624 self.ui.write(_("manifest: %d:%s\n") %
625 625 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
626 626 self.ui.write(_("user: %s\n") % changes[1])
627 627 self.ui.write(_("date: %s\n") % date)
628 628
629 629 if self.ui.debugflag:
630 630 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
631 631 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
632 632 files):
633 633 if value:
634 634 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
635 635 elif changes[3] and self.ui.verbose:
636 636 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
637 637 if copies and self.ui.verbose:
638 638 copies = ['%s (%s)' % c for c in copies]
639 639 self.ui.write(_("copies: %s\n") % ' '.join(copies))
640 640
641 641 if extra and self.ui.debugflag:
642 642 extraitems = extra.items()
643 643 extraitems.sort()
644 644 for key, value in extraitems:
645 645 self.ui.write(_("extra: %s=%s\n")
646 646 % (key, value.encode('string_escape')))
647 647
648 648 description = changes[4].strip()
649 649 if description:
650 650 if self.ui.verbose:
651 651 self.ui.write(_("description:\n"))
652 652 self.ui.write(description)
653 653 self.ui.write("\n\n")
654 654 else:
655 655 self.ui.write(_("summary: %s\n") %
656 656 description.splitlines()[0])
657 657 self.ui.write("\n")
658 658
659 659 self.showpatch(changenode)
660 660
661 661 def showpatch(self, node):
662 662 if self.patch:
663 663 prev = self.repo.changelog.parents(node)[0]
664 664 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
665 665 opts=patch.diffopts(self.ui))
666 666 self.ui.write("\n")
667 667
668 668 def _meaningful_parentrevs(self, log, rev):
669 669 """Return list of meaningful (or all if debug) parentrevs for rev.
670 670
671 671 For merges (two non-nullrev revisions) both parents are meaningful.
672 672 Otherwise the first parent revision is considered meaningful if it
673 673 is not the preceding revision.
674 674 """
675 675 parents = log.parentrevs(rev)
676 676 if not self.ui.debugflag and parents[1] == nullrev:
677 677 if parents[0] >= rev - 1:
678 678 parents = []
679 679 else:
680 680 parents = [parents[0]]
681 681 return parents
682 682
683 683
684 684 class changeset_templater(changeset_printer):
685 685 '''format changeset information.'''
686 686
687 687 def __init__(self, ui, repo, patch, mapfile, buffered):
688 688 changeset_printer.__init__(self, ui, repo, patch, buffered)
689 689 filters = templatefilters.filters.copy()
690 690 filters['formatnode'] = (ui.debugflag and (lambda x: x)
691 691 or (lambda x: x[:12]))
692 692 self.t = templater.templater(mapfile, filters,
693 693 cache={
694 694 'parent': '{rev}:{node|formatnode} ',
695 695 'manifest': '{rev}:{node|formatnode}',
696 696 'filecopy': '{name} ({source})'})
697 697
698 698 def use_template(self, t):
699 699 '''set template string to use'''
700 700 self.t.cache['changeset'] = t
701 701
702 702 def _show(self, rev, changenode, copies, props):
703 703 '''show a single changeset or file revision'''
704 704 log = self.repo.changelog
705 705 if changenode is None:
706 706 changenode = log.node(rev)
707 707 elif not rev:
708 708 rev = log.rev(changenode)
709 709
710 710 changes = log.read(changenode)
711 711
712 712 def showlist(name, values, plural=None, **args):
713 713 '''expand set of values.
714 714 name is name of key in template map.
715 715 values is list of strings or dicts.
716 716 plural is plural of name, if not simply name + 's'.
717 717
718 718 expansion works like this, given name 'foo'.
719 719
720 720 if values is empty, expand 'no_foos'.
721 721
722 722 if 'foo' not in template map, return values as a string,
723 723 joined by space.
724 724
725 725 expand 'start_foos'.
726 726
727 727 for each value, expand 'foo'. if 'last_foo' in template
728 728 map, expand it instead of 'foo' for last key.
729 729
730 730 expand 'end_foos'.
731 731 '''
732 732 if plural: names = plural
733 733 else: names = name + 's'
734 734 if not values:
735 735 noname = 'no_' + names
736 736 if noname in self.t:
737 737 yield self.t(noname, **args)
738 738 return
739 739 if name not in self.t:
740 740 if isinstance(values[0], str):
741 741 yield ' '.join(values)
742 742 else:
743 743 for v in values:
744 744 yield dict(v, **args)
745 745 return
746 746 startname = 'start_' + names
747 747 if startname in self.t:
748 748 yield self.t(startname, **args)
749 749 vargs = args.copy()
750 750 def one(v, tag=name):
751 751 try:
752 752 vargs.update(v)
753 753 except (AttributeError, ValueError):
754 754 try:
755 755 for a, b in v:
756 756 vargs[a] = b
757 757 except ValueError:
758 758 vargs[name] = v
759 759 return self.t(tag, **vargs)
760 760 lastname = 'last_' + name
761 761 if lastname in self.t:
762 762 last = values.pop()
763 763 else:
764 764 last = None
765 765 for v in values:
766 766 yield one(v)
767 767 if last is not None:
768 768 yield one(last, tag=lastname)
769 769 endname = 'end_' + names
770 770 if endname in self.t:
771 771 yield self.t(endname, **args)
772 772
773 773 def showbranches(**args):
774 774 branch = changes[5].get("branch")
775 775 if branch != 'default':
776 776 branch = util.tolocal(branch)
777 777 return showlist('branch', [branch], plural='branches', **args)
778 778
779 779 def showparents(**args):
780 780 parents = [[('rev', p), ('node', hex(log.node(p)))]
781 781 for p in self._meaningful_parentrevs(log, rev)]
782 782 return showlist('parent', parents, **args)
783 783
784 784 def showtags(**args):
785 785 return showlist('tag', self.repo.nodetags(changenode), **args)
786 786
787 787 def showextras(**args):
788 788 extras = changes[5].items()
789 789 extras.sort()
790 790 for key, value in extras:
791 791 args = args.copy()
792 792 args.update(dict(key=key, value=value))
793 793 yield self.t('extra', **args)
794 794
795 795 def showcopies(**args):
796 796 c = [{'name': x[0], 'source': x[1]} for x in copies]
797 797 return showlist('file_copy', c, plural='file_copies', **args)
798 798
799 799 files = []
800 800 def getfiles():
801 801 if not files:
802 802 files[:] = self.repo.status(
803 803 log.parents(changenode)[0], changenode)[:3]
804 804 return files
805 805 def showfiles(**args):
806 806 return showlist('file', changes[3], **args)
807 807 def showmods(**args):
808 808 return showlist('file_mod', getfiles()[0], **args)
809 809 def showadds(**args):
810 810 return showlist('file_add', getfiles()[1], **args)
811 811 def showdels(**args):
812 812 return showlist('file_del', getfiles()[2], **args)
813 813 def showmanifest(**args):
814 814 args = args.copy()
815 815 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
816 816 node=hex(changes[0])))
817 817 return self.t('manifest', **args)
818 818
819 819 defprops = {
820 820 'author': changes[1],
821 821 'branches': showbranches,
822 822 'date': changes[2],
823 823 'desc': changes[4].strip(),
824 824 'file_adds': showadds,
825 825 'file_dels': showdels,
826 826 'file_mods': showmods,
827 827 'files': showfiles,
828 828 'file_copies': showcopies,
829 829 'manifest': showmanifest,
830 830 'node': hex(changenode),
831 831 'parents': showparents,
832 832 'rev': rev,
833 833 'tags': showtags,
834 834 'extras': showextras,
835 835 }
836 836 props = props.copy()
837 837 props.update(defprops)
838 838
839 839 try:
840 840 if self.ui.debugflag and 'header_debug' in self.t:
841 841 key = 'header_debug'
842 842 elif self.ui.quiet and 'header_quiet' in self.t:
843 843 key = 'header_quiet'
844 844 elif self.ui.verbose and 'header_verbose' in self.t:
845 845 key = 'header_verbose'
846 846 elif 'header' in self.t:
847 847 key = 'header'
848 848 else:
849 849 key = ''
850 850 if key:
851 851 h = templater.stringify(self.t(key, **props))
852 852 if self.buffered:
853 853 self.header[rev] = h
854 854 else:
855 855 self.ui.write(h)
856 856 if self.ui.debugflag and 'changeset_debug' in self.t:
857 857 key = 'changeset_debug'
858 858 elif self.ui.quiet and 'changeset_quiet' in self.t:
859 859 key = 'changeset_quiet'
860 860 elif self.ui.verbose and 'changeset_verbose' in self.t:
861 861 key = 'changeset_verbose'
862 862 else:
863 863 key = 'changeset'
864 864 self.ui.write(templater.stringify(self.t(key, **props)))
865 865 self.showpatch(changenode)
866 866 except KeyError, inst:
867 867 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
868 868 inst.args[0]))
869 869 except SyntaxError, inst:
870 870 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
871 871
872 872 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
873 873 """show one changeset using template or regular display.
874 874
875 875 Display format will be the first non-empty hit of:
876 876 1. option 'template'
877 877 2. option 'style'
878 878 3. [ui] setting 'logtemplate'
879 879 4. [ui] setting 'style'
880 880 If all of these values are either the unset or the empty string,
881 881 regular display via changeset_printer() is done.
882 882 """
883 883 # options
884 884 patch = False
885 885 if opts.get('patch'):
886 886 patch = matchfn or util.always
887 887
888 888 tmpl = opts.get('template')
889 889 mapfile = None
890 890 if tmpl:
891 891 tmpl = templater.parsestring(tmpl, quoted=False)
892 892 else:
893 893 mapfile = opts.get('style')
894 894 # ui settings
895 895 if not mapfile:
896 896 tmpl = ui.config('ui', 'logtemplate')
897 897 if tmpl:
898 898 tmpl = templater.parsestring(tmpl)
899 899 else:
900 900 mapfile = ui.config('ui', 'style')
901 901
902 902 if tmpl or mapfile:
903 903 if mapfile:
904 904 if not os.path.split(mapfile)[0]:
905 905 mapname = (templater.templatepath('map-cmdline.' + mapfile)
906 906 or templater.templatepath(mapfile))
907 907 if mapname: mapfile = mapname
908 908 try:
909 909 t = changeset_templater(ui, repo, patch, mapfile, buffered)
910 910 except SyntaxError, inst:
911 911 raise util.Abort(inst.args[0])
912 912 if tmpl: t.use_template(tmpl)
913 913 return t
914 914 return changeset_printer(ui, repo, patch, buffered)
915 915
916 916 def finddate(ui, repo, date):
917 917 """Find the tipmost changeset that matches the given date spec"""
918 918 df = util.matchdate(date)
919 919 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
920 920 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
921 921 results = {}
922 922 for st, rev, fns in changeiter:
923 923 if st == 'add':
924 924 d = get(rev)[2]
925 925 if df(d[0]):
926 926 results[rev] = d
927 927 elif st == 'iter':
928 928 if rev in results:
929 929 ui.status("Found revision %s from %s\n" %
930 930 (rev, util.datestr(results[rev])))
931 931 return str(rev)
932 932
933 933 raise util.Abort(_("revision matching date not found"))
934 934
935 935 def walkchangerevs(ui, repo, pats, change, opts):
936 936 '''Iterate over files and the revs they changed in.
937 937
938 938 Callers most commonly need to iterate backwards over the history
939 939 it is interested in. Doing so has awful (quadratic-looking)
940 940 performance, so we use iterators in a "windowed" way.
941 941
942 942 We walk a window of revisions in the desired order. Within the
943 943 window, we first walk forwards to gather data, then in the desired
944 944 order (usually backwards) to display it.
945 945
946 946 This function returns an (iterator, matchfn) tuple. The iterator
947 947 yields 3-tuples. They will be of one of the following forms:
948 948
949 949 "window", incrementing, lastrev: stepping through a window,
950 950 positive if walking forwards through revs, last rev in the
951 951 sequence iterated over - use to reset state for the current window
952 952
953 953 "add", rev, fns: out-of-order traversal of the given file names
954 954 fns, which changed during revision rev - use to gather data for
955 955 possible display
956 956
957 957 "iter", rev, None: in-order traversal of the revs earlier iterated
958 958 over with "add" - use to display data'''
959 959
960 960 def increasing_windows(start, end, windowsize=8, sizelimit=512):
961 961 if start < end:
962 962 while start < end:
963 963 yield start, min(windowsize, end-start)
964 964 start += windowsize
965 965 if windowsize < sizelimit:
966 966 windowsize *= 2
967 967 else:
968 968 while start > end:
969 969 yield start, min(windowsize, start-end-1)
970 970 start -= windowsize
971 971 if windowsize < sizelimit:
972 972 windowsize *= 2
973 973
974 974 files, matchfn, anypats = matchpats(repo, pats, opts)
975 975 follow = opts.get('follow') or opts.get('follow_first')
976 976
977 977 if repo.changelog.count() == 0:
978 978 return [], matchfn
979 979
980 980 if follow:
981 981 defrange = '%s:0' % repo.changectx().rev()
982 982 else:
983 983 defrange = '-1:0'
984 984 revs = revrange(repo, opts['rev'] or [defrange])
985 985 wanted = {}
986 986 slowpath = anypats or opts.get('removed')
987 987 fncache = {}
988 988
989 989 if not slowpath and not files:
990 990 # No files, no patterns. Display all revs.
991 991 wanted = dict.fromkeys(revs)
992 992 copies = []
993 993 if not slowpath:
994 994 # Only files, no patterns. Check the history of each file.
995 995 def filerevgen(filelog, node):
996 996 cl_count = repo.changelog.count()
997 997 if node is None:
998 998 last = filelog.count() - 1
999 999 else:
1000 1000 last = filelog.rev(node)
1001 1001 for i, window in increasing_windows(last, nullrev):
1002 1002 revs = []
1003 1003 for j in xrange(i - window, i + 1):
1004 1004 n = filelog.node(j)
1005 1005 revs.append((filelog.linkrev(n),
1006 1006 follow and filelog.renamed(n)))
1007 1007 revs.reverse()
1008 1008 for rev in revs:
1009 1009 # only yield rev for which we have the changelog, it can
1010 1010 # happen while doing "hg log" during a pull or commit
1011 1011 if rev[0] < cl_count:
1012 1012 yield rev
1013 1013 def iterfiles():
1014 1014 for filename in files:
1015 1015 yield filename, None
1016 1016 for filename_node in copies:
1017 1017 yield filename_node
1018 1018 minrev, maxrev = min(revs), max(revs)
1019 1019 for file_, node in iterfiles():
1020 1020 filelog = repo.file(file_)
1021 1021 # A zero count may be a directory or deleted file, so
1022 1022 # try to find matching entries on the slow path.
1023 1023 if filelog.count() == 0:
1024 1024 slowpath = True
1025 1025 break
1026 1026 for rev, copied in filerevgen(filelog, node):
1027 1027 if rev <= maxrev:
1028 1028 if rev < minrev:
1029 1029 break
1030 1030 fncache.setdefault(rev, [])
1031 1031 fncache[rev].append(file_)
1032 1032 wanted[rev] = 1
1033 1033 if follow and copied:
1034 1034 copies.append(copied)
1035 1035 if slowpath:
1036 1036 if follow:
1037 1037 raise util.Abort(_('can only follow copies/renames for explicit '
1038 1038 'file names'))
1039 1039
1040 1040 # The slow path checks files modified in every changeset.
1041 1041 def changerevgen():
1042 1042 for i, window in increasing_windows(repo.changelog.count()-1,
1043 1043 nullrev):
1044 1044 for j in xrange(i - window, i + 1):
1045 1045 yield j, change(j)[3]
1046 1046
1047 1047 for rev, changefiles in changerevgen():
1048 1048 matches = filter(matchfn, changefiles)
1049 1049 if matches:
1050 1050 fncache[rev] = matches
1051 1051 wanted[rev] = 1
1052 1052
1053 1053 class followfilter:
1054 1054 def __init__(self, onlyfirst=False):
1055 1055 self.startrev = nullrev
1056 1056 self.roots = []
1057 1057 self.onlyfirst = onlyfirst
1058 1058
1059 1059 def match(self, rev):
1060 1060 def realparents(rev):
1061 1061 if self.onlyfirst:
1062 1062 return repo.changelog.parentrevs(rev)[0:1]
1063 1063 else:
1064 1064 return filter(lambda x: x != nullrev,
1065 1065 repo.changelog.parentrevs(rev))
1066 1066
1067 1067 if self.startrev == nullrev:
1068 1068 self.startrev = rev
1069 1069 return True
1070 1070
1071 1071 if rev > self.startrev:
1072 1072 # forward: all descendants
1073 1073 if not self.roots:
1074 1074 self.roots.append(self.startrev)
1075 1075 for parent in realparents(rev):
1076 1076 if parent in self.roots:
1077 1077 self.roots.append(rev)
1078 1078 return True
1079 1079 else:
1080 1080 # backwards: all parents
1081 1081 if not self.roots:
1082 1082 self.roots.extend(realparents(self.startrev))
1083 1083 if rev in self.roots:
1084 1084 self.roots.remove(rev)
1085 1085 self.roots.extend(realparents(rev))
1086 1086 return True
1087 1087
1088 1088 return False
1089 1089
1090 1090 # it might be worthwhile to do this in the iterator if the rev range
1091 1091 # is descending and the prune args are all within that range
1092 1092 for rev in opts.get('prune', ()):
1093 1093 rev = repo.changelog.rev(repo.lookup(rev))
1094 1094 ff = followfilter()
1095 1095 stop = min(revs[0], revs[-1])
1096 1096 for x in xrange(rev, stop-1, -1):
1097 1097 if ff.match(x) and x in wanted:
1098 1098 del wanted[x]
1099 1099
1100 1100 def iterate():
1101 1101 if follow and not files:
1102 1102 ff = followfilter(onlyfirst=opts.get('follow_first'))
1103 1103 def want(rev):
1104 1104 if ff.match(rev) and rev in wanted:
1105 1105 return True
1106 1106 return False
1107 1107 else:
1108 1108 def want(rev):
1109 1109 return rev in wanted
1110 1110
1111 1111 for i, window in increasing_windows(0, len(revs)):
1112 1112 yield 'window', revs[0] < revs[-1], revs[-1]
1113 1113 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1114 1114 srevs = list(nrevs)
1115 1115 srevs.sort()
1116 1116 for rev in srevs:
1117 1117 fns = fncache.get(rev)
1118 1118 if not fns:
1119 1119 def fns_generator():
1120 1120 for f in change(rev)[3]:
1121 1121 if matchfn(f):
1122 1122 yield f
1123 1123 fns = fns_generator()
1124 1124 yield 'add', rev, fns
1125 1125 for rev in nrevs:
1126 1126 yield 'iter', rev, None
1127 1127 return iterate(), matchfn
1128 1128
1129 1129 def commit(ui, repo, commitfunc, pats, opts):
1130 1130 '''commit the specified files or all outstanding changes'''
1131 1131 date = opts.get('date')
1132 1132 if date:
1133 1133 opts['date'] = util.parsedate(date)
1134 1134 message = logmessage(opts)
1135 1135
1136 1136 # extract addremove carefully -- this function can be called from a command
1137 1137 # that doesn't support addremove
1138 1138 if opts.get('addremove'):
1139 1139 addremove(repo, pats, opts)
1140 1140
1141 1141 fns, match, anypats = matchpats(repo, pats, opts)
1142 1142 if pats:
1143 1143 status = repo.status(files=fns, match=match)
1144 1144 modified, added, removed, deleted, unknown = status[:5]
1145 1145 files = modified + added + removed
1146 1146 slist = None
1147 1147 for f in fns:
1148 1148 if f == '.':
1149 1149 continue
1150 1150 if f not in files:
1151 1151 rf = repo.wjoin(f)
1152 1152 rel = repo.pathto(f)
1153 1153 try:
1154 1154 mode = os.lstat(rf)[stat.ST_MODE]
1155 1155 except OSError:
1156 1156 raise util.Abort(_("file %s not found!") % rel)
1157 1157 if stat.S_ISDIR(mode):
1158 1158 name = f + '/'
1159 1159 if slist is None:
1160 1160 slist = list(files)
1161 1161 slist.sort()
1162 1162 i = bisect.bisect(slist, name)
1163 1163 if i >= len(slist) or not slist[i].startswith(name):
1164 1164 raise util.Abort(_("no match under directory %s!")
1165 1165 % rel)
1166 1166 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1167 1167 raise util.Abort(_("can't commit %s: "
1168 1168 "unsupported file type!") % rel)
1169 1169 elif f not in repo.dirstate:
1170 1170 raise util.Abort(_("file %s not tracked!") % rel)
1171 1171 else:
1172 1172 files = []
1173 1173 try:
1174 1174 return commitfunc(ui, repo, files, message, match, opts)
1175 1175 except ValueError, inst:
1176 1176 raise util.Abort(str(inst))
@@ -1,3179 +1,3179 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 from node import *
8 from node import hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, re, sys, urllib
11 11 import hg, util, revlog, bundlerepo, extensions
12 12 import difflib, patch, time, help, mdiff, tempfile
13 13 import errno, version, socket
14 14 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
15 15
16 16 # Commands start here, listed alphabetically
17 17
18 18 def add(ui, repo, *pats, **opts):
19 19 """add the specified files on the next commit
20 20
21 21 Schedule files to be version controlled and added to the repository.
22 22
23 23 The files will be added to the repository at the next commit. To
24 24 undo an add before that, see hg revert.
25 25
26 26 If no names are given, add all files in the repository.
27 27 """
28 28
29 29 rejected = None
30 30 exacts = {}
31 31 names = []
32 32 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
33 33 badmatch=util.always):
34 34 if exact:
35 35 if ui.verbose:
36 36 ui.status(_('adding %s\n') % rel)
37 37 names.append(abs)
38 38 exacts[abs] = 1
39 39 elif abs not in repo.dirstate:
40 40 ui.status(_('adding %s\n') % rel)
41 41 names.append(abs)
42 42 if not opts.get('dry_run'):
43 43 rejected = repo.add(names)
44 44 rejected = [p for p in rejected if p in exacts]
45 45 return rejected and 1 or 0
46 46
47 47 def addremove(ui, repo, *pats, **opts):
48 48 """add all new files, delete all missing files
49 49
50 50 Add all new files and remove all missing files from the repository.
51 51
52 52 New files are ignored if they match any of the patterns in .hgignore. As
53 53 with add, these changes take effect at the next commit.
54 54
55 55 Use the -s option to detect renamed files. With a parameter > 0,
56 56 this compares every removed file with every added file and records
57 57 those similar enough as renames. This option takes a percentage
58 58 between 0 (disabled) and 100 (files must be identical) as its
59 59 parameter. Detecting renamed files this way can be expensive.
60 60 """
61 61 try:
62 62 sim = float(opts.get('similarity') or 0)
63 63 except ValueError:
64 64 raise util.Abort(_('similarity must be a number'))
65 65 if sim < 0 or sim > 100:
66 66 raise util.Abort(_('similarity must be between 0 and 100'))
67 67 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
68 68
69 69 def annotate(ui, repo, *pats, **opts):
70 70 """show changeset information per file line
71 71
72 72 List changes in files, showing the revision id responsible for each line
73 73
74 74 This command is useful to discover who did a change or when a change took
75 75 place.
76 76
77 77 Without the -a option, annotate will avoid processing files it
78 78 detects as binary. With -a, annotate will generate an annotation
79 79 anyway, probably with undesirable results.
80 80 """
81 81 datefunc = ui.quiet and util.shortdate or util.datestr
82 82 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
83 83
84 84 if not pats:
85 85 raise util.Abort(_('at least one file name or pattern required'))
86 86
87 87 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
88 88 ('number', lambda x: str(x[0].rev())),
89 89 ('changeset', lambda x: short(x[0].node())),
90 90 ('date', getdate),
91 91 ('follow', lambda x: x[0].path()),
92 92 ]
93 93
94 94 if (not opts['user'] and not opts['changeset'] and not opts['date']
95 95 and not opts['follow']):
96 96 opts['number'] = 1
97 97
98 98 linenumber = opts.get('line_number') is not None
99 99 if (linenumber and (not opts['changeset']) and (not opts['number'])):
100 100 raise util.Abort(_('at least one of -n/-c is required for -l'))
101 101
102 102 funcmap = [func for op, func in opmap if opts.get(op)]
103 103 if linenumber:
104 104 lastfunc = funcmap[-1]
105 105 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
106 106
107 107 ctx = repo.changectx(opts['rev'])
108 108
109 109 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
110 110 node=ctx.node()):
111 111 fctx = ctx.filectx(abs)
112 112 if not opts['text'] and util.binary(fctx.data()):
113 113 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
114 114 continue
115 115
116 116 lines = fctx.annotate(follow=opts.get('follow'),
117 117 linenumber=linenumber)
118 118 pieces = []
119 119
120 120 for f in funcmap:
121 121 l = [f(n) for n, dummy in lines]
122 122 if l:
123 123 m = max(map(len, l))
124 124 pieces.append(["%*s" % (m, x) for x in l])
125 125
126 126 if pieces:
127 127 for p, l in zip(zip(*pieces), lines):
128 128 ui.write("%s: %s" % (" ".join(p), l[1]))
129 129
130 130 def archive(ui, repo, dest, **opts):
131 131 '''create unversioned archive of a repository revision
132 132
133 133 By default, the revision used is the parent of the working
134 134 directory; use "-r" to specify a different revision.
135 135
136 136 To specify the type of archive to create, use "-t". Valid
137 137 types are:
138 138
139 139 "files" (default): a directory full of files
140 140 "tar": tar archive, uncompressed
141 141 "tbz2": tar archive, compressed using bzip2
142 142 "tgz": tar archive, compressed using gzip
143 143 "uzip": zip archive, uncompressed
144 144 "zip": zip archive, compressed using deflate
145 145
146 146 The exact name of the destination archive or directory is given
147 147 using a format string; see "hg help export" for details.
148 148
149 149 Each member added to an archive file has a directory prefix
150 150 prepended. Use "-p" to specify a format string for the prefix.
151 151 The default is the basename of the archive, with suffixes removed.
152 152 '''
153 153
154 154 ctx = repo.changectx(opts['rev'])
155 155 if not ctx:
156 156 raise util.Abort(_('repository has no revisions'))
157 157 node = ctx.node()
158 158 dest = cmdutil.make_filename(repo, dest, node)
159 159 if os.path.realpath(dest) == repo.root:
160 160 raise util.Abort(_('repository root cannot be destination'))
161 161 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
162 162 kind = opts.get('type') or 'files'
163 163 prefix = opts['prefix']
164 164 if dest == '-':
165 165 if kind == 'files':
166 166 raise util.Abort(_('cannot archive plain files to stdout'))
167 167 dest = sys.stdout
168 168 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
169 169 prefix = cmdutil.make_filename(repo, prefix, node)
170 170 archival.archive(repo, dest, node, kind, not opts['no_decode'],
171 171 matchfn, prefix)
172 172
173 173 def backout(ui, repo, node=None, rev=None, **opts):
174 174 '''reverse effect of earlier changeset
175 175
176 176 Commit the backed out changes as a new changeset. The new
177 177 changeset is a child of the backed out changeset.
178 178
179 179 If you back out a changeset other than the tip, a new head is
180 180 created. This head will be the new tip and you should merge this
181 181 backout changeset with another head (current one by default).
182 182
183 183 The --merge option remembers the parent of the working directory
184 184 before starting the backout, then merges the new head with that
185 185 changeset afterwards. This saves you from doing the merge by
186 186 hand. The result of this merge is not committed, as for a normal
187 187 merge.
188 188
189 189 See 'hg help dates' for a list of formats valid for -d/--date.
190 190 '''
191 191 if rev and node:
192 192 raise util.Abort(_("please specify just one revision"))
193 193
194 194 if not rev:
195 195 rev = node
196 196
197 197 if not rev:
198 198 raise util.Abort(_("please specify a revision to backout"))
199 199
200 200 date = opts.get('date')
201 201 if date:
202 202 opts['date'] = util.parsedate(date)
203 203
204 204 cmdutil.bail_if_changed(repo)
205 205 node = repo.lookup(rev)
206 206
207 207 op1, op2 = repo.dirstate.parents()
208 208 a = repo.changelog.ancestor(op1, node)
209 209 if a != node:
210 210 raise util.Abort(_('cannot back out change on a different branch'))
211 211
212 212 p1, p2 = repo.changelog.parents(node)
213 213 if p1 == nullid:
214 214 raise util.Abort(_('cannot back out a change with no parents'))
215 215 if p2 != nullid:
216 216 if not opts['parent']:
217 217 raise util.Abort(_('cannot back out a merge changeset without '
218 218 '--parent'))
219 219 p = repo.lookup(opts['parent'])
220 220 if p not in (p1, p2):
221 221 raise util.Abort(_('%s is not a parent of %s') %
222 222 (short(p), short(node)))
223 223 parent = p
224 224 else:
225 225 if opts['parent']:
226 226 raise util.Abort(_('cannot use --parent on non-merge changeset'))
227 227 parent = p1
228 228
229 229 hg.clean(repo, node, show_stats=False)
230 230 revert_opts = opts.copy()
231 231 revert_opts['date'] = None
232 232 revert_opts['all'] = True
233 233 revert_opts['rev'] = hex(parent)
234 234 revert_opts['no_backup'] = None
235 235 revert(ui, repo, **revert_opts)
236 236 commit_opts = opts.copy()
237 237 commit_opts['addremove'] = False
238 238 if not commit_opts['message'] and not commit_opts['logfile']:
239 239 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
240 240 commit_opts['force_editor'] = True
241 241 commit(ui, repo, **commit_opts)
242 242 def nice(node):
243 243 return '%d:%s' % (repo.changelog.rev(node), short(node))
244 244 ui.status(_('changeset %s backs out changeset %s\n') %
245 245 (nice(repo.changelog.tip()), nice(node)))
246 246 if op1 != node:
247 247 hg.clean(repo, op1, show_stats=False)
248 248 if opts['merge']:
249 249 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
250 250 hg.merge(repo, hex(repo.changelog.tip()))
251 251 else:
252 252 ui.status(_('the backout changeset is a new head - '
253 253 'do not forget to merge\n'))
254 254 ui.status(_('(use "backout --merge" '
255 255 'if you want to auto-merge)\n'))
256 256
257 257 def bisect(ui, repo, rev=None, extra=None,
258 258 reset=None, good=None, bad=None, skip=None, noupdate=None):
259 259 """subdivision search of changesets
260 260
261 261 This command helps to find changesets which introduce problems.
262 262 To use, mark the earliest changeset you know exhibits the problem
263 263 as bad, then mark the latest changeset which is free from the
264 264 problem as good. Bisect will update your working directory to a
265 265 revision for testing. Once you have performed tests, mark the
266 266 working directory as bad or good and bisect will either update to
267 267 another candidate changeset or announce that it has found the bad
268 268 revision.
269 269 """
270 270 # backward compatibility
271 271 if rev in "good bad reset init".split():
272 272 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
273 273 cmd, rev, extra = rev, extra, None
274 274 if cmd == "good":
275 275 good = True
276 276 elif cmd == "bad":
277 277 bad = True
278 278 else:
279 279 reset = True
280 280 elif extra or good + bad + skip + reset > 1:
281 281 raise util.Abort("Incompatible arguments")
282 282
283 283 if reset:
284 284 p = repo.join("bisect.state")
285 285 if os.path.exists(p):
286 286 os.unlink(p)
287 287 return
288 288
289 289 # load state
290 290 state = {'good': [], 'bad': [], 'skip': []}
291 291 if os.path.exists(repo.join("bisect.state")):
292 292 for l in repo.opener("bisect.state"):
293 293 kind, node = l[:-1].split()
294 294 node = repo.lookup(node)
295 295 if kind not in state:
296 296 raise util.Abort(_("unknown bisect kind %s") % kind)
297 297 state[kind].append(node)
298 298
299 299 # update state
300 300 node = repo.lookup(rev or '.')
301 301 if good:
302 302 state['good'].append(node)
303 303 elif bad:
304 304 state['bad'].append(node)
305 305 elif skip:
306 306 state['skip'].append(node)
307 307
308 308 # save state
309 309 f = repo.opener("bisect.state", "w", atomictemp=True)
310 310 wlock = repo.wlock()
311 311 try:
312 312 for kind in state:
313 313 for node in state[kind]:
314 314 f.write("%s %s\n" % (kind, hg.hex(node)))
315 315 f.rename()
316 316 finally:
317 317 del wlock
318 318
319 319 if not state['good'] or not state['bad']:
320 320 return
321 321
322 322 # actually bisect
323 323 node, changesets, good = hbisect.bisect(repo.changelog, state)
324 324 if changesets == 0:
325 325 ui.write(_("The first %s revision is:\n") % (good and "good" or "bad"))
326 326 displayer = cmdutil.show_changeset(ui, repo, {})
327 327 displayer.show(changenode=node)
328 328 elif node is not None:
329 329 # compute the approximate number of remaining tests
330 330 tests, size = 0, 2
331 331 while size <= changesets:
332 332 tests, size = tests + 1, size * 2
333 333 rev = repo.changelog.rev(node)
334 334 ui.write(_("Testing changeset %s:%s "
335 335 "(%s changesets remaining, ~%s tests)\n")
336 336 % (rev, hg.short(node), changesets, tests))
337 337 if not noupdate:
338 338 cmdutil.bail_if_changed(repo)
339 339 return hg.clean(repo, node)
340 340
341 341 def branch(ui, repo, label=None, **opts):
342 342 """set or show the current branch name
343 343
344 344 With no argument, show the current branch name. With one argument,
345 345 set the working directory branch name (the branch does not exist in
346 346 the repository until the next commit).
347 347
348 348 Unless --force is specified, branch will not let you set a
349 349 branch name that shadows an existing branch.
350 350
351 351 Use the command 'hg update' to switch to an existing branch.
352 352 """
353 353
354 354 if label:
355 355 if not opts.get('force') and label in repo.branchtags():
356 356 if label not in [p.branch() for p in repo.workingctx().parents()]:
357 357 raise util.Abort(_('a branch of the same name already exists'
358 358 ' (use --force to override)'))
359 359 repo.dirstate.setbranch(util.fromlocal(label))
360 360 ui.status(_('marked working directory as branch %s\n') % label)
361 361 else:
362 362 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
363 363
364 364 def branches(ui, repo, active=False):
365 365 """list repository named branches
366 366
367 367 List the repository's named branches, indicating which ones are
368 368 inactive. If active is specified, only show active branches.
369 369
370 370 A branch is considered active if it contains unmerged heads.
371 371
372 372 Use the command 'hg update' to switch to an existing branch.
373 373 """
374 374 b = repo.branchtags()
375 375 heads = dict.fromkeys(repo.heads(), 1)
376 376 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
377 377 l.sort()
378 378 l.reverse()
379 379 for ishead, r, n, t in l:
380 380 if active and not ishead:
381 381 # If we're only displaying active branches, abort the loop on
382 382 # encountering the first inactive head
383 383 break
384 384 else:
385 385 hexfunc = ui.debugflag and hex or short
386 386 if ui.quiet:
387 387 ui.write("%s\n" % t)
388 388 else:
389 389 spaces = " " * (30 - util.locallen(t))
390 390 # The code only gets here if inactive branches are being
391 391 # displayed or the branch is active.
392 392 isinactive = ((not ishead) and " (inactive)") or ''
393 393 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
394 394
395 395 def bundle(ui, repo, fname, dest=None, **opts):
396 396 """create a changegroup file
397 397
398 398 Generate a compressed changegroup file collecting changesets not
399 399 found in the other repository.
400 400
401 401 If no destination repository is specified the destination is
402 402 assumed to have all the nodes specified by one or more --base
403 403 parameters. To create a bundle containing all changesets, use
404 404 --all (or --base null).
405 405
406 406 The bundle file can then be transferred using conventional means and
407 407 applied to another repository with the unbundle or pull command.
408 408 This is useful when direct push and pull are not available or when
409 409 exporting an entire repository is undesirable.
410 410
411 411 Applying bundles preserves all changeset contents including
412 412 permissions, copy/rename information, and revision history.
413 413 """
414 414 revs = opts.get('rev') or None
415 415 if revs:
416 416 revs = [repo.lookup(rev) for rev in revs]
417 417 if opts.get('all'):
418 418 base = ['null']
419 419 else:
420 420 base = opts.get('base')
421 421 if base:
422 422 if dest:
423 423 raise util.Abort(_("--base is incompatible with specifiying "
424 424 "a destination"))
425 425 base = [repo.lookup(rev) for rev in base]
426 426 # create the right base
427 427 # XXX: nodesbetween / changegroup* should be "fixed" instead
428 428 o = []
429 429 has = {nullid: None}
430 430 for n in base:
431 431 has.update(repo.changelog.reachable(n))
432 432 if revs:
433 433 visit = list(revs)
434 434 else:
435 435 visit = repo.changelog.heads()
436 436 seen = {}
437 437 while visit:
438 438 n = visit.pop(0)
439 439 parents = [p for p in repo.changelog.parents(n) if p not in has]
440 440 if len(parents) == 0:
441 441 o.insert(0, n)
442 442 else:
443 443 for p in parents:
444 444 if p not in seen:
445 445 seen[p] = 1
446 446 visit.append(p)
447 447 else:
448 448 cmdutil.setremoteconfig(ui, opts)
449 449 dest, revs, checkout = hg.parseurl(
450 450 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
451 451 other = hg.repository(ui, dest)
452 452 o = repo.findoutgoing(other, force=opts['force'])
453 453
454 454 if revs:
455 455 cg = repo.changegroupsubset(o, revs, 'bundle')
456 456 else:
457 457 cg = repo.changegroup(o, 'bundle')
458 458 changegroup.writebundle(cg, fname, "HG10BZ")
459 459
460 460 def cat(ui, repo, file1, *pats, **opts):
461 461 """output the current or given revision of files
462 462
463 463 Print the specified files as they were at the given revision.
464 464 If no revision is given, the parent of the working directory is used,
465 465 or tip if no revision is checked out.
466 466
467 467 Output may be to a file, in which case the name of the file is
468 468 given using a format string. The formatting rules are the same as
469 469 for the export command, with the following additions:
470 470
471 471 %s basename of file being printed
472 472 %d dirname of file being printed, or '.' if in repo root
473 473 %p root-relative path name of file being printed
474 474 """
475 475 ctx = repo.changectx(opts['rev'])
476 476 err = 1
477 477 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
478 478 ctx.node()):
479 479 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
480 480 data = ctx.filectx(abs).data()
481 481 if opts.get('decode'):
482 482 data = repo.wwritedata(abs, data)
483 483 fp.write(data)
484 484 err = 0
485 485 return err
486 486
487 487 def clone(ui, source, dest=None, **opts):
488 488 """make a copy of an existing repository
489 489
490 490 Create a copy of an existing repository in a new directory.
491 491
492 492 If no destination directory name is specified, it defaults to the
493 493 basename of the source.
494 494
495 495 The location of the source is added to the new repository's
496 496 .hg/hgrc file, as the default to be used for future pulls.
497 497
498 498 For efficiency, hardlinks are used for cloning whenever the source
499 499 and destination are on the same filesystem (note this applies only
500 500 to the repository data, not to the checked out files). Some
501 501 filesystems, such as AFS, implement hardlinking incorrectly, but
502 502 do not report errors. In these cases, use the --pull option to
503 503 avoid hardlinking.
504 504
505 505 You can safely clone repositories and checked out files using full
506 506 hardlinks with
507 507
508 508 $ cp -al REPO REPOCLONE
509 509
510 510 which is the fastest way to clone. However, the operation is not
511 511 atomic (making sure REPO is not modified during the operation is
512 512 up to you) and you have to make sure your editor breaks hardlinks
513 513 (Emacs and most Linux Kernel tools do so).
514 514
515 515 If you use the -r option to clone up to a specific revision, no
516 516 subsequent revisions will be present in the cloned repository.
517 517 This option implies --pull, even on local repositories.
518 518
519 519 See pull for valid source format details.
520 520
521 521 It is possible to specify an ssh:// URL as the destination, but no
522 522 .hg/hgrc and working directory will be created on the remote side.
523 523 Look at the help text for the pull command for important details
524 524 about ssh:// URLs.
525 525 """
526 526 cmdutil.setremoteconfig(ui, opts)
527 527 hg.clone(ui, source, dest,
528 528 pull=opts['pull'],
529 529 stream=opts['uncompressed'],
530 530 rev=opts['rev'],
531 531 update=not opts['noupdate'])
532 532
533 533 def commit(ui, repo, *pats, **opts):
534 534 """commit the specified files or all outstanding changes
535 535
536 536 Commit changes to the given files into the repository.
537 537
538 538 If a list of files is omitted, all changes reported by "hg status"
539 539 will be committed.
540 540
541 541 If no commit message is specified, the configured editor is started to
542 542 enter a message.
543 543
544 544 See 'hg help dates' for a list of formats valid for -d/--date.
545 545 """
546 546 def commitfunc(ui, repo, files, message, match, opts):
547 547 return repo.commit(files, message, opts['user'], opts['date'], match,
548 548 force_editor=opts.get('force_editor'))
549 549 cmdutil.commit(ui, repo, commitfunc, pats, opts)
550 550
551 551 def copy(ui, repo, *pats, **opts):
552 552 """mark files as copied for the next commit
553 553
554 554 Mark dest as having copies of source files. If dest is a
555 555 directory, copies are put in that directory. If dest is a file,
556 556 there can only be one source.
557 557
558 558 By default, this command copies the contents of files as they
559 559 stand in the working directory. If invoked with --after, the
560 560 operation is recorded, but no copying is performed.
561 561
562 562 This command takes effect in the next commit. To undo a copy
563 563 before that, see hg revert.
564 564 """
565 565 wlock = repo.wlock(False)
566 566 try:
567 567 return cmdutil.copy(ui, repo, pats, opts)
568 568 finally:
569 569 del wlock
570 570
571 571 def debugancestor(ui, repo, *args):
572 572 """find the ancestor revision of two revisions in a given index"""
573 573 if len(args) == 3:
574 574 index, rev1, rev2 = args
575 575 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
576 576 elif len(args) == 2:
577 577 if not repo:
578 578 raise util.Abort(_("There is no Mercurial repository here "
579 579 "(.hg not found)"))
580 580 rev1, rev2 = args
581 581 r = repo.changelog
582 582 else:
583 583 raise util.Abort(_('either two or three arguments required'))
584 584 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
585 585 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
586 586
587 587 def debugcomplete(ui, cmd='', **opts):
588 588 """returns the completion list associated with the given command"""
589 589
590 590 if opts['options']:
591 591 options = []
592 592 otables = [globalopts]
593 593 if cmd:
594 594 aliases, entry = cmdutil.findcmd(ui, cmd, table)
595 595 otables.append(entry[1])
596 596 for t in otables:
597 597 for o in t:
598 598 if o[0]:
599 599 options.append('-%s' % o[0])
600 600 options.append('--%s' % o[1])
601 601 ui.write("%s\n" % "\n".join(options))
602 602 return
603 603
604 604 clist = cmdutil.findpossible(ui, cmd, table).keys()
605 605 clist.sort()
606 606 ui.write("%s\n" % "\n".join(clist))
607 607
608 608 def debugfsinfo(ui, path = "."):
609 609 file('.debugfsinfo', 'w').write('')
610 610 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
611 611 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
612 612 ui.write('case-sensitive: %s\n' % (util.checkfolding('.debugfsinfo')
613 613 and 'yes' or 'no'))
614 614 os.unlink('.debugfsinfo')
615 615
616 616 def debugrebuildstate(ui, repo, rev=""):
617 617 """rebuild the dirstate as it would look like for the given revision"""
618 618 if rev == "":
619 619 rev = repo.changelog.tip()
620 620 ctx = repo.changectx(rev)
621 621 files = ctx.manifest()
622 622 wlock = repo.wlock()
623 623 try:
624 624 repo.dirstate.rebuild(rev, files)
625 625 finally:
626 626 del wlock
627 627
628 628 def debugcheckstate(ui, repo):
629 629 """validate the correctness of the current dirstate"""
630 630 parent1, parent2 = repo.dirstate.parents()
631 631 m1 = repo.changectx(parent1).manifest()
632 632 m2 = repo.changectx(parent2).manifest()
633 633 errors = 0
634 634 for f in repo.dirstate:
635 635 state = repo.dirstate[f]
636 636 if state in "nr" and f not in m1:
637 637 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
638 638 errors += 1
639 639 if state in "a" and f in m1:
640 640 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
641 641 errors += 1
642 642 if state in "m" and f not in m1 and f not in m2:
643 643 ui.warn(_("%s in state %s, but not in either manifest\n") %
644 644 (f, state))
645 645 errors += 1
646 646 for f in m1:
647 647 state = repo.dirstate[f]
648 648 if state not in "nrm":
649 649 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
650 650 errors += 1
651 651 if errors:
652 652 error = _(".hg/dirstate inconsistent with current parent's manifest")
653 653 raise util.Abort(error)
654 654
655 655 def showconfig(ui, repo, *values, **opts):
656 656 """show combined config settings from all hgrc files
657 657
658 658 With no args, print names and values of all config items.
659 659
660 660 With one arg of the form section.name, print just the value of
661 661 that config item.
662 662
663 663 With multiple args, print names and values of all config items
664 664 with matching section names."""
665 665
666 666 untrusted = bool(opts.get('untrusted'))
667 667 if values:
668 668 if len([v for v in values if '.' in v]) > 1:
669 669 raise util.Abort(_('only one config item permitted'))
670 670 for section, name, value in ui.walkconfig(untrusted=untrusted):
671 671 sectname = section + '.' + name
672 672 if values:
673 673 for v in values:
674 674 if v == section:
675 675 ui.write('%s=%s\n' % (sectname, value))
676 676 elif v == sectname:
677 677 ui.write(value, '\n')
678 678 else:
679 679 ui.write('%s=%s\n' % (sectname, value))
680 680
681 681 def debugsetparents(ui, repo, rev1, rev2=None):
682 682 """manually set the parents of the current working directory
683 683
684 684 This is useful for writing repository conversion tools, but should
685 685 be used with care.
686 686 """
687 687
688 688 if not rev2:
689 689 rev2 = hex(nullid)
690 690
691 691 wlock = repo.wlock()
692 692 try:
693 693 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
694 694 finally:
695 695 del wlock
696 696
697 697 def debugstate(ui, repo):
698 698 """show the contents of the current dirstate"""
699 699 k = repo.dirstate._map.items()
700 700 k.sort()
701 701 for file_, ent in k:
702 702 if ent[3] == -1:
703 703 # Pad or slice to locale representation
704 704 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(0)))
705 705 timestr = 'unset'
706 706 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
707 707 else:
708 708 timestr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(ent[3]))
709 709 if ent[1] & 020000:
710 710 mode = 'lnk'
711 711 else:
712 712 mode = '%3o' % (ent[1] & 0777)
713 713 ui.write("%c %s %10d %s %s\n" % (ent[0], mode, ent[2], timestr, file_))
714 714 for f in repo.dirstate.copies():
715 715 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
716 716
717 717 def debugdata(ui, file_, rev):
718 718 """dump the contents of a data file revision"""
719 719 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
720 720 try:
721 721 ui.write(r.revision(r.lookup(rev)))
722 722 except KeyError:
723 723 raise util.Abort(_('invalid revision identifier %s') % rev)
724 724
725 725 def debugdate(ui, date, range=None, **opts):
726 726 """parse and display a date"""
727 727 if opts["extended"]:
728 728 d = util.parsedate(date, util.extendeddateformats)
729 729 else:
730 730 d = util.parsedate(date)
731 731 ui.write("internal: %s %s\n" % d)
732 732 ui.write("standard: %s\n" % util.datestr(d))
733 733 if range:
734 734 m = util.matchdate(range)
735 735 ui.write("match: %s\n" % m(d[0]))
736 736
737 737 def debugindex(ui, file_):
738 738 """dump the contents of an index file"""
739 739 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
740 740 ui.write(" rev offset length base linkrev" +
741 741 " nodeid p1 p2\n")
742 742 for i in xrange(r.count()):
743 743 node = r.node(i)
744 744 try:
745 745 pp = r.parents(node)
746 746 except:
747 747 pp = [nullid, nullid]
748 748 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
749 749 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
750 750 short(node), short(pp[0]), short(pp[1])))
751 751
752 752 def debugindexdot(ui, file_):
753 753 """dump an index DAG as a .dot file"""
754 754 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
755 755 ui.write("digraph G {\n")
756 756 for i in xrange(r.count()):
757 757 node = r.node(i)
758 758 pp = r.parents(node)
759 759 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
760 760 if pp[1] != nullid:
761 761 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
762 762 ui.write("}\n")
763 763
764 764 def debuginstall(ui):
765 765 '''test Mercurial installation'''
766 766
767 767 def writetemp(contents):
768 768 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
769 769 f = os.fdopen(fd, "wb")
770 770 f.write(contents)
771 771 f.close()
772 772 return name
773 773
774 774 problems = 0
775 775
776 776 # encoding
777 777 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
778 778 try:
779 779 util.fromlocal("test")
780 780 except util.Abort, inst:
781 781 ui.write(" %s\n" % inst)
782 782 ui.write(_(" (check that your locale is properly set)\n"))
783 783 problems += 1
784 784
785 785 # compiled modules
786 786 ui.status(_("Checking extensions...\n"))
787 787 try:
788 788 import bdiff, mpatch, base85
789 789 except Exception, inst:
790 790 ui.write(" %s\n" % inst)
791 791 ui.write(_(" One or more extensions could not be found"))
792 792 ui.write(_(" (check that you compiled the extensions)\n"))
793 793 problems += 1
794 794
795 795 # templates
796 796 ui.status(_("Checking templates...\n"))
797 797 try:
798 798 import templater
799 799 t = templater.templater(templater.templatepath("map-cmdline.default"))
800 800 except Exception, inst:
801 801 ui.write(" %s\n" % inst)
802 802 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
803 803 problems += 1
804 804
805 805 # patch
806 806 ui.status(_("Checking patch...\n"))
807 807 patchproblems = 0
808 808 a = "1\n2\n3\n4\n"
809 809 b = "1\n2\n3\ninsert\n4\n"
810 810 fa = writetemp(a)
811 811 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
812 812 os.path.basename(fa))
813 813 fd = writetemp(d)
814 814
815 815 files = {}
816 816 try:
817 817 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
818 818 except util.Abort, e:
819 819 ui.write(_(" patch call failed:\n"))
820 820 ui.write(" " + str(e) + "\n")
821 821 patchproblems += 1
822 822 else:
823 823 if list(files) != [os.path.basename(fa)]:
824 824 ui.write(_(" unexpected patch output!\n"))
825 825 patchproblems += 1
826 826 a = file(fa).read()
827 827 if a != b:
828 828 ui.write(_(" patch test failed!\n"))
829 829 patchproblems += 1
830 830
831 831 if patchproblems:
832 832 if ui.config('ui', 'patch'):
833 833 ui.write(_(" (Current patch tool may be incompatible with patch,"
834 834 " or misconfigured. Please check your .hgrc file)\n"))
835 835 else:
836 836 ui.write(_(" Internal patcher failure, please report this error"
837 837 " to http://www.selenic.com/mercurial/bts\n"))
838 838 problems += patchproblems
839 839
840 840 os.unlink(fa)
841 841 os.unlink(fd)
842 842
843 843 # editor
844 844 ui.status(_("Checking commit editor...\n"))
845 845 editor = ui.geteditor()
846 846 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
847 847 if not cmdpath:
848 848 if editor == 'vi':
849 849 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
850 850 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
851 851 else:
852 852 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
853 853 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
854 854 problems += 1
855 855
856 856 # check username
857 857 ui.status(_("Checking username...\n"))
858 858 user = os.environ.get("HGUSER")
859 859 if user is None:
860 860 user = ui.config("ui", "username")
861 861 if user is None:
862 862 user = os.environ.get("EMAIL")
863 863 if not user:
864 864 ui.warn(" ")
865 865 ui.username()
866 866 ui.write(_(" (specify a username in your .hgrc file)\n"))
867 867
868 868 if not problems:
869 869 ui.status(_("No problems detected\n"))
870 870 else:
871 871 ui.write(_("%s problems detected,"
872 872 " please check your install!\n") % problems)
873 873
874 874 return problems
875 875
876 876 def debugrename(ui, repo, file1, *pats, **opts):
877 877 """dump rename information"""
878 878
879 879 ctx = repo.changectx(opts.get('rev', 'tip'))
880 880 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
881 881 ctx.node()):
882 882 fctx = ctx.filectx(abs)
883 883 m = fctx.filelog().renamed(fctx.filenode())
884 884 if m:
885 885 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
886 886 else:
887 887 ui.write(_("%s not renamed\n") % rel)
888 888
889 889 def debugwalk(ui, repo, *pats, **opts):
890 890 """show how files match on given patterns"""
891 891 items = list(cmdutil.walk(repo, pats, opts))
892 892 if not items:
893 893 return
894 894 fmt = '%%s %%-%ds %%-%ds %%s' % (
895 895 max([len(abs) for (src, abs, rel, exact) in items]),
896 896 max([len(rel) for (src, abs, rel, exact) in items]))
897 897 for src, abs, rel, exact in items:
898 898 line = fmt % (src, abs, rel, exact and 'exact' or '')
899 899 ui.write("%s\n" % line.rstrip())
900 900
901 901 def diff(ui, repo, *pats, **opts):
902 902 """diff repository (or selected files)
903 903
904 904 Show differences between revisions for the specified files.
905 905
906 906 Differences between files are shown using the unified diff format.
907 907
908 908 NOTE: diff may generate unexpected results for merges, as it will
909 909 default to comparing against the working directory's first parent
910 910 changeset if no revisions are specified.
911 911
912 912 When two revision arguments are given, then changes are shown
913 913 between those revisions. If only one revision is specified then
914 914 that revision is compared to the working directory, and, when no
915 915 revisions are specified, the working directory files are compared
916 916 to its parent.
917 917
918 918 Without the -a option, diff will avoid generating diffs of files
919 919 it detects as binary. With -a, diff will generate a diff anyway,
920 920 probably with undesirable results.
921 921 """
922 922 node1, node2 = cmdutil.revpair(repo, opts['rev'])
923 923
924 924 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
925 925
926 926 patch.diff(repo, node1, node2, fns, match=matchfn,
927 927 opts=patch.diffopts(ui, opts))
928 928
929 929 def export(ui, repo, *changesets, **opts):
930 930 """dump the header and diffs for one or more changesets
931 931
932 932 Print the changeset header and diffs for one or more revisions.
933 933
934 934 The information shown in the changeset header is: author,
935 935 changeset hash, parent(s) and commit comment.
936 936
937 937 NOTE: export may generate unexpected diff output for merge changesets,
938 938 as it will compare the merge changeset against its first parent only.
939 939
940 940 Output may be to a file, in which case the name of the file is
941 941 given using a format string. The formatting rules are as follows:
942 942
943 943 %% literal "%" character
944 944 %H changeset hash (40 bytes of hexadecimal)
945 945 %N number of patches being generated
946 946 %R changeset revision number
947 947 %b basename of the exporting repository
948 948 %h short-form changeset hash (12 bytes of hexadecimal)
949 949 %n zero-padded sequence number, starting at 1
950 950 %r zero-padded changeset revision number
951 951
952 952 Without the -a option, export will avoid generating diffs of files
953 953 it detects as binary. With -a, export will generate a diff anyway,
954 954 probably with undesirable results.
955 955
956 956 With the --switch-parent option, the diff will be against the second
957 957 parent. It can be useful to review a merge.
958 958 """
959 959 if not changesets:
960 960 raise util.Abort(_("export requires at least one changeset"))
961 961 revs = cmdutil.revrange(repo, changesets)
962 962 if len(revs) > 1:
963 963 ui.note(_('exporting patches:\n'))
964 964 else:
965 965 ui.note(_('exporting patch:\n'))
966 966 patch.export(repo, revs, template=opts['output'],
967 967 switch_parent=opts['switch_parent'],
968 968 opts=patch.diffopts(ui, opts))
969 969
970 970 def grep(ui, repo, pattern, *pats, **opts):
971 971 """search for a pattern in specified files and revisions
972 972
973 973 Search revisions of files for a regular expression.
974 974
975 975 This command behaves differently than Unix grep. It only accepts
976 976 Python/Perl regexps. It searches repository history, not the
977 977 working directory. It always prints the revision number in which
978 978 a match appears.
979 979
980 980 By default, grep only prints output for the first revision of a
981 981 file in which it finds a match. To get it to print every revision
982 982 that contains a change in match status ("-" for a match that
983 983 becomes a non-match, or "+" for a non-match that becomes a match),
984 984 use the --all flag.
985 985 """
986 986 reflags = 0
987 987 if opts['ignore_case']:
988 988 reflags |= re.I
989 989 try:
990 990 regexp = re.compile(pattern, reflags)
991 991 except Exception, inst:
992 992 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
993 993 return None
994 994 sep, eol = ':', '\n'
995 995 if opts['print0']:
996 996 sep = eol = '\0'
997 997
998 998 fcache = {}
999 999 def getfile(fn):
1000 1000 if fn not in fcache:
1001 1001 fcache[fn] = repo.file(fn)
1002 1002 return fcache[fn]
1003 1003
1004 1004 def matchlines(body):
1005 1005 begin = 0
1006 1006 linenum = 0
1007 1007 while True:
1008 1008 match = regexp.search(body, begin)
1009 1009 if not match:
1010 1010 break
1011 1011 mstart, mend = match.span()
1012 1012 linenum += body.count('\n', begin, mstart) + 1
1013 1013 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1014 1014 lend = body.find('\n', mend)
1015 1015 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1016 1016 begin = lend + 1
1017 1017
1018 1018 class linestate(object):
1019 1019 def __init__(self, line, linenum, colstart, colend):
1020 1020 self.line = line
1021 1021 self.linenum = linenum
1022 1022 self.colstart = colstart
1023 1023 self.colend = colend
1024 1024
1025 1025 def __eq__(self, other):
1026 1026 return self.line == other.line
1027 1027
1028 1028 matches = {}
1029 1029 copies = {}
1030 1030 def grepbody(fn, rev, body):
1031 1031 matches[rev].setdefault(fn, [])
1032 1032 m = matches[rev][fn]
1033 1033 for lnum, cstart, cend, line in matchlines(body):
1034 1034 s = linestate(line, lnum, cstart, cend)
1035 1035 m.append(s)
1036 1036
1037 1037 def difflinestates(a, b):
1038 1038 sm = difflib.SequenceMatcher(None, a, b)
1039 1039 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1040 1040 if tag == 'insert':
1041 1041 for i in xrange(blo, bhi):
1042 1042 yield ('+', b[i])
1043 1043 elif tag == 'delete':
1044 1044 for i in xrange(alo, ahi):
1045 1045 yield ('-', a[i])
1046 1046 elif tag == 'replace':
1047 1047 for i in xrange(alo, ahi):
1048 1048 yield ('-', a[i])
1049 1049 for i in xrange(blo, bhi):
1050 1050 yield ('+', b[i])
1051 1051
1052 1052 prev = {}
1053 1053 def display(fn, rev, states, prevstates):
1054 1054 datefunc = ui.quiet and util.shortdate or util.datestr
1055 1055 found = False
1056 1056 filerevmatches = {}
1057 1057 r = prev.get(fn, -1)
1058 1058 if opts['all']:
1059 1059 iter = difflinestates(states, prevstates)
1060 1060 else:
1061 1061 iter = [('', l) for l in prevstates]
1062 1062 for change, l in iter:
1063 1063 cols = [fn, str(r)]
1064 1064 if opts['line_number']:
1065 1065 cols.append(str(l.linenum))
1066 1066 if opts['all']:
1067 1067 cols.append(change)
1068 1068 if opts['user']:
1069 1069 cols.append(ui.shortuser(get(r)[1]))
1070 1070 if opts.get('date'):
1071 1071 cols.append(datefunc(get(r)[2]))
1072 1072 if opts['files_with_matches']:
1073 1073 c = (fn, r)
1074 1074 if c in filerevmatches:
1075 1075 continue
1076 1076 filerevmatches[c] = 1
1077 1077 else:
1078 1078 cols.append(l.line)
1079 1079 ui.write(sep.join(cols), eol)
1080 1080 found = True
1081 1081 return found
1082 1082
1083 1083 fstate = {}
1084 1084 skip = {}
1085 1085 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1086 1086 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1087 1087 found = False
1088 1088 follow = opts.get('follow')
1089 1089 for st, rev, fns in changeiter:
1090 1090 if st == 'window':
1091 1091 matches.clear()
1092 1092 elif st == 'add':
1093 1093 ctx = repo.changectx(rev)
1094 1094 matches[rev] = {}
1095 1095 for fn in fns:
1096 1096 if fn in skip:
1097 1097 continue
1098 1098 try:
1099 1099 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1100 1100 fstate.setdefault(fn, [])
1101 1101 if follow:
1102 1102 copied = getfile(fn).renamed(ctx.filenode(fn))
1103 1103 if copied:
1104 1104 copies.setdefault(rev, {})[fn] = copied[0]
1105 1105 except revlog.LookupError:
1106 1106 pass
1107 1107 elif st == 'iter':
1108 1108 states = matches[rev].items()
1109 1109 states.sort()
1110 1110 for fn, m in states:
1111 1111 copy = copies.get(rev, {}).get(fn)
1112 1112 if fn in skip:
1113 1113 if copy:
1114 1114 skip[copy] = True
1115 1115 continue
1116 1116 if fn in prev or fstate[fn]:
1117 1117 r = display(fn, rev, m, fstate[fn])
1118 1118 found = found or r
1119 1119 if r and not opts['all']:
1120 1120 skip[fn] = True
1121 1121 if copy:
1122 1122 skip[copy] = True
1123 1123 fstate[fn] = m
1124 1124 if copy:
1125 1125 fstate[copy] = m
1126 1126 prev[fn] = rev
1127 1127
1128 1128 fstate = fstate.items()
1129 1129 fstate.sort()
1130 1130 for fn, state in fstate:
1131 1131 if fn in skip:
1132 1132 continue
1133 1133 if fn not in copies.get(prev[fn], {}):
1134 1134 found = display(fn, rev, {}, state) or found
1135 1135 return (not found and 1) or 0
1136 1136
1137 1137 def heads(ui, repo, *branchrevs, **opts):
1138 1138 """show current repository heads or show branch heads
1139 1139
1140 1140 With no arguments, show all repository head changesets.
1141 1141
1142 1142 If branch or revisions names are given this will show the heads of
1143 1143 the specified branches or the branches those revisions are tagged
1144 1144 with.
1145 1145
1146 1146 Repository "heads" are changesets that don't have child
1147 1147 changesets. They are where development generally takes place and
1148 1148 are the usual targets for update and merge operations.
1149 1149
1150 1150 Branch heads are changesets that have a given branch tag, but have
1151 1151 no child changesets with that tag. They are usually where
1152 1152 development on the given branch takes place.
1153 1153 """
1154 1154 if opts['rev']:
1155 1155 start = repo.lookup(opts['rev'])
1156 1156 else:
1157 1157 start = None
1158 1158 if not branchrevs:
1159 1159 # Assume we're looking repo-wide heads if no revs were specified.
1160 1160 heads = repo.heads(start)
1161 1161 else:
1162 1162 heads = []
1163 1163 visitedset = util.set()
1164 1164 for branchrev in branchrevs:
1165 1165 branch = repo.changectx(branchrev).branch()
1166 1166 if branch in visitedset:
1167 1167 continue
1168 1168 visitedset.add(branch)
1169 1169 bheads = repo.branchheads(branch, start)
1170 1170 if not bheads:
1171 1171 if branch != branchrev:
1172 1172 ui.warn(_("no changes on branch %s containing %s are "
1173 1173 "reachable from %s\n")
1174 1174 % (branch, branchrev, opts['rev']))
1175 1175 else:
1176 1176 ui.warn(_("no changes on branch %s are reachable from %s\n")
1177 1177 % (branch, opts['rev']))
1178 1178 heads.extend(bheads)
1179 1179 if not heads:
1180 1180 return 1
1181 1181 displayer = cmdutil.show_changeset(ui, repo, opts)
1182 1182 for n in heads:
1183 1183 displayer.show(changenode=n)
1184 1184
1185 1185 def help_(ui, name=None, with_version=False):
1186 1186 """show help for a command, extension, or list of commands
1187 1187
1188 1188 With no arguments, print a list of commands and short help.
1189 1189
1190 1190 Given a command name, print help for that command.
1191 1191
1192 1192 Given an extension name, print help for that extension, and the
1193 1193 commands it provides."""
1194 1194 option_lists = []
1195 1195
1196 1196 def addglobalopts(aliases):
1197 1197 if ui.verbose:
1198 1198 option_lists.append((_("global options:"), globalopts))
1199 1199 if name == 'shortlist':
1200 1200 option_lists.append((_('use "hg help" for the full list '
1201 1201 'of commands'), ()))
1202 1202 else:
1203 1203 if name == 'shortlist':
1204 1204 msg = _('use "hg help" for the full list of commands '
1205 1205 'or "hg -v" for details')
1206 1206 elif aliases:
1207 1207 msg = _('use "hg -v help%s" to show aliases and '
1208 1208 'global options') % (name and " " + name or "")
1209 1209 else:
1210 1210 msg = _('use "hg -v help %s" to show global options') % name
1211 1211 option_lists.append((msg, ()))
1212 1212
1213 1213 def helpcmd(name):
1214 1214 if with_version:
1215 1215 version_(ui)
1216 1216 ui.write('\n')
1217 1217 aliases, i = cmdutil.findcmd(ui, name, table)
1218 1218 # synopsis
1219 1219 ui.write("%s\n" % i[2])
1220 1220
1221 1221 # aliases
1222 1222 if not ui.quiet and len(aliases) > 1:
1223 1223 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1224 1224
1225 1225 # description
1226 1226 doc = i[0].__doc__
1227 1227 if not doc:
1228 1228 doc = _("(No help text available)")
1229 1229 if ui.quiet:
1230 1230 doc = doc.splitlines(0)[0]
1231 1231 ui.write("\n%s\n" % doc.rstrip())
1232 1232
1233 1233 if not ui.quiet:
1234 1234 # options
1235 1235 if i[1]:
1236 1236 option_lists.append((_("options:\n"), i[1]))
1237 1237
1238 1238 addglobalopts(False)
1239 1239
1240 1240 def helplist(header, select=None):
1241 1241 h = {}
1242 1242 cmds = {}
1243 1243 for c, e in table.items():
1244 1244 f = c.split("|", 1)[0]
1245 1245 if select and not select(f):
1246 1246 continue
1247 1247 if name == "shortlist" and not f.startswith("^"):
1248 1248 continue
1249 1249 f = f.lstrip("^")
1250 1250 if not ui.debugflag and f.startswith("debug"):
1251 1251 continue
1252 1252 doc = e[0].__doc__
1253 1253 if not doc:
1254 1254 doc = _("(No help text available)")
1255 1255 h[f] = doc.splitlines(0)[0].rstrip()
1256 1256 cmds[f] = c.lstrip("^")
1257 1257
1258 1258 if not h:
1259 1259 ui.status(_('no commands defined\n'))
1260 1260 return
1261 1261
1262 1262 ui.status(header)
1263 1263 fns = h.keys()
1264 1264 fns.sort()
1265 1265 m = max(map(len, fns))
1266 1266 for f in fns:
1267 1267 if ui.verbose:
1268 1268 commands = cmds[f].replace("|",", ")
1269 1269 ui.write(" %s:\n %s\n"%(commands, h[f]))
1270 1270 else:
1271 1271 ui.write(' %-*s %s\n' % (m, f, h[f]))
1272 1272
1273 1273 if not ui.quiet:
1274 1274 addglobalopts(True)
1275 1275
1276 1276 def helptopic(name):
1277 1277 v = None
1278 1278 for i in help.helptable:
1279 1279 l = i.split('|')
1280 1280 if name in l:
1281 1281 v = i
1282 1282 header = l[-1]
1283 1283 if not v:
1284 1284 raise cmdutil.UnknownCommand(name)
1285 1285
1286 1286 # description
1287 1287 doc = help.helptable[v]
1288 1288 if not doc:
1289 1289 doc = _("(No help text available)")
1290 1290 if callable(doc):
1291 1291 doc = doc()
1292 1292
1293 1293 ui.write("%s\n" % header)
1294 1294 ui.write("%s\n" % doc.rstrip())
1295 1295
1296 1296 def helpext(name):
1297 1297 try:
1298 1298 mod = extensions.find(name)
1299 1299 except KeyError:
1300 1300 raise cmdutil.UnknownCommand(name)
1301 1301
1302 1302 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1303 1303 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1304 1304 for d in doc[1:]:
1305 1305 ui.write(d, '\n')
1306 1306
1307 1307 ui.status('\n')
1308 1308
1309 1309 try:
1310 1310 ct = mod.cmdtable
1311 1311 except AttributeError:
1312 1312 ct = {}
1313 1313
1314 1314 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1315 1315 helplist(_('list of commands:\n\n'), modcmds.has_key)
1316 1316
1317 1317 if name and name != 'shortlist':
1318 1318 i = None
1319 1319 for f in (helpcmd, helptopic, helpext):
1320 1320 try:
1321 1321 f(name)
1322 1322 i = None
1323 1323 break
1324 1324 except cmdutil.UnknownCommand, inst:
1325 1325 i = inst
1326 1326 if i:
1327 1327 raise i
1328 1328
1329 1329 else:
1330 1330 # program name
1331 1331 if ui.verbose or with_version:
1332 1332 version_(ui)
1333 1333 else:
1334 1334 ui.status(_("Mercurial Distributed SCM\n"))
1335 1335 ui.status('\n')
1336 1336
1337 1337 # list of commands
1338 1338 if name == "shortlist":
1339 1339 header = _('basic commands:\n\n')
1340 1340 else:
1341 1341 header = _('list of commands:\n\n')
1342 1342
1343 1343 helplist(header)
1344 1344
1345 1345 # list all option lists
1346 1346 opt_output = []
1347 1347 for title, options in option_lists:
1348 1348 opt_output.append(("\n%s" % title, None))
1349 1349 for shortopt, longopt, default, desc in options:
1350 1350 if "DEPRECATED" in desc and not ui.verbose: continue
1351 1351 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1352 1352 longopt and " --%s" % longopt),
1353 1353 "%s%s" % (desc,
1354 1354 default
1355 1355 and _(" (default: %s)") % default
1356 1356 or "")))
1357 1357
1358 1358 if opt_output:
1359 1359 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1360 1360 for first, second in opt_output:
1361 1361 if second:
1362 1362 ui.write(" %-*s %s\n" % (opts_len, first, second))
1363 1363 else:
1364 1364 ui.write("%s\n" % first)
1365 1365
1366 1366 def identify(ui, repo, source=None,
1367 1367 rev=None, num=None, id=None, branch=None, tags=None):
1368 1368 """identify the working copy or specified revision
1369 1369
1370 1370 With no revision, print a summary of the current state of the repo.
1371 1371
1372 1372 With a path, do a lookup in another repository.
1373 1373
1374 1374 This summary identifies the repository state using one or two parent
1375 1375 hash identifiers, followed by a "+" if there are uncommitted changes
1376 1376 in the working directory, a list of tags for this revision and a branch
1377 1377 name for non-default branches.
1378 1378 """
1379 1379
1380 1380 if not repo and not source:
1381 1381 raise util.Abort(_("There is no Mercurial repository here "
1382 1382 "(.hg not found)"))
1383 1383
1384 1384 hexfunc = ui.debugflag and hex or short
1385 1385 default = not (num or id or branch or tags)
1386 1386 output = []
1387 1387
1388 1388 if source:
1389 1389 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1390 1390 srepo = hg.repository(ui, source)
1391 1391 if not rev and revs:
1392 1392 rev = revs[0]
1393 1393 if not rev:
1394 1394 rev = "tip"
1395 1395 if num or branch or tags:
1396 1396 raise util.Abort(
1397 1397 "can't query remote revision number, branch, or tags")
1398 1398 output = [hexfunc(srepo.lookup(rev))]
1399 1399 elif not rev:
1400 1400 ctx = repo.workingctx()
1401 1401 parents = ctx.parents()
1402 1402 changed = False
1403 1403 if default or id or num:
1404 1404 changed = ctx.files() + ctx.deleted()
1405 1405 if default or id:
1406 1406 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1407 1407 (changed) and "+" or "")]
1408 1408 if num:
1409 1409 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1410 1410 (changed) and "+" or ""))
1411 1411 else:
1412 1412 ctx = repo.changectx(rev)
1413 1413 if default or id:
1414 1414 output = [hexfunc(ctx.node())]
1415 1415 if num:
1416 1416 output.append(str(ctx.rev()))
1417 1417
1418 1418 if not source and default and not ui.quiet:
1419 1419 b = util.tolocal(ctx.branch())
1420 1420 if b != 'default':
1421 1421 output.append("(%s)" % b)
1422 1422
1423 1423 # multiple tags for a single parent separated by '/'
1424 1424 t = "/".join(ctx.tags())
1425 1425 if t:
1426 1426 output.append(t)
1427 1427
1428 1428 if branch:
1429 1429 output.append(util.tolocal(ctx.branch()))
1430 1430
1431 1431 if tags:
1432 1432 output.extend(ctx.tags())
1433 1433
1434 1434 ui.write("%s\n" % ' '.join(output))
1435 1435
1436 1436 def import_(ui, repo, patch1, *patches, **opts):
1437 1437 """import an ordered set of patches
1438 1438
1439 1439 Import a list of patches and commit them individually.
1440 1440
1441 1441 If there are outstanding changes in the working directory, import
1442 1442 will abort unless given the -f flag.
1443 1443
1444 1444 You can import a patch straight from a mail message. Even patches
1445 1445 as attachments work (body part must be type text/plain or
1446 1446 text/x-patch to be used). From and Subject headers of email
1447 1447 message are used as default committer and commit message. All
1448 1448 text/plain body parts before first diff are added to commit
1449 1449 message.
1450 1450
1451 1451 If the imported patch was generated by hg export, user and description
1452 1452 from patch override values from message headers and body. Values
1453 1453 given on command line with -m and -u override these.
1454 1454
1455 1455 If --exact is specified, import will set the working directory
1456 1456 to the parent of each patch before applying it, and will abort
1457 1457 if the resulting changeset has a different ID than the one
1458 1458 recorded in the patch. This may happen due to character set
1459 1459 problems or other deficiencies in the text patch format.
1460 1460
1461 1461 To read a patch from standard input, use patch name "-".
1462 1462 See 'hg help dates' for a list of formats valid for -d/--date.
1463 1463 """
1464 1464 patches = (patch1,) + patches
1465 1465
1466 1466 date = opts.get('date')
1467 1467 if date:
1468 1468 opts['date'] = util.parsedate(date)
1469 1469
1470 1470 if opts.get('exact') or not opts['force']:
1471 1471 cmdutil.bail_if_changed(repo)
1472 1472
1473 1473 d = opts["base"]
1474 1474 strip = opts["strip"]
1475 1475 wlock = lock = None
1476 1476 try:
1477 1477 wlock = repo.wlock()
1478 1478 lock = repo.lock()
1479 1479 for p in patches:
1480 1480 pf = os.path.join(d, p)
1481 1481
1482 1482 if pf == '-':
1483 1483 ui.status(_("applying patch from stdin\n"))
1484 1484 data = patch.extract(ui, sys.stdin)
1485 1485 else:
1486 1486 ui.status(_("applying %s\n") % p)
1487 1487 if os.path.exists(pf):
1488 1488 data = patch.extract(ui, file(pf, 'rb'))
1489 1489 else:
1490 1490 data = patch.extract(ui, urllib.urlopen(pf))
1491 1491 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1492 1492
1493 1493 if tmpname is None:
1494 1494 raise util.Abort(_('no diffs found'))
1495 1495
1496 1496 try:
1497 1497 cmdline_message = cmdutil.logmessage(opts)
1498 1498 if cmdline_message:
1499 1499 # pickup the cmdline msg
1500 1500 message = cmdline_message
1501 1501 elif message:
1502 1502 # pickup the patch msg
1503 1503 message = message.strip()
1504 1504 else:
1505 1505 # launch the editor
1506 1506 message = None
1507 1507 ui.debug(_('message:\n%s\n') % message)
1508 1508
1509 1509 wp = repo.workingctx().parents()
1510 1510 if opts.get('exact'):
1511 1511 if not nodeid or not p1:
1512 1512 raise util.Abort(_('not a mercurial patch'))
1513 1513 p1 = repo.lookup(p1)
1514 1514 p2 = repo.lookup(p2 or hex(nullid))
1515 1515
1516 1516 if p1 != wp[0].node():
1517 1517 hg.clean(repo, p1)
1518 1518 repo.dirstate.setparents(p1, p2)
1519 1519 elif p2:
1520 1520 try:
1521 1521 p1 = repo.lookup(p1)
1522 1522 p2 = repo.lookup(p2)
1523 1523 if p1 == wp[0].node():
1524 1524 repo.dirstate.setparents(p1, p2)
1525 1525 except hg.RepoError:
1526 1526 pass
1527 1527 if opts.get('exact') or opts.get('import_branch'):
1528 1528 repo.dirstate.setbranch(branch or 'default')
1529 1529
1530 1530 files = {}
1531 1531 try:
1532 1532 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1533 1533 files=files)
1534 1534 finally:
1535 1535 files = patch.updatedir(ui, repo, files)
1536 1536 if not opts.get('no_commit'):
1537 1537 n = repo.commit(files, message, opts.get('user') or user,
1538 1538 opts.get('date') or date)
1539 1539 if opts.get('exact'):
1540 1540 if hex(n) != nodeid:
1541 1541 repo.rollback()
1542 1542 raise util.Abort(_('patch is damaged'
1543 1543 ' or loses information'))
1544 1544 # Force a dirstate write so that the next transaction
1545 1545 # backups an up-do-date file.
1546 1546 repo.dirstate.write()
1547 1547 finally:
1548 1548 os.unlink(tmpname)
1549 1549 finally:
1550 1550 del lock, wlock
1551 1551
1552 1552 def incoming(ui, repo, source="default", **opts):
1553 1553 """show new changesets found in source
1554 1554
1555 1555 Show new changesets found in the specified path/URL or the default
1556 1556 pull location. These are the changesets that would be pulled if a pull
1557 1557 was requested.
1558 1558
1559 1559 For remote repository, using --bundle avoids downloading the changesets
1560 1560 twice if the incoming is followed by a pull.
1561 1561
1562 1562 See pull for valid source format details.
1563 1563 """
1564 1564 limit = cmdutil.loglimit(opts)
1565 1565 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1566 1566 cmdutil.setremoteconfig(ui, opts)
1567 1567
1568 1568 other = hg.repository(ui, source)
1569 1569 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1570 1570 if revs:
1571 1571 revs = [other.lookup(rev) for rev in revs]
1572 1572 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1573 1573 if not incoming:
1574 1574 try:
1575 1575 os.unlink(opts["bundle"])
1576 1576 except:
1577 1577 pass
1578 1578 ui.status(_("no changes found\n"))
1579 1579 return 1
1580 1580
1581 1581 cleanup = None
1582 1582 try:
1583 1583 fname = opts["bundle"]
1584 1584 if fname or not other.local():
1585 1585 # create a bundle (uncompressed if other repo is not local)
1586 1586 if revs is None:
1587 1587 cg = other.changegroup(incoming, "incoming")
1588 1588 else:
1589 1589 cg = other.changegroupsubset(incoming, revs, 'incoming')
1590 1590 bundletype = other.local() and "HG10BZ" or "HG10UN"
1591 1591 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1592 1592 # keep written bundle?
1593 1593 if opts["bundle"]:
1594 1594 cleanup = None
1595 1595 if not other.local():
1596 1596 # use the created uncompressed bundlerepo
1597 1597 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1598 1598
1599 1599 o = other.changelog.nodesbetween(incoming, revs)[0]
1600 1600 if opts['newest_first']:
1601 1601 o.reverse()
1602 1602 displayer = cmdutil.show_changeset(ui, other, opts)
1603 1603 count = 0
1604 1604 for n in o:
1605 1605 if count >= limit:
1606 1606 break
1607 1607 parents = [p for p in other.changelog.parents(n) if p != nullid]
1608 1608 if opts['no_merges'] and len(parents) == 2:
1609 1609 continue
1610 1610 count += 1
1611 1611 displayer.show(changenode=n)
1612 1612 finally:
1613 1613 if hasattr(other, 'close'):
1614 1614 other.close()
1615 1615 if cleanup:
1616 1616 os.unlink(cleanup)
1617 1617
1618 1618 def init(ui, dest=".", **opts):
1619 1619 """create a new repository in the given directory
1620 1620
1621 1621 Initialize a new repository in the given directory. If the given
1622 1622 directory does not exist, it is created.
1623 1623
1624 1624 If no directory is given, the current directory is used.
1625 1625
1626 1626 It is possible to specify an ssh:// URL as the destination.
1627 1627 Look at the help text for the pull command for important details
1628 1628 about ssh:// URLs.
1629 1629 """
1630 1630 cmdutil.setremoteconfig(ui, opts)
1631 1631 hg.repository(ui, dest, create=1)
1632 1632
1633 1633 def locate(ui, repo, *pats, **opts):
1634 1634 """locate files matching specific patterns
1635 1635
1636 1636 Print all files under Mercurial control whose names match the
1637 1637 given patterns.
1638 1638
1639 1639 This command searches the entire repository by default. To search
1640 1640 just the current directory and its subdirectories, use
1641 1641 "--include .".
1642 1642
1643 1643 If no patterns are given to match, this command prints all file
1644 1644 names.
1645 1645
1646 1646 If you want to feed the output of this command into the "xargs"
1647 1647 command, use the "-0" option to both this command and "xargs".
1648 1648 This will avoid the problem of "xargs" treating single filenames
1649 1649 that contain white space as multiple filenames.
1650 1650 """
1651 1651 end = opts['print0'] and '\0' or '\n'
1652 1652 rev = opts['rev']
1653 1653 if rev:
1654 1654 node = repo.lookup(rev)
1655 1655 else:
1656 1656 node = None
1657 1657
1658 1658 ret = 1
1659 1659 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1660 1660 badmatch=util.always,
1661 1661 default='relglob'):
1662 1662 if src == 'b':
1663 1663 continue
1664 1664 if not node and abs not in repo.dirstate:
1665 1665 continue
1666 1666 if opts['fullpath']:
1667 1667 ui.write(os.path.join(repo.root, abs), end)
1668 1668 else:
1669 1669 ui.write(((pats and rel) or abs), end)
1670 1670 ret = 0
1671 1671
1672 1672 return ret
1673 1673
1674 1674 def log(ui, repo, *pats, **opts):
1675 1675 """show revision history of entire repository or files
1676 1676
1677 1677 Print the revision history of the specified files or the entire
1678 1678 project.
1679 1679
1680 1680 File history is shown without following rename or copy history of
1681 1681 files. Use -f/--follow with a file name to follow history across
1682 1682 renames and copies. --follow without a file name will only show
1683 1683 ancestors or descendants of the starting revision. --follow-first
1684 1684 only follows the first parent of merge revisions.
1685 1685
1686 1686 If no revision range is specified, the default is tip:0 unless
1687 1687 --follow is set, in which case the working directory parent is
1688 1688 used as the starting revision.
1689 1689
1690 1690 See 'hg help dates' for a list of formats valid for -d/--date.
1691 1691
1692 1692 By default this command outputs: changeset id and hash, tags,
1693 1693 non-trivial parents, user, date and time, and a summary for each
1694 1694 commit. When the -v/--verbose switch is used, the list of changed
1695 1695 files and full commit message is shown.
1696 1696
1697 1697 NOTE: log -p may generate unexpected diff output for merge
1698 1698 changesets, as it will compare the merge changeset against its
1699 1699 first parent only. Also, the files: list will only reflect files
1700 1700 that are different from BOTH parents.
1701 1701
1702 1702 """
1703 1703
1704 1704 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1705 1705 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1706 1706
1707 1707 limit = cmdutil.loglimit(opts)
1708 1708 count = 0
1709 1709
1710 1710 if opts['copies'] and opts['rev']:
1711 1711 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1712 1712 else:
1713 1713 endrev = repo.changelog.count()
1714 1714 rcache = {}
1715 1715 ncache = {}
1716 1716 def getrenamed(fn, rev):
1717 1717 '''looks up all renames for a file (up to endrev) the first
1718 1718 time the file is given. It indexes on the changerev and only
1719 1719 parses the manifest if linkrev != changerev.
1720 1720 Returns rename info for fn at changerev rev.'''
1721 1721 if fn not in rcache:
1722 1722 rcache[fn] = {}
1723 1723 ncache[fn] = {}
1724 1724 fl = repo.file(fn)
1725 1725 for i in xrange(fl.count()):
1726 1726 node = fl.node(i)
1727 1727 lr = fl.linkrev(node)
1728 1728 renamed = fl.renamed(node)
1729 1729 rcache[fn][lr] = renamed
1730 1730 if renamed:
1731 1731 ncache[fn][node] = renamed
1732 1732 if lr >= endrev:
1733 1733 break
1734 1734 if rev in rcache[fn]:
1735 1735 return rcache[fn][rev]
1736 1736
1737 1737 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1738 1738 # filectx logic.
1739 1739
1740 1740 try:
1741 1741 return repo.changectx(rev).filectx(fn).renamed()
1742 1742 except revlog.LookupError:
1743 1743 pass
1744 1744 return None
1745 1745
1746 1746 df = False
1747 1747 if opts["date"]:
1748 1748 df = util.matchdate(opts["date"])
1749 1749
1750 1750 only_branches = opts['only_branch']
1751 1751
1752 1752 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1753 1753 for st, rev, fns in changeiter:
1754 1754 if st == 'add':
1755 1755 changenode = repo.changelog.node(rev)
1756 1756 parents = [p for p in repo.changelog.parentrevs(rev)
1757 1757 if p != nullrev]
1758 1758 if opts['no_merges'] and len(parents) == 2:
1759 1759 continue
1760 1760 if opts['only_merges'] and len(parents) != 2:
1761 1761 continue
1762 1762
1763 1763 if only_branches:
1764 1764 revbranch = get(rev)[5]['branch']
1765 1765 if revbranch not in only_branches:
1766 1766 continue
1767 1767
1768 1768 if df:
1769 1769 changes = get(rev)
1770 1770 if not df(changes[2][0]):
1771 1771 continue
1772 1772
1773 1773 if opts['keyword']:
1774 1774 changes = get(rev)
1775 1775 miss = 0
1776 1776 for k in [kw.lower() for kw in opts['keyword']]:
1777 1777 if not (k in changes[1].lower() or
1778 1778 k in changes[4].lower() or
1779 1779 k in " ".join(changes[3]).lower()):
1780 1780 miss = 1
1781 1781 break
1782 1782 if miss:
1783 1783 continue
1784 1784
1785 1785 copies = []
1786 1786 if opts.get('copies') and rev:
1787 1787 for fn in get(rev)[3]:
1788 1788 rename = getrenamed(fn, rev)
1789 1789 if rename:
1790 1790 copies.append((fn, rename[0]))
1791 1791 displayer.show(rev, changenode, copies=copies)
1792 1792 elif st == 'iter':
1793 1793 if count == limit: break
1794 1794 if displayer.flush(rev):
1795 1795 count += 1
1796 1796
1797 1797 def manifest(ui, repo, node=None, rev=None):
1798 1798 """output the current or given revision of the project manifest
1799 1799
1800 1800 Print a list of version controlled files for the given revision.
1801 1801 If no revision is given, the parent of the working directory is used,
1802 1802 or tip if no revision is checked out.
1803 1803
1804 1804 The manifest is the list of files being version controlled. If no revision
1805 1805 is given then the first parent of the working directory is used.
1806 1806
1807 1807 With -v flag, print file permissions, symlink and executable bits. With
1808 1808 --debug flag, print file revision hashes.
1809 1809 """
1810 1810
1811 1811 if rev and node:
1812 1812 raise util.Abort(_("please specify just one revision"))
1813 1813
1814 1814 if not node:
1815 1815 node = rev
1816 1816
1817 1817 m = repo.changectx(node).manifest()
1818 1818 files = m.keys()
1819 1819 files.sort()
1820 1820
1821 1821 for f in files:
1822 1822 if ui.debugflag:
1823 1823 ui.write("%40s " % hex(m[f]))
1824 1824 if ui.verbose:
1825 1825 type = m.execf(f) and "*" or m.linkf(f) and "@" or " "
1826 1826 perm = m.execf(f) and "755" or "644"
1827 1827 ui.write("%3s %1s " % (perm, type))
1828 1828 ui.write("%s\n" % f)
1829 1829
1830 1830 def merge(ui, repo, node=None, force=None, rev=None):
1831 1831 """merge working directory with another revision
1832 1832
1833 1833 Merge the contents of the current working directory and the
1834 1834 requested revision. Files that changed between either parent are
1835 1835 marked as changed for the next commit and a commit must be
1836 1836 performed before any further updates are allowed.
1837 1837
1838 1838 If no revision is specified, the working directory's parent is a
1839 1839 head revision, and the repository contains exactly one other head,
1840 1840 the other head is merged with by default. Otherwise, an explicit
1841 1841 revision to merge with must be provided.
1842 1842 """
1843 1843
1844 1844 if rev and node:
1845 1845 raise util.Abort(_("please specify just one revision"))
1846 1846 if not node:
1847 1847 node = rev
1848 1848
1849 1849 if not node:
1850 1850 heads = repo.heads()
1851 1851 if len(heads) > 2:
1852 1852 raise util.Abort(_('repo has %d heads - '
1853 1853 'please merge with an explicit rev') %
1854 1854 len(heads))
1855 1855 parent = repo.dirstate.parents()[0]
1856 1856 if len(heads) == 1:
1857 1857 msg = _('there is nothing to merge')
1858 1858 if parent != repo.lookup(repo.workingctx().branch()):
1859 1859 msg = _('%s - use "hg update" instead') % msg
1860 1860 raise util.Abort(msg)
1861 1861
1862 1862 if parent not in heads:
1863 1863 raise util.Abort(_('working dir not at a head rev - '
1864 1864 'use "hg update" or merge with an explicit rev'))
1865 1865 node = parent == heads[0] and heads[-1] or heads[0]
1866 1866 return hg.merge(repo, node, force=force)
1867 1867
1868 1868 def outgoing(ui, repo, dest=None, **opts):
1869 1869 """show changesets not found in destination
1870 1870
1871 1871 Show changesets not found in the specified destination repository or
1872 1872 the default push location. These are the changesets that would be pushed
1873 1873 if a push was requested.
1874 1874
1875 1875 See pull for valid destination format details.
1876 1876 """
1877 1877 limit = cmdutil.loglimit(opts)
1878 1878 dest, revs, checkout = hg.parseurl(
1879 1879 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1880 1880 cmdutil.setremoteconfig(ui, opts)
1881 1881 if revs:
1882 1882 revs = [repo.lookup(rev) for rev in revs]
1883 1883
1884 1884 other = hg.repository(ui, dest)
1885 1885 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1886 1886 o = repo.findoutgoing(other, force=opts['force'])
1887 1887 if not o:
1888 1888 ui.status(_("no changes found\n"))
1889 1889 return 1
1890 1890 o = repo.changelog.nodesbetween(o, revs)[0]
1891 1891 if opts['newest_first']:
1892 1892 o.reverse()
1893 1893 displayer = cmdutil.show_changeset(ui, repo, opts)
1894 1894 count = 0
1895 1895 for n in o:
1896 1896 if count >= limit:
1897 1897 break
1898 1898 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1899 1899 if opts['no_merges'] and len(parents) == 2:
1900 1900 continue
1901 1901 count += 1
1902 1902 displayer.show(changenode=n)
1903 1903
1904 1904 def parents(ui, repo, file_=None, **opts):
1905 1905 """show the parents of the working dir or revision
1906 1906
1907 1907 Print the working directory's parent revisions. If a
1908 1908 revision is given via --rev, the parent of that revision
1909 1909 will be printed. If a file argument is given, revision in
1910 1910 which the file was last changed (before the working directory
1911 1911 revision or the argument to --rev if given) is printed.
1912 1912 """
1913 1913 rev = opts.get('rev')
1914 1914 if rev:
1915 1915 ctx = repo.changectx(rev)
1916 1916 else:
1917 1917 ctx = repo.workingctx()
1918 1918
1919 1919 if file_:
1920 1920 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
1921 1921 if anypats or len(files) != 1:
1922 1922 raise util.Abort(_('can only specify an explicit file name'))
1923 1923 file_ = files[0]
1924 1924 filenodes = []
1925 1925 for cp in ctx.parents():
1926 1926 if not cp:
1927 1927 continue
1928 1928 try:
1929 1929 filenodes.append(cp.filenode(file_))
1930 1930 except revlog.LookupError:
1931 1931 pass
1932 1932 if not filenodes:
1933 1933 raise util.Abort(_("'%s' not found in manifest!") % file_)
1934 1934 fl = repo.file(file_)
1935 1935 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1936 1936 else:
1937 1937 p = [cp.node() for cp in ctx.parents()]
1938 1938
1939 1939 displayer = cmdutil.show_changeset(ui, repo, opts)
1940 1940 for n in p:
1941 1941 if n != nullid:
1942 1942 displayer.show(changenode=n)
1943 1943
1944 1944 def paths(ui, repo, search=None):
1945 1945 """show definition of symbolic path names
1946 1946
1947 1947 Show definition of symbolic path name NAME. If no name is given, show
1948 1948 definition of available names.
1949 1949
1950 1950 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1951 1951 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1952 1952 """
1953 1953 if search:
1954 1954 for name, path in ui.configitems("paths"):
1955 1955 if name == search:
1956 1956 ui.write("%s\n" % path)
1957 1957 return
1958 1958 ui.warn(_("not found!\n"))
1959 1959 return 1
1960 1960 else:
1961 1961 for name, path in ui.configitems("paths"):
1962 1962 ui.write("%s = %s\n" % (name, path))
1963 1963
1964 1964 def postincoming(ui, repo, modheads, optupdate, checkout):
1965 1965 if modheads == 0:
1966 1966 return
1967 1967 if optupdate:
1968 1968 if modheads <= 1 or checkout:
1969 1969 return hg.update(repo, checkout)
1970 1970 else:
1971 1971 ui.status(_("not updating, since new heads added\n"))
1972 1972 if modheads > 1:
1973 1973 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1974 1974 else:
1975 1975 ui.status(_("(run 'hg update' to get a working copy)\n"))
1976 1976
1977 1977 def pull(ui, repo, source="default", **opts):
1978 1978 """pull changes from the specified source
1979 1979
1980 1980 Pull changes from a remote repository to a local one.
1981 1981
1982 1982 This finds all changes from the repository at the specified path
1983 1983 or URL and adds them to the local repository. By default, this
1984 1984 does not update the copy of the project in the working directory.
1985 1985
1986 1986 Valid URLs are of the form:
1987 1987
1988 1988 local/filesystem/path (or file://local/filesystem/path)
1989 1989 http://[user@]host[:port]/[path]
1990 1990 https://[user@]host[:port]/[path]
1991 1991 ssh://[user@]host[:port]/[path]
1992 1992 static-http://host[:port]/[path]
1993 1993
1994 1994 Paths in the local filesystem can either point to Mercurial
1995 1995 repositories or to bundle files (as created by 'hg bundle' or
1996 1996 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1997 1997 allows access to a Mercurial repository where you simply use a web
1998 1998 server to publish the .hg directory as static content.
1999 1999
2000 2000 An optional identifier after # indicates a particular branch, tag,
2001 2001 or changeset to pull.
2002 2002
2003 2003 Some notes about using SSH with Mercurial:
2004 2004 - SSH requires an accessible shell account on the destination machine
2005 2005 and a copy of hg in the remote path or specified with as remotecmd.
2006 2006 - path is relative to the remote user's home directory by default.
2007 2007 Use an extra slash at the start of a path to specify an absolute path:
2008 2008 ssh://example.com//tmp/repository
2009 2009 - Mercurial doesn't use its own compression via SSH; the right thing
2010 2010 to do is to configure it in your ~/.ssh/config, e.g.:
2011 2011 Host *.mylocalnetwork.example.com
2012 2012 Compression no
2013 2013 Host *
2014 2014 Compression yes
2015 2015 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2016 2016 with the --ssh command line option.
2017 2017 """
2018 2018 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
2019 2019 cmdutil.setremoteconfig(ui, opts)
2020 2020
2021 2021 other = hg.repository(ui, source)
2022 2022 ui.status(_('pulling from %s\n') % util.hidepassword(source))
2023 2023 if revs:
2024 2024 try:
2025 2025 revs = [other.lookup(rev) for rev in revs]
2026 2026 except repo.NoCapability:
2027 2027 error = _("Other repository doesn't support revision lookup, "
2028 2028 "so a rev cannot be specified.")
2029 2029 raise util.Abort(error)
2030 2030
2031 2031 modheads = repo.pull(other, heads=revs, force=opts['force'])
2032 2032 return postincoming(ui, repo, modheads, opts['update'], checkout)
2033 2033
2034 2034 def push(ui, repo, dest=None, **opts):
2035 2035 """push changes to the specified destination
2036 2036
2037 2037 Push changes from the local repository to the given destination.
2038 2038
2039 2039 This is the symmetrical operation for pull. It helps to move
2040 2040 changes from the current repository to a different one. If the
2041 2041 destination is local this is identical to a pull in that directory
2042 2042 from the current one.
2043 2043
2044 2044 By default, push will refuse to run if it detects the result would
2045 2045 increase the number of remote heads. This generally indicates the
2046 2046 the client has forgotten to sync and merge before pushing.
2047 2047
2048 2048 Valid URLs are of the form:
2049 2049
2050 2050 local/filesystem/path (or file://local/filesystem/path)
2051 2051 ssh://[user@]host[:port]/[path]
2052 2052 http://[user@]host[:port]/[path]
2053 2053 https://[user@]host[:port]/[path]
2054 2054
2055 2055 An optional identifier after # indicates a particular branch, tag,
2056 2056 or changeset to push.
2057 2057
2058 2058 Look at the help text for the pull command for important details
2059 2059 about ssh:// URLs.
2060 2060
2061 2061 Pushing to http:// and https:// URLs is only possible, if this
2062 2062 feature is explicitly enabled on the remote Mercurial server.
2063 2063 """
2064 2064 dest, revs, checkout = hg.parseurl(
2065 2065 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2066 2066 cmdutil.setremoteconfig(ui, opts)
2067 2067
2068 2068 other = hg.repository(ui, dest)
2069 2069 ui.status('pushing to %s\n' % util.hidepassword(dest))
2070 2070 if revs:
2071 2071 revs = [repo.lookup(rev) for rev in revs]
2072 2072 r = repo.push(other, opts['force'], revs=revs)
2073 2073 return r == 0
2074 2074
2075 2075 def rawcommit(ui, repo, *pats, **opts):
2076 2076 """raw commit interface (DEPRECATED)
2077 2077
2078 2078 (DEPRECATED)
2079 2079 Lowlevel commit, for use in helper scripts.
2080 2080
2081 2081 This command is not intended to be used by normal users, as it is
2082 2082 primarily useful for importing from other SCMs.
2083 2083
2084 2084 This command is now deprecated and will be removed in a future
2085 2085 release, please use debugsetparents and commit instead.
2086 2086 """
2087 2087
2088 2088 ui.warn(_("(the rawcommit command is deprecated)\n"))
2089 2089
2090 2090 message = cmdutil.logmessage(opts)
2091 2091
2092 2092 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2093 2093 if opts['files']:
2094 2094 files += open(opts['files']).read().splitlines()
2095 2095
2096 2096 parents = [repo.lookup(p) for p in opts['parent']]
2097 2097
2098 2098 try:
2099 2099 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2100 2100 except ValueError, inst:
2101 2101 raise util.Abort(str(inst))
2102 2102
2103 2103 def recover(ui, repo):
2104 2104 """roll back an interrupted transaction
2105 2105
2106 2106 Recover from an interrupted commit or pull.
2107 2107
2108 2108 This command tries to fix the repository status after an interrupted
2109 2109 operation. It should only be necessary when Mercurial suggests it.
2110 2110 """
2111 2111 if repo.recover():
2112 2112 return hg.verify(repo)
2113 2113 return 1
2114 2114
2115 2115 def remove(ui, repo, *pats, **opts):
2116 2116 """remove the specified files on the next commit
2117 2117
2118 2118 Schedule the indicated files for removal from the repository.
2119 2119
2120 2120 This only removes files from the current branch, not from the
2121 2121 entire project history. If the files still exist in the working
2122 2122 directory, they will be deleted from it. If invoked with --after,
2123 2123 files are marked as removed, but not actually unlinked unless --force
2124 2124 is also given. Without exact file names, --after will only mark
2125 2125 files as removed if they are no longer in the working directory.
2126 2126
2127 2127 This command schedules the files to be removed at the next commit.
2128 2128 To undo a remove before that, see hg revert.
2129 2129
2130 2130 Modified files and added files are not removed by default. To
2131 2131 remove them, use the -f/--force option.
2132 2132 """
2133 2133 if not opts['after'] and not pats:
2134 2134 raise util.Abort(_('no files specified'))
2135 2135 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2136 2136 exact = dict.fromkeys(files)
2137 2137 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2138 2138 modified, added, removed, deleted, unknown = mardu
2139 2139 remove, forget = [], []
2140 2140 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2141 2141 reason = None
2142 2142 if abs in modified and not opts['force']:
2143 2143 reason = _('is modified (use -f to force removal)')
2144 2144 elif abs in added:
2145 2145 if opts['force']:
2146 2146 forget.append(abs)
2147 2147 continue
2148 2148 reason = _('has been marked for add (use -f to force removal)')
2149 2149 exact = 1 # force the message
2150 2150 elif abs not in repo.dirstate:
2151 2151 reason = _('is not managed')
2152 2152 elif opts['after'] and not exact and abs not in deleted:
2153 2153 continue
2154 2154 elif abs in removed:
2155 2155 continue
2156 2156 if reason:
2157 2157 if exact:
2158 2158 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2159 2159 else:
2160 2160 if ui.verbose or not exact:
2161 2161 ui.status(_('removing %s\n') % rel)
2162 2162 remove.append(abs)
2163 2163 repo.forget(forget)
2164 2164 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2165 2165
2166 2166 def rename(ui, repo, *pats, **opts):
2167 2167 """rename files; equivalent of copy + remove
2168 2168
2169 2169 Mark dest as copies of sources; mark sources for deletion. If
2170 2170 dest is a directory, copies are put in that directory. If dest is
2171 2171 a file, there can only be one source.
2172 2172
2173 2173 By default, this command copies the contents of files as they
2174 2174 stand in the working directory. If invoked with --after, the
2175 2175 operation is recorded, but no copying is performed.
2176 2176
2177 2177 This command takes effect in the next commit. To undo a rename
2178 2178 before that, see hg revert.
2179 2179 """
2180 2180 wlock = repo.wlock(False)
2181 2181 try:
2182 2182 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2183 2183 finally:
2184 2184 del wlock
2185 2185
2186 2186 def revert(ui, repo, *pats, **opts):
2187 2187 """restore individual files or dirs to an earlier state
2188 2188
2189 2189 (use update -r to check out earlier revisions, revert does not
2190 2190 change the working dir parents)
2191 2191
2192 2192 With no revision specified, revert the named files or directories
2193 2193 to the contents they had in the parent of the working directory.
2194 2194 This restores the contents of the affected files to an unmodified
2195 2195 state and unschedules adds, removes, copies, and renames. If the
2196 2196 working directory has two parents, you must explicitly specify the
2197 2197 revision to revert to.
2198 2198
2199 2199 Using the -r option, revert the given files or directories to their
2200 2200 contents as of a specific revision. This can be helpful to "roll
2201 2201 back" some or all of an earlier change.
2202 2202 See 'hg help dates' for a list of formats valid for -d/--date.
2203 2203
2204 2204 Revert modifies the working directory. It does not commit any
2205 2205 changes, or change the parent of the working directory. If you
2206 2206 revert to a revision other than the parent of the working
2207 2207 directory, the reverted files will thus appear modified
2208 2208 afterwards.
2209 2209
2210 2210 If a file has been deleted, it is restored. If the executable
2211 2211 mode of a file was changed, it is reset.
2212 2212
2213 2213 If names are given, all files matching the names are reverted.
2214 2214 If no arguments are given, no files are reverted.
2215 2215
2216 2216 Modified files are saved with a .orig suffix before reverting.
2217 2217 To disable these backups, use --no-backup.
2218 2218 """
2219 2219
2220 2220 if opts["date"]:
2221 2221 if opts["rev"]:
2222 2222 raise util.Abort(_("you can't specify a revision and a date"))
2223 2223 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2224 2224
2225 2225 if not pats and not opts['all']:
2226 2226 raise util.Abort(_('no files or directories specified; '
2227 2227 'use --all to revert the whole repo'))
2228 2228
2229 2229 parent, p2 = repo.dirstate.parents()
2230 2230 if not opts['rev'] and p2 != nullid:
2231 2231 raise util.Abort(_('uncommitted merge - please provide a '
2232 2232 'specific revision'))
2233 2233 ctx = repo.changectx(opts['rev'])
2234 2234 node = ctx.node()
2235 2235 mf = ctx.manifest()
2236 2236 if node == parent:
2237 2237 pmf = mf
2238 2238 else:
2239 2239 pmf = None
2240 2240
2241 2241 # need all matching names in dirstate and manifest of target rev,
2242 2242 # so have to walk both. do not print errors if files exist in one
2243 2243 # but not other.
2244 2244
2245 2245 names = {}
2246 2246
2247 2247 wlock = repo.wlock()
2248 2248 try:
2249 2249 # walk dirstate.
2250 2250 files = []
2251 2251 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2252 2252 badmatch=mf.has_key):
2253 2253 names[abs] = (rel, exact)
2254 2254 if src != 'b':
2255 2255 files.append(abs)
2256 2256
2257 2257 # walk target manifest.
2258 2258
2259 2259 def badmatch(path):
2260 2260 if path in names:
2261 2261 return True
2262 2262 path_ = path + '/'
2263 2263 for f in names:
2264 2264 if f.startswith(path_):
2265 2265 return True
2266 2266 return False
2267 2267
2268 2268 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2269 2269 badmatch=badmatch):
2270 2270 if abs in names or src == 'b':
2271 2271 continue
2272 2272 names[abs] = (rel, exact)
2273 2273
2274 2274 changes = repo.status(files=files, match=names.has_key)[:4]
2275 2275 modified, added, removed, deleted = map(dict.fromkeys, changes)
2276 2276
2277 2277 # if f is a rename, also revert the source
2278 2278 cwd = repo.getcwd()
2279 2279 for f in added:
2280 2280 src = repo.dirstate.copied(f)
2281 2281 if src and src not in names and repo.dirstate[src] == 'r':
2282 2282 removed[src] = None
2283 2283 names[src] = (repo.pathto(src, cwd), True)
2284 2284
2285 2285 def removeforget(abs):
2286 2286 if repo.dirstate[abs] == 'a':
2287 2287 return _('forgetting %s\n')
2288 2288 return _('removing %s\n')
2289 2289
2290 2290 revert = ([], _('reverting %s\n'))
2291 2291 add = ([], _('adding %s\n'))
2292 2292 remove = ([], removeforget)
2293 2293 undelete = ([], _('undeleting %s\n'))
2294 2294
2295 2295 disptable = (
2296 2296 # dispatch table:
2297 2297 # file state
2298 2298 # action if in target manifest
2299 2299 # action if not in target manifest
2300 2300 # make backup if in target manifest
2301 2301 # make backup if not in target manifest
2302 2302 (modified, revert, remove, True, True),
2303 2303 (added, revert, remove, True, False),
2304 2304 (removed, undelete, None, False, False),
2305 2305 (deleted, revert, remove, False, False),
2306 2306 )
2307 2307
2308 2308 entries = names.items()
2309 2309 entries.sort()
2310 2310
2311 2311 for abs, (rel, exact) in entries:
2312 2312 mfentry = mf.get(abs)
2313 2313 target = repo.wjoin(abs)
2314 2314 def handle(xlist, dobackup):
2315 2315 xlist[0].append(abs)
2316 2316 if dobackup and not opts['no_backup'] and util.lexists(target):
2317 2317 bakname = "%s.orig" % rel
2318 2318 ui.note(_('saving current version of %s as %s\n') %
2319 2319 (rel, bakname))
2320 2320 if not opts.get('dry_run'):
2321 2321 util.copyfile(target, bakname)
2322 2322 if ui.verbose or not exact:
2323 2323 msg = xlist[1]
2324 2324 if not isinstance(msg, basestring):
2325 2325 msg = msg(abs)
2326 2326 ui.status(msg % rel)
2327 2327 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2328 2328 if abs not in table: continue
2329 2329 # file has changed in dirstate
2330 2330 if mfentry:
2331 2331 handle(hitlist, backuphit)
2332 2332 elif misslist is not None:
2333 2333 handle(misslist, backupmiss)
2334 2334 break
2335 2335 else:
2336 2336 if abs not in repo.dirstate:
2337 2337 if mfentry:
2338 2338 handle(add, True)
2339 2339 elif exact:
2340 2340 ui.warn(_('file not managed: %s\n') % rel)
2341 2341 continue
2342 2342 # file has not changed in dirstate
2343 2343 if node == parent:
2344 2344 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2345 2345 continue
2346 2346 if pmf is None:
2347 2347 # only need parent manifest in this unlikely case,
2348 2348 # so do not read by default
2349 2349 pmf = repo.changectx(parent).manifest()
2350 2350 if abs in pmf:
2351 2351 if mfentry:
2352 2352 # if version of file is same in parent and target
2353 2353 # manifests, do nothing
2354 2354 if (pmf[abs] != mfentry or
2355 2355 pmf.flags(abs) != mf.flags(abs)):
2356 2356 handle(revert, False)
2357 2357 else:
2358 2358 handle(remove, False)
2359 2359
2360 2360 if not opts.get('dry_run'):
2361 2361 def checkout(f):
2362 2362 fc = ctx[f]
2363 2363 repo.wwrite(f, fc.data(), fc.fileflags())
2364 2364
2365 2365 audit_path = util.path_auditor(repo.root)
2366 2366 for f in remove[0]:
2367 2367 if repo.dirstate[f] == 'a':
2368 2368 repo.dirstate.forget(f)
2369 2369 continue
2370 2370 audit_path(f)
2371 2371 try:
2372 2372 util.unlink(repo.wjoin(f))
2373 2373 except OSError:
2374 2374 pass
2375 2375 repo.dirstate.remove(f)
2376 2376
2377 2377 for f in revert[0]:
2378 2378 checkout(f)
2379 2379
2380 2380 for f in add[0]:
2381 2381 checkout(f)
2382 2382 repo.dirstate.add(f)
2383 2383
2384 2384 normal = repo.dirstate.normallookup
2385 2385 if node == parent and p2 == nullid:
2386 2386 normal = repo.dirstate.normal
2387 2387 for f in undelete[0]:
2388 2388 checkout(f)
2389 2389 normal(f)
2390 2390
2391 2391 finally:
2392 2392 del wlock
2393 2393
2394 2394 def rollback(ui, repo):
2395 2395 """roll back the last transaction
2396 2396
2397 2397 This command should be used with care. There is only one level of
2398 2398 rollback, and there is no way to undo a rollback. It will also
2399 2399 restore the dirstate at the time of the last transaction, losing
2400 2400 any dirstate changes since that time.
2401 2401
2402 2402 Transactions are used to encapsulate the effects of all commands
2403 2403 that create new changesets or propagate existing changesets into a
2404 2404 repository. For example, the following commands are transactional,
2405 2405 and their effects can be rolled back:
2406 2406
2407 2407 commit
2408 2408 import
2409 2409 pull
2410 2410 push (with this repository as destination)
2411 2411 unbundle
2412 2412
2413 2413 This command is not intended for use on public repositories. Once
2414 2414 changes are visible for pull by other users, rolling a transaction
2415 2415 back locally is ineffective (someone else may already have pulled
2416 2416 the changes). Furthermore, a race is possible with readers of the
2417 2417 repository; for example an in-progress pull from the repository
2418 2418 may fail if a rollback is performed.
2419 2419 """
2420 2420 repo.rollback()
2421 2421
2422 2422 def root(ui, repo):
2423 2423 """print the root (top) of the current working dir
2424 2424
2425 2425 Print the root directory of the current repository.
2426 2426 """
2427 2427 ui.write(repo.root + "\n")
2428 2428
2429 2429 def serve(ui, repo, **opts):
2430 2430 """export the repository via HTTP
2431 2431
2432 2432 Start a local HTTP repository browser and pull server.
2433 2433
2434 2434 By default, the server logs accesses to stdout and errors to
2435 2435 stderr. Use the "-A" and "-E" options to log to files.
2436 2436 """
2437 2437
2438 2438 if opts["stdio"]:
2439 2439 if repo is None:
2440 2440 raise hg.RepoError(_("There is no Mercurial repository here"
2441 2441 " (.hg not found)"))
2442 2442 s = sshserver.sshserver(ui, repo)
2443 2443 s.serve_forever()
2444 2444
2445 2445 parentui = ui.parentui or ui
2446 2446 optlist = ("name templates style address port prefix ipv6"
2447 2447 " accesslog errorlog webdir_conf certificate")
2448 2448 for o in optlist.split():
2449 2449 if opts[o]:
2450 2450 parentui.setconfig("web", o, str(opts[o]))
2451 2451 if (repo is not None) and (repo.ui != parentui):
2452 2452 repo.ui.setconfig("web", o, str(opts[o]))
2453 2453
2454 2454 if repo is None and not ui.config("web", "webdir_conf"):
2455 2455 raise hg.RepoError(_("There is no Mercurial repository here"
2456 2456 " (.hg not found)"))
2457 2457
2458 2458 class service:
2459 2459 def init(self):
2460 2460 util.set_signal_handler()
2461 2461 try:
2462 2462 self.httpd = hgweb.server.create_server(parentui, repo)
2463 2463 except socket.error, inst:
2464 2464 raise util.Abort(_('cannot start server: ') + inst.args[1])
2465 2465
2466 2466 if not ui.verbose: return
2467 2467
2468 2468 if self.httpd.prefix:
2469 2469 prefix = self.httpd.prefix.strip('/') + '/'
2470 2470 else:
2471 2471 prefix = ''
2472 2472
2473 2473 if self.httpd.port != 80:
2474 2474 ui.status(_('listening at http://%s:%d/%s\n') %
2475 2475 (self.httpd.addr, self.httpd.port, prefix))
2476 2476 else:
2477 2477 ui.status(_('listening at http://%s/%s\n') %
2478 2478 (self.httpd.addr, prefix))
2479 2479
2480 2480 def run(self):
2481 2481 self.httpd.serve_forever()
2482 2482
2483 2483 service = service()
2484 2484
2485 2485 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2486 2486
2487 2487 def status(ui, repo, *pats, **opts):
2488 2488 """show changed files in the working directory
2489 2489
2490 2490 Show status of files in the repository. If names are given, only
2491 2491 files that match are shown. Files that are clean or ignored or
2492 2492 source of a copy/move operation, are not listed unless -c (clean),
2493 2493 -i (ignored), -C (copies) or -A is given. Unless options described
2494 2494 with "show only ..." are given, the options -mardu are used.
2495 2495
2496 2496 Option -q/--quiet hides untracked (unknown and ignored) files
2497 2497 unless explicitly requested with -u/--unknown or -i/-ignored.
2498 2498
2499 2499 NOTE: status may appear to disagree with diff if permissions have
2500 2500 changed or a merge has occurred. The standard diff format does not
2501 2501 report permission changes and diff only reports changes relative
2502 2502 to one merge parent.
2503 2503
2504 2504 If one revision is given, it is used as the base revision.
2505 2505 If two revisions are given, the difference between them is shown.
2506 2506
2507 2507 The codes used to show the status of files are:
2508 2508 M = modified
2509 2509 A = added
2510 2510 R = removed
2511 2511 C = clean
2512 2512 ! = deleted, but still tracked
2513 2513 ? = not tracked
2514 2514 I = ignored
2515 2515 = the previous added file was copied from here
2516 2516 """
2517 2517
2518 2518 all = opts['all']
2519 2519 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2520 2520
2521 2521 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2522 2522 cwd = (pats and repo.getcwd()) or ''
2523 2523 modified, added, removed, deleted, unknown, ignored, clean = [
2524 2524 n for n in repo.status(node1=node1, node2=node2, files=files,
2525 2525 match=matchfn,
2526 2526 list_ignored=opts['ignored']
2527 2527 or all and not ui.quiet,
2528 2528 list_clean=opts['clean'] or all,
2529 2529 list_unknown=opts['unknown']
2530 2530 or not (ui.quiet or
2531 2531 opts['modified'] or
2532 2532 opts['added'] or
2533 2533 opts['removed'] or
2534 2534 opts['deleted'] or
2535 2535 opts['ignored']))]
2536 2536
2537 2537 changetypes = (('modified', 'M', modified),
2538 2538 ('added', 'A', added),
2539 2539 ('removed', 'R', removed),
2540 2540 ('deleted', '!', deleted),
2541 2541 ('unknown', '?', unknown),
2542 2542 ('ignored', 'I', ignored))
2543 2543
2544 2544 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2545 2545
2546 2546 end = opts['print0'] and '\0' or '\n'
2547 2547
2548 2548 for opt, char, changes in ([ct for ct in explicit_changetypes
2549 2549 if all or opts[ct[0]]]
2550 2550 or changetypes):
2551 2551
2552 2552 if opts['no_status']:
2553 2553 format = "%%s%s" % end
2554 2554 else:
2555 2555 format = "%s %%s%s" % (char, end)
2556 2556
2557 2557 for f in changes:
2558 2558 ui.write(format % repo.pathto(f, cwd))
2559 2559 if ((all or opts.get('copies')) and not opts.get('no_status')):
2560 2560 copied = repo.dirstate.copied(f)
2561 2561 if copied:
2562 2562 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2563 2563
2564 2564 def tag(ui, repo, name, rev_=None, **opts):
2565 2565 """add a tag for the current or given revision
2566 2566
2567 2567 Name a particular revision using <name>.
2568 2568
2569 2569 Tags are used to name particular revisions of the repository and are
2570 2570 very useful to compare different revision, to go back to significant
2571 2571 earlier versions or to mark branch points as releases, etc.
2572 2572
2573 2573 If no revision is given, the parent of the working directory is used,
2574 2574 or tip if no revision is checked out.
2575 2575
2576 2576 To facilitate version control, distribution, and merging of tags,
2577 2577 they are stored as a file named ".hgtags" which is managed
2578 2578 similarly to other project files and can be hand-edited if
2579 2579 necessary. The file '.hg/localtags' is used for local tags (not
2580 2580 shared among repositories).
2581 2581
2582 2582 See 'hg help dates' for a list of formats valid for -d/--date.
2583 2583 """
2584 2584 if name in ['tip', '.', 'null']:
2585 2585 raise util.Abort(_("the name '%s' is reserved") % name)
2586 2586 if rev_ is not None:
2587 2587 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2588 2588 "please use 'hg tag [-r REV] NAME' instead\n"))
2589 2589 if opts['rev']:
2590 2590 raise util.Abort(_("use only one form to specify the revision"))
2591 2591 if opts['rev'] and opts['remove']:
2592 2592 raise util.Abort(_("--rev and --remove are incompatible"))
2593 2593 if opts['rev']:
2594 2594 rev_ = opts['rev']
2595 2595 message = opts['message']
2596 2596 if opts['remove']:
2597 2597 tagtype = repo.tagtype(name)
2598 2598
2599 2599 if not tagtype:
2600 2600 raise util.Abort(_('tag %s does not exist') % name)
2601 2601 if opts['local'] and tagtype == 'global':
2602 2602 raise util.Abort(_('%s tag is global') % name)
2603 2603 if not opts['local'] and tagtype == 'local':
2604 2604 raise util.Abort(_('%s tag is local') % name)
2605 2605
2606 2606 rev_ = nullid
2607 2607 if not message:
2608 2608 message = _('Removed tag %s') % name
2609 2609 elif name in repo.tags() and not opts['force']:
2610 2610 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2611 2611 % name)
2612 2612 if not rev_ and repo.dirstate.parents()[1] != nullid:
2613 2613 raise util.Abort(_('uncommitted merge - please provide a '
2614 2614 'specific revision'))
2615 2615 r = repo.changectx(rev_).node()
2616 2616
2617 2617 if not message:
2618 2618 message = _('Added tag %s for changeset %s') % (name, short(r))
2619 2619
2620 2620 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2621 2621
2622 2622 def tags(ui, repo):
2623 2623 """list repository tags
2624 2624
2625 2625 List the repository tags.
2626 2626
2627 2627 This lists both regular and local tags. When the -v/--verbose switch
2628 2628 is used, a third column "local" is printed for local tags.
2629 2629 """
2630 2630
2631 2631 l = repo.tagslist()
2632 2632 l.reverse()
2633 2633 hexfunc = ui.debugflag and hex or short
2634 2634 tagtype = ""
2635 2635
2636 2636 for t, n in l:
2637 2637 if ui.quiet:
2638 2638 ui.write("%s\n" % t)
2639 2639 continue
2640 2640
2641 2641 try:
2642 2642 hn = hexfunc(n)
2643 2643 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2644 2644 except revlog.LookupError:
2645 2645 r = " ?:%s" % hn
2646 2646 else:
2647 2647 spaces = " " * (30 - util.locallen(t))
2648 2648 if ui.verbose:
2649 2649 if repo.tagtype(t) == 'local':
2650 2650 tagtype = " local"
2651 2651 else:
2652 2652 tagtype = ""
2653 2653 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2654 2654
2655 2655 def tip(ui, repo, **opts):
2656 2656 """show the tip revision
2657 2657
2658 2658 Show the tip revision.
2659 2659 """
2660 2660 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2661 2661
2662 2662 def unbundle(ui, repo, fname1, *fnames, **opts):
2663 2663 """apply one or more changegroup files
2664 2664
2665 2665 Apply one or more compressed changegroup files generated by the
2666 2666 bundle command.
2667 2667 """
2668 2668 fnames = (fname1,) + fnames
2669 2669
2670 2670 lock = None
2671 2671 try:
2672 2672 lock = repo.lock()
2673 2673 for fname in fnames:
2674 2674 if os.path.exists(fname):
2675 2675 f = open(fname, "rb")
2676 2676 else:
2677 2677 f = urllib.urlopen(fname)
2678 2678 gen = changegroup.readbundle(f, fname)
2679 2679 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2680 2680 finally:
2681 2681 del lock
2682 2682
2683 2683 return postincoming(ui, repo, modheads, opts['update'], None)
2684 2684
2685 2685 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2686 2686 """update working directory
2687 2687
2688 2688 Update the working directory to the specified revision, or the
2689 2689 tip of the current branch if none is specified.
2690 2690 See 'hg help dates' for a list of formats valid for -d/--date.
2691 2691
2692 2692 If there are no outstanding changes in the working directory and
2693 2693 there is a linear relationship between the current version and the
2694 2694 requested version, the result is the requested version.
2695 2695
2696 2696 To merge the working directory with another revision, use the
2697 2697 merge command.
2698 2698
2699 2699 By default, update will refuse to run if doing so would require
2700 2700 discarding local changes.
2701 2701 """
2702 2702 if rev and node:
2703 2703 raise util.Abort(_("please specify just one revision"))
2704 2704
2705 2705 if not rev:
2706 2706 rev = node
2707 2707
2708 2708 if date:
2709 2709 if rev:
2710 2710 raise util.Abort(_("you can't specify a revision and a date"))
2711 2711 rev = cmdutil.finddate(ui, repo, date)
2712 2712
2713 2713 if clean:
2714 2714 return hg.clean(repo, rev)
2715 2715 else:
2716 2716 return hg.update(repo, rev)
2717 2717
2718 2718 def verify(ui, repo):
2719 2719 """verify the integrity of the repository
2720 2720
2721 2721 Verify the integrity of the current repository.
2722 2722
2723 2723 This will perform an extensive check of the repository's
2724 2724 integrity, validating the hashes and checksums of each entry in
2725 2725 the changelog, manifest, and tracked files, as well as the
2726 2726 integrity of their crosslinks and indices.
2727 2727 """
2728 2728 return hg.verify(repo)
2729 2729
2730 2730 def version_(ui):
2731 2731 """output version and copyright information"""
2732 2732 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2733 2733 % version.get_version())
2734 2734 ui.status(_(
2735 2735 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2736 2736 "This is free software; see the source for copying conditions. "
2737 2737 "There is NO\nwarranty; "
2738 2738 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2739 2739 ))
2740 2740
2741 2741 # Command options and aliases are listed here, alphabetically
2742 2742
2743 2743 globalopts = [
2744 2744 ('R', 'repository', '',
2745 2745 _('repository root directory or symbolic path name')),
2746 2746 ('', 'cwd', '', _('change working directory')),
2747 2747 ('y', 'noninteractive', None,
2748 2748 _('do not prompt, assume \'yes\' for any required answers')),
2749 2749 ('q', 'quiet', None, _('suppress output')),
2750 2750 ('v', 'verbose', None, _('enable additional output')),
2751 2751 ('', 'config', [], _('set/override config option')),
2752 2752 ('', 'debug', None, _('enable debugging output')),
2753 2753 ('', 'debugger', None, _('start debugger')),
2754 2754 ('', 'encoding', util._encoding, _('set the charset encoding')),
2755 2755 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2756 2756 ('', 'lsprof', None, _('print improved command execution profile')),
2757 2757 ('', 'traceback', None, _('print traceback on exception')),
2758 2758 ('', 'time', None, _('time how long the command takes')),
2759 2759 ('', 'profile', None, _('print command execution profile')),
2760 2760 ('', 'version', None, _('output version information and exit')),
2761 2761 ('h', 'help', None, _('display help and exit')),
2762 2762 ]
2763 2763
2764 2764 dryrunopts = [('n', 'dry-run', None,
2765 2765 _('do not perform actions, just print output'))]
2766 2766
2767 2767 remoteopts = [
2768 2768 ('e', 'ssh', '', _('specify ssh command to use')),
2769 2769 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2770 2770 ]
2771 2771
2772 2772 walkopts = [
2773 2773 ('I', 'include', [], _('include names matching the given patterns')),
2774 2774 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2775 2775 ]
2776 2776
2777 2777 commitopts = [
2778 2778 ('m', 'message', '', _('use <text> as commit message')),
2779 2779 ('l', 'logfile', '', _('read commit message from <file>')),
2780 2780 ]
2781 2781
2782 2782 commitopts2 = [
2783 2783 ('d', 'date', '', _('record datecode as commit date')),
2784 2784 ('u', 'user', '', _('record user as committer')),
2785 2785 ]
2786 2786
2787 2787 templateopts = [
2788 2788 ('', 'style', '', _('display using template map file')),
2789 2789 ('', 'template', '', _('display with template')),
2790 2790 ]
2791 2791
2792 2792 logopts = [
2793 2793 ('p', 'patch', None, _('show patch')),
2794 2794 ('l', 'limit', '', _('limit number of changes displayed')),
2795 2795 ('M', 'no-merges', None, _('do not show merges')),
2796 2796 ] + templateopts
2797 2797
2798 2798 table = {
2799 2799 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2800 2800 "addremove":
2801 2801 (addremove,
2802 2802 [('s', 'similarity', '',
2803 2803 _('guess renamed files by similarity (0<=s<=100)')),
2804 2804 ] + walkopts + dryrunopts,
2805 2805 _('hg addremove [OPTION]... [FILE]...')),
2806 2806 "^annotate|blame":
2807 2807 (annotate,
2808 2808 [('r', 'rev', '', _('annotate the specified revision')),
2809 2809 ('f', 'follow', None, _('follow file copies and renames')),
2810 2810 ('a', 'text', None, _('treat all files as text')),
2811 2811 ('u', 'user', None, _('list the author (long with -v)')),
2812 2812 ('d', 'date', None, _('list the date (short with -q)')),
2813 2813 ('n', 'number', None, _('list the revision number (default)')),
2814 2814 ('c', 'changeset', None, _('list the changeset')),
2815 2815 ('l', 'line-number', None,
2816 2816 _('show line number at the first appearance'))
2817 2817 ] + walkopts,
2818 2818 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2819 2819 "archive":
2820 2820 (archive,
2821 2821 [('', 'no-decode', None, _('do not pass files through decoders')),
2822 2822 ('p', 'prefix', '', _('directory prefix for files in archive')),
2823 2823 ('r', 'rev', '', _('revision to distribute')),
2824 2824 ('t', 'type', '', _('type of distribution to create')),
2825 2825 ] + walkopts,
2826 2826 _('hg archive [OPTION]... DEST')),
2827 2827 "backout":
2828 2828 (backout,
2829 2829 [('', 'merge', None,
2830 2830 _('merge with old dirstate parent after backout')),
2831 2831 ('', 'parent', '', _('parent to choose when backing out merge')),
2832 2832 ('r', 'rev', '', _('revision to backout')),
2833 2833 ] + walkopts + commitopts + commitopts2,
2834 2834 _('hg backout [OPTION]... [-r] REV')),
2835 2835 "bisect":
2836 2836 (bisect,
2837 2837 [('r', 'reset', False, _('reset bisect state')),
2838 2838 ('g', 'good', False, _('mark changeset good')),
2839 2839 ('b', 'bad', False, _('mark changeset bad')),
2840 2840 ('s', 'skip', False, _('skip testing changeset')),
2841 2841 ('U', 'noupdate', False, _('do not update to target'))],
2842 2842 _("hg bisect [-gbsr] [REV]")),
2843 2843 "branch":
2844 2844 (branch,
2845 2845 [('f', 'force', None,
2846 2846 _('set branch name even if it shadows an existing branch'))],
2847 2847 _('hg branch [-f] [NAME]')),
2848 2848 "branches":
2849 2849 (branches,
2850 2850 [('a', 'active', False,
2851 2851 _('show only branches that have unmerged heads'))],
2852 2852 _('hg branches [-a]')),
2853 2853 "bundle":
2854 2854 (bundle,
2855 2855 [('f', 'force', None,
2856 2856 _('run even when remote repository is unrelated')),
2857 2857 ('r', 'rev', [],
2858 2858 _('a changeset you would like to bundle')),
2859 2859 ('', 'base', [],
2860 2860 _('a base changeset to specify instead of a destination')),
2861 2861 ('a', 'all', None,
2862 2862 _('bundle all changesets in the repository')),
2863 2863 ] + remoteopts,
2864 2864 _('hg bundle [-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
2865 2865 "cat":
2866 2866 (cat,
2867 2867 [('o', 'output', '', _('print output to file with formatted name')),
2868 2868 ('r', 'rev', '', _('print the given revision')),
2869 2869 ('', 'decode', None, _('apply any matching decode filter')),
2870 2870 ] + walkopts,
2871 2871 _('hg cat [OPTION]... FILE...')),
2872 2872 "^clone":
2873 2873 (clone,
2874 2874 [('U', 'noupdate', None, _('do not update the new working directory')),
2875 2875 ('r', 'rev', [],
2876 2876 _('a changeset you would like to have after cloning')),
2877 2877 ('', 'pull', None, _('use pull protocol to copy metadata')),
2878 2878 ('', 'uncompressed', None,
2879 2879 _('use uncompressed transfer (fast over LAN)')),
2880 2880 ] + remoteopts,
2881 2881 _('hg clone [OPTION]... SOURCE [DEST]')),
2882 2882 "^commit|ci":
2883 2883 (commit,
2884 2884 [('A', 'addremove', None,
2885 2885 _('mark new/missing files as added/removed before committing')),
2886 2886 ] + walkopts + commitopts + commitopts2,
2887 2887 _('hg commit [OPTION]... [FILE]...')),
2888 2888 "copy|cp":
2889 2889 (copy,
2890 2890 [('A', 'after', None, _('record a copy that has already occurred')),
2891 2891 ('f', 'force', None,
2892 2892 _('forcibly copy over an existing managed file')),
2893 2893 ] + walkopts + dryrunopts,
2894 2894 _('hg copy [OPTION]... [SOURCE]... DEST')),
2895 2895 "debugancestor": (debugancestor, [],
2896 2896 _('hg debugancestor [INDEX] REV1 REV2')),
2897 2897 "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
2898 2898 "debugcomplete":
2899 2899 (debugcomplete,
2900 2900 [('o', 'options', None, _('show the command options'))],
2901 2901 _('hg debugcomplete [-o] CMD')),
2902 2902 "debugdate":
2903 2903 (debugdate,
2904 2904 [('e', 'extended', None, _('try extended date formats'))],
2905 2905 _('hg debugdate [-e] DATE [RANGE]')),
2906 2906 "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
2907 2907 "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
2908 2908 "debugindex": (debugindex, [], _('hg debugindex FILE')),
2909 2909 "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
2910 2910 "debuginstall": (debuginstall, [], _('hg debuginstall')),
2911 2911 "debugrawcommit|rawcommit":
2912 2912 (rawcommit,
2913 2913 [('p', 'parent', [], _('parent')),
2914 2914 ('F', 'files', '', _('file list'))
2915 2915 ] + commitopts + commitopts2,
2916 2916 _('hg debugrawcommit [OPTION]... [FILE]...')),
2917 2917 "debugrebuildstate":
2918 2918 (debugrebuildstate,
2919 2919 [('r', 'rev', '', _('revision to rebuild to'))],
2920 2920 _('hg debugrebuildstate [-r REV] [REV]')),
2921 2921 "debugrename":
2922 2922 (debugrename,
2923 2923 [('r', 'rev', '', _('revision to debug'))],
2924 2924 _('hg debugrename [-r REV] FILE')),
2925 2925 "debugsetparents":
2926 2926 (debugsetparents,
2927 2927 [],
2928 2928 _('hg debugsetparents REV1 [REV2]')),
2929 2929 "debugstate": (debugstate, [], _('hg debugstate')),
2930 2930 "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
2931 2931 "^diff":
2932 2932 (diff,
2933 2933 [('r', 'rev', [], _('revision')),
2934 2934 ('a', 'text', None, _('treat all files as text')),
2935 2935 ('p', 'show-function', None,
2936 2936 _('show which function each change is in')),
2937 2937 ('g', 'git', None, _('use git extended diff format')),
2938 2938 ('', 'nodates', None, _("don't include dates in diff headers")),
2939 2939 ('w', 'ignore-all-space', None,
2940 2940 _('ignore white space when comparing lines')),
2941 2941 ('b', 'ignore-space-change', None,
2942 2942 _('ignore changes in the amount of white space')),
2943 2943 ('B', 'ignore-blank-lines', None,
2944 2944 _('ignore changes whose lines are all blank')),
2945 2945 ('U', 'unified', 3,
2946 2946 _('number of lines of context to show'))
2947 2947 ] + walkopts,
2948 2948 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2949 2949 "^export":
2950 2950 (export,
2951 2951 [('o', 'output', '', _('print output to file with formatted name')),
2952 2952 ('a', 'text', None, _('treat all files as text')),
2953 2953 ('g', 'git', None, _('use git extended diff format')),
2954 2954 ('', 'nodates', None, _("don't include dates in diff headers")),
2955 2955 ('', 'switch-parent', None, _('diff against the second parent'))],
2956 2956 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2957 2957 "grep":
2958 2958 (grep,
2959 2959 [('0', 'print0', None, _('end fields with NUL')),
2960 2960 ('', 'all', None, _('print all revisions that match')),
2961 2961 ('f', 'follow', None,
2962 2962 _('follow changeset history, or file history across copies and renames')),
2963 2963 ('i', 'ignore-case', None, _('ignore case when matching')),
2964 2964 ('l', 'files-with-matches', None,
2965 2965 _('print only filenames and revs that match')),
2966 2966 ('n', 'line-number', None, _('print matching line numbers')),
2967 2967 ('r', 'rev', [], _('search in given revision range')),
2968 2968 ('u', 'user', None, _('list the author (long with -v)')),
2969 2969 ('d', 'date', None, _('list the date (short with -q)')),
2970 2970 ] + walkopts,
2971 2971 _('hg grep [OPTION]... PATTERN [FILE]...')),
2972 2972 "heads":
2973 2973 (heads,
2974 2974 [('r', 'rev', '', _('show only heads which are descendants of rev')),
2975 2975 ] + templateopts,
2976 2976 _('hg heads [-r REV] [REV]...')),
2977 2977 "help": (help_, [], _('hg help [COMMAND]')),
2978 2978 "identify|id":
2979 2979 (identify,
2980 2980 [('r', 'rev', '', _('identify the specified rev')),
2981 2981 ('n', 'num', None, _('show local revision number')),
2982 2982 ('i', 'id', None, _('show global revision id')),
2983 2983 ('b', 'branch', None, _('show branch')),
2984 2984 ('t', 'tags', None, _('show tags'))],
2985 2985 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2986 2986 "import|patch":
2987 2987 (import_,
2988 2988 [('p', 'strip', 1,
2989 2989 _('directory strip option for patch. This has the same\n'
2990 2990 'meaning as the corresponding patch option')),
2991 2991 ('b', 'base', '', _('base path')),
2992 2992 ('f', 'force', None,
2993 2993 _('skip check for outstanding uncommitted changes')),
2994 2994 ('', 'no-commit', None, _("don't commit, just update the working directory")),
2995 2995 ('', 'exact', None,
2996 2996 _('apply patch to the nodes from which it was generated')),
2997 2997 ('', 'import-branch', None,
2998 2998 _('Use any branch information in patch (implied by --exact)'))] +
2999 2999 commitopts + commitopts2,
3000 3000 _('hg import [OPTION]... PATCH...')),
3001 3001 "incoming|in":
3002 3002 (incoming,
3003 3003 [('f', 'force', None,
3004 3004 _('run even when remote repository is unrelated')),
3005 3005 ('n', 'newest-first', None, _('show newest record first')),
3006 3006 ('', 'bundle', '', _('file to store the bundles into')),
3007 3007 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3008 3008 ] + logopts + remoteopts,
3009 3009 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
3010 3010 ' [--bundle FILENAME] [SOURCE]')),
3011 3011 "^init":
3012 3012 (init,
3013 3013 remoteopts,
3014 3014 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
3015 3015 "locate":
3016 3016 (locate,
3017 3017 [('r', 'rev', '', _('search the repository as it stood at rev')),
3018 3018 ('0', 'print0', None,
3019 3019 _('end filenames with NUL, for use with xargs')),
3020 3020 ('f', 'fullpath', None,
3021 3021 _('print complete paths from the filesystem root')),
3022 3022 ] + walkopts,
3023 3023 _('hg locate [OPTION]... [PATTERN]...')),
3024 3024 "^log|history":
3025 3025 (log,
3026 3026 [('f', 'follow', None,
3027 3027 _('follow changeset history, or file history across copies and renames')),
3028 3028 ('', 'follow-first', None,
3029 3029 _('only follow the first parent of merge changesets')),
3030 3030 ('d', 'date', '', _('show revs matching date spec')),
3031 3031 ('C', 'copies', None, _('show copied files')),
3032 3032 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3033 3033 ('r', 'rev', [], _('show the specified revision or range')),
3034 3034 ('', 'removed', None, _('include revs where files were removed')),
3035 3035 ('m', 'only-merges', None, _('show only merges')),
3036 3036 ('b', 'only-branch', [],
3037 3037 _('show only changesets within the given named branch')),
3038 3038 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3039 3039 ] + logopts + walkopts,
3040 3040 _('hg log [OPTION]... [FILE]')),
3041 3041 "manifest":
3042 3042 (manifest,
3043 3043 [('r', 'rev', '', _('revision to display'))],
3044 3044 _('hg manifest [-r REV]')),
3045 3045 "^merge":
3046 3046 (merge,
3047 3047 [('f', 'force', None, _('force a merge with outstanding changes')),
3048 3048 ('r', 'rev', '', _('revision to merge')),
3049 3049 ],
3050 3050 _('hg merge [-f] [[-r] REV]')),
3051 3051 "outgoing|out":
3052 3052 (outgoing,
3053 3053 [('f', 'force', None,
3054 3054 _('run even when remote repository is unrelated')),
3055 3055 ('r', 'rev', [], _('a specific revision you would like to push')),
3056 3056 ('n', 'newest-first', None, _('show newest record first')),
3057 3057 ] + logopts + remoteopts,
3058 3058 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3059 3059 "^parents":
3060 3060 (parents,
3061 3061 [('r', 'rev', '', _('show parents from the specified rev')),
3062 3062 ] + templateopts,
3063 3063 _('hg parents [-r REV] [FILE]')),
3064 3064 "paths": (paths, [], _('hg paths [NAME]')),
3065 3065 "^pull":
3066 3066 (pull,
3067 3067 [('u', 'update', None,
3068 3068 _('update to new tip if changesets were pulled')),
3069 3069 ('f', 'force', None,
3070 3070 _('run even when remote repository is unrelated')),
3071 3071 ('r', 'rev', [],
3072 3072 _('a specific revision up to which you would like to pull')),
3073 3073 ] + remoteopts,
3074 3074 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3075 3075 "^push":
3076 3076 (push,
3077 3077 [('f', 'force', None, _('force push')),
3078 3078 ('r', 'rev', [], _('a specific revision you would like to push')),
3079 3079 ] + remoteopts,
3080 3080 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3081 3081 "recover": (recover, [], _('hg recover')),
3082 3082 "^remove|rm":
3083 3083 (remove,
3084 3084 [('A', 'after', None, _('record remove without deleting')),
3085 3085 ('f', 'force', None, _('remove file even if modified')),
3086 3086 ] + walkopts,
3087 3087 _('hg remove [OPTION]... FILE...')),
3088 3088 "rename|mv":
3089 3089 (rename,
3090 3090 [('A', 'after', None, _('record a rename that has already occurred')),
3091 3091 ('f', 'force', None,
3092 3092 _('forcibly copy over an existing managed file')),
3093 3093 ] + walkopts + dryrunopts,
3094 3094 _('hg rename [OPTION]... SOURCE... DEST')),
3095 3095 "revert":
3096 3096 (revert,
3097 3097 [('a', 'all', None, _('revert all changes when no arguments given')),
3098 3098 ('d', 'date', '', _('tipmost revision matching date')),
3099 3099 ('r', 'rev', '', _('revision to revert to')),
3100 3100 ('', 'no-backup', None, _('do not save backup copies of files')),
3101 3101 ] + walkopts + dryrunopts,
3102 3102 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3103 3103 "rollback": (rollback, [], _('hg rollback')),
3104 3104 "root": (root, [], _('hg root')),
3105 3105 "^serve":
3106 3106 (serve,
3107 3107 [('A', 'accesslog', '', _('name of access log file to write to')),
3108 3108 ('d', 'daemon', None, _('run server in background')),
3109 3109 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3110 3110 ('E', 'errorlog', '', _('name of error log file to write to')),
3111 3111 ('p', 'port', 0, _('port to use (default: 8000)')),
3112 3112 ('a', 'address', '', _('address to use')),
3113 3113 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3114 3114 ('n', 'name', '',
3115 3115 _('name to show in web pages (default: working dir)')),
3116 3116 ('', 'webdir-conf', '', _('name of the webdir config file'
3117 3117 ' (serve more than one repo)')),
3118 3118 ('', 'pid-file', '', _('name of file to write process ID to')),
3119 3119 ('', 'stdio', None, _('for remote clients')),
3120 3120 ('t', 'templates', '', _('web templates to use')),
3121 3121 ('', 'style', '', _('template style to use')),
3122 3122 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3123 3123 ('', 'certificate', '', _('SSL certificate file'))],
3124 3124 _('hg serve [OPTION]...')),
3125 3125 "showconfig|debugconfig":
3126 3126 (showconfig,
3127 3127 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3128 3128 _('hg showconfig [-u] [NAME]...')),
3129 3129 "^status|st":
3130 3130 (status,
3131 3131 [('A', 'all', None, _('show status of all files')),
3132 3132 ('m', 'modified', None, _('show only modified files')),
3133 3133 ('a', 'added', None, _('show only added files')),
3134 3134 ('r', 'removed', None, _('show only removed files')),
3135 3135 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3136 3136 ('c', 'clean', None, _('show only files without changes')),
3137 3137 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3138 3138 ('i', 'ignored', None, _('show only ignored files')),
3139 3139 ('n', 'no-status', None, _('hide status prefix')),
3140 3140 ('C', 'copies', None, _('show source of copied files')),
3141 3141 ('0', 'print0', None,
3142 3142 _('end filenames with NUL, for use with xargs')),
3143 3143 ('', 'rev', [], _('show difference from revision')),
3144 3144 ] + walkopts,
3145 3145 _('hg status [OPTION]... [FILE]...')),
3146 3146 "tag":
3147 3147 (tag,
3148 3148 [('f', 'force', None, _('replace existing tag')),
3149 3149 ('l', 'local', None, _('make the tag local')),
3150 3150 ('r', 'rev', '', _('revision to tag')),
3151 3151 ('', 'remove', None, _('remove a tag')),
3152 3152 # -l/--local is already there, commitopts cannot be used
3153 3153 ('m', 'message', '', _('use <text> as commit message')),
3154 3154 ] + commitopts2,
3155 3155 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3156 3156 "tags": (tags, [], _('hg tags')),
3157 3157 "tip":
3158 3158 (tip,
3159 3159 [('p', 'patch', None, _('show patch')),
3160 3160 ] + templateopts,
3161 3161 _('hg tip [-p]')),
3162 3162 "unbundle":
3163 3163 (unbundle,
3164 3164 [('u', 'update', None,
3165 3165 _('update to new tip if changesets were unbundled'))],
3166 3166 _('hg unbundle [-u] FILE...')),
3167 3167 "^update|up|checkout|co":
3168 3168 (update,
3169 3169 [('C', 'clean', None, _('overwrite locally modified files')),
3170 3170 ('d', 'date', '', _('tipmost revision matching date')),
3171 3171 ('r', 'rev', '', _('revision'))],
3172 3172 _('hg update [-C] [-d DATE] [[-r] REV]')),
3173 3173 "verify": (verify, [], _('hg verify')),
3174 3174 "version": (version_, [], _('hg version')),
3175 3175 }
3176 3176
3177 3177 norepo = ("clone init version help debugcomplete debugdata"
3178 3178 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3179 3179 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,620 +1,620 b''
1 1 # context.py - changeset and file context objects for mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 from node import *
8 from node import nullid, nullrev, short
9 9 from i18n import _
10 10 import ancestor, bdiff, repo, revlog, util, os, errno
11 11
12 12 class changectx(object):
13 13 """A changecontext object makes access to data related to a particular
14 14 changeset convenient."""
15 15 def __init__(self, repo, changeid=None):
16 16 """changeid is a revision number, node, or tag"""
17 17 self._repo = repo
18 18
19 19 if not changeid and changeid != 0:
20 20 p1, p2 = self._repo.dirstate.parents()
21 21 self._rev = self._repo.changelog.rev(p1)
22 22 if self._rev == -1:
23 23 changeid = 'tip'
24 24 else:
25 25 self._node = p1
26 26 return
27 27
28 28 self._node = self._repo.lookup(changeid)
29 29 self._rev = self._repo.changelog.rev(self._node)
30 30
31 31 def __str__(self):
32 32 return short(self.node())
33 33
34 34 def __repr__(self):
35 35 return "<changectx %s>" % str(self)
36 36
37 37 def __eq__(self, other):
38 38 try:
39 39 return self._rev == other._rev
40 40 except AttributeError:
41 41 return False
42 42
43 43 def __ne__(self, other):
44 44 return not (self == other)
45 45
46 46 def __nonzero__(self):
47 47 return self._rev != nullrev
48 48
49 49 def __getattr__(self, name):
50 50 if name == '_changeset':
51 51 self._changeset = self._repo.changelog.read(self.node())
52 52 return self._changeset
53 53 elif name == '_manifest':
54 54 self._manifest = self._repo.manifest.read(self._changeset[0])
55 55 return self._manifest
56 56 elif name == '_manifestdelta':
57 57 md = self._repo.manifest.readdelta(self._changeset[0])
58 58 self._manifestdelta = md
59 59 return self._manifestdelta
60 60 else:
61 61 raise AttributeError, name
62 62
63 63 def __contains__(self, key):
64 64 return key in self._manifest
65 65
66 66 def __getitem__(self, key):
67 67 return self.filectx(key)
68 68
69 69 def __iter__(self):
70 70 a = self._manifest.keys()
71 71 a.sort()
72 72 for f in a:
73 73 yield f
74 74
75 75 def changeset(self): return self._changeset
76 76 def manifest(self): return self._manifest
77 77
78 78 def rev(self): return self._rev
79 79 def node(self): return self._node
80 80 def user(self): return self._changeset[1]
81 81 def date(self): return self._changeset[2]
82 82 def files(self): return self._changeset[3]
83 83 def description(self): return self._changeset[4]
84 84 def branch(self): return self._changeset[5].get("branch")
85 85 def extra(self): return self._changeset[5]
86 86 def tags(self): return self._repo.nodetags(self._node)
87 87
88 88 def parents(self):
89 89 """return contexts for each parent changeset"""
90 90 p = self._repo.changelog.parents(self._node)
91 91 return [changectx(self._repo, x) for x in p]
92 92
93 93 def children(self):
94 94 """return contexts for each child changeset"""
95 95 c = self._repo.changelog.children(self._node)
96 96 return [changectx(self._repo, x) for x in c]
97 97
98 98 def _fileinfo(self, path):
99 99 if '_manifest' in self.__dict__:
100 100 try:
101 101 return self._manifest[path], self._manifest.flags(path)
102 102 except KeyError:
103 103 raise revlog.LookupError(path, _("'%s' not found in manifest") % path)
104 104 if '_manifestdelta' in self.__dict__ or path in self.files():
105 105 if path in self._manifestdelta:
106 106 return self._manifestdelta[path], self._manifestdelta.flags(path)
107 107 node, flag = self._repo.manifest.find(self._changeset[0], path)
108 108 if not node:
109 109 raise revlog.LookupError(path, _("'%s' not found in manifest") % path)
110 110
111 111 return node, flag
112 112
113 113 def filenode(self, path):
114 114 return self._fileinfo(path)[0]
115 115
116 116 def fileflags(self, path):
117 117 try:
118 118 return self._fileinfo(path)[1]
119 119 except revlog.LookupError:
120 120 return ''
121 121
122 122 def filectx(self, path, fileid=None, filelog=None):
123 123 """get a file context from this changeset"""
124 124 if fileid is None:
125 125 fileid = self.filenode(path)
126 126 return filectx(self._repo, path, fileid=fileid,
127 127 changectx=self, filelog=filelog)
128 128
129 129 def filectxs(self):
130 130 """generate a file context for each file in this changeset's
131 131 manifest"""
132 132 mf = self.manifest()
133 133 m = mf.keys()
134 134 m.sort()
135 135 for f in m:
136 136 yield self.filectx(f, fileid=mf[f])
137 137
138 138 def ancestor(self, c2):
139 139 """
140 140 return the ancestor context of self and c2
141 141 """
142 142 n = self._repo.changelog.ancestor(self._node, c2._node)
143 143 return changectx(self._repo, n)
144 144
145 145 class filectx(object):
146 146 """A filecontext object makes access to data related to a particular
147 147 filerevision convenient."""
148 148 def __init__(self, repo, path, changeid=None, fileid=None,
149 149 filelog=None, changectx=None):
150 150 """changeid can be a changeset revision, node, or tag.
151 151 fileid can be a file revision or node."""
152 152 self._repo = repo
153 153 self._path = path
154 154
155 155 assert (changeid is not None
156 156 or fileid is not None
157 157 or changectx is not None)
158 158
159 159 if filelog:
160 160 self._filelog = filelog
161 161
162 162 if changeid is not None:
163 163 self._changeid = changeid
164 164 if changectx is not None:
165 165 self._changectx = changectx
166 166 if fileid is not None:
167 167 self._fileid = fileid
168 168
169 169 def __getattr__(self, name):
170 170 if name == '_changectx':
171 171 self._changectx = changectx(self._repo, self._changeid)
172 172 return self._changectx
173 173 elif name == '_filelog':
174 174 self._filelog = self._repo.file(self._path)
175 175 return self._filelog
176 176 elif name == '_changeid':
177 177 if '_changectx' in self.__dict__:
178 178 self._changeid = self._changectx.rev()
179 179 else:
180 180 self._changeid = self._filelog.linkrev(self._filenode)
181 181 return self._changeid
182 182 elif name == '_filenode':
183 183 if '_fileid' in self.__dict__:
184 184 self._filenode = self._filelog.lookup(self._fileid)
185 185 else:
186 186 self._filenode = self._changectx.filenode(self._path)
187 187 return self._filenode
188 188 elif name == '_filerev':
189 189 self._filerev = self._filelog.rev(self._filenode)
190 190 return self._filerev
191 191 else:
192 192 raise AttributeError, name
193 193
194 194 def __nonzero__(self):
195 195 try:
196 196 n = self._filenode
197 197 return True
198 198 except revlog.LookupError:
199 199 # file is missing
200 200 return False
201 201
202 202 def __str__(self):
203 203 return "%s@%s" % (self.path(), short(self.node()))
204 204
205 205 def __repr__(self):
206 206 return "<filectx %s>" % str(self)
207 207
208 208 def __eq__(self, other):
209 209 try:
210 210 return (self._path == other._path
211 211 and self._fileid == other._fileid)
212 212 except AttributeError:
213 213 return False
214 214
215 215 def __ne__(self, other):
216 216 return not (self == other)
217 217
218 218 def filectx(self, fileid):
219 219 '''opens an arbitrary revision of the file without
220 220 opening a new filelog'''
221 221 return filectx(self._repo, self._path, fileid=fileid,
222 222 filelog=self._filelog)
223 223
224 224 def filerev(self): return self._filerev
225 225 def filenode(self): return self._filenode
226 226 def fileflags(self): return self._changectx.fileflags(self._path)
227 227 def isexec(self): return 'x' in self.fileflags()
228 228 def islink(self): return 'l' in self.fileflags()
229 229 def filelog(self): return self._filelog
230 230
231 231 def rev(self):
232 232 if '_changectx' in self.__dict__:
233 233 return self._changectx.rev()
234 234 if '_changeid' in self.__dict__:
235 235 return self._changectx.rev()
236 236 return self._filelog.linkrev(self._filenode)
237 237
238 238 def linkrev(self): return self._filelog.linkrev(self._filenode)
239 239 def node(self): return self._changectx.node()
240 240 def user(self): return self._changectx.user()
241 241 def date(self): return self._changectx.date()
242 242 def files(self): return self._changectx.files()
243 243 def description(self): return self._changectx.description()
244 244 def branch(self): return self._changectx.branch()
245 245 def manifest(self): return self._changectx.manifest()
246 246 def changectx(self): return self._changectx
247 247
248 248 def data(self): return self._filelog.read(self._filenode)
249 249 def path(self): return self._path
250 250 def size(self): return self._filelog.size(self._filerev)
251 251
252 252 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
253 253
254 254 def renamed(self):
255 255 """check if file was actually renamed in this changeset revision
256 256
257 257 If rename logged in file revision, we report copy for changeset only
258 258 if file revisions linkrev points back to the changeset in question
259 259 or both changeset parents contain different file revisions.
260 260 """
261 261
262 262 renamed = self._filelog.renamed(self._filenode)
263 263 if not renamed:
264 264 return renamed
265 265
266 266 if self.rev() == self.linkrev():
267 267 return renamed
268 268
269 269 name = self.path()
270 270 fnode = self._filenode
271 271 for p in self._changectx.parents():
272 272 try:
273 273 if fnode == p.filenode(name):
274 274 return None
275 275 except revlog.LookupError:
276 276 pass
277 277 return renamed
278 278
279 279 def parents(self):
280 280 p = self._path
281 281 fl = self._filelog
282 282 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
283 283
284 284 r = self._filelog.renamed(self._filenode)
285 285 if r:
286 286 pl[0] = (r[0], r[1], None)
287 287
288 288 return [filectx(self._repo, p, fileid=n, filelog=l)
289 289 for p,n,l in pl if n != nullid]
290 290
291 291 def children(self):
292 292 # hard for renames
293 293 c = self._filelog.children(self._filenode)
294 294 return [filectx(self._repo, self._path, fileid=x,
295 295 filelog=self._filelog) for x in c]
296 296
297 297 def annotate(self, follow=False, linenumber=None):
298 298 '''returns a list of tuples of (ctx, line) for each line
299 299 in the file, where ctx is the filectx of the node where
300 300 that line was last changed.
301 301 This returns tuples of ((ctx, linenumber), line) for each line,
302 302 if "linenumber" parameter is NOT "None".
303 303 In such tuples, linenumber means one at the first appearance
304 304 in the managed file.
305 305 To reduce annotation cost,
306 306 this returns fixed value(False is used) as linenumber,
307 307 if "linenumber" parameter is "False".'''
308 308
309 309 def decorate_compat(text, rev):
310 310 return ([rev] * len(text.splitlines()), text)
311 311
312 312 def without_linenumber(text, rev):
313 313 return ([(rev, False)] * len(text.splitlines()), text)
314 314
315 315 def with_linenumber(text, rev):
316 316 size = len(text.splitlines())
317 317 return ([(rev, i) for i in xrange(1, size + 1)], text)
318 318
319 319 decorate = (((linenumber is None) and decorate_compat) or
320 320 (linenumber and with_linenumber) or
321 321 without_linenumber)
322 322
323 323 def pair(parent, child):
324 324 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
325 325 child[0][b1:b2] = parent[0][a1:a2]
326 326 return child
327 327
328 328 getlog = util.cachefunc(lambda x: self._repo.file(x))
329 329 def getctx(path, fileid):
330 330 log = path == self._path and self._filelog or getlog(path)
331 331 return filectx(self._repo, path, fileid=fileid, filelog=log)
332 332 getctx = util.cachefunc(getctx)
333 333
334 334 def parents(f):
335 335 # we want to reuse filectx objects as much as possible
336 336 p = f._path
337 337 if f._filerev is None: # working dir
338 338 pl = [(n.path(), n.filerev()) for n in f.parents()]
339 339 else:
340 340 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
341 341
342 342 if follow:
343 343 r = f.renamed()
344 344 if r:
345 345 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
346 346
347 347 return [getctx(p, n) for p, n in pl if n != nullrev]
348 348
349 349 # use linkrev to find the first changeset where self appeared
350 350 if self.rev() != self.linkrev():
351 351 base = self.filectx(self.filerev())
352 352 else:
353 353 base = self
354 354
355 355 # find all ancestors
356 356 needed = {base: 1}
357 357 visit = [base]
358 358 files = [base._path]
359 359 while visit:
360 360 f = visit.pop(0)
361 361 for p in parents(f):
362 362 if p not in needed:
363 363 needed[p] = 1
364 364 visit.append(p)
365 365 if p._path not in files:
366 366 files.append(p._path)
367 367 else:
368 368 # count how many times we'll use this
369 369 needed[p] += 1
370 370
371 371 # sort by revision (per file) which is a topological order
372 372 visit = []
373 373 for f in files:
374 374 fn = [(n.rev(), n) for n in needed.keys() if n._path == f]
375 375 visit.extend(fn)
376 376 visit.sort()
377 377 hist = {}
378 378
379 379 for r, f in visit:
380 380 curr = decorate(f.data(), f)
381 381 for p in parents(f):
382 382 if p != nullid:
383 383 curr = pair(hist[p], curr)
384 384 # trim the history of unneeded revs
385 385 needed[p] -= 1
386 386 if not needed[p]:
387 387 del hist[p]
388 388 hist[f] = curr
389 389
390 390 return zip(hist[f][0], hist[f][1].splitlines(1))
391 391
392 392 def ancestor(self, fc2):
393 393 """
394 394 find the common ancestor file context, if any, of self, and fc2
395 395 """
396 396
397 397 acache = {}
398 398
399 399 # prime the ancestor cache for the working directory
400 400 for c in (self, fc2):
401 401 if c._filerev == None:
402 402 pl = [(n.path(), n.filenode()) for n in c.parents()]
403 403 acache[(c._path, None)] = pl
404 404
405 405 flcache = {self._path:self._filelog, fc2._path:fc2._filelog}
406 406 def parents(vertex):
407 407 if vertex in acache:
408 408 return acache[vertex]
409 409 f, n = vertex
410 410 if f not in flcache:
411 411 flcache[f] = self._repo.file(f)
412 412 fl = flcache[f]
413 413 pl = [(f, p) for p in fl.parents(n) if p != nullid]
414 414 re = fl.renamed(n)
415 415 if re:
416 416 pl.append(re)
417 417 acache[vertex] = pl
418 418 return pl
419 419
420 420 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
421 421 v = ancestor.ancestor(a, b, parents)
422 422 if v:
423 423 f, n = v
424 424 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
425 425
426 426 return None
427 427
428 428 class workingctx(changectx):
429 429 """A workingctx object makes access to data related to
430 430 the current working directory convenient."""
431 431 def __init__(self, repo):
432 432 self._repo = repo
433 433 self._rev = None
434 434 self._node = None
435 435
436 436 def __str__(self):
437 437 return str(self._parents[0]) + "+"
438 438
439 439 def __nonzero__(self):
440 440 return True
441 441
442 442 def __getattr__(self, name):
443 443 if name == '_parents':
444 444 self._parents = self._repo.parents()
445 445 return self._parents
446 446 if name == '_status':
447 447 self._status = self._repo.status()
448 448 return self._status
449 449 if name == '_manifest':
450 450 self._buildmanifest()
451 451 return self._manifest
452 452 else:
453 453 raise AttributeError, name
454 454
455 455 def _buildmanifest(self):
456 456 """generate a manifest corresponding to the working directory"""
457 457
458 458 man = self._parents[0].manifest().copy()
459 459 copied = self._repo.dirstate.copies()
460 460 is_exec = util.execfunc(self._repo.root,
461 461 lambda p: man.execf(copied.get(p,p)))
462 462 is_link = util.linkfunc(self._repo.root,
463 463 lambda p: man.linkf(copied.get(p,p)))
464 464 modified, added, removed, deleted, unknown = self._status[:5]
465 465 for i, l in (("a", added), ("m", modified), ("u", unknown)):
466 466 for f in l:
467 467 man[f] = man.get(copied.get(f, f), nullid) + i
468 468 try:
469 469 man.set(f, is_exec(f), is_link(f))
470 470 except OSError:
471 471 pass
472 472
473 473 for f in deleted + removed:
474 474 if f in man:
475 475 del man[f]
476 476
477 477 self._manifest = man
478 478
479 479 def manifest(self): return self._manifest
480 480
481 481 def user(self): return self._repo.ui.username()
482 482 def date(self): return util.makedate()
483 483 def description(self): return ""
484 484 def files(self):
485 485 f = self.modified() + self.added() + self.removed()
486 486 f.sort()
487 487 return f
488 488
489 489 def modified(self): return self._status[0]
490 490 def added(self): return self._status[1]
491 491 def removed(self): return self._status[2]
492 492 def deleted(self): return self._status[3]
493 493 def unknown(self): return self._status[4]
494 494 def clean(self): return self._status[5]
495 495 def branch(self): return self._repo.dirstate.branch()
496 496
497 497 def tags(self):
498 498 t = []
499 499 [t.extend(p.tags()) for p in self.parents()]
500 500 return t
501 501
502 502 def parents(self):
503 503 """return contexts for each parent changeset"""
504 504 return self._parents
505 505
506 506 def children(self):
507 507 return []
508 508
509 509 def fileflags(self, path):
510 510 if '_manifest' in self.__dict__:
511 511 try:
512 512 return self._manifest.flags(path)
513 513 except KeyError:
514 514 return ''
515 515
516 516 pnode = self._parents[0].changeset()[0]
517 517 orig = self._repo.dirstate.copies().get(path, path)
518 518 node, flag = self._repo.manifest.find(pnode, orig)
519 519 is_link = util.linkfunc(self._repo.root, lambda p: 'l' in flag)
520 520 is_exec = util.execfunc(self._repo.root, lambda p: 'x' in flag)
521 521 try:
522 522 return (is_link(path) and 'l' or '') + (is_exec(path) and 'e' or '')
523 523 except OSError:
524 524 pass
525 525
526 526 if not node or path in self.deleted() or path in self.removed():
527 527 return ''
528 528 return flag
529 529
530 530 def filectx(self, path, filelog=None):
531 531 """get a file context from the working directory"""
532 532 return workingfilectx(self._repo, path, workingctx=self,
533 533 filelog=filelog)
534 534
535 535 def ancestor(self, c2):
536 536 """return the ancestor context of self and c2"""
537 537 return self._parents[0].ancestor(c2) # punt on two parents for now
538 538
539 539 class workingfilectx(filectx):
540 540 """A workingfilectx object makes access to data related to a particular
541 541 file in the working directory convenient."""
542 542 def __init__(self, repo, path, filelog=None, workingctx=None):
543 543 """changeid can be a changeset revision, node, or tag.
544 544 fileid can be a file revision or node."""
545 545 self._repo = repo
546 546 self._path = path
547 547 self._changeid = None
548 548 self._filerev = self._filenode = None
549 549
550 550 if filelog:
551 551 self._filelog = filelog
552 552 if workingctx:
553 553 self._changectx = workingctx
554 554
555 555 def __getattr__(self, name):
556 556 if name == '_changectx':
557 557 self._changectx = workingctx(self._repo)
558 558 return self._changectx
559 559 elif name == '_repopath':
560 560 self._repopath = (self._repo.dirstate.copied(self._path)
561 561 or self._path)
562 562 return self._repopath
563 563 elif name == '_filelog':
564 564 self._filelog = self._repo.file(self._repopath)
565 565 return self._filelog
566 566 else:
567 567 raise AttributeError, name
568 568
569 569 def __nonzero__(self):
570 570 return True
571 571
572 572 def __str__(self):
573 573 return "%s@%s" % (self.path(), self._changectx)
574 574
575 575 def filectx(self, fileid):
576 576 '''opens an arbitrary revision of the file without
577 577 opening a new filelog'''
578 578 return filectx(self._repo, self._repopath, fileid=fileid,
579 579 filelog=self._filelog)
580 580
581 581 def rev(self):
582 582 if '_changectx' in self.__dict__:
583 583 return self._changectx.rev()
584 584 return self._filelog.linkrev(self._filenode)
585 585
586 586 def data(self): return self._repo.wread(self._path)
587 587 def renamed(self):
588 588 rp = self._repopath
589 589 if rp == self._path:
590 590 return None
591 591 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
592 592
593 593 def parents(self):
594 594 '''return parent filectxs, following copies if necessary'''
595 595 p = self._path
596 596 rp = self._repopath
597 597 pcl = self._changectx._parents
598 598 fl = self._filelog
599 599 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
600 600 if len(pcl) > 1:
601 601 if rp != p:
602 602 fl = None
603 603 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
604 604
605 605 return [filectx(self._repo, p, fileid=n, filelog=l)
606 606 for p,n,l in pl if n != nullid]
607 607
608 608 def children(self):
609 609 return []
610 610
611 611 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
612 612 def date(self):
613 613 t, tz = self._changectx.date()
614 614 try:
615 615 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
616 616 except OSError, err:
617 617 if err.errno != errno.ENOENT: raise
618 618 return (t, tz)
619 619
620 620 def cmp(self, text): return self._repo.wread(self._path) == text
@@ -1,598 +1,598 b''
1 1 """
2 2 dirstate.py - working directory tracking for mercurial
3 3
4 4 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5
6 6 This software may be used and distributed according to the terms
7 7 of the GNU General Public License, incorporated herein by reference.
8 8 """
9 9
10 from node import *
10 from node import nullid
11 11 from i18n import _
12 12 import struct, os, time, bisect, stat, strutil, util, re, errno, ignore
13 13 import cStringIO, osutil
14 14
15 15 _unknown = ('?', 0, 0, 0)
16 16 _format = ">cllll"
17 17
18 18 class dirstate(object):
19 19
20 20 def __init__(self, opener, ui, root):
21 21 self._opener = opener
22 22 self._root = root
23 23 self._dirty = False
24 24 self._dirtypl = False
25 25 self._ui = ui
26 26
27 27 def __getattr__(self, name):
28 28 if name == '_map':
29 29 self._read()
30 30 return self._map
31 31 elif name == '_copymap':
32 32 self._read()
33 33 return self._copymap
34 34 elif name == '_branch':
35 35 try:
36 36 self._branch = (self._opener("branch").read().strip()
37 37 or "default")
38 38 except IOError:
39 39 self._branch = "default"
40 40 return self._branch
41 41 elif name == '_pl':
42 42 self._pl = [nullid, nullid]
43 43 try:
44 44 st = self._opener("dirstate").read(40)
45 45 if len(st) == 40:
46 46 self._pl = st[:20], st[20:40]
47 47 except IOError, err:
48 48 if err.errno != errno.ENOENT: raise
49 49 return self._pl
50 50 elif name == '_dirs':
51 51 self._dirs = {}
52 52 for f in self._map:
53 53 if self[f] != 'r':
54 54 self._incpath(f)
55 55 return self._dirs
56 56 elif name == '_ignore':
57 57 files = [self._join('.hgignore')]
58 58 for name, path in self._ui.configitems("ui"):
59 59 if name == 'ignore' or name.startswith('ignore.'):
60 60 files.append(os.path.expanduser(path))
61 61 self._ignore = ignore.ignore(self._root, files, self._ui.warn)
62 62 return self._ignore
63 63 elif name == '_slash':
64 64 self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/'
65 65 return self._slash
66 66 else:
67 67 raise AttributeError, name
68 68
69 69 def _join(self, f):
70 70 return os.path.join(self._root, f)
71 71
72 72 def getcwd(self):
73 73 cwd = os.getcwd()
74 74 if cwd == self._root: return ''
75 75 # self._root ends with a path separator if self._root is '/' or 'C:\'
76 76 rootsep = self._root
77 77 if not util.endswithsep(rootsep):
78 78 rootsep += os.sep
79 79 if cwd.startswith(rootsep):
80 80 return cwd[len(rootsep):]
81 81 else:
82 82 # we're outside the repo. return an absolute path.
83 83 return cwd
84 84
85 85 def pathto(self, f, cwd=None):
86 86 if cwd is None:
87 87 cwd = self.getcwd()
88 88 path = util.pathto(self._root, cwd, f)
89 89 if self._slash:
90 90 return util.normpath(path)
91 91 return path
92 92
93 93 def __getitem__(self, key):
94 94 ''' current states:
95 95 n normal
96 96 m needs merging
97 97 r marked for removal
98 98 a marked for addition
99 99 ? not tracked'''
100 100 return self._map.get(key, ("?",))[0]
101 101
102 102 def __contains__(self, key):
103 103 return key in self._map
104 104
105 105 def __iter__(self):
106 106 a = self._map.keys()
107 107 a.sort()
108 108 for x in a:
109 109 yield x
110 110
111 111 def parents(self):
112 112 return self._pl
113 113
114 114 def branch(self):
115 115 return self._branch
116 116
117 117 def setparents(self, p1, p2=nullid):
118 118 self._dirty = self._dirtypl = True
119 119 self._pl = p1, p2
120 120
121 121 def setbranch(self, branch):
122 122 self._branch = branch
123 123 self._opener("branch", "w").write(branch + '\n')
124 124
125 125 def _read(self):
126 126 self._map = {}
127 127 self._copymap = {}
128 128 if not self._dirtypl:
129 129 self._pl = [nullid, nullid]
130 130 try:
131 131 st = self._opener("dirstate").read()
132 132 except IOError, err:
133 133 if err.errno != errno.ENOENT: raise
134 134 return
135 135 if not st:
136 136 return
137 137
138 138 if not self._dirtypl:
139 139 self._pl = [st[:20], st[20: 40]]
140 140
141 141 # deref fields so they will be local in loop
142 142 dmap = self._map
143 143 copymap = self._copymap
144 144 unpack = struct.unpack
145 145 e_size = struct.calcsize(_format)
146 146 pos1 = 40
147 147 l = len(st)
148 148
149 149 # the inner loop
150 150 while pos1 < l:
151 151 pos2 = pos1 + e_size
152 152 e = unpack(">cllll", st[pos1:pos2]) # a literal here is faster
153 153 pos1 = pos2 + e[4]
154 154 f = st[pos2:pos1]
155 155 if '\0' in f:
156 156 f, c = f.split('\0')
157 157 copymap[f] = c
158 158 dmap[f] = e # we hold onto e[4] because making a subtuple is slow
159 159
160 160 def invalidate(self):
161 161 for a in "_map _copymap _branch _pl _dirs _ignore".split():
162 162 if a in self.__dict__:
163 163 delattr(self, a)
164 164 self._dirty = False
165 165
166 166 def copy(self, source, dest):
167 167 self._dirty = True
168 168 self._copymap[dest] = source
169 169
170 170 def copied(self, file):
171 171 return self._copymap.get(file, None)
172 172
173 173 def copies(self):
174 174 return self._copymap
175 175
176 176 def _incpath(self, path):
177 177 c = path.rfind('/')
178 178 if c >= 0:
179 179 dirs = self._dirs
180 180 base = path[:c]
181 181 if base not in dirs:
182 182 self._incpath(base)
183 183 dirs[base] = 1
184 184 else:
185 185 dirs[base] += 1
186 186
187 187 def _decpath(self, path):
188 188 c = path.rfind('/')
189 189 if c >= 0:
190 190 base = path[:c]
191 191 dirs = self._dirs
192 192 if dirs[base] == 1:
193 193 del dirs[base]
194 194 self._decpath(base)
195 195 else:
196 196 dirs[base] -= 1
197 197
198 198 def _incpathcheck(self, f):
199 199 if '\r' in f or '\n' in f:
200 200 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
201 201 % f)
202 202 # shadows
203 203 if f in self._dirs:
204 204 raise util.Abort(_('directory %r already in dirstate') % f)
205 205 for c in strutil.rfindall(f, '/'):
206 206 d = f[:c]
207 207 if d in self._dirs:
208 208 break
209 209 if d in self._map and self[d] != 'r':
210 210 raise util.Abort(_('file %r in dirstate clashes with %r') %
211 211 (d, f))
212 212 self._incpath(f)
213 213
214 214 def _changepath(self, f, newstate, relaxed=False):
215 215 # handle upcoming path changes
216 216 oldstate = self[f]
217 217 if oldstate not in "?r" and newstate in "?r":
218 218 if "_dirs" in self.__dict__:
219 219 self._decpath(f)
220 220 return
221 221 if oldstate in "?r" and newstate not in "?r":
222 222 if relaxed and oldstate == '?':
223 223 # XXX
224 224 # in relaxed mode we assume the caller knows
225 225 # what it is doing, workaround for updating
226 226 # dir-to-file revisions
227 227 if "_dirs" in self.__dict__:
228 228 self._incpath(f)
229 229 return
230 230 self._incpathcheck(f)
231 231 return
232 232
233 233 def normal(self, f):
234 234 'mark a file normal and clean'
235 235 self._dirty = True
236 236 self._changepath(f, 'n', True)
237 237 s = os.lstat(self._join(f))
238 238 self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime, 0)
239 239 if f in self._copymap:
240 240 del self._copymap[f]
241 241
242 242 def normallookup(self, f):
243 243 'mark a file normal, but possibly dirty'
244 244 self._dirty = True
245 245 self._changepath(f, 'n', True)
246 246 self._map[f] = ('n', 0, -1, -1, 0)
247 247 if f in self._copymap:
248 248 del self._copymap[f]
249 249
250 250 def normaldirty(self, f):
251 251 'mark a file normal, but dirty'
252 252 self._dirty = True
253 253 self._changepath(f, 'n', True)
254 254 self._map[f] = ('n', 0, -2, -1, 0)
255 255 if f in self._copymap:
256 256 del self._copymap[f]
257 257
258 258 def add(self, f):
259 259 'mark a file added'
260 260 self._dirty = True
261 261 self._changepath(f, 'a')
262 262 self._map[f] = ('a', 0, -1, -1, 0)
263 263 if f in self._copymap:
264 264 del self._copymap[f]
265 265
266 266 def remove(self, f):
267 267 'mark a file removed'
268 268 self._dirty = True
269 269 self._changepath(f, 'r')
270 270 self._map[f] = ('r', 0, 0, 0, 0)
271 271 if f in self._copymap:
272 272 del self._copymap[f]
273 273
274 274 def merge(self, f):
275 275 'mark a file merged'
276 276 self._dirty = True
277 277 s = os.lstat(self._join(f))
278 278 self._changepath(f, 'm', True)
279 279 self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime, 0)
280 280 if f in self._copymap:
281 281 del self._copymap[f]
282 282
283 283 def forget(self, f):
284 284 'forget a file'
285 285 self._dirty = True
286 286 try:
287 287 self._changepath(f, '?')
288 288 del self._map[f]
289 289 except KeyError:
290 290 self._ui.warn(_("not in dirstate: %s\n") % f)
291 291
292 292 def clear(self):
293 293 self._map = {}
294 294 if "_dirs" in self.__dict__:
295 295 delattr(self, "_dirs");
296 296 self._copymap = {}
297 297 self._pl = [nullid, nullid]
298 298 self._dirty = True
299 299
300 300 def rebuild(self, parent, files):
301 301 self.clear()
302 302 for f in files:
303 303 if files.execf(f):
304 304 self._map[f] = ('n', 0777, -1, 0, 0)
305 305 else:
306 306 self._map[f] = ('n', 0666, -1, 0, 0)
307 307 self._pl = (parent, nullid)
308 308 self._dirty = True
309 309
310 310 def write(self):
311 311 if not self._dirty:
312 312 return
313 313 cs = cStringIO.StringIO()
314 314 copymap = self._copymap
315 315 pack = struct.pack
316 316 write = cs.write
317 317 write("".join(self._pl))
318 318 for f, e in self._map.iteritems():
319 319 if f in copymap:
320 320 f = "%s\0%s" % (f, copymap[f])
321 321 e = pack(_format, e[0], e[1], e[2], e[3], len(f))
322 322 write(e)
323 323 write(f)
324 324 st = self._opener("dirstate", "w", atomictemp=True)
325 325 st.write(cs.getvalue())
326 326 st.rename()
327 327 self._dirty = self._dirtypl = False
328 328
329 329 def _filter(self, files):
330 330 ret = {}
331 331 unknown = []
332 332
333 333 for x in files:
334 334 if x == '.':
335 335 return self._map.copy()
336 336 if x not in self._map:
337 337 unknown.append(x)
338 338 else:
339 339 ret[x] = self._map[x]
340 340
341 341 if not unknown:
342 342 return ret
343 343
344 344 b = self._map.keys()
345 345 b.sort()
346 346 blen = len(b)
347 347
348 348 for x in unknown:
349 349 bs = bisect.bisect(b, "%s%s" % (x, '/'))
350 350 while bs < blen:
351 351 s = b[bs]
352 352 if len(s) > len(x) and s.startswith(x):
353 353 ret[s] = self._map[s]
354 354 else:
355 355 break
356 356 bs += 1
357 357 return ret
358 358
359 359 def _supported(self, f, mode, verbose=False):
360 360 if stat.S_ISREG(mode) or stat.S_ISLNK(mode):
361 361 return True
362 362 if verbose:
363 363 kind = 'unknown'
364 364 if stat.S_ISCHR(mode): kind = _('character device')
365 365 elif stat.S_ISBLK(mode): kind = _('block device')
366 366 elif stat.S_ISFIFO(mode): kind = _('fifo')
367 367 elif stat.S_ISSOCK(mode): kind = _('socket')
368 368 elif stat.S_ISDIR(mode): kind = _('directory')
369 369 self._ui.warn(_('%s: unsupported file type (type is %s)\n')
370 370 % (self.pathto(f), kind))
371 371 return False
372 372
373 373 def _dirignore(self, f):
374 374 if self._ignore(f):
375 375 return True
376 376 for c in strutil.findall(f, '/'):
377 377 if self._ignore(f[:c]):
378 378 return True
379 379 return False
380 380
381 381 def walk(self, files=None, match=util.always, badmatch=None):
382 382 # filter out the stat
383 383 for src, f, st in self.statwalk(files, match, badmatch=badmatch):
384 384 yield src, f
385 385
386 386 def statwalk(self, files=None, match=util.always, unknown=True,
387 387 ignored=False, badmatch=None, directories=False):
388 388 '''
389 389 walk recursively through the directory tree, finding all files
390 390 matched by the match function
391 391
392 392 results are yielded in a tuple (src, filename, st), where src
393 393 is one of:
394 394 'f' the file was found in the directory tree
395 395 'd' the file is a directory of the tree
396 396 'm' the file was only in the dirstate and not in the tree
397 397 'b' file was not found and matched badmatch
398 398
399 399 and st is the stat result if the file was found in the directory.
400 400 '''
401 401
402 402 # walk all files by default
403 403 if not files:
404 404 files = ['.']
405 405 dc = self._map.copy()
406 406 else:
407 407 files = util.unique(files)
408 408 dc = self._filter(files)
409 409
410 410 def imatch(file_):
411 411 if file_ not in dc and self._ignore(file_):
412 412 return False
413 413 return match(file_)
414 414
415 415 # TODO: don't walk unknown directories if unknown and ignored are False
416 416 ignore = self._ignore
417 417 dirignore = self._dirignore
418 418 if ignored:
419 419 imatch = match
420 420 ignore = util.never
421 421 dirignore = util.never
422 422
423 423 # self._root may end with a path separator when self._root == '/'
424 424 common_prefix_len = len(self._root)
425 425 if not util.endswithsep(self._root):
426 426 common_prefix_len += 1
427 427
428 428 normpath = util.normpath
429 429 listdir = osutil.listdir
430 430 lstat = os.lstat
431 431 bisect_left = bisect.bisect_left
432 432 isdir = os.path.isdir
433 433 pconvert = util.pconvert
434 434 join = os.path.join
435 435 s_isdir = stat.S_ISDIR
436 436 supported = self._supported
437 437 _join = self._join
438 438 known = {'.hg': 1}
439 439
440 440 # recursion free walker, faster than os.walk.
441 441 def findfiles(s):
442 442 work = [s]
443 443 wadd = work.append
444 444 found = []
445 445 add = found.append
446 446 if directories:
447 447 add((normpath(s[common_prefix_len:]), 'd', lstat(s)))
448 448 while work:
449 449 top = work.pop()
450 450 entries = listdir(top, stat=True)
451 451 # nd is the top of the repository dir tree
452 452 nd = normpath(top[common_prefix_len:])
453 453 if nd == '.':
454 454 nd = ''
455 455 else:
456 456 # do not recurse into a repo contained in this
457 457 # one. use bisect to find .hg directory so speed
458 458 # is good on big directory.
459 459 names = [e[0] for e in entries]
460 460 hg = bisect_left(names, '.hg')
461 461 if hg < len(names) and names[hg] == '.hg':
462 462 if isdir(join(top, '.hg')):
463 463 continue
464 464 for f, kind, st in entries:
465 465 np = pconvert(join(nd, f))
466 466 if np in known:
467 467 continue
468 468 known[np] = 1
469 469 p = join(top, f)
470 470 # don't trip over symlinks
471 471 if kind == stat.S_IFDIR:
472 472 if not ignore(np):
473 473 wadd(p)
474 474 if directories:
475 475 add((np, 'd', st))
476 476 if np in dc and match(np):
477 477 add((np, 'm', st))
478 478 elif imatch(np):
479 479 if supported(np, st.st_mode):
480 480 add((np, 'f', st))
481 481 elif np in dc:
482 482 add((np, 'm', st))
483 483 found.sort()
484 484 return found
485 485
486 486 # step one, find all files that match our criteria
487 487 files.sort()
488 488 for ff in files:
489 489 nf = normpath(ff)
490 490 f = _join(ff)
491 491 try:
492 492 st = lstat(f)
493 493 except OSError, inst:
494 494 found = False
495 495 for fn in dc:
496 496 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
497 497 found = True
498 498 break
499 499 if not found:
500 500 if inst.errno != errno.ENOENT or not badmatch:
501 501 self._ui.warn('%s: %s\n' %
502 502 (self.pathto(ff), inst.strerror))
503 503 elif badmatch and badmatch(ff) and imatch(nf):
504 504 yield 'b', ff, None
505 505 continue
506 506 if s_isdir(st.st_mode):
507 507 if not dirignore(nf):
508 508 for f, src, st in findfiles(f):
509 509 yield src, f, st
510 510 else:
511 511 if nf in known:
512 512 continue
513 513 known[nf] = 1
514 514 if match(nf):
515 515 if supported(ff, st.st_mode, verbose=True):
516 516 yield 'f', nf, st
517 517 elif ff in dc:
518 518 yield 'm', nf, st
519 519
520 520 # step two run through anything left in the dc hash and yield
521 521 # if we haven't already seen it
522 522 ks = dc.keys()
523 523 ks.sort()
524 524 for k in ks:
525 525 if k in known:
526 526 continue
527 527 known[k] = 1
528 528 if imatch(k):
529 529 yield 'm', k, None
530 530
531 531 def status(self, files, match, list_ignored, list_clean, list_unknown=True):
532 532 lookup, modified, added, unknown, ignored = [], [], [], [], []
533 533 removed, deleted, clean = [], [], []
534 534
535 535 files = files or []
536 536 _join = self._join
537 537 lstat = os.lstat
538 538 cmap = self._copymap
539 539 dmap = self._map
540 540 ladd = lookup.append
541 541 madd = modified.append
542 542 aadd = added.append
543 543 uadd = unknown.append
544 544 iadd = ignored.append
545 545 radd = removed.append
546 546 dadd = deleted.append
547 547 cadd = clean.append
548 548
549 549 for src, fn, st in self.statwalk(files, match, unknown=list_unknown,
550 550 ignored=list_ignored):
551 551 if fn in dmap:
552 552 type_, mode, size, time, foo = dmap[fn]
553 553 else:
554 554 if (list_ignored or fn in files) and self._dirignore(fn):
555 555 if list_ignored:
556 556 iadd(fn)
557 557 elif list_unknown:
558 558 uadd(fn)
559 559 continue
560 560 if src == 'm':
561 561 nonexistent = True
562 562 if not st:
563 563 try:
564 564 st = lstat(_join(fn))
565 565 except OSError, inst:
566 566 if inst.errno not in (errno.ENOENT, errno.ENOTDIR):
567 567 raise
568 568 st = None
569 569 # We need to re-check that it is a valid file
570 570 if st and self._supported(fn, st.st_mode):
571 571 nonexistent = False
572 572 # XXX: what to do with file no longer present in the fs
573 573 # who are not removed in the dirstate ?
574 574 if nonexistent and type_ in "nma":
575 575 dadd(fn)
576 576 continue
577 577 # check the common case first
578 578 if type_ == 'n':
579 579 if not st:
580 580 st = lstat(_join(fn))
581 581 if (size >= 0 and (size != st.st_size
582 582 or (mode ^ st.st_mode) & 0100)
583 583 or size == -2
584 584 or fn in self._copymap):
585 585 madd(fn)
586 586 elif time != int(st.st_mtime):
587 587 ladd(fn)
588 588 elif list_clean:
589 589 cadd(fn)
590 590 elif type_ == 'm':
591 591 madd(fn)
592 592 elif type_ == 'a':
593 593 aadd(fn)
594 594 elif type_ == 'r':
595 595 radd(fn)
596 596
597 597 return (lookup, modified, added, removed, deleted, unknown, ignored,
598 598 clean)
@@ -1,417 +1,416 b''
1 1 # dispatch.py - command dispatching for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 from node import *
9 8 from i18n import _
10 9 import os, sys, atexit, signal, pdb, traceback, socket, errno, shlex, time
11 10 import util, commands, hg, lock, fancyopts, revlog, version, extensions, hook
12 11 import cmdutil
13 12 import ui as _ui
14 13
15 14 class ParseError(Exception):
16 15 """Exception raised on errors in parsing the command line."""
17 16
18 17 def run():
19 18 "run the command in sys.argv"
20 19 sys.exit(dispatch(sys.argv[1:]))
21 20
22 21 def dispatch(args):
23 22 "run the command specified in args"
24 23 try:
25 24 u = _ui.ui(traceback='--traceback' in args)
26 25 except util.Abort, inst:
27 26 sys.stderr.write(_("abort: %s\n") % inst)
28 27 return -1
29 28 return _runcatch(u, args)
30 29
31 30 def _runcatch(ui, args):
32 31 def catchterm(*args):
33 32 raise util.SignalInterrupt
34 33
35 34 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
36 35 num = getattr(signal, name, None)
37 36 if num: signal.signal(num, catchterm)
38 37
39 38 try:
40 39 try:
41 40 # enter the debugger before command execution
42 41 if '--debugger' in args:
43 42 pdb.set_trace()
44 43 try:
45 44 return _dispatch(ui, args)
46 45 finally:
47 46 ui.flush()
48 47 except:
49 48 # enter the debugger when we hit an exception
50 49 if '--debugger' in args:
51 50 pdb.post_mortem(sys.exc_info()[2])
52 51 ui.print_exc()
53 52 raise
54 53
55 54 except ParseError, inst:
56 55 if inst.args[0]:
57 56 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
58 57 commands.help_(ui, inst.args[0])
59 58 else:
60 59 ui.warn(_("hg: %s\n") % inst.args[1])
61 60 commands.help_(ui, 'shortlist')
62 61 except cmdutil.AmbiguousCommand, inst:
63 62 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
64 63 (inst.args[0], " ".join(inst.args[1])))
65 64 except cmdutil.UnknownCommand, inst:
66 65 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
67 66 commands.help_(ui, 'shortlist')
68 67 except hg.RepoError, inst:
69 68 ui.warn(_("abort: %s!\n") % inst)
70 69 except lock.LockHeld, inst:
71 70 if inst.errno == errno.ETIMEDOUT:
72 71 reason = _('timed out waiting for lock held by %s') % inst.locker
73 72 else:
74 73 reason = _('lock held by %s') % inst.locker
75 74 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
76 75 except lock.LockUnavailable, inst:
77 76 ui.warn(_("abort: could not lock %s: %s\n") %
78 77 (inst.desc or inst.filename, inst.strerror))
79 78 except revlog.RevlogError, inst:
80 79 ui.warn(_("abort: %s!\n") % inst)
81 80 except util.SignalInterrupt:
82 81 ui.warn(_("killed!\n"))
83 82 except KeyboardInterrupt:
84 83 try:
85 84 ui.warn(_("interrupted!\n"))
86 85 except IOError, inst:
87 86 if inst.errno == errno.EPIPE:
88 87 if ui.debugflag:
89 88 ui.warn(_("\nbroken pipe\n"))
90 89 else:
91 90 raise
92 91 except socket.error, inst:
93 92 ui.warn(_("abort: %s\n") % inst[1])
94 93 except IOError, inst:
95 94 if hasattr(inst, "code"):
96 95 ui.warn(_("abort: %s\n") % inst)
97 96 elif hasattr(inst, "reason"):
98 97 try: # usually it is in the form (errno, strerror)
99 98 reason = inst.reason.args[1]
100 99 except: # it might be anything, for example a string
101 100 reason = inst.reason
102 101 ui.warn(_("abort: error: %s\n") % reason)
103 102 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
104 103 if ui.debugflag:
105 104 ui.warn(_("broken pipe\n"))
106 105 elif getattr(inst, "strerror", None):
107 106 if getattr(inst, "filename", None):
108 107 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
109 108 else:
110 109 ui.warn(_("abort: %s\n") % inst.strerror)
111 110 else:
112 111 raise
113 112 except OSError, inst:
114 113 if getattr(inst, "filename", None):
115 114 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
116 115 else:
117 116 ui.warn(_("abort: %s\n") % inst.strerror)
118 117 except util.UnexpectedOutput, inst:
119 118 ui.warn(_("abort: %s") % inst[0])
120 119 if not isinstance(inst[1], basestring):
121 120 ui.warn(" %r\n" % (inst[1],))
122 121 elif not inst[1]:
123 122 ui.warn(_(" empty string\n"))
124 123 else:
125 124 ui.warn("\n%r\n" % util.ellipsis(inst[1]))
126 125 except ImportError, inst:
127 126 m = str(inst).split()[-1]
128 127 ui.warn(_("abort: could not import module %s!\n") % m)
129 128 if m in "mpatch bdiff".split():
130 129 ui.warn(_("(did you forget to compile extensions?)\n"))
131 130 elif m in "zlib".split():
132 131 ui.warn(_("(is your Python install correct?)\n"))
133 132
134 133 except util.Abort, inst:
135 134 ui.warn(_("abort: %s\n") % inst)
136 135 except MemoryError:
137 136 ui.warn(_("abort: out of memory\n"))
138 137 except SystemExit, inst:
139 138 # Commands shouldn't sys.exit directly, but give a return code.
140 139 # Just in case catch this and and pass exit code to caller.
141 140 return inst.code
142 141 except:
143 142 ui.warn(_("** unknown exception encountered, details follow\n"))
144 143 ui.warn(_("** report bug details to "
145 144 "http://www.selenic.com/mercurial/bts\n"))
146 145 ui.warn(_("** or mercurial@selenic.com\n"))
147 146 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
148 147 % version.get_version())
149 148 raise
150 149
151 150 return -1
152 151
153 152 def _findrepo(p):
154 153 while not os.path.isdir(os.path.join(p, ".hg")):
155 154 oldp, p = p, os.path.dirname(p)
156 155 if p == oldp:
157 156 return None
158 157
159 158 return p
160 159
161 160 def _parse(ui, args):
162 161 options = {}
163 162 cmdoptions = {}
164 163
165 164 try:
166 165 args = fancyopts.fancyopts(args, commands.globalopts, options)
167 166 except fancyopts.getopt.GetoptError, inst:
168 167 raise ParseError(None, inst)
169 168
170 169 if args:
171 170 cmd, args = args[0], args[1:]
172 171 aliases, i = cmdutil.findcmd(ui, cmd, commands.table)
173 172 cmd = aliases[0]
174 173 defaults = ui.config("defaults", cmd)
175 174 if defaults:
176 175 args = shlex.split(defaults) + args
177 176 c = list(i[1])
178 177 else:
179 178 cmd = None
180 179 c = []
181 180
182 181 # combine global options into local
183 182 for o in commands.globalopts:
184 183 c.append((o[0], o[1], options[o[1]], o[3]))
185 184
186 185 try:
187 186 args = fancyopts.fancyopts(args, c, cmdoptions)
188 187 except fancyopts.getopt.GetoptError, inst:
189 188 raise ParseError(cmd, inst)
190 189
191 190 # separate global options back out
192 191 for o in commands.globalopts:
193 192 n = o[1]
194 193 options[n] = cmdoptions[n]
195 194 del cmdoptions[n]
196 195
197 196 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
198 197
199 198 def _parseconfig(config):
200 199 """parse the --config options from the command line"""
201 200 parsed = []
202 201 for cfg in config:
203 202 try:
204 203 name, value = cfg.split('=', 1)
205 204 section, name = name.split('.', 1)
206 205 if not section or not name:
207 206 raise IndexError
208 207 parsed.append((section, name, value))
209 208 except (IndexError, ValueError):
210 209 raise util.Abort(_('malformed --config option: %s') % cfg)
211 210 return parsed
212 211
213 212 def _earlygetopt(aliases, args):
214 213 """Return list of values for an option (or aliases).
215 214
216 215 The values are listed in the order they appear in args.
217 216 The options and values are removed from args.
218 217 """
219 218 try:
220 219 argcount = args.index("--")
221 220 except ValueError:
222 221 argcount = len(args)
223 222 shortopts = [opt for opt in aliases if len(opt) == 2]
224 223 values = []
225 224 pos = 0
226 225 while pos < argcount:
227 226 if args[pos] in aliases:
228 227 if pos + 1 >= argcount:
229 228 # ignore and let getopt report an error if there is no value
230 229 break
231 230 del args[pos]
232 231 values.append(args.pop(pos))
233 232 argcount -= 2
234 233 elif args[pos][:2] in shortopts:
235 234 # short option can have no following space, e.g. hg log -Rfoo
236 235 values.append(args.pop(pos)[2:])
237 236 argcount -= 1
238 237 else:
239 238 pos += 1
240 239 return values
241 240
242 241 _loaded = {}
243 242 def _dispatch(ui, args):
244 243 # read --config before doing anything else
245 244 # (e.g. to change trust settings for reading .hg/hgrc)
246 245 config = _earlygetopt(['--config'], args)
247 246 if config:
248 247 ui.updateopts(config=_parseconfig(config))
249 248
250 249 # check for cwd
251 250 cwd = _earlygetopt(['--cwd'], args)
252 251 if cwd:
253 252 os.chdir(cwd[-1])
254 253
255 254 # read the local repository .hgrc into a local ui object
256 255 path = _findrepo(os.getcwd()) or ""
257 256 if not path:
258 257 lui = ui
259 258 if path:
260 259 try:
261 260 lui = _ui.ui(parentui=ui)
262 261 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
263 262 except IOError:
264 263 pass
265 264
266 265 # now we can expand paths, even ones in .hg/hgrc
267 266 rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
268 267 if rpath:
269 268 path = lui.expandpath(rpath[-1])
270 269 lui = _ui.ui(parentui=ui)
271 270 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
272 271
273 272 extensions.loadall(lui)
274 273 for name, module in extensions.extensions():
275 274 if name in _loaded:
276 275 continue
277 276
278 277 # setup extensions
279 278 # TODO this should be generalized to scheme, where extensions can
280 279 # redepend on other extensions. then we should toposort them, and
281 280 # do initialization in correct order
282 281 extsetup = getattr(module, 'extsetup', None)
283 282 if extsetup:
284 283 extsetup()
285 284
286 285 cmdtable = getattr(module, 'cmdtable', {})
287 286 overrides = [cmd for cmd in cmdtable if cmd in commands.table]
288 287 if overrides:
289 288 ui.warn(_("extension '%s' overrides commands: %s\n")
290 289 % (name, " ".join(overrides)))
291 290 commands.table.update(cmdtable)
292 291 _loaded[name] = 1
293 292 # check for fallback encoding
294 293 fallback = lui.config('ui', 'fallbackencoding')
295 294 if fallback:
296 295 util._fallbackencoding = fallback
297 296
298 297 fullargs = args
299 298 cmd, func, args, options, cmdoptions = _parse(lui, args)
300 299
301 300 if options["config"]:
302 301 raise util.Abort(_("Option --config may not be abbreviated!"))
303 302 if options["cwd"]:
304 303 raise util.Abort(_("Option --cwd may not be abbreviated!"))
305 304 if options["repository"]:
306 305 raise util.Abort(_(
307 306 "Option -R has to be separated from other options (i.e. not -qR) "
308 307 "and --repository may only be abbreviated as --repo!"))
309 308
310 309 if options["encoding"]:
311 310 util._encoding = options["encoding"]
312 311 if options["encodingmode"]:
313 312 util._encodingmode = options["encodingmode"]
314 313 if options["time"]:
315 314 def get_times():
316 315 t = os.times()
317 316 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
318 317 t = (t[0], t[1], t[2], t[3], time.clock())
319 318 return t
320 319 s = get_times()
321 320 def print_time():
322 321 t = get_times()
323 322 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
324 323 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
325 324 atexit.register(print_time)
326 325
327 326 ui.updateopts(options["verbose"], options["debug"], options["quiet"],
328 327 not options["noninteractive"], options["traceback"])
329 328
330 329 if options['help']:
331 330 return commands.help_(ui, cmd, options['version'])
332 331 elif options['version']:
333 332 return commands.version_(ui)
334 333 elif not cmd:
335 334 return commands.help_(ui, 'shortlist')
336 335
337 336 repo = None
338 337 if cmd not in commands.norepo.split():
339 338 try:
340 339 repo = hg.repository(ui, path=path)
341 340 ui = repo.ui
342 341 if not repo.local():
343 342 raise util.Abort(_("repository '%s' is not local") % path)
344 343 ui.setconfig("bundle", "mainreporoot", repo.root)
345 344 except hg.RepoError:
346 345 if cmd not in commands.optionalrepo.split():
347 346 if args and not path: # try to infer -R from command args
348 347 repos = map(_findrepo, args)
349 348 guess = repos[0]
350 349 if guess and repos.count(guess) == len(repos):
351 350 return _dispatch(ui, ['--repository', guess] + fullargs)
352 351 if not path:
353 352 raise hg.RepoError(_("There is no Mercurial repository here"
354 353 " (.hg not found)"))
355 354 raise
356 355 d = lambda: func(ui, repo, *args, **cmdoptions)
357 356 else:
358 357 d = lambda: func(ui, *args, **cmdoptions)
359 358
360 359 # run pre-hook, and abort if it fails
361 360 ret = hook.hook(lui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
362 361 if ret:
363 362 return ret
364 363 ret = _runcommand(ui, options, cmd, d)
365 364 # run post-hook, passing command result
366 365 hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
367 366 result = ret)
368 367 return ret
369 368
370 369 def _runcommand(ui, options, cmd, cmdfunc):
371 370 def checkargs():
372 371 try:
373 372 return cmdfunc()
374 373 except TypeError, inst:
375 374 # was this an argument error?
376 375 tb = traceback.extract_tb(sys.exc_info()[2])
377 376 if len(tb) != 2: # no
378 377 raise
379 378 raise ParseError(cmd, _("invalid arguments"))
380 379
381 380 if options['profile']:
382 381 import hotshot, hotshot.stats
383 382 prof = hotshot.Profile("hg.prof")
384 383 try:
385 384 try:
386 385 return prof.runcall(checkargs)
387 386 except:
388 387 try:
389 388 ui.warn(_('exception raised - generating '
390 389 'profile anyway\n'))
391 390 except:
392 391 pass
393 392 raise
394 393 finally:
395 394 prof.close()
396 395 stats = hotshot.stats.load("hg.prof")
397 396 stats.strip_dirs()
398 397 stats.sort_stats('time', 'calls')
399 398 stats.print_stats(40)
400 399 elif options['lsprof']:
401 400 try:
402 401 from mercurial import lsprof
403 402 except ImportError:
404 403 raise util.Abort(_(
405 404 'lsprof not available - install from '
406 405 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
407 406 p = lsprof.Profiler()
408 407 p.enable(subcalls=True)
409 408 try:
410 409 return checkargs()
411 410 finally:
412 411 p.disable()
413 412 stats = lsprof.Stats(p.getstats())
414 413 stats.sort()
415 414 stats.pprint(top=10, file=sys.stderr, climit=5)
416 415 else:
417 416 return checkargs()
@@ -1,83 +1,84 b''
1 1 # filelog.py - file history class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 from revlog import *
8 from node import bin, nullid
9 from revlog import revlog
9 10 import os
10 11
11 12 class filelog(revlog):
12 13 def __init__(self, opener, path):
13 14 revlog.__init__(self, opener,
14 15 "/".join(("data", self.encodedir(path + ".i"))))
15 16
16 17 # This avoids a collision between a file named foo and a dir named
17 18 # foo.i or foo.d
18 19 def encodedir(self, path):
19 20 return (path
20 21 .replace(".hg/", ".hg.hg/")
21 22 .replace(".i/", ".i.hg/")
22 23 .replace(".d/", ".d.hg/"))
23 24
24 25 def decodedir(self, path):
25 26 return (path
26 27 .replace(".d.hg/", ".d/")
27 28 .replace(".i.hg/", ".i/")
28 29 .replace(".hg.hg/", ".hg/"))
29 30
30 31 def read(self, node):
31 32 t = self.revision(node)
32 33 if not t.startswith('\1\n'):
33 34 return t
34 35 s = t.index('\1\n', 2)
35 36 return t[s+2:]
36 37
37 38 def _readmeta(self, node):
38 39 t = self.revision(node)
39 40 if not t.startswith('\1\n'):
40 41 return {}
41 42 s = t.index('\1\n', 2)
42 43 mt = t[2:s]
43 44 m = {}
44 45 for l in mt.splitlines():
45 46 k, v = l.split(": ", 1)
46 47 m[k] = v
47 48 return m
48 49
49 50 def add(self, text, meta, transaction, link, p1=None, p2=None):
50 51 if meta or text.startswith('\1\n'):
51 52 mt = ""
52 53 if meta:
53 54 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
54 55 text = "\1\n%s\1\n%s" % ("".join(mt), text)
55 56 return self.addrevision(text, transaction, link, p1, p2)
56 57
57 58 def renamed(self, node):
58 59 if self.parents(node)[0] != nullid:
59 60 return False
60 61 m = self._readmeta(node)
61 62 if m and "copy" in m:
62 63 return (m["copy"], bin(m["copyrev"]))
63 64 return False
64 65
65 66 def size(self, rev):
66 67 """return the size of a given revision"""
67 68
68 69 # for revisions with renames, we have to go the slow way
69 70 node = self.node(rev)
70 71 if self.renamed(node):
71 72 return len(self.read(node))
72 73
73 74 return revlog.size(self, rev)
74 75
75 76 def cmp(self, node, text):
76 77 """compare text with a given file revision"""
77 78
78 79 # for renames, we have to go the slow way
79 80 if self.renamed(node):
80 81 t2 = self.read(node)
81 82 return t2 != text
82 83
83 84 return revlog.cmp(self, node, text)
@@ -1,217 +1,217 b''
1 1 # filemerge.py - file-level merge handling for Mercurial
2 2 #
3 3 # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 from node import *
8 from node import nullrev
9 9 from i18n import _
10 10 import util, os, tempfile, context, simplemerge, re, filecmp
11 11
12 12 def _toolstr(ui, tool, part, default=""):
13 13 return ui.config("merge-tools", tool + "." + part, default)
14 14
15 15 def _toolbool(ui, tool, part, default=False):
16 16 return ui.configbool("merge-tools", tool + "." + part, default)
17 17
18 18 def _findtool(ui, tool):
19 19 k = _toolstr(ui, tool, "regkey")
20 20 if k:
21 21 p = util.lookup_reg(k, _toolstr(ui, tool, "regname"))
22 22 if p:
23 23 p = util.find_exe(p + _toolstr(ui, tool, "regappend"))
24 24 if p:
25 25 return p
26 26 return util.find_exe(_toolstr(ui, tool, "executable", tool))
27 27
28 28 def _picktool(repo, ui, path, binary, symlink):
29 29 def check(tool, pat, symlink, binary):
30 30 tmsg = tool
31 31 if pat:
32 32 tmsg += " specified for " + pat
33 33 if pat and not _findtool(ui, tool): # skip search if not matching
34 34 ui.warn(_("couldn't find merge tool %s\n") % tmsg)
35 35 elif symlink and not _toolbool(ui, tool, "symlink"):
36 36 ui.warn(_("tool %s can't handle symlinks\n") % tmsg)
37 37 elif binary and not _toolbool(ui, tool, "binary"):
38 38 ui.warn(_("tool %s can't handle binary\n") % tmsg)
39 39 elif not util.gui() and _toolbool(ui, tool, "gui"):
40 40 ui.warn(_("tool %s requires a GUI\n") % tmsg)
41 41 else:
42 42 return True
43 43 return False
44 44
45 45 # HGMERGE takes precedence
46 46 hgmerge = os.environ.get("HGMERGE")
47 47 if hgmerge:
48 48 return (hgmerge, hgmerge)
49 49
50 50 # then patterns
51 51 for pat, tool in ui.configitems("merge-patterns"):
52 52 mf = util.matcher(repo.root, "", [pat], [], [])[1]
53 53 if mf(path) and check(tool, pat, symlink, False):
54 54 toolpath = _findtool(ui, tool)
55 55 return (tool, '"' + toolpath + '"')
56 56
57 57 # then merge tools
58 58 tools = {}
59 59 for k,v in ui.configitems("merge-tools"):
60 60 t = k.split('.')[0]
61 61 if t not in tools:
62 62 tools[t] = int(_toolstr(ui, t, "priority", "0"))
63 63 names = tools.keys()
64 64 tools = [(-p,t) for t,p in tools.items()]
65 65 tools.sort()
66 66 uimerge = ui.config("ui", "merge")
67 67 if uimerge:
68 68 if uimerge not in names:
69 69 return (uimerge, uimerge)
70 70 tools.insert(0, (None, uimerge)) # highest priority
71 71 tools.append((None, "hgmerge")) # the old default, if found
72 72 for p,t in tools:
73 73 toolpath = _findtool(ui, t)
74 74 if toolpath and check(t, None, symlink, binary):
75 75 return (t, '"' + toolpath + '"')
76 76 # internal merge as last resort
77 77 return (not (symlink or binary) and "internal:merge" or None, None)
78 78
79 79 def _eoltype(data):
80 80 "Guess the EOL type of a file"
81 81 if '\0' in data: # binary
82 82 return None
83 83 if '\r\n' in data: # Windows
84 84 return '\r\n'
85 85 if '\r' in data: # Old Mac
86 86 return '\r'
87 87 if '\n' in data: # UNIX
88 88 return '\n'
89 89 return None # unknown
90 90
91 91 def _matcheol(file, origfile):
92 92 "Convert EOL markers in a file to match origfile"
93 93 tostyle = _eoltype(open(origfile, "rb").read())
94 94 if tostyle:
95 95 data = open(file, "rb").read()
96 96 style = _eoltype(data)
97 97 if style:
98 98 newdata = data.replace(style, tostyle)
99 99 if newdata != data:
100 100 open(file, "wb").write(newdata)
101 101
102 102 def filemerge(repo, fw, fd, fo, wctx, mctx):
103 103 """perform a 3-way merge in the working directory
104 104
105 105 fw = original filename in the working directory
106 106 fd = destination filename in the working directory
107 107 fo = filename in other parent
108 108 wctx, mctx = working and merge changecontexts
109 109 """
110 110
111 111 def temp(prefix, ctx):
112 112 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
113 113 (fd, name) = tempfile.mkstemp(prefix=pre)
114 114 data = repo.wwritedata(ctx.path(), ctx.data())
115 115 f = os.fdopen(fd, "wb")
116 116 f.write(data)
117 117 f.close()
118 118 return name
119 119
120 120 def isbin(ctx):
121 121 try:
122 122 return util.binary(ctx.data())
123 123 except IOError:
124 124 return False
125 125
126 126 fco = mctx.filectx(fo)
127 127 if not fco.cmp(wctx.filectx(fd).data()): # files identical?
128 128 return None
129 129
130 130 ui = repo.ui
131 131 fcm = wctx.filectx(fw)
132 132 fca = fcm.ancestor(fco) or repo.filectx(fw, fileid=nullrev)
133 133 binary = isbin(fcm) or isbin(fco) or isbin(fca)
134 134 symlink = fcm.islink() or fco.islink()
135 135 tool, toolpath = _picktool(repo, ui, fw, binary, symlink)
136 136 ui.debug(_("picked tool '%s' for %s (binary %s symlink %s)\n") %
137 137 (tool, fw, binary, symlink))
138 138
139 139 if not tool:
140 140 tool = "internal:local"
141 141 if ui.prompt(_(" no tool found to merge %s\n"
142 142 "keep (l)ocal or take (o)ther?") % fw,
143 143 _("[lo]"), _("l")) != _("l"):
144 144 tool = "internal:other"
145 145 if tool == "internal:local":
146 146 return 0
147 147 if tool == "internal:other":
148 148 repo.wwrite(fd, fco.data(), fco.fileflags())
149 149 return 0
150 150 if tool == "internal:fail":
151 151 return 1
152 152
153 153 # do the actual merge
154 154 a = repo.wjoin(fd)
155 155 b = temp("base", fca)
156 156 c = temp("other", fco)
157 157 out = ""
158 158 back = a + ".orig"
159 159 util.copyfile(a, back)
160 160
161 161 if fw != fo:
162 162 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
163 163 else:
164 164 repo.ui.status(_("merging %s\n") % fw)
165 165 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
166 166
167 167 # do we attempt to simplemerge first?
168 168 if _toolbool(ui, tool, "premerge", not (binary or symlink)):
169 169 r = simplemerge.simplemerge(a, b, c, quiet=True)
170 170 if not r:
171 171 ui.debug(_(" premerge successful\n"))
172 172 os.unlink(back)
173 173 os.unlink(b)
174 174 os.unlink(c)
175 175 return 0
176 176 util.copyfile(back, a) # restore from backup and try again
177 177
178 178 env = dict(HG_FILE=fd,
179 179 HG_MY_NODE=str(wctx.parents()[0]),
180 180 HG_OTHER_NODE=str(mctx),
181 181 HG_MY_ISLINK=fcm.islink(),
182 182 HG_OTHER_ISLINK=fco.islink(),
183 183 HG_BASE_ISLINK=fca.islink())
184 184
185 185 if tool == "internal:merge":
186 186 r = simplemerge.simplemerge(a, b, c, label=['local', 'other'])
187 187 else:
188 188 args = _toolstr(ui, tool, "args", '$local $base $other')
189 189 if "$output" in args:
190 190 out, a = a, back # read input from backup, write to original
191 191 replace = dict(local=a, base=b, other=c, output=out)
192 192 args = re.sub("\$(local|base|other|output)",
193 193 lambda x: '"%s"' % replace[x.group()[1:]], args)
194 194 r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env)
195 195
196 196 if not r and _toolbool(ui, tool, "checkconflicts"):
197 197 if re.match("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcm.data()):
198 198 r = 1
199 199
200 200 if not r and _toolbool(ui, tool, "checkchanged"):
201 201 if filecmp.cmp(repo.wjoin(fd), back):
202 202 if ui.prompt(_(" output file %s appears unchanged\n"
203 203 "was merge successful (yn)?") % fd,
204 204 _("[yn]"), _("n")) != _("y"):
205 205 r = 1
206 206
207 207 if _toolbool(ui, tool, "fixeol"):
208 208 _matcheol(repo.wjoin(fd), back)
209 209
210 210 if r:
211 211 repo.ui.warn(_("merging %s failed!\n") % fd)
212 212 else:
213 213 os.unlink(back)
214 214
215 215 os.unlink(b)
216 216 os.unlink(c)
217 217 return r
@@ -1,313 +1,313 b''
1 1 # hg.py - repository classes for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 from node import *
10 from repo import *
9 from node import bin, hex, nullid, nullrev, short
10 from repo import NoCapability, RepoError
11 11 from i18n import _
12 12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
13 13 import errno, lock, os, shutil, util, extensions
14 14 import merge as _merge
15 15 import verify as _verify
16 16
17 17 def _local(path):
18 18 return (os.path.isfile(util.drop_scheme('file', path)) and
19 19 bundlerepo or localrepo)
20 20
21 21 def parseurl(url, revs):
22 22 '''parse url#branch, returning url, branch + revs'''
23 23
24 24 if '#' not in url:
25 25 return url, (revs or None), None
26 26
27 27 url, rev = url.split('#', 1)
28 28 return url, revs + [rev], rev
29 29
30 30 schemes = {
31 31 'bundle': bundlerepo,
32 32 'file': _local,
33 33 'http': httprepo,
34 34 'https': httprepo,
35 35 'ssh': sshrepo,
36 36 'static-http': statichttprepo,
37 37 }
38 38
39 39 def _lookup(path):
40 40 scheme = 'file'
41 41 if path:
42 42 c = path.find(':')
43 43 if c > 0:
44 44 scheme = path[:c]
45 45 thing = schemes.get(scheme) or schemes['file']
46 46 try:
47 47 return thing(path)
48 48 except TypeError:
49 49 return thing
50 50
51 51 def islocal(repo):
52 52 '''return true if repo or path is local'''
53 53 if isinstance(repo, str):
54 54 try:
55 55 return _lookup(repo).islocal(repo)
56 56 except AttributeError:
57 57 return False
58 58 return repo.local()
59 59
60 60 def repository(ui, path='', create=False):
61 61 """return a repository object for the specified path"""
62 62 repo = _lookup(path).instance(ui, path, create)
63 63 ui = getattr(repo, "ui", ui)
64 64 for name, module in extensions.extensions():
65 65 hook = getattr(module, 'reposetup', None)
66 66 if hook:
67 67 hook(ui, repo)
68 68 return repo
69 69
70 70 def defaultdest(source):
71 71 '''return default destination of clone if none is given'''
72 72 return os.path.basename(os.path.normpath(source))
73 73
74 74 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
75 75 stream=False):
76 76 """Make a copy of an existing repository.
77 77
78 78 Create a copy of an existing repository in a new directory. The
79 79 source and destination are URLs, as passed to the repository
80 80 function. Returns a pair of repository objects, the source and
81 81 newly created destination.
82 82
83 83 The location of the source is added to the new repository's
84 84 .hg/hgrc file, as the default to be used for future pulls and
85 85 pushes.
86 86
87 87 If an exception is raised, the partly cloned/updated destination
88 88 repository will be deleted.
89 89
90 90 Arguments:
91 91
92 92 source: repository object or URL
93 93
94 94 dest: URL of destination repository to create (defaults to base
95 95 name of source repository)
96 96
97 97 pull: always pull from source repository, even in local case
98 98
99 99 stream: stream raw data uncompressed from repository (fast over
100 100 LAN, slow over WAN)
101 101
102 102 rev: revision to clone up to (implies pull=True)
103 103
104 104 update: update working directory after clone completes, if
105 105 destination is local repository
106 106 """
107 107
108 108 if isinstance(source, str):
109 109 origsource = ui.expandpath(source)
110 110 source, rev, checkout = parseurl(origsource, rev)
111 111 src_repo = repository(ui, source)
112 112 else:
113 113 src_repo = source
114 114 origsource = source = src_repo.url()
115 115 checkout = None
116 116
117 117 if dest is None:
118 118 dest = defaultdest(source)
119 119 ui.status(_("destination directory: %s\n") % dest)
120 120
121 121 def localpath(path):
122 122 if path.startswith('file://localhost/'):
123 123 return path[16:]
124 124 if path.startswith('file://'):
125 125 return path[7:]
126 126 if path.startswith('file:'):
127 127 return path[5:]
128 128 return path
129 129
130 130 dest = localpath(dest)
131 131 source = localpath(source)
132 132
133 133 if os.path.exists(dest):
134 134 raise util.Abort(_("destination '%s' already exists") % dest)
135 135
136 136 class DirCleanup(object):
137 137 def __init__(self, dir_):
138 138 self.rmtree = shutil.rmtree
139 139 self.dir_ = dir_
140 140 def close(self):
141 141 self.dir_ = None
142 142 def __del__(self):
143 143 if self.dir_:
144 144 self.rmtree(self.dir_, True)
145 145
146 146 src_lock = dest_lock = dir_cleanup = None
147 147 try:
148 148 if islocal(dest):
149 149 dir_cleanup = DirCleanup(dest)
150 150
151 151 abspath = origsource
152 152 copy = False
153 153 if src_repo.local() and islocal(dest):
154 154 abspath = os.path.abspath(util.drop_scheme('file', origsource))
155 155 copy = not pull and not rev
156 156
157 157 if copy:
158 158 try:
159 159 # we use a lock here because if we race with commit, we
160 160 # can end up with extra data in the cloned revlogs that's
161 161 # not pointed to by changesets, thus causing verify to
162 162 # fail
163 163 src_lock = src_repo.lock()
164 164 except lock.LockException:
165 165 copy = False
166 166
167 167 if copy:
168 168 def force_copy(src, dst):
169 169 if not os.path.exists(src):
170 170 # Tolerate empty source repository and optional files
171 171 return
172 172 util.copyfiles(src, dst)
173 173
174 174 src_store = os.path.realpath(src_repo.spath)
175 175 if not os.path.exists(dest):
176 176 os.mkdir(dest)
177 177 try:
178 178 dest_path = os.path.realpath(os.path.join(dest, ".hg"))
179 179 os.mkdir(dest_path)
180 180 except OSError, inst:
181 181 if inst.errno == errno.EEXIST:
182 182 dir_cleanup.close()
183 183 raise util.Abort(_("destination '%s' already exists")
184 184 % dest)
185 185 raise
186 186 if src_repo.spath != src_repo.path:
187 187 # XXX racy
188 188 dummy_changelog = os.path.join(dest_path, "00changelog.i")
189 189 # copy the dummy changelog
190 190 force_copy(src_repo.join("00changelog.i"), dummy_changelog)
191 191 dest_store = os.path.join(dest_path, "store")
192 192 os.mkdir(dest_store)
193 193 else:
194 194 dest_store = dest_path
195 195 # copy the requires file
196 196 force_copy(src_repo.join("requires"),
197 197 os.path.join(dest_path, "requires"))
198 198 # we lock here to avoid premature writing to the target
199 199 dest_lock = lock.lock(os.path.join(dest_store, "lock"))
200 200
201 201 files = ("data",
202 202 "00manifest.d", "00manifest.i",
203 203 "00changelog.d", "00changelog.i")
204 204 for f in files:
205 205 src = os.path.join(src_store, f)
206 206 dst = os.path.join(dest_store, f)
207 207 force_copy(src, dst)
208 208
209 209 # we need to re-init the repo after manually copying the data
210 210 # into it
211 211 dest_repo = repository(ui, dest)
212 212
213 213 else:
214 214 try:
215 215 dest_repo = repository(ui, dest, create=True)
216 216 except OSError, inst:
217 217 if inst.errno == errno.EEXIST:
218 218 dir_cleanup.close()
219 219 raise util.Abort(_("destination '%s' already exists")
220 220 % dest)
221 221 raise
222 222
223 223 revs = None
224 224 if rev:
225 225 if 'lookup' not in src_repo.capabilities:
226 226 raise util.Abort(_("src repository does not support revision "
227 227 "lookup and so doesn't support clone by "
228 228 "revision"))
229 229 revs = [src_repo.lookup(r) for r in rev]
230 230
231 231 if dest_repo.local():
232 232 dest_repo.clone(src_repo, heads=revs, stream=stream)
233 233 elif src_repo.local():
234 234 src_repo.push(dest_repo, revs=revs)
235 235 else:
236 236 raise util.Abort(_("clone from remote to remote not supported"))
237 237
238 238 if dir_cleanup:
239 239 dir_cleanup.close()
240 240
241 241 if dest_repo.local():
242 242 fp = dest_repo.opener("hgrc", "w", text=True)
243 243 fp.write("[paths]\n")
244 244 fp.write("default = %s\n" % abspath)
245 245 fp.close()
246 246
247 247 if update:
248 248 if not checkout:
249 249 try:
250 250 checkout = dest_repo.lookup("default")
251 251 except:
252 252 checkout = dest_repo.changelog.tip()
253 253 _update(dest_repo, checkout)
254 254
255 255 return src_repo, dest_repo
256 256 finally:
257 257 del src_lock, dest_lock, dir_cleanup
258 258
259 259 def _showstats(repo, stats):
260 260 stats = ((stats[0], _("updated")),
261 261 (stats[1], _("merged")),
262 262 (stats[2], _("removed")),
263 263 (stats[3], _("unresolved")))
264 264 note = ", ".join([_("%d files %s") % s for s in stats])
265 265 repo.ui.status("%s\n" % note)
266 266
267 267 def _update(repo, node): return update(repo, node)
268 268
269 269 def update(repo, node):
270 270 """update the working directory to node, merging linear changes"""
271 271 pl = repo.parents()
272 272 stats = _merge.update(repo, node, False, False, None)
273 273 _showstats(repo, stats)
274 274 if stats[3]:
275 275 repo.ui.status(_("There are unresolved merges with"
276 276 " locally modified files.\n"))
277 277 if stats[1]:
278 278 repo.ui.status(_("You can finish the partial merge using:\n"))
279 279 else:
280 280 repo.ui.status(_("You can redo the full merge using:\n"))
281 281 # len(pl)==1, otherwise _merge.update() would have raised util.Abort:
282 282 repo.ui.status(_(" hg update %s\n hg update %s\n")
283 283 % (pl[0].rev(), repo.changectx(node).rev()))
284 284 return stats[3] > 0
285 285
286 286 def clean(repo, node, show_stats=True):
287 287 """forcibly switch the working directory to node, clobbering changes"""
288 288 stats = _merge.update(repo, node, False, True, None)
289 289 if show_stats: _showstats(repo, stats)
290 290 return stats[3] > 0
291 291
292 292 def merge(repo, node, force=None, remind=True):
293 293 """branch merge with node, resolving changes"""
294 294 stats = _merge.update(repo, node, True, force, False)
295 295 _showstats(repo, stats)
296 296 if stats[3]:
297 297 pl = repo.parents()
298 298 repo.ui.status(_("There are unresolved merges,"
299 299 " you can redo the full merge using:\n"
300 300 " hg update -C %s\n"
301 301 " hg merge %s\n")
302 302 % (pl[0].rev(), pl[1].rev()))
303 303 elif remind:
304 304 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
305 305 return stats[3] > 0
306 306
307 307 def revert(repo, node, choose):
308 308 """revert changes to revision in node without updating dirstate"""
309 309 return _merge.update(repo, node, False, True, choose)[3] > 0
310 310
311 311 def verify(repo):
312 312 """verify the consistency of a repository"""
313 313 return _verify.verify(repo)
@@ -1,951 +1,951 b''
1 1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import os, mimetypes, re
10 from mercurial.node import *
10 from mercurial.node import hex, nullid, short
11 11 from mercurial import mdiff, ui, hg, util, archival, patch, hook
12 12 from mercurial import revlog, templater, templatefilters, changegroup
13 13 from common import get_mtime, style_map, paritygen, countgen, get_contact
14 14 from common import ErrorResponse
15 15 from common import HTTP_OK, HTTP_BAD_REQUEST, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
16 16 from request import wsgirequest
17 17 import webcommands, protocol
18 18
19 19 shortcuts = {
20 20 'cl': [('cmd', ['changelog']), ('rev', None)],
21 21 'sl': [('cmd', ['shortlog']), ('rev', None)],
22 22 'cs': [('cmd', ['changeset']), ('node', None)],
23 23 'f': [('cmd', ['file']), ('filenode', None)],
24 24 'fl': [('cmd', ['filelog']), ('filenode', None)],
25 25 'fd': [('cmd', ['filediff']), ('node', None)],
26 26 'fa': [('cmd', ['annotate']), ('filenode', None)],
27 27 'mf': [('cmd', ['manifest']), ('manifest', None)],
28 28 'ca': [('cmd', ['archive']), ('node', None)],
29 29 'tags': [('cmd', ['tags'])],
30 30 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
31 31 'static': [('cmd', ['static']), ('file', None)]
32 32 }
33 33
34 34 def _up(p):
35 35 if p[0] != "/":
36 36 p = "/" + p
37 37 if p[-1] == "/":
38 38 p = p[:-1]
39 39 up = os.path.dirname(p)
40 40 if up == "/":
41 41 return "/"
42 42 return up + "/"
43 43
44 44 def revnavgen(pos, pagelen, limit, nodefunc):
45 45 def seq(factor, limit=None):
46 46 if limit:
47 47 yield limit
48 48 if limit >= 20 and limit <= 40:
49 49 yield 50
50 50 else:
51 51 yield 1 * factor
52 52 yield 3 * factor
53 53 for f in seq(factor * 10):
54 54 yield f
55 55
56 56 def nav(**map):
57 57 l = []
58 58 last = 0
59 59 for f in seq(1, pagelen):
60 60 if f < pagelen or f <= last:
61 61 continue
62 62 if f > limit:
63 63 break
64 64 last = f
65 65 if pos + f < limit:
66 66 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
67 67 if pos - f >= 0:
68 68 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
69 69
70 70 try:
71 71 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
72 72
73 73 for label, node in l:
74 74 yield {"label": label, "node": node}
75 75
76 76 yield {"label": "tip", "node": "tip"}
77 77 except hg.RepoError:
78 78 pass
79 79
80 80 return nav
81 81
82 82 class hgweb(object):
83 83 def __init__(self, repo, name=None):
84 84 if isinstance(repo, str):
85 85 parentui = ui.ui(report_untrusted=False, interactive=False)
86 86 self.repo = hg.repository(parentui, repo)
87 87 else:
88 88 self.repo = repo
89 89
90 90 hook.redirect(True)
91 91 self.mtime = -1
92 92 self.reponame = name
93 93 self.archives = 'zip', 'gz', 'bz2'
94 94 self.stripecount = 1
95 95 self._capabilities = None
96 96 # a repo owner may set web.templates in .hg/hgrc to get any file
97 97 # readable by the user running the CGI script
98 98 self.templatepath = self.config("web", "templates",
99 99 templater.templatepath(),
100 100 untrusted=False)
101 101
102 102 # The CGI scripts are often run by a user different from the repo owner.
103 103 # Trust the settings from the .hg/hgrc files by default.
104 104 def config(self, section, name, default=None, untrusted=True):
105 105 return self.repo.ui.config(section, name, default,
106 106 untrusted=untrusted)
107 107
108 108 def configbool(self, section, name, default=False, untrusted=True):
109 109 return self.repo.ui.configbool(section, name, default,
110 110 untrusted=untrusted)
111 111
112 112 def configlist(self, section, name, default=None, untrusted=True):
113 113 return self.repo.ui.configlist(section, name, default,
114 114 untrusted=untrusted)
115 115
116 116 def refresh(self):
117 117 mtime = get_mtime(self.repo.root)
118 118 if mtime != self.mtime:
119 119 self.mtime = mtime
120 120 self.repo = hg.repository(self.repo.ui, self.repo.root)
121 121 self.maxchanges = int(self.config("web", "maxchanges", 10))
122 122 self.stripecount = int(self.config("web", "stripes", 1))
123 123 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
124 124 self.maxfiles = int(self.config("web", "maxfiles", 10))
125 125 self.allowpull = self.configbool("web", "allowpull", True)
126 126 self.encoding = self.config("web", "encoding", util._encoding)
127 127 self._capabilities = None
128 128
129 129 def capabilities(self):
130 130 if self._capabilities is not None:
131 131 return self._capabilities
132 132 caps = ['lookup', 'changegroupsubset']
133 133 if self.configbool('server', 'uncompressed'):
134 134 caps.append('stream=%d' % self.repo.changelog.version)
135 135 if changegroup.bundlepriority:
136 136 caps.append('unbundle=%s' % ','.join(changegroup.bundlepriority))
137 137 self._capabilities = caps
138 138 return caps
139 139
140 140 def run(self):
141 141 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
142 142 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
143 143 import mercurial.hgweb.wsgicgi as wsgicgi
144 144 wsgicgi.launch(self)
145 145
146 146 def __call__(self, env, respond):
147 147 req = wsgirequest(env, respond)
148 148 self.run_wsgi(req)
149 149 return req
150 150
151 151 def run_wsgi(self, req):
152 152
153 153 self.refresh()
154 154
155 155 # expand form shortcuts
156 156
157 157 for k in shortcuts.iterkeys():
158 158 if k in req.form:
159 159 for name, value in shortcuts[k]:
160 160 if value is None:
161 161 value = req.form[k]
162 162 req.form[name] = value
163 163 del req.form[k]
164 164
165 165 # work with CGI variables to create coherent structure
166 166 # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME
167 167
168 168 req.url = req.env['SCRIPT_NAME']
169 169 if not req.url.endswith('/'):
170 170 req.url += '/'
171 171 if 'REPO_NAME' in req.env:
172 172 req.url += req.env['REPO_NAME'] + '/'
173 173
174 174 if req.env.get('PATH_INFO'):
175 175 parts = req.env.get('PATH_INFO').strip('/').split('/')
176 176 repo_parts = req.env.get('REPO_NAME', '').split('/')
177 177 if parts[:len(repo_parts)] == repo_parts:
178 178 parts = parts[len(repo_parts):]
179 179 query = '/'.join(parts)
180 180 else:
181 181 query = req.env['QUERY_STRING'].split('&', 1)[0]
182 182 query = query.split(';', 1)[0]
183 183
184 184 # translate user-visible url structure to internal structure
185 185
186 186 args = query.split('/', 2)
187 187 if 'cmd' not in req.form and args and args[0]:
188 188
189 189 cmd = args.pop(0)
190 190 style = cmd.rfind('-')
191 191 if style != -1:
192 192 req.form['style'] = [cmd[:style]]
193 193 cmd = cmd[style+1:]
194 194
195 195 # avoid accepting e.g. style parameter as command
196 196 if hasattr(webcommands, cmd) or hasattr(protocol, cmd):
197 197 req.form['cmd'] = [cmd]
198 198
199 199 if args and args[0]:
200 200 node = args.pop(0)
201 201 req.form['node'] = [node]
202 202 if args:
203 203 req.form['file'] = args
204 204
205 205 if cmd == 'static':
206 206 req.form['file'] = req.form['node']
207 207 elif cmd == 'archive':
208 208 fn = req.form['node'][0]
209 209 for type_, spec in self.archive_specs.iteritems():
210 210 ext = spec[2]
211 211 if fn.endswith(ext):
212 212 req.form['node'] = [fn[:-len(ext)]]
213 213 req.form['type'] = [type_]
214 214
215 215 # process this if it's a protocol request
216 216
217 217 cmd = req.form.get('cmd', [''])[0]
218 218 if cmd in protocol.__all__:
219 219 method = getattr(protocol, cmd)
220 220 method(self, req)
221 221 return
222 222
223 223 # process the web interface request
224 224
225 225 try:
226 226
227 227 tmpl = self.templater(req)
228 228 ctype = tmpl('mimetype', encoding=self.encoding)
229 229 ctype = templater.stringify(ctype)
230 230
231 231 if cmd == '':
232 232 req.form['cmd'] = [tmpl.cache['default']]
233 233 cmd = req.form['cmd'][0]
234 234
235 235 if cmd not in webcommands.__all__:
236 236 msg = 'No such method: %s' % cmd
237 237 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
238 238 elif cmd == 'file' and 'raw' in req.form.get('style', []):
239 239 self.ctype = ctype
240 240 content = webcommands.rawfile(self, req, tmpl)
241 241 else:
242 242 content = getattr(webcommands, cmd)(self, req, tmpl)
243 243 req.respond(HTTP_OK, ctype)
244 244
245 245 req.write(content)
246 246 del tmpl
247 247
248 248 except revlog.LookupError, err:
249 249 req.respond(HTTP_NOT_FOUND, ctype)
250 250 req.write(tmpl('error', error='revision not found: %s' % err.name))
251 251 except (hg.RepoError, revlog.RevlogError), inst:
252 252 req.respond(HTTP_SERVER_ERROR, ctype)
253 253 req.write(tmpl('error', error=str(inst)))
254 254 except ErrorResponse, inst:
255 255 req.respond(inst.code, ctype)
256 256 req.write(tmpl('error', error=inst.message))
257 257
258 258 def templater(self, req):
259 259
260 260 # determine scheme, port and server name
261 261 # this is needed to create absolute urls
262 262
263 263 proto = req.env.get('wsgi.url_scheme')
264 264 if proto == 'https':
265 265 proto = 'https'
266 266 default_port = "443"
267 267 else:
268 268 proto = 'http'
269 269 default_port = "80"
270 270
271 271 port = req.env["SERVER_PORT"]
272 272 port = port != default_port and (":" + port) or ""
273 273 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
274 274 staticurl = self.config("web", "staticurl") or req.url + 'static/'
275 275 if not staticurl.endswith('/'):
276 276 staticurl += '/'
277 277
278 278 # some functions for the templater
279 279
280 280 def header(**map):
281 281 yield tmpl('header', encoding=self.encoding, **map)
282 282
283 283 def footer(**map):
284 284 yield tmpl("footer", **map)
285 285
286 286 def motd(**map):
287 287 yield self.config("web", "motd", "")
288 288
289 289 def sessionvars(**map):
290 290 fields = []
291 291 if 'style' in req.form:
292 292 style = req.form['style'][0]
293 293 if style != self.config('web', 'style', ''):
294 294 fields.append(('style', style))
295 295
296 296 separator = req.url[-1] == '?' and ';' or '?'
297 297 for name, value in fields:
298 298 yield dict(name=name, value=value, separator=separator)
299 299 separator = ';'
300 300
301 301 # figure out which style to use
302 302
303 303 style = self.config("web", "style", "")
304 304 if 'style' in req.form:
305 305 style = req.form['style'][0]
306 306 mapfile = style_map(self.templatepath, style)
307 307
308 308 if not self.reponame:
309 309 self.reponame = (self.config("web", "name")
310 310 or req.env.get('REPO_NAME')
311 311 or req.url.strip('/') or self.repo.root)
312 312
313 313 # create the templater
314 314
315 315 tmpl = templater.templater(mapfile, templatefilters.filters,
316 316 defaults={"url": req.url,
317 317 "staticurl": staticurl,
318 318 "urlbase": urlbase,
319 319 "repo": self.reponame,
320 320 "header": header,
321 321 "footer": footer,
322 322 "motd": motd,
323 323 "sessionvars": sessionvars
324 324 })
325 325 return tmpl
326 326
327 327 def archivelist(self, nodeid):
328 328 allowed = self.configlist("web", "allow_archive")
329 329 for i, spec in self.archive_specs.iteritems():
330 330 if i in allowed or self.configbool("web", "allow" + i):
331 331 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
332 332
333 333 def listfilediffs(self, tmpl, files, changeset):
334 334 for f in files[:self.maxfiles]:
335 335 yield tmpl("filedifflink", node=hex(changeset), file=f)
336 336 if len(files) > self.maxfiles:
337 337 yield tmpl("fileellipses")
338 338
339 339 def siblings(self, siblings=[], hiderev=None, **args):
340 340 siblings = [s for s in siblings if s.node() != nullid]
341 341 if len(siblings) == 1 and siblings[0].rev() == hiderev:
342 342 return
343 343 for s in siblings:
344 344 d = {'node': hex(s.node()), 'rev': s.rev()}
345 345 if hasattr(s, 'path'):
346 346 d['file'] = s.path()
347 347 d.update(args)
348 348 yield d
349 349
350 350 def renamelink(self, fl, node):
351 351 r = fl.renamed(node)
352 352 if r:
353 353 return [dict(file=r[0], node=hex(r[1]))]
354 354 return []
355 355
356 356 def nodetagsdict(self, node):
357 357 return [{"name": i} for i in self.repo.nodetags(node)]
358 358
359 359 def nodebranchdict(self, ctx):
360 360 branches = []
361 361 branch = ctx.branch()
362 362 # If this is an empty repo, ctx.node() == nullid,
363 363 # ctx.branch() == 'default', but branchtags() is
364 364 # an empty dict. Using dict.get avoids a traceback.
365 365 if self.repo.branchtags().get(branch) == ctx.node():
366 366 branches.append({"name": branch})
367 367 return branches
368 368
369 369 def showtag(self, tmpl, t1, node=nullid, **args):
370 370 for t in self.repo.nodetags(node):
371 371 yield tmpl(t1, tag=t, **args)
372 372
373 373 def diff(self, tmpl, node1, node2, files):
374 374 def filterfiles(filters, files):
375 375 l = [x for x in files if x in filters]
376 376
377 377 for t in filters:
378 378 if t and t[-1] != os.sep:
379 379 t += os.sep
380 380 l += [x for x in files if x.startswith(t)]
381 381 return l
382 382
383 383 parity = paritygen(self.stripecount)
384 384 def diffblock(diff, f, fn):
385 385 yield tmpl("diffblock",
386 386 lines=prettyprintlines(diff),
387 387 parity=parity.next(),
388 388 file=f,
389 389 filenode=hex(fn or nullid))
390 390
391 391 blockcount = countgen()
392 392 def prettyprintlines(diff):
393 393 blockno = blockcount.next()
394 394 for lineno, l in enumerate(diff.splitlines(1)):
395 395 if blockno == 0:
396 396 lineno = lineno + 1
397 397 else:
398 398 lineno = "%d.%d" % (blockno, lineno + 1)
399 399 if l.startswith('+'):
400 400 ltype = "difflineplus"
401 401 elif l.startswith('-'):
402 402 ltype = "difflineminus"
403 403 elif l.startswith('@'):
404 404 ltype = "difflineat"
405 405 else:
406 406 ltype = "diffline"
407 407 yield tmpl(ltype,
408 408 line=l,
409 409 lineid="l%s" % lineno,
410 410 linenumber="% 8s" % lineno)
411 411
412 412 r = self.repo
413 413 c1 = r.changectx(node1)
414 414 c2 = r.changectx(node2)
415 415 date1 = util.datestr(c1.date())
416 416 date2 = util.datestr(c2.date())
417 417
418 418 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
419 419 if files:
420 420 modified, added, removed = map(lambda x: filterfiles(files, x),
421 421 (modified, added, removed))
422 422
423 423 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
424 424 for f in modified:
425 425 to = c1.filectx(f).data()
426 426 tn = c2.filectx(f).data()
427 427 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
428 428 opts=diffopts), f, tn)
429 429 for f in added:
430 430 to = None
431 431 tn = c2.filectx(f).data()
432 432 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
433 433 opts=diffopts), f, tn)
434 434 for f in removed:
435 435 to = c1.filectx(f).data()
436 436 tn = None
437 437 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
438 438 opts=diffopts), f, tn)
439 439
440 440 def changelog(self, tmpl, ctx, shortlog=False):
441 441 def changelist(limit=0,**map):
442 442 cl = self.repo.changelog
443 443 l = [] # build a list in forward order for efficiency
444 444 for i in xrange(start, end):
445 445 ctx = self.repo.changectx(i)
446 446 n = ctx.node()
447 447 showtags = self.showtag(tmpl, 'changelogtag', n)
448 448
449 449 l.insert(0, {"parity": parity.next(),
450 450 "author": ctx.user(),
451 451 "parent": self.siblings(ctx.parents(), i - 1),
452 452 "child": self.siblings(ctx.children(), i + 1),
453 453 "changelogtag": showtags,
454 454 "desc": ctx.description(),
455 455 "date": ctx.date(),
456 456 "files": self.listfilediffs(tmpl, ctx.files(), n),
457 457 "rev": i,
458 458 "node": hex(n),
459 459 "tags": self.nodetagsdict(n),
460 460 "branches": self.nodebranchdict(ctx)})
461 461
462 462 if limit > 0:
463 463 l = l[:limit]
464 464
465 465 for e in l:
466 466 yield e
467 467
468 468 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
469 469 cl = self.repo.changelog
470 470 count = cl.count()
471 471 pos = ctx.rev()
472 472 start = max(0, pos - maxchanges + 1)
473 473 end = min(count, start + maxchanges)
474 474 pos = end - 1
475 475 parity = paritygen(self.stripecount, offset=start-end)
476 476
477 477 changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
478 478
479 479 return tmpl(shortlog and 'shortlog' or 'changelog',
480 480 changenav=changenav,
481 481 node=hex(cl.tip()),
482 482 rev=pos, changesets=count,
483 483 entries=lambda **x: changelist(limit=0,**x),
484 484 latestentry=lambda **x: changelist(limit=1,**x),
485 485 archives=self.archivelist("tip"))
486 486
487 487 def search(self, tmpl, query):
488 488
489 489 def changelist(**map):
490 490 cl = self.repo.changelog
491 491 count = 0
492 492 qw = query.lower().split()
493 493
494 494 def revgen():
495 495 for i in xrange(cl.count() - 1, 0, -100):
496 496 l = []
497 497 for j in xrange(max(0, i - 100), i + 1):
498 498 ctx = self.repo.changectx(j)
499 499 l.append(ctx)
500 500 l.reverse()
501 501 for e in l:
502 502 yield e
503 503
504 504 for ctx in revgen():
505 505 miss = 0
506 506 for q in qw:
507 507 if not (q in ctx.user().lower() or
508 508 q in ctx.description().lower() or
509 509 q in " ".join(ctx.files()).lower()):
510 510 miss = 1
511 511 break
512 512 if miss:
513 513 continue
514 514
515 515 count += 1
516 516 n = ctx.node()
517 517 showtags = self.showtag(tmpl, 'changelogtag', n)
518 518
519 519 yield tmpl('searchentry',
520 520 parity=parity.next(),
521 521 author=ctx.user(),
522 522 parent=self.siblings(ctx.parents()),
523 523 child=self.siblings(ctx.children()),
524 524 changelogtag=showtags,
525 525 desc=ctx.description(),
526 526 date=ctx.date(),
527 527 files=self.listfilediffs(tmpl, ctx.files(), n),
528 528 rev=ctx.rev(),
529 529 node=hex(n),
530 530 tags=self.nodetagsdict(n),
531 531 branches=self.nodebranchdict(ctx))
532 532
533 533 if count >= self.maxchanges:
534 534 break
535 535
536 536 cl = self.repo.changelog
537 537 parity = paritygen(self.stripecount)
538 538
539 539 return tmpl('search',
540 540 query=query,
541 541 node=hex(cl.tip()),
542 542 entries=changelist,
543 543 archives=self.archivelist("tip"))
544 544
545 545 def changeset(self, tmpl, ctx):
546 546 n = ctx.node()
547 547 showtags = self.showtag(tmpl, 'changesettag', n)
548 548 parents = ctx.parents()
549 549 p1 = parents[0].node()
550 550
551 551 files = []
552 552 parity = paritygen(self.stripecount)
553 553 for f in ctx.files():
554 554 files.append(tmpl("filenodelink",
555 555 node=hex(n), file=f,
556 556 parity=parity.next()))
557 557
558 558 def diff(**map):
559 559 yield self.diff(tmpl, p1, n, None)
560 560
561 561 return tmpl('changeset',
562 562 diff=diff,
563 563 rev=ctx.rev(),
564 564 node=hex(n),
565 565 parent=self.siblings(parents),
566 566 child=self.siblings(ctx.children()),
567 567 changesettag=showtags,
568 568 author=ctx.user(),
569 569 desc=ctx.description(),
570 570 date=ctx.date(),
571 571 files=files,
572 572 archives=self.archivelist(hex(n)),
573 573 tags=self.nodetagsdict(n),
574 574 branches=self.nodebranchdict(ctx))
575 575
576 576 def filelog(self, tmpl, fctx):
577 577 f = fctx.path()
578 578 fl = fctx.filelog()
579 579 count = fl.count()
580 580 pagelen = self.maxshortchanges
581 581 pos = fctx.filerev()
582 582 start = max(0, pos - pagelen + 1)
583 583 end = min(count, start + pagelen)
584 584 pos = end - 1
585 585 parity = paritygen(self.stripecount, offset=start-end)
586 586
587 587 def entries(limit=0, **map):
588 588 l = []
589 589
590 590 for i in xrange(start, end):
591 591 ctx = fctx.filectx(i)
592 592 n = fl.node(i)
593 593
594 594 l.insert(0, {"parity": parity.next(),
595 595 "filerev": i,
596 596 "file": f,
597 597 "node": hex(ctx.node()),
598 598 "author": ctx.user(),
599 599 "date": ctx.date(),
600 600 "rename": self.renamelink(fl, n),
601 601 "parent": self.siblings(fctx.parents()),
602 602 "child": self.siblings(fctx.children()),
603 603 "desc": ctx.description()})
604 604
605 605 if limit > 0:
606 606 l = l[:limit]
607 607
608 608 for e in l:
609 609 yield e
610 610
611 611 nodefunc = lambda x: fctx.filectx(fileid=x)
612 612 nav = revnavgen(pos, pagelen, count, nodefunc)
613 613 return tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav,
614 614 entries=lambda **x: entries(limit=0, **x),
615 615 latestentry=lambda **x: entries(limit=1, **x))
616 616
617 617 def filerevision(self, tmpl, fctx):
618 618 f = fctx.path()
619 619 text = fctx.data()
620 620 fl = fctx.filelog()
621 621 n = fctx.filenode()
622 622 parity = paritygen(self.stripecount)
623 623
624 624 if util.binary(text):
625 625 mt = mimetypes.guess_type(f)[0] or 'application/octet-stream'
626 626 text = '(binary:%s)' % mt
627 627
628 628 def lines():
629 629 for lineno, t in enumerate(text.splitlines(1)):
630 630 yield {"line": t,
631 631 "lineid": "l%d" % (lineno + 1),
632 632 "linenumber": "% 6d" % (lineno + 1),
633 633 "parity": parity.next()}
634 634
635 635 return tmpl("filerevision",
636 636 file=f,
637 637 path=_up(f),
638 638 text=lines(),
639 639 rev=fctx.rev(),
640 640 node=hex(fctx.node()),
641 641 author=fctx.user(),
642 642 date=fctx.date(),
643 643 desc=fctx.description(),
644 644 parent=self.siblings(fctx.parents()),
645 645 child=self.siblings(fctx.children()),
646 646 rename=self.renamelink(fl, n),
647 647 permissions=fctx.manifest().flags(f))
648 648
649 649 def fileannotate(self, tmpl, fctx):
650 650 f = fctx.path()
651 651 n = fctx.filenode()
652 652 fl = fctx.filelog()
653 653 parity = paritygen(self.stripecount)
654 654
655 655 def annotate(**map):
656 656 last = None
657 657 if util.binary(fctx.data()):
658 658 mt = (mimetypes.guess_type(fctx.path())[0]
659 659 or 'application/octet-stream')
660 660 lines = enumerate([((fctx.filectx(fctx.filerev()), 1),
661 661 '(binary:%s)' % mt)])
662 662 else:
663 663 lines = enumerate(fctx.annotate(follow=True, linenumber=True))
664 664 for lineno, ((f, targetline), l) in lines:
665 665 fnode = f.filenode()
666 666 name = self.repo.ui.shortuser(f.user())
667 667
668 668 if last != fnode:
669 669 last = fnode
670 670
671 671 yield {"parity": parity.next(),
672 672 "node": hex(f.node()),
673 673 "rev": f.rev(),
674 674 "author": name,
675 675 "file": f.path(),
676 676 "targetline": targetline,
677 677 "line": l,
678 678 "lineid": "l%d" % (lineno + 1),
679 679 "linenumber": "% 6d" % (lineno + 1)}
680 680
681 681 return tmpl("fileannotate",
682 682 file=f,
683 683 annotate=annotate,
684 684 path=_up(f),
685 685 rev=fctx.rev(),
686 686 node=hex(fctx.node()),
687 687 author=fctx.user(),
688 688 date=fctx.date(),
689 689 desc=fctx.description(),
690 690 rename=self.renamelink(fl, n),
691 691 parent=self.siblings(fctx.parents()),
692 692 child=self.siblings(fctx.children()),
693 693 permissions=fctx.manifest().flags(f))
694 694
695 695 def manifest(self, tmpl, ctx, path):
696 696 mf = ctx.manifest()
697 697 node = ctx.node()
698 698
699 699 files = {}
700 700 parity = paritygen(self.stripecount)
701 701
702 702 if path and path[-1] != "/":
703 703 path += "/"
704 704 l = len(path)
705 705 abspath = "/" + path
706 706
707 707 for f, n in mf.items():
708 708 if f[:l] != path:
709 709 continue
710 710 remain = f[l:]
711 711 if "/" in remain:
712 712 short = remain[:remain.index("/") + 1] # bleah
713 713 files[short] = (f, None)
714 714 else:
715 715 short = os.path.basename(remain)
716 716 files[short] = (f, n)
717 717
718 718 if not files:
719 719 raise ErrorResponse(HTTP_NOT_FOUND, 'Path not found: ' + path)
720 720
721 721 def filelist(**map):
722 722 fl = files.keys()
723 723 fl.sort()
724 724 for f in fl:
725 725 full, fnode = files[f]
726 726 if not fnode:
727 727 continue
728 728
729 729 fctx = ctx.filectx(full)
730 730 yield {"file": full,
731 731 "parity": parity.next(),
732 732 "basename": f,
733 733 "date": fctx.changectx().date(),
734 734 "size": fctx.size(),
735 735 "permissions": mf.flags(full)}
736 736
737 737 def dirlist(**map):
738 738 fl = files.keys()
739 739 fl.sort()
740 740 for f in fl:
741 741 full, fnode = files[f]
742 742 if fnode:
743 743 continue
744 744
745 745 yield {"parity": parity.next(),
746 746 "path": "%s%s" % (abspath, f),
747 747 "basename": f[:-1]}
748 748
749 749 return tmpl("manifest",
750 750 rev=ctx.rev(),
751 751 node=hex(node),
752 752 path=abspath,
753 753 up=_up(abspath),
754 754 upparity=parity.next(),
755 755 fentries=filelist,
756 756 dentries=dirlist,
757 757 archives=self.archivelist(hex(node)),
758 758 tags=self.nodetagsdict(node),
759 759 branches=self.nodebranchdict(ctx))
760 760
761 761 def tags(self, tmpl):
762 762 i = self.repo.tagslist()
763 763 i.reverse()
764 764 parity = paritygen(self.stripecount)
765 765
766 766 def entries(notip=False,limit=0, **map):
767 767 count = 0
768 768 for k, n in i:
769 769 if notip and k == "tip":
770 770 continue
771 771 if limit > 0 and count >= limit:
772 772 continue
773 773 count = count + 1
774 774 yield {"parity": parity.next(),
775 775 "tag": k,
776 776 "date": self.repo.changectx(n).date(),
777 777 "node": hex(n)}
778 778
779 779 return tmpl("tags",
780 780 node=hex(self.repo.changelog.tip()),
781 781 entries=lambda **x: entries(False,0, **x),
782 782 entriesnotip=lambda **x: entries(True,0, **x),
783 783 latestentry=lambda **x: entries(True,1, **x))
784 784
785 785 def summary(self, tmpl):
786 786 i = self.repo.tagslist()
787 787 i.reverse()
788 788
789 789 def tagentries(**map):
790 790 parity = paritygen(self.stripecount)
791 791 count = 0
792 792 for k, n in i:
793 793 if k == "tip": # skip tip
794 794 continue;
795 795
796 796 count += 1
797 797 if count > 10: # limit to 10 tags
798 798 break;
799 799
800 800 yield tmpl("tagentry",
801 801 parity=parity.next(),
802 802 tag=k,
803 803 node=hex(n),
804 804 date=self.repo.changectx(n).date())
805 805
806 806
807 807 def branches(**map):
808 808 parity = paritygen(self.stripecount)
809 809
810 810 b = self.repo.branchtags()
811 811 l = [(-self.repo.changelog.rev(n), n, t) for t, n in b.items()]
812 812 l.sort()
813 813
814 814 for r,n,t in l:
815 815 ctx = self.repo.changectx(n)
816 816
817 817 yield {'parity': parity.next(),
818 818 'branch': t,
819 819 'node': hex(n),
820 820 'date': ctx.date()}
821 821
822 822 def changelist(**map):
823 823 parity = paritygen(self.stripecount, offset=start-end)
824 824 l = [] # build a list in forward order for efficiency
825 825 for i in xrange(start, end):
826 826 ctx = self.repo.changectx(i)
827 827 n = ctx.node()
828 828 hn = hex(n)
829 829
830 830 l.insert(0, tmpl(
831 831 'shortlogentry',
832 832 parity=parity.next(),
833 833 author=ctx.user(),
834 834 desc=ctx.description(),
835 835 date=ctx.date(),
836 836 rev=i,
837 837 node=hn,
838 838 tags=self.nodetagsdict(n),
839 839 branches=self.nodebranchdict(ctx)))
840 840
841 841 yield l
842 842
843 843 cl = self.repo.changelog
844 844 count = cl.count()
845 845 start = max(0, count - self.maxchanges)
846 846 end = min(count, start + self.maxchanges)
847 847
848 848 return tmpl("summary",
849 849 desc=self.config("web", "description", "unknown"),
850 850 owner=get_contact(self.config) or "unknown",
851 851 lastchange=cl.read(cl.tip())[2],
852 852 tags=tagentries,
853 853 branches=branches,
854 854 shortlog=changelist,
855 855 node=hex(cl.tip()),
856 856 archives=self.archivelist("tip"))
857 857
858 858 def filediff(self, tmpl, fctx):
859 859 n = fctx.node()
860 860 path = fctx.path()
861 861 parents = fctx.parents()
862 862 p1 = parents and parents[0].node() or nullid
863 863
864 864 def diff(**map):
865 865 yield self.diff(tmpl, p1, n, [path])
866 866
867 867 return tmpl("filediff",
868 868 file=path,
869 869 node=hex(n),
870 870 rev=fctx.rev(),
871 871 parent=self.siblings(parents),
872 872 child=self.siblings(fctx.children()),
873 873 diff=diff)
874 874
875 875 archive_specs = {
876 876 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
877 877 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
878 878 'zip': ('application/zip', 'zip', '.zip', None),
879 879 }
880 880
881 881 def archive(self, tmpl, req, key, type_):
882 882 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
883 883 cnode = self.repo.lookup(key)
884 884 arch_version = key
885 885 if cnode == key or key == 'tip':
886 886 arch_version = short(cnode)
887 887 name = "%s-%s" % (reponame, arch_version)
888 888 mimetype, artype, extension, encoding = self.archive_specs[type_]
889 889 headers = [
890 890 ('Content-Type', mimetype),
891 891 ('Content-Disposition', 'attachment; filename=%s%s' %
892 892 (name, extension))
893 893 ]
894 894 if encoding:
895 895 headers.append(('Content-Encoding', encoding))
896 896 req.header(headers)
897 897 req.respond(HTTP_OK)
898 898 archival.archive(self.repo, req, cnode, artype, prefix=name)
899 899
900 900 # add tags to things
901 901 # tags -> list of changesets corresponding to tags
902 902 # find tag, changeset, file
903 903
904 904 def cleanpath(self, path):
905 905 path = path.lstrip('/')
906 906 return util.canonpath(self.repo.root, '', path)
907 907
908 908 def changectx(self, req):
909 909 if 'node' in req.form:
910 910 changeid = req.form['node'][0]
911 911 elif 'manifest' in req.form:
912 912 changeid = req.form['manifest'][0]
913 913 else:
914 914 changeid = self.repo.changelog.count() - 1
915 915
916 916 try:
917 917 ctx = self.repo.changectx(changeid)
918 918 except hg.RepoError:
919 919 man = self.repo.manifest
920 920 mn = man.lookup(changeid)
921 921 ctx = self.repo.changectx(man.linkrev(mn))
922 922
923 923 return ctx
924 924
925 925 def filectx(self, req):
926 926 path = self.cleanpath(req.form['file'][0])
927 927 if 'node' in req.form:
928 928 changeid = req.form['node'][0]
929 929 else:
930 930 changeid = req.form['filenode'][0]
931 931 try:
932 932 ctx = self.repo.changectx(changeid)
933 933 fctx = ctx.filectx(path)
934 934 except hg.RepoError:
935 935 fctx = self.repo.filectx(path, fileid=changeid)
936 936
937 937 return fctx
938 938
939 939 def check_perm(self, req, op, default):
940 940 '''check permission for operation based on user auth.
941 941 return true if op allowed, else false.
942 942 default is policy to use if no config given.'''
943 943
944 944 user = req.env.get('REMOTE_USER')
945 945
946 946 deny = self.configlist('web', 'deny_' + op)
947 947 if deny and (not user or deny == ['*'] or user in deny):
948 948 return False
949 949
950 950 allow = self.configlist('web', 'allow_' + op)
951 951 return (allow and (allow == ['*'] or user in allow)) or default
@@ -1,221 +1,221 b''
1 1 #
2 2 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import cStringIO, zlib, bz2, tempfile, errno, os, sys
9 9 from mercurial import util, streamclone
10 10 from mercurial.i18n import gettext as _
11 from mercurial.node import *
11 from mercurial.node import bin, hex
12 12 from mercurial import changegroup as changegroupmod
13 13 from common import HTTP_OK, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
14 14
15 15 # __all__ is populated with the allowed commands. Be sure to add to it if
16 16 # you're adding a new command, or the new command won't work.
17 17
18 18 __all__ = [
19 19 'lookup', 'heads', 'branches', 'between', 'changegroup',
20 20 'changegroupsubset', 'capabilities', 'unbundle', 'stream_out',
21 21 ]
22 22
23 23 HGTYPE = 'application/mercurial-0.1'
24 24
25 25 def lookup(web, req):
26 26 try:
27 27 r = hex(web.repo.lookup(req.form['key'][0]))
28 28 success = 1
29 29 except Exception,inst:
30 30 r = str(inst)
31 31 success = 0
32 32 resp = "%s %s\n" % (success, r)
33 33 req.respond(HTTP_OK, HGTYPE, length=len(resp))
34 34 req.write(resp)
35 35
36 36 def heads(web, req):
37 37 resp = " ".join(map(hex, web.repo.heads())) + "\n"
38 38 req.respond(HTTP_OK, HGTYPE, length=len(resp))
39 39 req.write(resp)
40 40
41 41 def branches(web, req):
42 42 nodes = []
43 43 if 'nodes' in req.form:
44 44 nodes = map(bin, req.form['nodes'][0].split(" "))
45 45 resp = cStringIO.StringIO()
46 46 for b in web.repo.branches(nodes):
47 47 resp.write(" ".join(map(hex, b)) + "\n")
48 48 resp = resp.getvalue()
49 49 req.respond(HTTP_OK, HGTYPE, length=len(resp))
50 50 req.write(resp)
51 51
52 52 def between(web, req):
53 53 if 'pairs' in req.form:
54 54 pairs = [map(bin, p.split("-"))
55 55 for p in req.form['pairs'][0].split(" ")]
56 56 resp = cStringIO.StringIO()
57 57 for b in web.repo.between(pairs):
58 58 resp.write(" ".join(map(hex, b)) + "\n")
59 59 resp = resp.getvalue()
60 60 req.respond(HTTP_OK, HGTYPE, length=len(resp))
61 61 req.write(resp)
62 62
63 63 def changegroup(web, req):
64 64 req.respond(HTTP_OK, HGTYPE)
65 65 nodes = []
66 66 if not web.allowpull:
67 67 return
68 68
69 69 if 'roots' in req.form:
70 70 nodes = map(bin, req.form['roots'][0].split(" "))
71 71
72 72 z = zlib.compressobj()
73 73 f = web.repo.changegroup(nodes, 'serve')
74 74 while 1:
75 75 chunk = f.read(4096)
76 76 if not chunk:
77 77 break
78 78 req.write(z.compress(chunk))
79 79
80 80 req.write(z.flush())
81 81
82 82 def changegroupsubset(web, req):
83 83 req.respond(HTTP_OK, HGTYPE)
84 84 bases = []
85 85 heads = []
86 86 if not web.allowpull:
87 87 return
88 88
89 89 if 'bases' in req.form:
90 90 bases = [bin(x) for x in req.form['bases'][0].split(' ')]
91 91 if 'heads' in req.form:
92 92 heads = [bin(x) for x in req.form['heads'][0].split(' ')]
93 93
94 94 z = zlib.compressobj()
95 95 f = web.repo.changegroupsubset(bases, heads, 'serve')
96 96 while 1:
97 97 chunk = f.read(4096)
98 98 if not chunk:
99 99 break
100 100 req.write(z.compress(chunk))
101 101
102 102 req.write(z.flush())
103 103
104 104 def capabilities(web, req):
105 105 resp = ' '.join(web.capabilities())
106 106 req.respond(HTTP_OK, HGTYPE, length=len(resp))
107 107 req.write(resp)
108 108
109 109 def unbundle(web, req):
110 110 def bail(response, headers={}):
111 111 length = int(req.env['CONTENT_LENGTH'])
112 112 for s in util.filechunkiter(req, limit=length):
113 113 # drain incoming bundle, else client will not see
114 114 # response when run outside cgi script
115 115 pass
116 116 req.header(headers.items())
117 117 req.respond(HTTP_OK, HGTYPE)
118 118 req.write('0\n')
119 119 req.write(response)
120 120
121 121 # require ssl by default, auth info cannot be sniffed and
122 122 # replayed
123 123 ssl_req = web.configbool('web', 'push_ssl', True)
124 124 if ssl_req:
125 125 if req.env.get('wsgi.url_scheme') != 'https':
126 126 bail('ssl required\n')
127 127 return
128 128 proto = 'https'
129 129 else:
130 130 proto = 'http'
131 131
132 132 # do not allow push unless explicitly allowed
133 133 if not web.check_perm(req, 'push', False):
134 134 bail('push not authorized\n',
135 135 headers={'status': '401 Unauthorized'})
136 136 return
137 137
138 138 their_heads = req.form['heads'][0].split(' ')
139 139
140 140 def check_heads():
141 141 heads = map(hex, web.repo.heads())
142 142 return their_heads == [hex('force')] or their_heads == heads
143 143
144 144 # fail early if possible
145 145 if not check_heads():
146 146 bail('unsynced changes\n')
147 147 return
148 148
149 149 req.respond(HTTP_OK, HGTYPE)
150 150
151 151 # do not lock repo until all changegroup data is
152 152 # streamed. save to temporary file.
153 153
154 154 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
155 155 fp = os.fdopen(fd, 'wb+')
156 156 try:
157 157 length = int(req.env['CONTENT_LENGTH'])
158 158 for s in util.filechunkiter(req, limit=length):
159 159 fp.write(s)
160 160
161 161 try:
162 162 lock = web.repo.lock()
163 163 try:
164 164 if not check_heads():
165 165 req.write('0\n')
166 166 req.write('unsynced changes\n')
167 167 return
168 168
169 169 fp.seek(0)
170 170 header = fp.read(6)
171 171 if header.startswith('HG') and not header.startswith('HG10'):
172 172 raise ValueError('unknown bundle version')
173 173 elif header not in changegroupmod.bundletypes:
174 174 raise ValueError('unknown bundle compression type')
175 175 gen = changegroupmod.unbundle(header, fp)
176 176
177 177 # send addchangegroup output to client
178 178
179 179 old_stdout = sys.stdout
180 180 sys.stdout = cStringIO.StringIO()
181 181
182 182 try:
183 183 url = 'remote:%s:%s' % (proto,
184 184 req.env.get('REMOTE_HOST', ''))
185 185 try:
186 186 ret = web.repo.addchangegroup(gen, 'serve', url)
187 187 except util.Abort, inst:
188 188 sys.stdout.write("abort: %s\n" % inst)
189 189 ret = 0
190 190 finally:
191 191 val = sys.stdout.getvalue()
192 192 sys.stdout = old_stdout
193 193 req.write('%d\n' % ret)
194 194 req.write(val)
195 195 finally:
196 196 del lock
197 197 except ValueError, inst:
198 198 req.write('0\n')
199 199 req.write(str(inst) + '\n')
200 200 except (OSError, IOError), inst:
201 201 req.write('0\n')
202 202 filename = getattr(inst, 'filename', '')
203 203 # Don't send our filesystem layout to the client
204 204 if filename.startswith(web.repo.root):
205 205 filename = filename[len(web.repo.root)+1:]
206 206 else:
207 207 filename = ''
208 208 error = getattr(inst, 'strerror', 'Unknown error')
209 209 if inst.errno == errno.ENOENT:
210 210 code = HTTP_NOT_FOUND
211 211 else:
212 212 code = HTTP_SERVER_ERROR
213 213 req.respond(code)
214 214 req.write('%s: %s\n' % (error, filename))
215 215 finally:
216 216 fp.close()
217 217 os.unlink(tempname)
218 218
219 219 def stream_out(web, req):
220 220 req.respond(HTTP_OK, HGTYPE)
221 221 streamclone.stream_out(web.repo, req, untrusted=True)
@@ -1,458 +1,458 b''
1 1 # httprepo.py - HTTP repository proxy classes for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 from node import *
10 from remoterepo import *
9 from node import bin, hex
10 from remoterepo import remoterepository
11 11 from i18n import _
12 12 import repo, os, urllib, urllib2, urlparse, zlib, util, httplib
13 13 import errno, keepalive, tempfile, socket, changegroup
14 14
15 15 class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm):
16 16 def __init__(self, ui):
17 17 urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self)
18 18 self.ui = ui
19 19
20 20 def find_user_password(self, realm, authuri):
21 21 authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password(
22 22 self, realm, authuri)
23 23 user, passwd = authinfo
24 24 if user and passwd:
25 25 return (user, passwd)
26 26
27 27 if not self.ui.interactive:
28 28 raise util.Abort(_('http authorization required'))
29 29
30 30 self.ui.write(_("http authorization required\n"))
31 31 self.ui.status(_("realm: %s\n") % realm)
32 32 if user:
33 33 self.ui.status(_("user: %s\n") % user)
34 34 else:
35 35 user = self.ui.prompt(_("user:"), default=None)
36 36
37 37 if not passwd:
38 38 passwd = self.ui.getpass()
39 39
40 40 self.add_password(realm, authuri, user, passwd)
41 41 return (user, passwd)
42 42
43 43 def netlocsplit(netloc):
44 44 '''split [user[:passwd]@]host[:port] into 4-tuple.'''
45 45
46 46 a = netloc.find('@')
47 47 if a == -1:
48 48 user, passwd = None, None
49 49 else:
50 50 userpass, netloc = netloc[:a], netloc[a+1:]
51 51 c = userpass.find(':')
52 52 if c == -1:
53 53 user, passwd = urllib.unquote(userpass), None
54 54 else:
55 55 user = urllib.unquote(userpass[:c])
56 56 passwd = urllib.unquote(userpass[c+1:])
57 57 c = netloc.find(':')
58 58 if c == -1:
59 59 host, port = netloc, None
60 60 else:
61 61 host, port = netloc[:c], netloc[c+1:]
62 62 return host, port, user, passwd
63 63
64 64 def netlocunsplit(host, port, user=None, passwd=None):
65 65 '''turn host, port, user, passwd into [user[:passwd]@]host[:port].'''
66 66 if port:
67 67 hostport = host + ':' + port
68 68 else:
69 69 hostport = host
70 70 if user:
71 71 if passwd:
72 72 userpass = urllib.quote(user) + ':' + urllib.quote(passwd)
73 73 else:
74 74 userpass = urllib.quote(user)
75 75 return userpass + '@' + hostport
76 76 return hostport
77 77
78 78 # work around a bug in Python < 2.4.2
79 79 # (it leaves a "\n" at the end of Proxy-authorization headers)
80 80 class request(urllib2.Request):
81 81 def add_header(self, key, val):
82 82 if key.lower() == 'proxy-authorization':
83 83 val = val.strip()
84 84 return urllib2.Request.add_header(self, key, val)
85 85
86 86 class httpsendfile(file):
87 87 def __len__(self):
88 88 return os.fstat(self.fileno()).st_size
89 89
90 90 def _gen_sendfile(connection):
91 91 def _sendfile(self, data):
92 92 # send a file
93 93 if isinstance(data, httpsendfile):
94 94 # if auth required, some data sent twice, so rewind here
95 95 data.seek(0)
96 96 for chunk in util.filechunkiter(data):
97 97 connection.send(self, chunk)
98 98 else:
99 99 connection.send(self, data)
100 100 return _sendfile
101 101
102 102 class httpconnection(keepalive.HTTPConnection):
103 103 # must be able to send big bundle as stream.
104 104 send = _gen_sendfile(keepalive.HTTPConnection)
105 105
106 106 class httphandler(keepalive.HTTPHandler):
107 107 def http_open(self, req):
108 108 return self.do_open(httpconnection, req)
109 109
110 110 def __del__(self):
111 111 self.close_all()
112 112
113 113 has_https = hasattr(urllib2, 'HTTPSHandler')
114 114 if has_https:
115 115 class httpsconnection(httplib.HTTPSConnection):
116 116 response_class = keepalive.HTTPResponse
117 117 # must be able to send big bundle as stream.
118 118 send = _gen_sendfile(httplib.HTTPSConnection)
119 119
120 120 class httpshandler(keepalive.KeepAliveHandler, urllib2.HTTPSHandler):
121 121 def https_open(self, req):
122 122 return self.do_open(httpsconnection, req)
123 123
124 124 # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if
125 125 # it doesn't know about the auth type requested. This can happen if
126 126 # somebody is using BasicAuth and types a bad password.
127 127 class httpdigestauthhandler(urllib2.HTTPDigestAuthHandler):
128 128 def http_error_auth_reqed(self, auth_header, host, req, headers):
129 129 try:
130 130 return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed(
131 131 self, auth_header, host, req, headers)
132 132 except ValueError, inst:
133 133 arg = inst.args[0]
134 134 if arg.startswith("AbstractDigestAuthHandler doesn't know "):
135 135 return
136 136 raise
137 137
138 138 def zgenerator(f):
139 139 zd = zlib.decompressobj()
140 140 try:
141 141 for chunk in util.filechunkiter(f):
142 142 yield zd.decompress(chunk)
143 143 except httplib.HTTPException, inst:
144 144 raise IOError(None, _('connection ended unexpectedly'))
145 145 yield zd.flush()
146 146
147 147 _safe = ('abcdefghijklmnopqrstuvwxyz'
148 148 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
149 149 '0123456789' '_.-/')
150 150 _safeset = None
151 151 _hex = None
152 152 def quotepath(path):
153 153 '''quote the path part of a URL
154 154
155 155 This is similar to urllib.quote, but it also tries to avoid
156 156 quoting things twice (inspired by wget):
157 157
158 158 >>> quotepath('abc def')
159 159 'abc%20def'
160 160 >>> quotepath('abc%20def')
161 161 'abc%20def'
162 162 >>> quotepath('abc%20 def')
163 163 'abc%20%20def'
164 164 >>> quotepath('abc def%20')
165 165 'abc%20def%20'
166 166 >>> quotepath('abc def%2')
167 167 'abc%20def%252'
168 168 >>> quotepath('abc def%')
169 169 'abc%20def%25'
170 170 '''
171 171 global _safeset, _hex
172 172 if _safeset is None:
173 173 _safeset = util.set(_safe)
174 174 _hex = util.set('abcdefABCDEF0123456789')
175 175 l = list(path)
176 176 for i in xrange(len(l)):
177 177 c = l[i]
178 178 if c == '%' and i + 2 < len(l) and (l[i+1] in _hex and l[i+2] in _hex):
179 179 pass
180 180 elif c not in _safeset:
181 181 l[i] = '%%%02X' % ord(c)
182 182 return ''.join(l)
183 183
184 184 class httprepository(remoterepository):
185 185 def __init__(self, ui, path):
186 186 self.path = path
187 187 self.caps = None
188 188 self.handler = None
189 189 scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path)
190 190 if query or frag:
191 191 raise util.Abort(_('unsupported URL component: "%s"') %
192 192 (query or frag))
193 193 if not urlpath:
194 194 urlpath = '/'
195 195 urlpath = quotepath(urlpath)
196 196 host, port, user, passwd = netlocsplit(netloc)
197 197
198 198 # urllib cannot handle URLs with embedded user or passwd
199 199 self._url = urlparse.urlunsplit((scheme, netlocunsplit(host, port),
200 200 urlpath, '', ''))
201 201 self.ui = ui
202 202 self.ui.debug(_('using %s\n') % self._url)
203 203
204 204 proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy')
205 205 # XXX proxyauthinfo = None
206 206 handlers = [httphandler()]
207 207 if has_https:
208 208 handlers.append(httpshandler())
209 209
210 210 if proxyurl:
211 211 # proxy can be proper url or host[:port]
212 212 if not (proxyurl.startswith('http:') or
213 213 proxyurl.startswith('https:')):
214 214 proxyurl = 'http://' + proxyurl + '/'
215 215 snpqf = urlparse.urlsplit(proxyurl)
216 216 proxyscheme, proxynetloc, proxypath, proxyquery, proxyfrag = snpqf
217 217 hpup = netlocsplit(proxynetloc)
218 218
219 219 proxyhost, proxyport, proxyuser, proxypasswd = hpup
220 220 if not proxyuser:
221 221 proxyuser = ui.config("http_proxy", "user")
222 222 proxypasswd = ui.config("http_proxy", "passwd")
223 223
224 224 # see if we should use a proxy for this url
225 225 no_list = [ "localhost", "127.0.0.1" ]
226 226 no_list.extend([p.lower() for
227 227 p in ui.configlist("http_proxy", "no")])
228 228 no_list.extend([p.strip().lower() for
229 229 p in os.getenv("no_proxy", '').split(',')
230 230 if p.strip()])
231 231 # "http_proxy.always" config is for running tests on localhost
232 232 if (not ui.configbool("http_proxy", "always") and
233 233 host.lower() in no_list):
234 234 # avoid auto-detection of proxy settings by appending
235 235 # a ProxyHandler with no proxies defined.
236 236 handlers.append(urllib2.ProxyHandler({}))
237 237 ui.debug(_('disabling proxy for %s\n') % host)
238 238 else:
239 239 proxyurl = urlparse.urlunsplit((
240 240 proxyscheme, netlocunsplit(proxyhost, proxyport,
241 241 proxyuser, proxypasswd or ''),
242 242 proxypath, proxyquery, proxyfrag))
243 243 handlers.append(urllib2.ProxyHandler({scheme: proxyurl}))
244 244 ui.debug(_('proxying through http://%s:%s\n') %
245 245 (proxyhost, proxyport))
246 246
247 247 # urllib2 takes proxy values from the environment and those
248 248 # will take precedence if found, so drop them
249 249 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
250 250 try:
251 251 if env in os.environ:
252 252 del os.environ[env]
253 253 except OSError:
254 254 pass
255 255
256 256 passmgr = passwordmgr(ui)
257 257 if user:
258 258 ui.debug(_('http auth: user %s, password %s\n') %
259 259 (user, passwd and '*' * len(passwd) or 'not set'))
260 260 netloc = host
261 261 if port:
262 262 netloc += ':' + port
263 263 # Python < 2.4.3 uses only the netloc to search for a password
264 264 passmgr.add_password(None, (self._url, netloc), user, passwd or '')
265 265
266 266 handlers.extend((urllib2.HTTPBasicAuthHandler(passmgr),
267 267 httpdigestauthhandler(passmgr)))
268 268 opener = urllib2.build_opener(*handlers)
269 269
270 270 # 1.0 here is the _protocol_ version
271 271 opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
272 272 urllib2.install_opener(opener)
273 273
274 274 def url(self):
275 275 return self.path
276 276
277 277 # look up capabilities only when needed
278 278
279 279 def get_caps(self):
280 280 if self.caps is None:
281 281 try:
282 282 self.caps = util.set(self.do_read('capabilities').split())
283 283 except repo.RepoError:
284 284 self.caps = util.set()
285 285 self.ui.debug(_('capabilities: %s\n') %
286 286 (' '.join(self.caps or ['none'])))
287 287 return self.caps
288 288
289 289 capabilities = property(get_caps)
290 290
291 291 def lock(self):
292 292 raise util.Abort(_('operation not supported over http'))
293 293
294 294 def do_cmd(self, cmd, **args):
295 295 data = args.pop('data', None)
296 296 headers = args.pop('headers', {})
297 297 self.ui.debug(_("sending %s command\n") % cmd)
298 298 q = {"cmd": cmd}
299 299 q.update(args)
300 300 qs = '?%s' % urllib.urlencode(q)
301 301 cu = "%s%s" % (self._url, qs)
302 302 try:
303 303 if data:
304 304 self.ui.debug(_("sending %s bytes\n") % len(data))
305 305 resp = urllib2.urlopen(request(cu, data, headers))
306 306 except urllib2.HTTPError, inst:
307 307 if inst.code == 401:
308 308 raise util.Abort(_('authorization failed'))
309 309 raise
310 310 except httplib.HTTPException, inst:
311 311 self.ui.debug(_('http error while sending %s command\n') % cmd)
312 312 self.ui.print_exc()
313 313 raise IOError(None, inst)
314 314 except IndexError:
315 315 # this only happens with Python 2.3, later versions raise URLError
316 316 raise util.Abort(_('http error, possibly caused by proxy setting'))
317 317 # record the url we got redirected to
318 318 resp_url = resp.geturl()
319 319 if resp_url.endswith(qs):
320 320 resp_url = resp_url[:-len(qs)]
321 321 if self._url != resp_url:
322 322 self.ui.status(_('real URL is %s\n') % resp_url)
323 323 self._url = resp_url
324 324 try:
325 325 proto = resp.getheader('content-type')
326 326 except AttributeError:
327 327 proto = resp.headers['content-type']
328 328
329 329 # accept old "text/plain" and "application/hg-changegroup" for now
330 330 if not (proto.startswith('application/mercurial-') or
331 331 proto.startswith('text/plain') or
332 332 proto.startswith('application/hg-changegroup')):
333 333 self.ui.debug(_("Requested URL: '%s'\n") % cu)
334 334 raise repo.RepoError(_("'%s' does not appear to be an hg repository")
335 335 % self._url)
336 336
337 337 if proto.startswith('application/mercurial-'):
338 338 try:
339 339 version = proto.split('-', 1)[1]
340 340 version_info = tuple([int(n) for n in version.split('.')])
341 341 except ValueError:
342 342 raise repo.RepoError(_("'%s' sent a broken Content-Type "
343 343 "header (%s)") % (self._url, proto))
344 344 if version_info > (0, 1):
345 345 raise repo.RepoError(_("'%s' uses newer protocol %s") %
346 346 (self._url, version))
347 347
348 348 return resp
349 349
350 350 def do_read(self, cmd, **args):
351 351 fp = self.do_cmd(cmd, **args)
352 352 try:
353 353 return fp.read()
354 354 finally:
355 355 # if using keepalive, allow connection to be reused
356 356 fp.close()
357 357
358 358 def lookup(self, key):
359 359 self.requirecap('lookup', _('look up remote revision'))
360 360 d = self.do_cmd("lookup", key = key).read()
361 361 success, data = d[:-1].split(' ', 1)
362 362 if int(success):
363 363 return bin(data)
364 364 raise repo.RepoError(data)
365 365
366 366 def heads(self):
367 367 d = self.do_read("heads")
368 368 try:
369 369 return map(bin, d[:-1].split(" "))
370 370 except:
371 371 raise util.UnexpectedOutput(_("unexpected response:"), d)
372 372
373 373 def branches(self, nodes):
374 374 n = " ".join(map(hex, nodes))
375 375 d = self.do_read("branches", nodes=n)
376 376 try:
377 377 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
378 378 return br
379 379 except:
380 380 raise util.UnexpectedOutput(_("unexpected response:"), d)
381 381
382 382 def between(self, pairs):
383 383 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
384 384 d = self.do_read("between", pairs=n)
385 385 try:
386 386 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
387 387 return p
388 388 except:
389 389 raise util.UnexpectedOutput(_("unexpected response:"), d)
390 390
391 391 def changegroup(self, nodes, kind):
392 392 n = " ".join(map(hex, nodes))
393 393 f = self.do_cmd("changegroup", roots=n)
394 394 return util.chunkbuffer(zgenerator(f))
395 395
396 396 def changegroupsubset(self, bases, heads, source):
397 397 self.requirecap('changegroupsubset', _('look up remote changes'))
398 398 baselst = " ".join([hex(n) for n in bases])
399 399 headlst = " ".join([hex(n) for n in heads])
400 400 f = self.do_cmd("changegroupsubset", bases=baselst, heads=headlst)
401 401 return util.chunkbuffer(zgenerator(f))
402 402
403 403 def unbundle(self, cg, heads, source):
404 404 # have to stream bundle to a temp file because we do not have
405 405 # http 1.1 chunked transfer.
406 406
407 407 type = ""
408 408 types = self.capable('unbundle')
409 409 # servers older than d1b16a746db6 will send 'unbundle' as a
410 410 # boolean capability
411 411 try:
412 412 types = types.split(',')
413 413 except AttributeError:
414 414 types = [""]
415 415 if types:
416 416 for x in types:
417 417 if x in changegroup.bundletypes:
418 418 type = x
419 419 break
420 420
421 421 tempname = changegroup.writebundle(cg, None, type)
422 422 fp = httpsendfile(tempname, "rb")
423 423 try:
424 424 try:
425 425 rfp = self.do_cmd(
426 426 'unbundle', data=fp,
427 427 headers={'Content-Type': 'application/octet-stream'},
428 428 heads=' '.join(map(hex, heads)))
429 429 try:
430 430 ret = int(rfp.readline())
431 431 self.ui.write(rfp.read())
432 432 return ret
433 433 finally:
434 434 rfp.close()
435 435 except socket.error, err:
436 436 if err[0] in (errno.ECONNRESET, errno.EPIPE):
437 437 raise util.Abort(_('push failed: %s') % err[1])
438 438 raise util.Abort(err[1])
439 439 finally:
440 440 fp.close()
441 441 os.unlink(tempname)
442 442
443 443 def stream_out(self):
444 444 return self.do_cmd('stream_out')
445 445
446 446 class httpsrepository(httprepository):
447 447 def __init__(self, ui, path):
448 448 if not has_https:
449 449 raise util.Abort(_('Python support for SSL and HTTPS '
450 450 'is not installed'))
451 451 httprepository.__init__(self, ui, path)
452 452
453 453 def instance(ui, path, create):
454 454 if create:
455 455 raise util.Abort(_('cannot create new http repository'))
456 456 if path.startswith('https:'):
457 457 return httpsrepository(ui, path)
458 458 return httprepository(ui, path)
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now